merge crypsinous into master in crypsinousintomaster branch

This commit is contained in:
mohab metwally
2022-09-15 18:38:11 +02:00
382 changed files with 26304 additions and 7598 deletions

View File

@@ -0,0 +1,23 @@
name: "Push Notification IRC"
on: [push, pull_request]
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: irc push
uses: rectalogic/notify-irc@v1
if: github.event_name == 'push'
with:
channel: "#dev"
server: "irc1.dark.fi"
nickname: github-notifier
message: ${{ github.actor }} pushed ${{ join(github.event.commits.*.message) }} ${{ github.event.ref }}
- name: irc pull request
uses: rectalogic/notify-irc@v1
if: github.event_name == 'pull_request'
with:
channel: "#dev"
server: "irc1.dark.fi"
nickname: github-notifier
message: ${{ github.actor }} opened PR ${{ github.event.pull_request.html_url }}

View File

@@ -1,39 +0,0 @@
on: [push]
jobs:
send-message:
runs-on: ubuntu-latest
name: Send message via Matrix
steps:
- name: checkout
uses: actions/checkout@v2
with:
fetch-depth: 0
- run: |
ALL_MSGS=""
for i in ${{ join(github.event.commits.*.id, ' ') }}; do
MSG=$(git --no-pager show -s --format='%h <b>%an</b>: %s' $i)
ALL_MSGS="$ALL_MSGS$MSG<br>"
done
echo "::set-output name=COMMIT_MESSAGE::$ALL_MSGS"
id: commit-message
- uses: narodnik/matrix-action@main
with:
server: 'matrix.dark.fi'
room-id: '!BQEjGPeQwWMEvvOLtO:dark.fi'
#access_token: ${{ secrets.MATRIX_TOKEN }}
status: 'OK'
user: 'b1-66er'
password: ${{ secrets.MATRIX_PASSWORD }}
message: '${{ steps.commit-message.outputs.COMMIT_MESSAGE }}'
#- name: Send message to test channel
# id: matrix-chat-message
# uses: fadenb/matrix-chat-message@v0.0.6
# with:
# homeserver: 'dark.fi'
# token: ${{ secrets.MATRIX_TOKEN }}
# channel: '!MODZOZydPqCRdulXmR:dark.fi'
# message: |
# This is an *example message* using **markdown** for formatting.\
# Use a `\` character at the end of a line to cause a linebreak (the whole message is treated as markdown).\
# You can use variables like ${{ github.sha }} anywhere.

5
.gitignore vendored
View File

@@ -3,6 +3,7 @@
*.zk.bin
*_circuit_layout.png
.vscode/
.DS_Store
/target/*
/tmp/*
@@ -11,6 +12,7 @@
/dao-cli
/daod
/darkfid
/darkotc
/dnetview
/drk
/faucetd
@@ -19,3 +21,6 @@
/taud
/vanityaddr
/zkas
/lilith
/darkwiki
/darkwikid

1578
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -19,35 +19,41 @@ name = "darkfi"
[workspace]
members = [
"bin/zkas",
#"bin/cashierd",
#"bin/cashierd",
"bin/darkwiki",
"bin/darkwikid",
"bin/darkfid",
"bin/darkotc",
"bin/drk",
"bin/faucetd",
"bin/ircd",
"bin/irc-raft",
#"bin/irc-raft",
"bin/dnetview",
"bin/daod",
"bin/dao-cli",
"bin/tau/taud",
"bin/tau/tau-cli",
"bin/vanityaddr",
"bin/lilith",
"src/sdk",
"src/util/derive",
"src/util/derive-internal",
"example/dchat",
]
[dependencies]
# Hard dependencies
libc = "0.2.126"
libc = "0.2.132"
log = "0.4.17"
thiserror = "1.0.31"
thiserror = "1.0.34"
# async-runtime
smol = {version = "1.2.5", optional = true}
futures = {version = "0.3.21", optional = true}
async-std = {version = "1.11.0", features = ["attributes"], optional = true}
async-trait = {version = "0.1.53", optional = true}
async-channel = {version = "1.6.1", optional = true}
futures = {version = "0.3.24", optional = true}
async-std = {version = "1.12.0", features = ["attributes"], optional = true}
async-trait = {version = "0.1.57", optional = true}
async-channel = {version = "1.7.1", optional = true}
async-executor = {version = "1.4.1", optional = true}
# async-net
@@ -55,88 +61,87 @@ async-native-tls = {version = "0.4.0", optional = true}
native-tls = {version = "0.2.10", optional = true}
# Networking
socket2 = {version = "0.4.4", optional = true}
futures-rustls = {version = "0.22.1", features = ["dangerous_configuration"], optional = true}
socket2 = {version = "0.4.7", optional = true}
futures-rustls = {version = "0.22.2", features = ["dangerous_configuration"], optional = true}
# TLS cert utilities
ed25519-compact = {version = "1.0.11", features = ["pem"], optional = true}
rcgen = {version = "0.9.2", features = ["pem"], optional = true}
rustls-pemfile = {version = "1.0.0", optional = true}
rcgen = {version = "0.9.3", features = ["pem"], optional = true}
rustls-pemfile = {version = "1.0.1", optional = true}
# Encoding
hex = {version = "0.4.3", optional = true}
bs58 = {version = "0.4.0", optional = true}
toml = {version = "0.5.9", optional = true}
bytes = {version = "1.1.0", optional = true}
bytes = {version = "1.2.1", optional = true}
bincode = {version = "2.0.0-rc.1", features = ["serde"], optional = true}
num-bigint = {version = "0.4.3", features = ["serde"], optional = true}
serde_json = {version = "1.0.81", optional = true}
serde = {version = "1.0.137", features = ["derive"], optional = true}
serde_json = {version = "1.0.85", optional = true}
serde = {version = "1.0.144", features = ["derive"], optional = true}
structopt = {version= "0.3.26", optional = true}
structopt-toml = {version= "0.5.0", optional = true}
structopt-toml = {version= "0.5.1", optional = true}
# Utilities
url = {version = "2.2.2", features = ["serde"], optional = true}
dirs = {version = "4.0.0", optional = true}
subtle = {version = "2.4.1", optional = true}
lazy_static = {version = "1.4.0", optional = true}
lazy-init = {version = "0.5.0", optional = true}
lazy-init = {version = "0.5.1", optional = true}
fxhash = {version = "0.2.1", optional = true}
indexmap = {version = "1.8.1", optional = true}
indexmap = {version = "1.9.1", optional = true}
itertools = {version = "0.10.3", optional = true}
darkfi-derive = {path = "src/util/derive", optional = true}
darkfi-derive-internal = {path = "src/util/derive-internal", optional = true}
chrono = {version = "0.4.19", optional = true}
regex = {version = "1.5.6", optional = true}
chrono = {version = "0.4.22", optional = true}
# Misc
termion = {version = "1.5.6", optional = true}
simplelog = {version = "0.12.0", optional = true}
indicatif = {version = "0.17.0", optional = true}
# Websockets
tungstenite = {version = "0.17.2", optional = true}
tungstenite = {version = "0.17.3", optional = true}
async-tungstenite = {version = "0.17.2", optional = true}
# socks5
fast-socks5 = {git = "https://github.com/ghassmo/fast-socks5", optional = true}
fast-socks5 = {version = "0.4.3", optional = true}
# Crypto
bitvec = {version = "1.0.0", optional = true}
bitvec = {version = "1.0.1", optional = true}
rand = {version = "0.8.5", optional = true}
blake3 = {version = "1.3.1", optional = true}
sha2 = {version = "0.10.2", optional = true}
sha2 = {version = "0.10.5", optional = true}
group = {version = "0.12.0", optional = true}
arrayvec = {version = "0.7.2", optional = true}
blake2b_simd = {version = "1.0.0", optional = true}
pasta_curves = {version = "0.4.0", optional = true}
crypto_api_chachapoly = {version = "0.5.0", optional = true}
incrementalmerkletree = {version = "0.3.0", optional = true}
#halo2_proofs = {version = "0.1.0", optional = true}
#halo2_gadgets = {version = "0.1.0", optional = true}
halo2_proofs = {git = "https://github.com/zcash/halo2.git", rev = "a898d65ae3ad3d41987666f6a03cfc15edae01c4", optional = true}
halo2_gadgets = {git = "https://github.com/zcash/halo2.git", rev = "a898d65ae3ad3d41987666f6a03cfc15edae01c4", optional = true}
halo2_proofs = {version = "0.2.0", optional = true}
halo2_gadgets = {version = "0.2.0", optional = true}
#halo2_proofs = {git = "https://github.com/zcash/halo2.git", rev = "a898d65ae3ad3d41987666f6a03cfc15edae01c4", optional = true}
#halo2_gadgets = {git = "https://github.com/zcash/halo2.git", rev = "a898d65ae3ad3d41987666f6a03cfc15edae01c4", optional = true}
# Smart contract runtime
drk-sdk = {path = "src/sdk", optional = true}
wasmer = {version = "2.2.1", optional = true}
wasmer-compiler-singlepass = {version = "2.2.1", optional = true}
wasmer-middlewares = {version = "2.2.1", optional = true}
wasmer = {version = "2.3.0", optional = true}
wasmer-compiler-singlepass = {version = "2.3.0", optional = true}
wasmer-middlewares = {version = "2.3.0", optional = true}
# Wallet management
sqlx = {version = "0.5.13", features = ["runtime-async-std-native-tls", "sqlite"], optional = true}
libsqlite3-sys = {version = "0.24.2", features = ["bundled-sqlcipher"], optional = true }
sqlx = {version = "0.6.1", features = ["runtime-async-std-native-tls", "sqlite"], optional = true}
libsqlite3-sys = {version = "0.24.1", features = ["bundled-sqlcipher"], optional = true }
# Blockchain store
sled = {version = "0.34.7", optional = true}
[dev-dependencies]
clap = {version = "3.1.18", features = ["derive"]}
#halo2_proofs = {version = "0.1.0", features = ["dev-graph", "gadget-traces", "sanity-checks"]}
#halo2_gadgets = {version = "0.1.0", features = ["dev-graph", "test-dependencies"]}
halo2_proofs = {git = "https://github.com/zcash/halo2.git", rev = "a898d65ae3ad3d41987666f6a03cfc15edae01c4", features = ["dev-graph", "gadget-traces", "sanity-checks"]}
halo2_gadgets = {git = "https://github.com/zcash/halo2.git", rev = "a898d65ae3ad3d41987666f6a03cfc15edae01c4", features = ["dev-graph", "test-dependencies"]}
clap = {version = "3.2.20", features = ["derive"]}
halo2_proofs = {version = "0.2.0", features = ["dev-graph", "gadget-traces", "sanity-checks"]}
halo2_gadgets = {version = "0.2.0", features = ["dev-graph", "test-dependencies"]}
#halo2_proofs = {git = "https://github.com/zcash/halo2.git", rev = "a898d65ae3ad3d41987666f6a03cfc15edae01c4", features = ["dev-graph", "gadget-traces", "sanity-checks"]}
#halo2_gadgets = {git = "https://github.com/zcash/halo2.git", rev = "a898d65ae3ad3d41987666f6a03cfc15edae01c4", features = ["dev-graph", "test-dependencies"]}
plotters = "0.3.1"
plotters = "0.3.3"
[features]
async-runtime = [
@@ -159,6 +164,7 @@ websockets = [
]
util = [
"blake3",
"bs58",
"hex",
"bincode",
@@ -168,9 +174,10 @@ util = [
"simplelog",
"serde_json",
"dirs",
"num-bigint",
"fxhash",
"chrono",
"indicatif",
"termion",
"async-net",
"async-runtime",
@@ -217,7 +224,6 @@ net = [
"fast-socks5",
"ed25519-compact",
"rcgen",
"regex",
"rustls-pemfile",
"structopt",
"structopt-toml",
@@ -298,6 +304,10 @@ tx = [
"util",
]
dht = [
"util",
]
[[example]]
name = "net"
path = "example/net.rs"

View File

@@ -10,7 +10,8 @@ CARGO = cargo
#RUSTFLAGS = -C target-cpu=native
# Binaries to be built
BINS = zkas drk darkfid tau taud ircd dnetview darkotc
BINS = drk darkfid tau taud ircd dnetview darkotc darkwikid darkwiki
# Common dependencies which should force the binaries to be rebuilt
BINDEPS = \
@@ -21,7 +22,22 @@ BINDEPS = \
$(shell find script/sql -type f) \
$(shell find contrib/token -type f)
all: $(BINS)
# ZK proofs to compile with zkas
PROOFS = \
$(shell find bin/daod/proof -type f -name '*.zk') \
$(shell find proof -type f -name '*.zk') \
example/simple.zk
PROOFS_BIN = $(PROOFS:=.bin)
all: zkas $(PROOFS_BIN) $(BINS)
zkas: $(BINDEPS)
RUSTFLAGS="$(RUSTFLAGS)" $(CARGO) build --all-features --release --package $@
cp -f target/release/$@ $@
$(PROOFS_BIN): $(PROOFS)
./zkas $(basename $@) -o $@
token_lists:
$(MAKE) -C contrib/token all
@@ -30,30 +46,23 @@ $(BINS): token_lists $(BINDEPS)
RUSTFLAGS="$(RUSTFLAGS)" $(CARGO) build --all-features --release --package $@
cp -f target/release/$@ $@
check: token_lists
check: token_lists zkas $(PROOFS_BIN)
RUSTFLAGS="$(RUSTFLAGS)" $(CARGO) hack check --release --feature-powerset --all
fix: token_lists
fix: token_lists zkas $(PROOFS_BIN)
RUSTFLAGS="$(RUSTFLAGS)" $(CARGO) clippy --release --all-features --fix --allow-dirty --all
clippy: token_lists
clippy: token_lists zkas $(PROOFS_BIN)
RUSTFLAGS="$(RUSTFLAGS)" $(CARGO) clippy --release --all-features --all
rustdoc: token_lists
rustdoc: token_lists zkas
RUSTFLAGS="$(RUSTFLAGS)" $(CARGO) doc --release --workspace --all-features \
--no-deps --document-private-items
# zkas source files which we want to compile for tests
VM_SRC = proof/arithmetic.zk proof/mint.zk proof/burn.zk example/simple.zk
VM_BIN = $(VM_SRC:=.bin)
$(VM_BIN): zkas $(VM_SRC)
./zkas $(basename $@) -o $@
test: token_lists $(VM_BIN) test-tx
test: token_lists zkas $(PROOFS_BIN) test-tx
RUSTFLAGS="$(RUSTFLAGS)" $(CARGO) test --release --all-features --all
test-tx:
test-tx: zkas
RUSTFLAGS="$(RUSTFLAGS)" $(CARGO) run --release --features=node,zkas --example tx
clean:

View File

@@ -5,6 +5,10 @@
[![Manifesto - unsystem](https://img.shields.io/badge/Manifesto-unsystem-informational?logo=minutemailer&logoColor=white&style=flat-square)](https://dark.fi/manifesto.html)
[![Book - mdbook](https://img.shields.io/badge/Book-mdbook-orange?logo=gitbook&logoColor=white&style=flat-square)](https://darkrenaissance.github.io/darkfi)
## Connect to DarkFi IRC
Follow the [installation instructions](https://darkrenaissance.github.io/darkfi/misc/ircd.html#installation)
for the P2P IRC daemon.
## Build
@@ -27,8 +31,8 @@ The following dependencies are also required:
| freetype2 libs | libfreetype6-dev |
| expat xml lib | libexpat1-dev |
Users of Debian-based systems (e.g. Ubuntu) can simply run the following
to install the required dependencies:
Users of Debian-based systems (e.g. Ubuntu) can simply run the
following to install the required dependencies:
```shell
# apt-get update
@@ -37,18 +41,21 @@ to install the required dependencies:
libexpat1-dev
```
Alternatively users can use the automated script in
`contrib/mac_setup.sh` by running. This script will install brew
if it does not already exists on the system and then automatically
install the dependencies:
Alternatively, users can try using the automated script under `contrib`
folder by executing:
```shell
% bash contrib/mac_setup.sh
% sh contrib/dependency_setup.sh
```
To build the necessary binaries, we can just clone the repo, and use the
provided Makefile to build the project. This will download the trusted
setup params, and compile the source code.
The script will try to recognize which system you are running,
and install dependencies accordingly. In case it does not find your
package manager, please consider adding support for it into the script
and sending a patch.
To build the necessary binaries, we can just clone the repo, and use
the provided Makefile to build the project. This will download the
trusted setup params, and compile the source code.
```shell
% git clone https://github.com/darkrenaissance/darkfi

View File

@@ -10,10 +10,10 @@ features = ["wallet", "node", "rpc"]
[dependencies]
# Async
smol = "1.2.5"
futures = "0.3.21"
async-std = "1.11.0"
async-trait = "0.1.53"
async-channel = "1.6.1"
futures = "0.3.24"
async-std = "1.12.0"
async-trait = "0.1.57"
async-channel = "1.7.1"
async-executor = "1.4.1"
easy-parallel = "3.2.0"
@@ -21,23 +21,23 @@ easy-parallel = "3.2.0"
rand = "0.8.5"
# Misc
clap = {version = "3.1.18", features = ["derive"]}
clap = {version = "3.2.20", features = ["derive"]}
log = "0.4.17"
num_cpus = "1.13.1"
simplelog = "0.12.0"
thiserror = "1.0.31"
thiserror = "1.0.34"
url = "2.2.2"
fxhash = "0.2.1"
# Encoding and parsing
serde = {version = "1.0.137", features = ["derive"]}
serde_json = "1.0.81"
serde = {version = "1.0.144", features = ["derive"]}
serde_json = "1.0.85"
# Bitcoin bridge dependencies
bdk = {version = "0.18.0", optional = true}
anyhow = {version = "1.0.57", optional = true}
bitcoin = {version = "0.28.1", optional = true}
secp256k1 = {version = "0.22.1", default-features = false, features = ["rand-std"], optional = true}
bdk = {version = "0.21.0", optional = true}
anyhow = {version = "1.0.64", optional = true}
bitcoin = {version = "0.29.1", optional = true}
secp256k1 = {version = "0.24.0", default-features = false, features = ["rand-std"], optional = true}
# Ethereum bridge dependencies
hex = {version = "0.4.3", optional = true}
@@ -49,11 +49,11 @@ num-bigint = {version = "0.4.3", features = ["rand", "serde"], optional = true}
# Solana bridge dependencies
native-tls = {version = "0.2.10", optional = true}
async-native-tls = {version = "0.4.0", optional = true}
solana-client = {version = "1.10.17", optional = true}
solana-sdk = {version = "1.10.17", optional = true}
spl-associated-token-account = {version = "1.0.5", features = ["no-entrypoint"], optional = true}
spl-token = {version = "3.4.0-alpha", features = ["no-entrypoint"], optional = true}
tungstenite = {version = "0.17.2", optional = true}
solana-client = {version = "1.11.5", optional = true}
solana-sdk = {version = "1.11.5", optional = true}
spl-associated-token-account = {version = "1.1.1", features = ["no-entrypoint"], optional = true}
spl-token = {version = "3.5.0", features = ["no-entrypoint"], optional = true}
tungstenite = {version = "0.17.3", optional = true}
[features]
btc = [

View File

@@ -10,18 +10,18 @@ features = ["rpc"]
[dependencies]
# Async
smol = "1.2.5"
futures = "0.3.21"
async-std = {version = "1.11.0", features = ["attributes"]}
async-trait = "0.1.53"
async-channel = "1.6.1"
futures = "0.3.24"
async-std = {version = "1.12.0", features = ["attributes"]}
async-trait = "0.1.57"
async-channel = "1.7.1"
async-executor = "1.4.1"
# Misc
clap = {version = "3.1.18", features = ["derive"]}
clap = {version = "3.2.20", features = ["derive"]}
log = "0.4.17"
num_cpus = "1.13.1"
simplelog = "0.12.0"
url = "2.2.2"
# Encoding and parsing
serde_json = "1.0.81"
serde_json = "1.0.85"

View File

@@ -5,15 +5,15 @@ edition = "2021"
[dependencies.darkfi]
path = "../../"
features = ["rpc"]
features = ["rpc", "crypto", "tx", "node"]
[dependencies]
# Async
smol = "1.2.5"
futures = "0.3.21"
async-std = {version = "1.11.0", features = ["attributes"]}
async-trait = "0.1.53"
async-channel = "1.6.1"
futures = "0.3.24"
async-std = {version = "1.12.0", features = ["attributes"]}
async-trait = "0.1.57"
async-channel = "1.7.1"
async-executor = "1.4.1"
easy-parallel = "3.2.0"
@@ -21,7 +21,20 @@ easy-parallel = "3.2.0"
log = "0.4.17"
num_cpus = "1.13.1"
simplelog = "0.12.0"
url = "2.2.2"
thiserror = "1.0.34"
# Crypto
incrementalmerkletree = "0.3.0"
pasta_curves = "0.4.0"
halo2_gadgets = "0.2.0"
halo2_proofs = "0.2.0"
rand = "0.8.5"
crypto_api_chachapoly = "0.5.0"
group = "0.12.0"
# Encoding and parsing
serde_json = "1.0.81"
serde_json = "1.0.85"
# Utilities
lazy_static = "1.4.0"
url = "2.2.2"

9
bin/daod/Makefile Normal file
View File

@@ -0,0 +1,9 @@
ZK_SRC_FILES := $(wildcard proof/*.zk)
ZK_BIN_FILES := $(patsubst proof/%.zk, proof/%.zk.bin, $(ZK_SRC_FILES))
daod: $(ZK_BIN_FILES)
cargo run --release
proof/%.zk.bin: proof/%.zk
zkas $<

251
bin/daod/demo-spec.md Normal file
View File

@@ -0,0 +1,251 @@
---
title: DAO demo architecture
author: jstark
---
This document outlines a simple demo to showcase the smart contract
schema underlying the initial DAO MVP. We have tried to mimic the
basic DarkFi architecture while remaining as simple as possible.
We do not have a blockchain, p2p network, or encrypted wallet database
in this highly simplified demo. It is just a local network of 4 nodes
and a relayer. The values are all stored in memory.
# Layers
**bin**
Located in darkfi/bin/dao.
* **daod/**: receives rpc requests and operates a `client`.
* **dao-cli/**: command-line interface that receives input and sends rpc requests.
* **relayerd/**: receives transactions on TCP and relays them to all nodes.
**src**
Located in darkfi/bin/dao/daod.
* **contract/**: source code for dao and money contracts.
* **util/**: demo-wide utilities.
* **state/**: stores dao and money states.
* **tx**: underlying types required by transactions, function calls and call data.
* **node/**: a dao full node.
**node**
A dao node containing `client` and `wallet` submodules.
* **client/**: operates a wallet and performs state transition and validate methods.
* **wallet/**: owns and operates secret values.
**proof**
Located in darkfi/bin/dao/proof. Contains the zk proofs.
# Command Flow
The following assumes that a user has already compiled the zk contracts
by running `make`.
This requires a `Makescript` as follows:
```
ZK_SRC_FILES := $(wildcard proof/*.zk)
ZK_BIN_FILES := $(patsubst proof/%.zk, proof/%.zk.bin, $(ZK_SRC_FILES))
daod: $(ZK_BIN_FILES)
cargo run --release
proof/%.zk.bin: proof/%.zk
zkas $<
```
We will also need to write a shell script that opens 9 terminals and
runs the following:
*Terminal 1:* relayerd.
*Terminal 2-5:* 4 instances of daod.
*Terminal 6-9:* 4 instances of dao-cli.
Relayerd and daod should be sent to the background, so the demo will
consist visually of 4 terminals running dao-cli.
## Start relayer
1. `relayerd` starts a listener for all TCP ports specified in config.
## Initialize DAO
Note: this happens automatically on daod first run.
1. `daod`: starts a listener on the relayer TCP port.
2. `daod:` creates a client and calls `client.init()`.
3. `client`: creates a money wallet.
4. `money-wallet`: generates cashier, faucet keys.
5. `client`: gets public keys from wallet and calls `state.new()`.
6. `state`: creates ZkContractTable, StateRegistry.
7. `state`: loads all zk binaries and saves them in ZkContractTable.
8. `state`: creates a new money/dao state and registers them in StateRegistry.
## Stage 1: create DAO
1. `dao-cli:` sends `create()` rpc request to daod.
2. `daod`: receives rpc request and calls `client.create()`.
3. `client`: creates a dao wallet.
4. `dao-wallet`: specifies the dao params.
5. `dao-wallet`: creates a dao keypair, bulla blind, and signature secret.
**build sequence.**
Note: Builders differ according to the FuncCall, but the basic sequence
is the same.
6. `dao-wallet`: build: creates dao_contract::mint::wallet::Builder.
7. `dao-wallet`: generates a FuncCall from builder.build().
8. `dao-wallet`: adds FuncCall to a vector.
9. `dao-wallet`: sign the vector of FuncCalls.
**send sequence.**
10. `dao-wallet`: create a Transaction.
11. `dao-wallet`: send the Transaction to the relayer.
12. `relayer`: receives a Transaction on one of its connections.
13. `relayer`: relays the Transaction to all connected nodes.
**recv sequence.**
14. `daod`: receives a Transaction on its relayerd listener.
15. 'daod`: sends the Transaction to Client.
**validate sequence.**
16. `client`: validate: creates an empty vector of updates.
16. `client`: loops through all FuncCalls in the Transaction.
17. `client`: runs a match statement on the FUNC_ID.
18. `client`: finds mint FUNC_ID and runs a state transition function.
20. `client`: pushes the result to Vec<Update>
21. `client`: outside the loop, atomically applies all updates.
22. `client`: calls zk_verify() on the Transaction.
23. `client`: verifies signatures.
------------------------------------------------------------------------
24. `client`: sends Transaction to the relayer.
25. `relayer`: receives Transaction and relays.
* TODO: `dao-wallet`: waits until Transction is confirmed. (how?)
27. `dao-wallet`: look up the dao state and call witness().
28. `dao-wallet`: get the dao bulla from the Transaction.
29. `dao-cli`: print "Created DAO {}".
## Stage 2: fund DAO
* TODO: for the demo it might be better to call mint() first and then
fund(), passing the values into fund()
Here we are creating a treasury token and sending it to the DAO.
1. `dao-cli:` `fund()` rpc request to daod
2. `daod`: receives rpc request and calls `client.fund()`.
3. `client`: creates treasury token, random token ID and supply
Note: dao-wallet must manually track coins to retrieve coins belonging
to its private key.
4. `dao-wallet`: looks up the money state, and calls state.wallet_cache.track()
5. `money-wallet`: sets spend hook to dao_contract::exec::FUNC_ID
5. `money-wallet`: sets user_data to dao_bulla
* TODO: how does it get the dao_bulla? Must be stored somewhere.
6. `money-wallet`: specifies dao public key and treasury token BuilderOutputInfo.
5. `money-wallet`: runs the build sequence for money::transfer.
9. `money-wallet`: create Transaction and send.
10. `relayer`: receives Transaction and relays.
11. `daod`: receives a Transaction and sends to client.
12. `client`: runs the validate sequence.
Note: here we get all coins associated with the private key.
13. `dao-wallet`: looks up the state and calls WalletCache.get_received()
14. `dao-wallet`: check the coin is valid by recreating Coin
15. `daod`: sendswith token ID and balance to dao-cli.
16. `dao-cli`: displays data using pretty table.
## Stage 3: airdrop
1. `dao-cli`: calls keygen()
2. `daod`: client.keygen()
3. `daod`: money-wallet.keygen()
4. `money-wallet`: creates new keypair
5. `money-wallet`: looks up the money_contract State and calls WalletCache.track()
6. `money-wallet`: return the public key
7. `dao-cli`: prints the public key
Note: do this 3 times to generate 3 pubkey keys for different daod instances.
8. `dao-cli`: calls mint()
9. `daod`: call client.mint()
10. `client:` creates governance token with random token ID and supply
11. `dao-cli`: prints "created token {} with supply {}"
12. `dao-cli`: calls airdrop() and passes a value and a pubkey.
13. `dao-wallet:` runs the build sequence for money::transfer.
14. `dao-wallet`: create Transaction and send.
15. `relayer`: receives Transaction and relays.
16. `daod`: receives a Transaction and sends to client.
17. `client`: runs the validate sequence.
18. `money-wallet`: state.wallet_cache.get_received()
19. `money-wallet`: check the coin is valid by recreating Coin
20. `daod`: sends token ID and balance to cli
21. `dao-cli`: prints "received coin {} with value {}".
* TODO: money-wallet must keep track of Coins and have a flag for whether or not they are spent.
* Hashmap of <Coin, bool> ?
## Stage 4: create proposal
* TODO: maybe for the demo we should just hardcode a user/ proposal recipient.
1. `dao-cli`: calls propose() and enter a user pubkey and an amount
3. `dao-wallet`: runs the build sequence for dao_contract::propose
4. `dao-wallet`: specifies user pubkey, amount and token ID in Proposal
5. `dao-cli`: prints "Created proposal to send {} xDRK to {}"
6. `dao-wallet`: create Transaction and send.
7. `relayer`: receives Transaction and relays.
8. `daod`: receives a Transaction and sends to client.
9. `client`: runs the validate sequence.
* TODO: how does everyone have access to DAO private key?
10. `dao-wallet`: reads received proposal and tries to decrypt Note
11. `dao-wallet`: sends decrypted values to daod
12. `dao-cli`: prints "Proposal is now active"
## Stage 5 vote
1. `dao-cli`: calls vote() and enters a vote option (yes or no) and an amount
2. `daod`: calls client.vote()
3. `money-wallet`: get money_leaf_position and money_merkle_path
4. `money-wallet`: create builder sequence for dao_contract::vote
5. `money-wallet`: specify dao_keypair in vote_keypair field
* TODO: this implies that money-wallet is able to access private values in dao-wallet
6. `money-wallet`: signs and sends
7. `relayer`: receives Transaction and relays.
8. `daod`: receives a Transaction and sends to client.
9. `client`: runs the validate sequence.
10. `dao-wallet`: tries to decrypt the Vote.
11. `dao-cli`: prints "Received vote {} value {}"
Note: repeat 3 times with different values and vote options.
* TODO: ignore section re: vote commitments?
* TODO: determine outcome: yes_votes_value/ all_votes_value
e.g. when the quorum is reached, print "Quorum reached! Outcome {}"
or just hardcode it for X n. of voters
## Stage 6: Executing the proposal
1. `dao-cli`: calls exec()
* TODO: how does dao have access to user data?
2. `dao-wallet`: get money_leaf_position and money_merkle_path
3. `dao-wallet`: specifies user_kaypair and proposal amount in 1st output
4. `dao-wallet`: specifies change in 2nd output
5. `dao-wallet`: run build sequence for money_contract::transfer
6. `dao-wallet`: run build sequence for dao_contract::exec
7. `dao-wallet`: signs transaction and sends
8. `relayer`: receives Transaction and relays.
9. `daod`: receives a Transaction and sends to client.
10. `client`: runs the validate sequence.

View File

@@ -525,9 +525,9 @@ class VoteProof:
revealed.token_commit = crypto.pedersen_encrypt(
self.token_id, self.token_blind, self.ec
)
revealed.vote_option_commit = crypto.ff_hash(
self.ec.p, self.vote_option, self.vote_option_blind
)
#revealed.vote_option_commit = crypto.ff_hash(
# self.ec.p, self.vote_option, self.vote_option_blind
#)
return revealed
def verify(self, public):
@@ -537,8 +537,9 @@ class VoteProof:
return False
return all([
revealed.value_commit == public.value_commit,
revealed.vote_commit == public.vote_commit,
revealed.token_commit == public.token_commit,
revealed.vote_option_commit == public.vote_option_commit
#revealed.vote_option_commit == public.vote_option_commit
])
class VoteTx:
@@ -903,7 +904,9 @@ class DaoExecProof:
assert self.total_votes >= self.dao.quorum
assert self.win_votes / self.total_votes >= self.dao.approval_ratio
# Approval ratio should be actually 2 values ffs
#assert self.win_votes / self.total_votes >= self.dao.approval_ratio
assert self.win_votes >= self.dao.approval_ratio * self.total_votes
return all([
revealed.all_proposals == public.all_proposals,
@@ -1308,9 +1311,9 @@ def main(argv):
gov_token_id, note.token_blind, ec)
assert tx.vote.revealed.token_commit == token_commit
vote_option_commit = crypto.ff_hash(
ec.p, note.vote_option, note.vote_option_blind)
assert tx.vote.revealed.vote_option_commit == vote_option_commit
#vote_option_commit = crypto.ff_hash(
# ec.p, note.vote_option, note.vote_option_blind)
#assert tx.vote.revealed.vote_option_commit == vote_option_commit
value_commit = crypto.pedersen_encrypt(
note.value, note.value_blind, ec)

View File

@@ -319,10 +319,6 @@ class MintProof:
self.token_id, self.token_blind, self.ec
)
revealed.dest_commit = ff_hash(
self.ec.p, self.public[0], self.public[1]
)
return revealed
def verify(self, public):
@@ -331,6 +327,5 @@ class MintProof:
revealed.coin == public.coin,
revealed.value_commit == public.value_commit,
revealed.token_commit == public.token_commit,
revealed.dest_commit == public.dest_commit
])

View File

@@ -1,80 +0,0 @@
constant "DaoBurn" {
EcFixedPointShort VALUE_COMMIT_VALUE,
EcFixedPoint VALUE_COMMIT_RANDOM,
}
contract "DaoBurn" {
Base treasury_value,
Base authority_key_x,
Base authority_key_y,
Base governance_token_id,
Base treasury_token_id,
Scalar treasury_value_blind,
Base authority_key_x_blind,
Base authority_key_y_blind,
Base governance_token_id_blind,
Base treasury_token_id_blind,
# BullaBurn subroutine
Base serial,
Base coin_blind,
Uint32 leaf_pos,
MerklePath path,
Base signature_secret,
}
contract "DaoBurn" {
# Pedersen commitment for coin's value
vcv = ec_mul_short(treasury_value, VALUE_COMMIT_VALUE);
vcr = ec_mul(treasury_value_blind, VALUE_COMMIT_RANDOM);
value_commit = ec_add(vcv, vcr);
# Since the value commit is a curve point, we fetch its coordinates
# and constrain them:
value_commit_x = ec_get_x(value_commit);
value_commit_y = ec_get_y(value_commit);
constrain_instance(value_commit_x);
constrain_instance(value_commit_y);
authority_x_commit = poseidon_hash(authority_key_x, authority_key_x_blind);
constrain_instance(authority_x_commit);
authority_y_commit = poseidon_hash(authority_key_y, authority_key_y_blind);
constrain_instance(authority_y_commit);
gov_token_id_commit = poseidon_hash(
governance_token_id, governance_token_id_blind);
constrain_instance(gov_token_id_commit);
treasury_token_id_commit = poseidon_hash(
treasury_token_id, treasury_token_id_blind);
constrain_instance(treasury_token_id_commit);
# BullaBurn subroutine
# Poseidon hash of the nullifier
nullifier = poseidon_hash(serial);
constrain_instance(nullifier);
B = poseidon_hash(
treasury_value,
authority_key_x,
authority_key_y,
governance_token_id,
treasury_token_id,
serial,
coin_blind
);
# Merkle root
root = calculate_merkle_root(leaf_pos, path, B);
constrain_instance(root);
# Finally, we derive a public key for the signature and
# constrain its coordinates:
signature_public = ec_mul_base(signature_secret, NULLIFIER_K);
signature_x = ec_get_x(signature_public);
signature_y = ec_get_y(signature_public);
constrain_instance(signature_x);
constrain_instance(signature_y);
}

View File

@@ -4,109 +4,165 @@ constant "DaoExec" {
}
contract "DaoExec" {
# Main attributes for DAO
Base treasury_value,
Base authority_key_x,
Base authority_key_y,
Base governance_token_id,
Base treasury_token_id,
# proposal params
Base proposal_dest_x,
Base proposal_dest_y,
Base proposal_amount,
Base proposal_serial,
Base proposal_token_id,
Base proposal_blind,
# Used for proving old bulla is in the set
Uint32 leaf_pos,
MerklePath path,
# DAO params
Base dao_proposer_limit,
Base dao_quorum,
Base dao_approval_ratio_quot,
Base dao_approval_ratio_base,
Base gov_token_id,
Base dao_public_x,
Base dao_public_y,
Base dao_bulla_blind,
Base old_bulla_serial,
Base old_bulla_blind,
# votes
Base yes_votes_value,
Base all_votes_value,
Scalar yes_votes_blind,
Scalar all_votes_blind,
# outputs + inputs
Base user_serial,
Base user_coin_blind,
Base dao_serial,
Base dao_coin_blind,
Base input_value,
Scalar input_value_blind,
Base coin_serial,
Base coin_blind,
Base new_bulla_serial,
Base new_bulla_blind,
###
# Instruction data
Base instr_value,
Base instr_key_x,
Base instr_key_y,
Base instr_token_id,
Base message_blind,
# Same blinding factor used for governance token ID hash on votes
Base governance_token_id_blind,
# Votes info
Base vote_value,
Scalar vote_blind,
# misc
Base dao_spend_hook,
Base user_spend_hook,
Base user_data,
}
circuit "DaoExec" {
# instr = OPCODE, u, P
message = poseidon_hash(instr_value, instr_key_x, instr_key_y,
instr_token_id, message_blind);
constrain_instance(message);
# Vote commitment
vote_cv = ec_mul_short(vote_value, VALUE_COMMIT_VALUE);
vote_cr = ec_mul(vote_blind, VALUE_COMMIT_RANDOM);
vote_commit = ec_add(vote_cv, vote_cr);
vote_commit_x = ec_get_x(vote_commit);
vote_commit_y = ec_get_y(vote_commit);
constrain_instance(vote_commit_x);
constrain_instance(vote_commit_y);
gov_token_id_commit = poseidon_hash(
governance_token_id, governance_token_id_blind);
constrain_instance(gov_token_id_commit);
change_value = base_sub(treasury_value, instr_value);
# TODO: missing instructions
# greater_than_zero(instr_value);
# greater_than_or_equal_zero(change_value);
# bulla_smash
# Poseidon hash of the nullifier
nullifier = poseidon_hash(old_bulla_serial);
constrain_instance(nullifier);
old_bulla = poseidon_hash(
treasury_value,
authority_key_x,
authority_key_y,
governance_token_id,
treasury_token_id,
old_bulla_serial,
old_bulla_blind
dao_bulla = poseidon_hash(
dao_proposer_limit,
dao_quorum,
dao_approval_ratio_quot,
dao_approval_ratio_base,
gov_token_id,
dao_public_x,
dao_public_y,
dao_bulla_blind,
);
# Proposal bulla is valid means DAO bulla is also valid
# because of dao-propose-main.zk, already checks that when
# we first create the proposal. So it is redundant here.
# Merkle root
root = calculate_merkle_root(leaf_pos, path, old_bulla);
constrain_instance(root);
# coin_mint
# Poseidon hash of the coin
coin = poseidon_hash(
instr_key_x,
instr_key_y,
instr_value,
instr_token_id,
coin_serial,
coin_blind
proposal_bulla = poseidon_hash(
proposal_dest_x,
proposal_dest_y,
proposal_amount,
proposal_serial,
proposal_token_id,
dao_bulla,
proposal_blind,
# @tmp-workaround
proposal_blind,
);
constrain_instance(coin);
constrain_instance(proposal_bulla);
# bulla_mint
bulla = poseidon_hash(
change_value,
authority_key_x,
authority_key_y,
governance_token_id,
treasury_token_id,
new_bulla_serial,
new_bulla_blind
coin_0 = poseidon_hash(
proposal_dest_x,
proposal_dest_y,
proposal_amount,
proposal_token_id,
proposal_serial,
user_spend_hook,
user_data,
proposal_blind,
);
constrain_instance(bulla);
constrain_instance(coin_0);
change = base_sub(input_value, proposal_amount);
coin_1 = poseidon_hash(
dao_public_x,
dao_public_y,
change,
proposal_token_id,
dao_serial,
dao_spend_hook,
proposal_bulla,
dao_coin_blind,
);
constrain_instance(coin_1);
# Create pedersen commits for win_votes, and total_votes
# and make public
yes_votes_value_c = ec_mul_short(yes_votes_value, VALUE_COMMIT_VALUE);
yes_votes_blind_c = ec_mul(yes_votes_blind, VALUE_COMMIT_RANDOM);
yes_votes_commit = ec_add(yes_votes_value_c, yes_votes_blind_c);
# get curve points and constrain
yes_votes_commit_x = ec_get_x(yes_votes_commit);
yes_votes_commit_y = ec_get_y(yes_votes_commit);
constrain_instance(yes_votes_commit_x);
constrain_instance(yes_votes_commit_y);
all_votes_c = ec_mul_short(all_votes_value, VALUE_COMMIT_VALUE);
all_votes_blind_c = ec_mul(all_votes_blind, VALUE_COMMIT_RANDOM);
all_votes_commit = ec_add(all_votes_c, all_votes_blind_c);
# get curve points and constrain
all_votes_commit_x = ec_get_x(all_votes_commit);
all_votes_commit_y = ec_get_y(all_votes_commit);
constrain_instance(all_votes_commit_x);
constrain_instance(all_votes_commit_y);
# Create pedersen commit for input_value and make public
input_value_v = ec_mul_short(input_value, VALUE_COMMIT_VALUE);
input_value_r = ec_mul(input_value_blind, VALUE_COMMIT_RANDOM);
input_value_commit = ec_add(input_value_v, input_value_r);
# get curve points and constrain
input_value_x = ec_get_x(input_value_commit);
input_value_y = ec_get_y(input_value_commit);
constrain_instance(input_value_x);
constrain_instance(input_value_y);
constrain_instance(dao_spend_hook);
constrain_instance(user_spend_hook);
constrain_instance(user_data);
# Check that dao_quorum is less than or equal to all_votes_value
one = witness_base(1);
all_votes_value_1 = base_add(all_votes_value, one);
less_than(dao_quorum, all_votes_value_1);
# approval_ratio_quot / approval_ratio_base <= yes_votes / all_votes
#
# The above is also equivalent to this:
#
# all_votes * approval_ratio_quot <= yes_votes * approval_ratio_base
rhs = base_mul(all_votes_value, dao_approval_ratio_quot);
lhs = base_mul(yes_votes_value, dao_approval_ratio_base);
lhs_1 = base_add(lhs, one);
less_than(rhs, lhs_1);
####
# Create coin 0
# Create coin 1
# Check values of coin 0 + coin 1 == input value
# Check value of coin 0 == proposal_amount
# Check public key matches too
# Create the input value commit
# Create the value commits
# NOTE: there is a vulnerability here where someone can create the exec
# transaction with a bad note so it cannot be decrypted by the receiver
# TODO: research verifiable encryption inside ZK
}

View File

@@ -1,60 +1,31 @@
constant "DaoMint" {
EcFixedPointShort VALUE_COMMIT_VALUE,
EcFixedPoint VALUE_COMMIT_RANDOM,
}
contract "DaoMint" {
Base treasury_value,
Base authority_key_x,
Base authority_key_y,
Base governance_token_id,
Base treasury_token_id,
Scalar treasury_value_blind,
Base authority_key_x_blind,
Base authority_key_y_blind,
Base governance_token_id_blind,
Base treasury_token_id_blind,
# BullaMint subroutine
Base serial,
Base coin_blind,
Base dao_proposer_limit,
Base dao_quorum,
Base dao_approval_ratio_quot,
Base dao_approval_ratio_base,
Base gdrk_token_id,
Base dao_public_x,
Base dao_public_y,
Base dao_bulla_blind,
}
circuit "DaoMint" {
# Pedersen commitment for coin's value
vcv = ec_mul_short(treasury_value, VALUE_COMMIT_VALUE);
vcr = ec_mul(treasury_value_blind, VALUE_COMMIT_RANDOM);
value_commit = ec_add(vcv, vcr);
# Since the value commit is a curve point, we fetch its coordinates
# and constrain them:
value_commit_x = ec_get_x(value_commit);
value_commit_y = ec_get_y(value_commit);
constrain_instance(value_commit_x);
constrain_instance(value_commit_y);
authority_x_commit = poseidon_hash(authority_key_x, authority_key_x_blind);
constrain_instance(authority_x_commit);
authority_y_commit = poseidon_hash(authority_key_y, authority_key_y_blind);
constrain_instance(authority_y_commit);
gov_token_id_commit = poseidon_hash(
governance_token_id, governance_token_id_blind);
constrain_instance(gov_token_id_commit);
treasury_token_id_commit = poseidon_hash(
treasury_token_id, treasury_token_id_blind);
constrain_instance(treasury_token_id_commit);
# This circuit is not that interesting.
# It just states the bulla is a hash of 8 values.
# BullaMint subroutine
bulla = poseidon_hash(
treasury_value,
authority_key_x,
authority_key_y,
governance_token_id,
treasury_token_id,
serial,
coin_blind
bulla = poseidon_hash(
dao_proposer_limit,
dao_quorum,
dao_approval_ratio_quot,
dao_approval_ratio_base,
gdrk_token_id,
dao_public_x,
dao_public_y,
dao_bulla_blind,
);
constrain_instance(bulla);
}

View File

@@ -0,0 +1,63 @@
constant "DaoProposeInput" {
EcFixedPointShort VALUE_COMMIT_VALUE,
EcFixedPoint VALUE_COMMIT_RANDOM,
EcFixedPointBase NULLIFIER_K,
}
contract "DaoProposeInput" {
Base secret,
Base serial,
Base spend_hook,
Base user_data,
Base value,
Base token,
Base coin_blind,
Scalar value_blind,
Base token_blind,
Uint32 leaf_pos,
MerklePath path,
Base signature_secret,
}
circuit "DaoProposeInput" {
# Poseidon hash of the nullifier
#nullifier = poseidon_hash(secret, serial);
#constrain_instance(nullifier);
# Pedersen commitment for coin's value
vcv = ec_mul_short(value, VALUE_COMMIT_VALUE);
vcr = ec_mul(value_blind, VALUE_COMMIT_RANDOM);
value_commit = ec_add(vcv, vcr);
# Since value_commit is a curve point, we fetch its coordinates
# and constrain them:
value_commit_x = ec_get_x(value_commit);
value_commit_y = ec_get_y(value_commit);
constrain_instance(value_commit_x);
constrain_instance(value_commit_y);
# Commitment for coin's token ID
token_commit = poseidon_hash(token, token_blind);
constrain_instance(token_commit);
# Coin hash
pub = ec_mul_base(secret, NULLIFIER_K);
pub_x = ec_get_x(pub);
pub_y = ec_get_y(pub);
C = poseidon_hash(pub_x, pub_y, value, token, serial, spend_hook, user_data, coin_blind);
# Merkle root
root = merkle_root(leaf_pos, path, C);
constrain_instance(root);
# Finally, we derive a public key for the signature and
# constrain its coordinates:
signature_public = ec_mul_base(signature_secret, NULLIFIER_K);
signature_x = ec_get_x(signature_public);
signature_y = ec_get_y(signature_public);
constrain_instance(signature_x);
constrain_instance(signature_y);
# At this point we've enforced all of our public inputs.
}

View File

@@ -0,0 +1,88 @@
constant "DaoProposeMain" {
EcFixedPointShort VALUE_COMMIT_VALUE,
EcFixedPoint VALUE_COMMIT_RANDOM,
}
contract "DaoProposeMain" {
# Proposers total number of gov tokens
Base total_funds,
Scalar total_funds_blind,
# Check the inputs and this proof are for the same token
Base gov_token_blind,
# proposal params
Base proposal_dest_x,
Base proposal_dest_y,
Base proposal_amount,
Base proposal_serial,
Base proposal_token_id,
Base proposal_blind,
# DAO params
Base dao_proposer_limit,
Base dao_quorum,
Base dao_approval_ratio_quot,
Base dao_approval_ratio_base,
Base gov_token_id,
Base dao_public_x,
Base dao_public_y,
Base dao_bulla_blind,
Uint32 dao_leaf_pos,
MerklePath dao_path,
}
circuit "DaoProposeMain" {
token_commit = poseidon_hash(gov_token_id, gov_token_blind);
constrain_instance(token_commit);
dao_bulla = poseidon_hash(
dao_proposer_limit,
dao_quorum,
dao_approval_ratio_quot,
dao_approval_ratio_base,
gov_token_id,
dao_public_x,
dao_public_y,
dao_bulla_blind,
);
dao_root = merkle_root(dao_leaf_pos, dao_path, dao_bulla);
constrain_instance(dao_root);
# Proves this DAO is valid
proposal_bulla = poseidon_hash(
proposal_dest_x,
proposal_dest_y,
proposal_amount,
proposal_serial,
proposal_token_id,
dao_bulla,
proposal_blind,
# @tmp-workaround
proposal_blind,
);
constrain_instance(proposal_bulla);
# Rangeproof check for proposal amount
zero = witness_base(0);
less_than(zero, proposal_amount);
# This is the main check
# We check that dao_proposer_limit <= total_funds
one = witness_base(1);
total_funds_1 = base_add(total_funds, one);
less_than(dao_proposer_limit, total_funds_1);
# Pedersen commitment for coin's value
vcv = ec_mul_short(total_funds, VALUE_COMMIT_VALUE);
vcr = ec_mul(total_funds_blind, VALUE_COMMIT_RANDOM);
total_funds_commit = ec_add(vcv, vcr);
# Since total_funds_commit is a curve point, we fetch its coordinates
# and constrain them:
total_funds_commit_x = ec_get_x(total_funds_commit);
total_funds_commit_y = ec_get_y(total_funds_commit);
constrain_instance(total_funds_commit_x);
constrain_instance(total_funds_commit_y);
}

View File

@@ -0,0 +1,64 @@
constant "DaoVoteInput" {
EcFixedPointShort VALUE_COMMIT_VALUE,
EcFixedPoint VALUE_COMMIT_RANDOM,
EcFixedPointBase NULLIFIER_K,
}
contract "DaoVoteInput" {
Base secret,
Base serial,
Base spend_hook,
Base user_data,
Base value,
Base gov_token_id,
Base coin_blind,
Scalar value_blind,
Base gov_token_blind,
Uint32 leaf_pos,
MerklePath path,
Base signature_secret,
}
circuit "DaoVoteInput" {
# Poseidon hash of the nullifier
nullifier = poseidon_hash(secret, serial);
constrain_instance(nullifier);
# Pedersen commitment for coin's value
vcv = ec_mul_short(value, VALUE_COMMIT_VALUE);
vcr = ec_mul(value_blind, VALUE_COMMIT_RANDOM);
value_commit = ec_add(vcv, vcr);
# Since value_commit is a curve point, we fetch its coordinates
# and constrain them:
value_commit_x = ec_get_x(value_commit);
value_commit_y = ec_get_y(value_commit);
constrain_instance(value_commit_x);
constrain_instance(value_commit_y);
# Commitment for coin's token ID
token_commit = poseidon_hash(gov_token_id, gov_token_blind);
constrain_instance(token_commit);
# Coin hash
pub = ec_mul_base(secret, NULLIFIER_K);
pub_x = ec_get_x(pub);
pub_y = ec_get_y(pub);
C = poseidon_hash(pub_x, pub_y, value, gov_token_id, serial, spend_hook, user_data, coin_blind);
# Merkle root
root = merkle_root(leaf_pos, path, C);
constrain_instance(root);
# Finally, we derive a public key for the signature and
# constrain its coordinates:
signature_public = ec_mul_base(signature_secret, NULLIFIER_K);
signature_x = ec_get_x(signature_public);
signature_y = ec_get_y(signature_public);
constrain_instance(signature_x);
constrain_instance(signature_y);
# At this point we've enforced all of our public inputs.
}

View File

@@ -0,0 +1,98 @@
constant "DaoVoteMain" {
EcFixedPointShort VALUE_COMMIT_VALUE,
EcFixedPoint VALUE_COMMIT_RANDOM,
}
contract "DaoVoteMain" {
# proposal params
Base proposal_dest_x,
Base proposal_dest_y,
Base proposal_amount,
Base proposal_serial,
Base proposal_token_id,
Base proposal_blind,
# DAO params
Base dao_proposer_limit,
Base dao_quorum,
Base dao_approval_ratio_quot,
Base dao_approval_ratio_base,
Base gov_token_id,
Base dao_public_x,
Base dao_public_y,
Base dao_bulla_blind,
# Is the vote yes or no
Base vote_option,
Scalar yes_vote_blind,
# Total amount of capital allocated to vote
Base all_votes_value,
Scalar all_votes_blind,
# Check the inputs and this proof are for the same token
Base gov_token_blind,
}
circuit "DaoVoteMain" {
token_commit = poseidon_hash(gov_token_id, gov_token_blind);
constrain_instance(token_commit);
dao_bulla = poseidon_hash(
dao_proposer_limit,
dao_quorum,
dao_approval_ratio_quot,
dao_approval_ratio_base,
gov_token_id,
dao_public_x,
dao_public_y,
dao_bulla_blind,
);
# Proposal bulla is valid means DAO bulla is also valid
# because of dao-propose-main.zk, already checks that when
# we first create the proposal. So it is redundant here.
proposal_bulla = poseidon_hash(
proposal_dest_x,
proposal_dest_y,
proposal_amount,
proposal_serial,
proposal_token_id,
dao_bulla,
proposal_blind,
# @tmp-workaround
proposal_blind,
);
constrain_instance(proposal_bulla);
# TODO: we need to check the proposal isn't invalidated
# that is expired or already executed.
# normally we call this yes vote
# Pedersen commitment for vote option
yes_votes_value = base_mul(vote_option, all_votes_value);
yes_votes_value_c = ec_mul_short(yes_votes_value, VALUE_COMMIT_VALUE);
yes_votes_blind_c = ec_mul(yes_vote_blind, VALUE_COMMIT_RANDOM);
yes_votes_commit = ec_add(yes_votes_value_c, yes_votes_blind_c);
# get curve points and constrain
yes_votes_commit_x = ec_get_x(yes_votes_commit);
yes_votes_commit_y = ec_get_y(yes_votes_commit);
constrain_instance(yes_votes_commit_x);
constrain_instance(yes_votes_commit_y);
# Pedersen commitment for vote value
all_votes_c = ec_mul_short(all_votes_value, VALUE_COMMIT_VALUE);
all_votes_blind_c = ec_mul(all_votes_blind, VALUE_COMMIT_RANDOM);
all_votes_commit = ec_add(all_votes_c, all_votes_blind_c);
# get curve points and constrain
all_votes_commit_x = ec_get_x(all_votes_commit);
all_votes_commit_y = ec_get_y(all_votes_commit);
constrain_instance(all_votes_commit_x);
constrain_instance(all_votes_commit_y);
# Vote option should be 0 or 1
bool_check(vote_option);
}

14
bin/daod/proof/foo.zk Normal file
View File

@@ -0,0 +1,14 @@
constant "DaoMint" {
}
contract "DaoMint" {
Base a,
Base b,
}
circuit "DaoMint" {
c = base_add(a, b);
constrain_instance(c);
}

View File

@@ -0,0 +1,10 @@
use lazy_static::lazy_static;
use pasta_curves::{group::ff::Field, pallas};
use rand::rngs::OsRng;
pub mod validate;
pub mod wallet;
lazy_static! {
pub static ref FUNC_ID: pallas::Base = pallas::Base::random(&mut OsRng);
}

View File

@@ -0,0 +1,202 @@
use pasta_curves::{
arithmetic::CurveAffine,
group::{Curve, Group},
pallas,
};
use darkfi::{
crypto::{coin::Coin, keypair::PublicKey, types::DrkCircuitField},
util::serial::{Encodable, SerialDecodable, SerialEncodable},
Error as DarkFiError,
};
use std::any::{Any, TypeId};
use crate::{
dao_contract,
dao_contract::CONTRACT_ID,
demo::{CallDataBase, HashableBase, StateRegistry, Transaction, UpdateBase},
money_contract,
};
type Result<T> = std::result::Result<T, Error>;
#[derive(Debug, Clone, thiserror::Error)]
pub enum Error {
#[error("DarkFi error: {0}")]
DarkFiError(String),
#[error("InvalidNumberOfFuncCalls")]
InvalidNumberOfFuncCalls,
#[error("InvalidIndex")]
InvalidIndex,
#[error("InvalidCallData")]
InvalidCallData,
#[error("InvalidNumberOfOutputs")]
InvalidNumberOfOutputs,
#[error("InvalidOutput")]
InvalidOutput,
#[error("InvalidValueCommit")]
InvalidValueCommit,
#[error("InvalidVoteCommit")]
InvalidVoteCommit,
}
impl From<DarkFiError> for Error {
fn from(err: DarkFiError) -> Self {
Self::DarkFiError(err.to_string())
}
}
#[derive(Clone, SerialEncodable, SerialDecodable)]
pub struct CallData {
pub proposal: pallas::Base,
pub coin_0: pallas::Base,
pub coin_1: pallas::Base,
pub yes_votes_commit: pallas::Point,
pub all_votes_commit: pallas::Point,
pub input_value_commit: pallas::Point,
}
impl CallDataBase for CallData {
fn zk_public_values(&self) -> Vec<(String, Vec<DrkCircuitField>)> {
let yes_votes_commit_coords = self.yes_votes_commit.to_affine().coordinates().unwrap();
let all_votes_commit_coords = self.all_votes_commit.to_affine().coordinates().unwrap();
let input_value_commit_coords = self.input_value_commit.to_affine().coordinates().unwrap();
vec![(
"dao-exec".to_string(),
vec![
self.proposal,
self.coin_0,
self.coin_1,
*yes_votes_commit_coords.x(),
*yes_votes_commit_coords.y(),
*all_votes_commit_coords.x(),
*all_votes_commit_coords.y(),
*input_value_commit_coords.x(),
*input_value_commit_coords.y(),
*super::FUNC_ID,
pallas::Base::from(0),
pallas::Base::from(0),
],
)]
}
fn as_any(&self) -> &dyn Any {
self
}
fn signature_public_keys(&self) -> Vec<PublicKey> {
vec![]
}
fn encode_bytes(
&self,
mut writer: &mut dyn std::io::Write,
) -> std::result::Result<usize, darkfi::Error> {
self.encode(&mut writer)
}
}
pub fn state_transition(
states: &StateRegistry,
func_call_index: usize,
parent_tx: &Transaction,
) -> Result<Box<dyn UpdateBase>> {
let func_call = &parent_tx.func_calls[func_call_index];
let call_data = func_call.call_data.as_any();
assert_eq!((&*call_data).type_id(), TypeId::of::<CallData>());
let call_data = call_data.downcast_ref::<CallData>();
// This will be inside wasm so unwrap is fine.
let call_data = call_data.unwrap();
// Enforce tx has correct format:
// 1. There should only be 2 func_call's
if parent_tx.func_calls.len() != 2 {
return Err(Error::InvalidNumberOfFuncCalls)
}
// 2. func_call_index == 1
if func_call_index != 1 {
return Err(Error::InvalidIndex)
}
// 3. First item should be a Money::transfer() calldata
if parent_tx.func_calls[0].func_id != *money_contract::transfer::FUNC_ID {
return Err(Error::InvalidCallData)
}
let money_transfer_call_data = parent_tx.func_calls[0].call_data.as_any();
let money_transfer_call_data =
money_transfer_call_data.downcast_ref::<money_contract::transfer::validate::CallData>();
let money_transfer_call_data = money_transfer_call_data.unwrap();
assert_eq!(
money_transfer_call_data.type_id(),
TypeId::of::<money_contract::transfer::validate::CallData>()
);
// 4. Money::transfer() has exactly 2 outputs
if money_transfer_call_data.outputs.len() != 2 {
return Err(Error::InvalidNumberOfOutputs)
}
// Checks:
// 1. Check both coins in Money::transfer() are equal to our coin_0, coin_1
if money_transfer_call_data.outputs[0].revealed.coin != Coin(call_data.coin_0) {
return Err(Error::InvalidOutput)
}
if money_transfer_call_data.outputs[1].revealed.coin != Coin(call_data.coin_1) {
return Err(Error::InvalidOutput)
}
// 2. sum of Money::transfer() calldata input_value_commits == our input value commit
let mut input_value_commits = pallas::Point::identity();
for input in &money_transfer_call_data.inputs {
input_value_commits += input.revealed.value_commit;
}
if input_value_commits != call_data.input_value_commit {
return Err(Error::InvalidValueCommit)
}
// 3. get the ProposalVote from DAO::State
let state = states
.lookup::<dao_contract::State>(*CONTRACT_ID)
.expect("Return type is not of type State");
let proposal_votes = state.proposal_votes.get(&HashableBase(call_data.proposal)).unwrap();
// 4. check yes_votes_commit is the same as in ProposalVote
if proposal_votes.yes_votes_commit != call_data.yes_votes_commit {
return Err(Error::InvalidVoteCommit)
}
// 5. also check all_votes_commit
if proposal_votes.all_votes_commit != call_data.all_votes_commit {
return Err(Error::InvalidVoteCommit)
}
Ok(Box::new(Update { proposal: call_data.proposal }))
}
#[derive(Clone)]
pub struct Update {
pub proposal: pallas::Base,
}
impl UpdateBase for Update {
fn apply(self: Box<Self>, states: &mut StateRegistry) {
let state = states
.lookup_mut::<dao_contract::State>(*CONTRACT_ID)
.expect("Return type is not of type State");
state.proposal_votes.remove(&HashableBase(self.proposal)).unwrap();
}
}

View File

@@ -0,0 +1,198 @@
use log::debug;
use rand::rngs::OsRng;
use halo2_proofs::circuit::Value;
use pasta_curves::{arithmetic::CurveAffine, group::Curve, pallas};
use darkfi::{
crypto::{
keypair::SecretKey,
util::{pedersen_commitment_u64, poseidon_hash},
Proof,
},
zk::vm::{Witness, ZkCircuit},
};
use crate::{
dao_contract::{
exec::validate::CallData, mint::wallet::DaoParams, propose::wallet::Proposal, CONTRACT_ID,
},
demo::{FuncCall, ZkContractInfo, ZkContractTable},
};
pub struct Builder {
pub proposal: Proposal,
pub dao: DaoParams,
pub yes_votes_value: u64,
pub all_votes_value: u64,
pub yes_votes_blind: pallas::Scalar,
pub all_votes_blind: pallas::Scalar,
pub user_serial: pallas::Base,
pub user_coin_blind: pallas::Base,
pub dao_serial: pallas::Base,
pub dao_coin_blind: pallas::Base,
pub input_value: u64,
pub input_value_blind: pallas::Scalar,
pub hook_dao_exec: pallas::Base,
pub signature_secret: SecretKey,
}
impl Builder {
pub fn build(self, zk_bins: &ZkContractTable) -> FuncCall {
debug!(target: "dao_contract::exec::wallet::Builder", "build()");
debug!(target: "dao_contract::exec::wallet", "proposalserial{:?}", self.proposal.serial);
let mut proofs = vec![];
let proposal_dest_coords = self.proposal.dest.0.to_affine().coordinates().unwrap();
let proposal_amount = pallas::Base::from(self.proposal.amount);
let dao_proposer_limit = pallas::Base::from(self.dao.proposer_limit);
let dao_quorum = pallas::Base::from(self.dao.quorum);
let dao_approval_ratio_quot = pallas::Base::from(self.dao.approval_ratio_quot);
let dao_approval_ratio_base = pallas::Base::from(self.dao.approval_ratio_base);
let dao_pubkey_coords = self.dao.public_key.0.to_affine().coordinates().unwrap();
let user_spend_hook = pallas::Base::from(0);
let user_data = pallas::Base::from(0);
let input_value = pallas::Base::from(self.input_value);
let change = input_value - proposal_amount;
let dao_bulla = poseidon_hash::<8>([
dao_proposer_limit,
dao_quorum,
dao_approval_ratio_quot,
dao_approval_ratio_base,
self.dao.gov_token_id,
*dao_pubkey_coords.x(),
*dao_pubkey_coords.y(),
self.dao.bulla_blind,
]);
let proposal_bulla = poseidon_hash::<8>([
*proposal_dest_coords.x(),
*proposal_dest_coords.y(),
proposal_amount,
self.proposal.serial,
self.proposal.token_id,
dao_bulla,
self.proposal.blind,
// @tmp-workaround
self.proposal.blind,
]);
let coin_0 = poseidon_hash::<8>([
*proposal_dest_coords.x(),
*proposal_dest_coords.y(),
proposal_amount,
self.proposal.token_id,
self.proposal.serial,
user_spend_hook,
user_data,
self.proposal.blind,
]);
let coin_1 = poseidon_hash::<8>([
*dao_pubkey_coords.x(),
*dao_pubkey_coords.y(),
change,
self.proposal.token_id,
self.dao_serial,
self.hook_dao_exec,
proposal_bulla,
self.dao_coin_blind,
]);
let yes_votes_commit = pedersen_commitment_u64(self.yes_votes_value, self.yes_votes_blind);
let yes_votes_commit_coords = yes_votes_commit.to_affine().coordinates().unwrap();
let all_votes_commit = pedersen_commitment_u64(self.all_votes_value, self.all_votes_blind);
let all_votes_commit_coords = all_votes_commit.to_affine().coordinates().unwrap();
let input_value_commit = pedersen_commitment_u64(self.input_value, self.input_value_blind);
let input_value_commit_coords = input_value_commit.to_affine().coordinates().unwrap();
let zk_info = zk_bins.lookup(&"dao-exec".to_string()).unwrap();
let zk_info = if let ZkContractInfo::Binary(info) = zk_info {
info
} else {
panic!("Not binary info")
};
let zk_bin = zk_info.bincode.clone();
let prover_witnesses = vec![
// proposal params
Witness::Base(Value::known(*proposal_dest_coords.x())),
Witness::Base(Value::known(*proposal_dest_coords.y())),
Witness::Base(Value::known(proposal_amount)),
Witness::Base(Value::known(self.proposal.serial)),
Witness::Base(Value::known(self.proposal.token_id)),
Witness::Base(Value::known(self.proposal.blind)),
// DAO params
Witness::Base(Value::known(dao_proposer_limit)),
Witness::Base(Value::known(dao_quorum)),
Witness::Base(Value::known(dao_approval_ratio_quot)),
Witness::Base(Value::known(dao_approval_ratio_base)),
Witness::Base(Value::known(self.dao.gov_token_id)),
Witness::Base(Value::known(*dao_pubkey_coords.x())),
Witness::Base(Value::known(*dao_pubkey_coords.y())),
Witness::Base(Value::known(self.dao.bulla_blind)),
// votes
Witness::Base(Value::known(pallas::Base::from(self.yes_votes_value))),
Witness::Base(Value::known(pallas::Base::from(self.all_votes_value))),
Witness::Scalar(Value::known(self.yes_votes_blind)),
Witness::Scalar(Value::known(self.all_votes_blind)),
// outputs + inputs
Witness::Base(Value::known(self.user_serial)),
Witness::Base(Value::known(self.user_coin_blind)),
Witness::Base(Value::known(self.dao_serial)),
Witness::Base(Value::known(self.dao_coin_blind)),
Witness::Base(Value::known(input_value)),
Witness::Scalar(Value::known(self.input_value_blind)),
// misc
Witness::Base(Value::known(self.hook_dao_exec)),
Witness::Base(Value::known(user_spend_hook)),
Witness::Base(Value::known(user_data)),
];
let public_inputs = vec![
proposal_bulla,
coin_0,
coin_1,
*yes_votes_commit_coords.x(),
*yes_votes_commit_coords.y(),
*all_votes_commit_coords.x(),
*all_votes_commit_coords.y(),
*input_value_commit_coords.x(),
*input_value_commit_coords.y(),
self.hook_dao_exec,
user_spend_hook,
user_data,
];
let circuit = ZkCircuit::new(prover_witnesses, zk_bin);
debug!(target: "example_contract::foo::wallet::Builder", "input_proof Proof::create()");
let proving_key = &zk_info.proving_key;
let input_proof = Proof::create(proving_key, &[circuit], &public_inputs, &mut OsRng)
.expect("DAO::exec() proving error!)");
proofs.push(input_proof);
let call_data = CallData {
proposal: proposal_bulla,
coin_0,
coin_1,
yes_votes_commit,
all_votes_commit,
input_value_commit,
};
FuncCall {
contract_id: *CONTRACT_ID,
func_id: *super::FUNC_ID,
call_data: Box::new(call_data),
proofs,
}
}
}

View File

@@ -0,0 +1,44 @@
use lazy_static::lazy_static;
use pasta_curves::{group::ff::Field, pallas};
use rand::rngs::OsRng;
pub mod validate;
/// This is an anonymous contract function that mutates the internal DAO state.
///
/// Corresponds to `mint(proposer_limit, quorum, approval_ratio, dao_pubkey, dao_blind)`
///
/// The prover creates a `Builder`, which then constructs the `Tx` that the verifier can
/// check using `state_transition()`.
///
/// # Arguments
///
/// * `proposer_limit` - Number of governance tokens that holder must possess in order to
/// propose a new vote.
/// * `quorum` - Number of minimum votes that must be met for a proposal to pass.
/// * `approval_ratio` - Ratio of winning to total votes for a proposal to pass.
/// * `dao_pubkey` - Public key of the DAO for permissioned access. This can also be
/// shared publicly if you want a full decentralized DAO.
/// * `dao_blind` - Blinding factor for the DAO bulla.
///
/// # Example
///
/// ```rust
/// let dao_proposer_limit = 110;
/// let dao_quorum = 110;
/// let dao_approval_ratio = 2;
///
/// let builder = dao_contract::Mint::Builder(
/// dao_proposer_limit,
/// dao_quorum,
/// dao_approval_ratio,
/// gov_token_id,
/// dao_pubkey,
/// dao_blind
/// );
/// let tx = builder.build();
/// ```
pub mod wallet;
lazy_static! {
pub static ref FUNC_ID: pallas::Base = pallas::Base::random(&mut OsRng);
}

View File

@@ -0,0 +1,73 @@
use std::any::{Any, TypeId};
use darkfi::{
crypto::{keypair::PublicKey, types::DrkCircuitField},
util::serial::{Encodable, SerialDecodable, SerialEncodable},
};
use crate::{
dao_contract::{DaoBulla, State, CONTRACT_ID},
demo::{CallDataBase, StateRegistry, Transaction, UpdateBase},
};
pub fn state_transition(
_states: &StateRegistry,
func_call_index: usize,
parent_tx: &Transaction,
) -> Result<Box<dyn UpdateBase>> {
let func_call = &parent_tx.func_calls[func_call_index];
let call_data = func_call.call_data.as_any();
assert_eq!((&*call_data).type_id(), TypeId::of::<CallData>());
let call_data = call_data.downcast_ref::<CallData>();
// This will be inside wasm so unwrap is fine.
let call_data = call_data.unwrap();
Ok(Box::new(Update { dao_bulla: call_data.dao_bulla.clone() }))
}
#[derive(Clone)]
pub struct Update {
pub dao_bulla: DaoBulla,
}
impl UpdateBase for Update {
fn apply(self: Box<Self>, states: &mut StateRegistry) {
// Lookup dao_contract state from registry
let state = states.lookup_mut::<State>(*CONTRACT_ID).unwrap();
// Add dao_bulla to state.dao_bullas
state.add_dao_bulla(self.dao_bulla);
}
}
#[derive(Debug, Clone, thiserror::Error)]
pub enum Error {}
type Result<T> = std::result::Result<T, Error>;
#[derive(Clone, SerialEncodable, SerialDecodable)]
pub struct CallData {
pub dao_bulla: DaoBulla,
}
impl CallDataBase for CallData {
fn zk_public_values(&self) -> Vec<(String, Vec<DrkCircuitField>)> {
vec![("dao-mint".to_string(), vec![self.dao_bulla.0])]
}
fn as_any(&self) -> &dyn Any {
self
}
fn signature_public_keys(&self) -> Vec<PublicKey> {
vec![]
}
fn encode_bytes(
&self,
mut writer: &mut dyn std::io::Write,
) -> std::result::Result<usize, darkfi::Error> {
self.encode(&mut writer)
}
}

View File

@@ -0,0 +1,98 @@
use crate::dao_contract::state::DaoBulla;
use darkfi::{
crypto::{
keypair::{PublicKey, SecretKey},
util::poseidon_hash,
Proof,
},
zk::vm::{Witness, ZkCircuit},
};
use halo2_proofs::circuit::Value;
use pasta_curves::{arithmetic::CurveAffine, group::Curve, pallas};
use rand::rngs::OsRng;
use crate::{
dao_contract::{mint::validate::CallData, CONTRACT_ID},
demo::{FuncCall, ZkContractInfo, ZkContractTable},
};
#[derive(Clone)]
pub struct DaoParams {
pub proposer_limit: u64,
pub quorum: u64,
pub approval_ratio_quot: u64,
pub approval_ratio_base: u64,
pub gov_token_id: pallas::Base,
pub public_key: PublicKey,
pub bulla_blind: pallas::Base,
}
pub struct Builder {
pub dao_proposer_limit: u64,
pub dao_quorum: u64,
pub dao_approval_ratio_quot: u64,
pub dao_approval_ratio_base: u64,
pub gov_token_id: pallas::Base,
pub dao_pubkey: PublicKey,
pub dao_bulla_blind: pallas::Base,
pub _signature_secret: SecretKey,
}
impl Builder {
/// Consumes self, and produces the function call
pub fn build(self, zk_bins: &ZkContractTable) -> FuncCall {
// Dao bulla
let dao_proposer_limit = pallas::Base::from(self.dao_proposer_limit);
let dao_quorum = pallas::Base::from(self.dao_quorum);
let dao_approval_ratio_quot = pallas::Base::from(self.dao_approval_ratio_quot);
let dao_approval_ratio_base = pallas::Base::from(self.dao_approval_ratio_base);
let dao_pubkey_coords = self.dao_pubkey.0.to_affine().coordinates().unwrap();
let dao_bulla = poseidon_hash::<8>([
dao_proposer_limit,
dao_quorum,
dao_approval_ratio_quot,
dao_approval_ratio_base,
self.gov_token_id,
*dao_pubkey_coords.x(),
*dao_pubkey_coords.y(),
self.dao_bulla_blind,
]);
let dao_bulla = DaoBulla(dao_bulla);
// Now create the mint proof
let zk_info = zk_bins.lookup(&"dao-mint".to_string()).unwrap();
let zk_info = if let ZkContractInfo::Binary(info) = zk_info {
info
} else {
panic!("Not binary info")
};
let zk_bin = zk_info.bincode.clone();
let prover_witnesses = vec![
Witness::Base(Value::known(dao_proposer_limit)),
Witness::Base(Value::known(dao_quorum)),
Witness::Base(Value::known(dao_approval_ratio_quot)),
Witness::Base(Value::known(dao_approval_ratio_base)),
Witness::Base(Value::known(self.gov_token_id)),
Witness::Base(Value::known(*dao_pubkey_coords.x())),
Witness::Base(Value::known(*dao_pubkey_coords.y())),
Witness::Base(Value::known(self.dao_bulla_blind)),
];
let public_inputs = vec![dao_bulla.0];
let circuit = ZkCircuit::new(prover_witnesses, zk_bin);
let proving_key = &zk_info.proving_key;
let mint_proof = Proof::create(proving_key, &[circuit], &public_inputs, &mut OsRng)
.expect("DAO::mint() proving error!");
let call_data = CallData { dao_bulla };
FuncCall {
contract_id: *CONTRACT_ID,
func_id: *super::FUNC_ID,
call_data: Box::new(call_data),
proofs: vec![mint_proof],
}
}
}

View File

@@ -0,0 +1,20 @@
use lazy_static::lazy_static;
use pasta_curves::{group::ff::Field, pallas};
use rand::rngs::OsRng;
// mint()
pub mod mint;
// propose()
pub mod propose;
// vote{}
pub mod vote;
// exec{}
pub mod exec;
pub mod state;
pub use state::{DaoBulla, State};
lazy_static! {
pub static ref CONTRACT_ID: pallas::Base = pallas::Base::random(&mut OsRng);
}

View File

@@ -0,0 +1,10 @@
use lazy_static::lazy_static;
use pasta_curves::{group::ff::Field, pallas};
use rand::rngs::OsRng;
pub mod validate;
pub mod wallet;
lazy_static! {
pub static ref FUNC_ID: pallas::Base = pallas::Base::random(&mut OsRng);
}

View File

@@ -0,0 +1,171 @@
use darkfi::{
crypto::{keypair::PublicKey, merkle_node::MerkleNode, types::DrkCircuitField},
util::serial::{Encodable, SerialDecodable, SerialEncodable},
Error as DarkFiError,
};
use log::error;
use pasta_curves::{
arithmetic::CurveAffine,
group::{Curve, Group},
pallas,
};
use std::any::{Any, TypeId};
use crate::{
dao_contract,
dao_contract::State as DaoState,
demo::{CallDataBase, StateRegistry, Transaction, UpdateBase},
money_contract,
money_contract::state::State as MoneyState,
note::EncryptedNote2,
};
// used for debugging
// const TARGET: &str = "dao_contract::propose::validate::state_transition()";
#[derive(Debug, Clone, thiserror::Error)]
pub enum Error {
#[error("Invalid input merkle root")]
InvalidInputMerkleRoot,
#[error("Invalid DAO merkle root")]
InvalidDaoMerkleRoot,
#[error("DarkFi error: {0}")]
DarkFiError(String),
}
type Result<T> = std::result::Result<T, Error>;
impl From<DarkFiError> for Error {
fn from(err: DarkFiError) -> Self {
Self::DarkFiError(err.to_string())
}
}
#[derive(Clone, SerialEncodable, SerialDecodable)]
pub struct CallData {
pub header: Header,
pub inputs: Vec<Input>,
}
impl CallDataBase for CallData {
fn zk_public_values(&self) -> Vec<(String, Vec<DrkCircuitField>)> {
let mut zk_publics = Vec::new();
let mut total_funds_commit = pallas::Point::identity();
assert!(self.inputs.len() > 0, "inputs length cannot be zero");
for input in &self.inputs {
total_funds_commit += input.value_commit;
let value_coords = input.value_commit.to_affine().coordinates().unwrap();
let sigpub_coords = input.signature_public.0.to_affine().coordinates().unwrap();
zk_publics.push((
"dao-propose-burn".to_string(),
vec![
*value_coords.x(),
*value_coords.y(),
self.header.token_commit,
input.merkle_root.0,
*sigpub_coords.x(),
*sigpub_coords.y(),
],
));
}
let total_funds_coords = total_funds_commit.to_affine().coordinates().unwrap();
zk_publics.push((
"dao-propose-main".to_string(),
vec![
self.header.token_commit,
self.header.dao_merkle_root.0,
self.header.proposal_bulla,
*total_funds_coords.x(),
*total_funds_coords.y(),
],
));
zk_publics
}
fn as_any(&self) -> &dyn Any {
self
}
fn signature_public_keys(&self) -> Vec<PublicKey> {
let mut signature_public_keys = vec![];
for input in self.inputs.clone() {
signature_public_keys.push(input.signature_public);
}
signature_public_keys
}
fn encode_bytes(
&self,
mut writer: &mut dyn std::io::Write,
) -> std::result::Result<usize, darkfi::Error> {
self.encode(&mut writer)
}
}
#[derive(Clone, SerialEncodable, SerialDecodable)]
pub struct Header {
pub dao_merkle_root: MerkleNode,
pub token_commit: pallas::Base,
pub proposal_bulla: pallas::Base,
pub enc_note: EncryptedNote2,
}
#[derive(Clone, SerialEncodable, SerialDecodable)]
pub struct Input {
pub value_commit: pallas::Point,
pub merkle_root: MerkleNode,
pub signature_public: PublicKey,
}
pub fn state_transition(
states: &StateRegistry,
func_call_index: usize,
parent_tx: &Transaction,
) -> Result<Box<dyn UpdateBase>> {
let func_call = &parent_tx.func_calls[func_call_index];
let call_data = func_call.call_data.as_any();
assert_eq!((&*call_data).type_id(), TypeId::of::<CallData>());
let call_data = call_data.downcast_ref::<CallData>();
// This will be inside wasm so unwrap is fine.
let call_data = call_data.unwrap();
// Check the merkle roots for the input coins are valid
for input in &call_data.inputs {
let money_state = states.lookup::<MoneyState>(*money_contract::CONTRACT_ID).unwrap();
if !money_state.is_valid_merkle(&input.merkle_root) {
return Err(Error::InvalidInputMerkleRoot)
}
}
let state = states.lookup::<DaoState>(*dao_contract::CONTRACT_ID).unwrap();
// Is the DAO bulla generated in the ZK proof valid
if !state.is_valid_dao_merkle(&call_data.header.dao_merkle_root) {
return Err(Error::InvalidDaoMerkleRoot)
}
// TODO: look at gov tokens avoid using already spent ones
// Need to spend original coin and generate 2 nullifiers?
Ok(Box::new(Update { proposal_bulla: call_data.header.proposal_bulla }))
}
#[derive(Clone)]
pub struct Update {
pub proposal_bulla: pallas::Base,
}
impl UpdateBase for Update {
fn apply(self: Box<Self>, states: &mut StateRegistry) {
let state = states.lookup_mut::<DaoState>(*dao_contract::CONTRACT_ID).unwrap();
state.add_proposal_bulla(self.proposal_bulla);
}
}

View File

@@ -0,0 +1,269 @@
use halo2_proofs::circuit::Value;
use incrementalmerkletree::Hashable;
use pasta_curves::{
arithmetic::CurveAffine,
group::{ff::Field, Curve},
pallas,
};
use rand::rngs::OsRng;
use darkfi::{
crypto::{
keypair::{PublicKey, SecretKey},
merkle_node::MerkleNode,
util::{pedersen_commitment_u64, poseidon_hash},
Proof,
},
util::serial::{SerialDecodable, SerialEncodable},
zk::vm::{Witness, ZkCircuit},
};
use crate::{
dao_contract::{
mint::wallet::DaoParams,
propose::validate::{CallData, Header, Input},
CONTRACT_ID,
},
demo::{FuncCall, ZkContractInfo, ZkContractTable},
money_contract, note,
};
#[derive(SerialEncodable, SerialDecodable)]
pub struct Note {
pub proposal: Proposal,
}
pub struct BuilderInput {
pub secret: SecretKey,
pub note: money_contract::transfer::wallet::Note,
pub leaf_position: incrementalmerkletree::Position,
pub merkle_path: Vec<MerkleNode>,
pub signature_secret: SecretKey,
}
#[derive(SerialEncodable, SerialDecodable, Clone)]
pub struct Proposal {
pub dest: PublicKey,
pub amount: u64,
pub serial: pallas::Base,
pub token_id: pallas::Base,
pub blind: pallas::Base,
}
pub struct Builder {
pub inputs: Vec<BuilderInput>,
pub proposal: Proposal,
pub dao: DaoParams,
pub dao_leaf_position: incrementalmerkletree::Position,
pub dao_merkle_path: Vec<MerkleNode>,
pub dao_merkle_root: MerkleNode,
}
impl Builder {
pub fn build(self, zk_bins: &ZkContractTable) -> FuncCall {
let mut proofs = vec![];
let gov_token_blind = pallas::Base::random(&mut OsRng);
let mut inputs = vec![];
let mut total_funds = 0;
let mut total_funds_blinds = pallas::Scalar::from(0);
for input in self.inputs {
let funds_blind = pallas::Scalar::random(&mut OsRng);
total_funds += input.note.value;
total_funds_blinds += funds_blind;
let signature_public = PublicKey::from_secret(input.signature_secret);
let zk_info = zk_bins.lookup(&"dao-propose-burn".to_string()).unwrap();
let zk_info = if let ZkContractInfo::Binary(info) = zk_info {
info
} else {
panic!("Not binary info")
};
let zk_bin = zk_info.bincode.clone();
// Note from the previous output
let note = input.note;
let leaf_pos: u64 = input.leaf_position.into();
let prover_witnesses = vec![
Witness::Base(Value::known(input.secret.0)),
Witness::Base(Value::known(note.serial)),
Witness::Base(Value::known(pallas::Base::from(0))),
Witness::Base(Value::known(pallas::Base::from(0))),
Witness::Base(Value::known(pallas::Base::from(note.value))),
Witness::Base(Value::known(note.token_id)),
Witness::Base(Value::known(note.coin_blind)),
Witness::Scalar(Value::known(funds_blind)),
Witness::Base(Value::known(gov_token_blind)),
Witness::Uint32(Value::known(leaf_pos.try_into().unwrap())),
Witness::MerklePath(Value::known(input.merkle_path.clone().try_into().unwrap())),
Witness::Base(Value::known(input.signature_secret.0)),
];
let public_key = PublicKey::from_secret(input.secret);
let coords = public_key.0.to_affine().coordinates().unwrap();
let coin = poseidon_hash::<8>([
*coords.x(),
*coords.y(),
pallas::Base::from(note.value),
note.token_id,
note.serial,
pallas::Base::from(0),
pallas::Base::from(0),
note.coin_blind,
]);
let merkle_root = {
let position: u64 = input.leaf_position.into();
let mut current = MerkleNode(coin);
for (level, sibling) in input.merkle_path.iter().enumerate() {
let level = level as u8;
current = if position & (1 << level) == 0 {
MerkleNode::combine(level.into(), &current, sibling)
} else {
MerkleNode::combine(level.into(), sibling, &current)
};
}
current
};
let token_commit = poseidon_hash::<2>([note.token_id, gov_token_blind]);
assert_eq!(self.dao.gov_token_id, note.token_id);
let value_commit = pedersen_commitment_u64(note.value, funds_blind);
let value_coords = value_commit.to_affine().coordinates().unwrap();
let sigpub_coords = signature_public.0.to_affine().coordinates().unwrap();
let public_inputs = vec![
*value_coords.x(),
*value_coords.y(),
token_commit,
merkle_root.0,
*sigpub_coords.x(),
*sigpub_coords.y(),
];
let circuit = ZkCircuit::new(prover_witnesses, zk_bin);
let proving_key = &zk_info.proving_key;
let input_proof = Proof::create(proving_key, &[circuit], &public_inputs, &mut OsRng)
.expect("DAO::propose() proving error!");
proofs.push(input_proof);
let input = Input { value_commit, merkle_root, signature_public };
inputs.push(input);
}
let total_funds_commit = pedersen_commitment_u64(total_funds, total_funds_blinds);
let total_funds_coords = total_funds_commit.to_affine().coordinates().unwrap();
let total_funds = pallas::Base::from(total_funds);
let token_commit = poseidon_hash::<2>([self.dao.gov_token_id, gov_token_blind]);
let proposal_dest_coords = self.proposal.dest.0.to_affine().coordinates().unwrap();
let proposal_dest_x = *proposal_dest_coords.x();
let proposal_dest_y = *proposal_dest_coords.y();
let proposal_amount = pallas::Base::from(self.proposal.amount);
let dao_proposer_limit = pallas::Base::from(self.dao.proposer_limit);
let dao_quorum = pallas::Base::from(self.dao.quorum);
let dao_approval_ratio_quot = pallas::Base::from(self.dao.approval_ratio_quot);
let dao_approval_ratio_base = pallas::Base::from(self.dao.approval_ratio_base);
let dao_pubkey_coords = self.dao.public_key.0.to_affine().coordinates().unwrap();
let dao_bulla = poseidon_hash::<8>([
dao_proposer_limit,
dao_quorum,
dao_approval_ratio_quot,
dao_approval_ratio_base,
self.dao.gov_token_id,
*dao_pubkey_coords.x(),
*dao_pubkey_coords.y(),
self.dao.bulla_blind,
]);
let dao_leaf_position: u64 = self.dao_leaf_position.into();
let proposal_bulla = poseidon_hash::<8>([
proposal_dest_x,
proposal_dest_y,
proposal_amount,
self.proposal.serial,
self.proposal.token_id,
dao_bulla,
self.proposal.blind,
// @tmp-workaround
self.proposal.blind,
]);
let zk_info = zk_bins.lookup(&"dao-propose-main".to_string()).unwrap();
let zk_info = if let ZkContractInfo::Binary(info) = zk_info {
info
} else {
panic!("Not binary info")
};
let zk_bin = zk_info.bincode.clone();
let prover_witnesses = vec![
// Proposers total number of gov tokens
Witness::Base(Value::known(total_funds)),
Witness::Scalar(Value::known(total_funds_blinds)),
// Used for blinding exported gov token ID
Witness::Base(Value::known(gov_token_blind)),
// proposal params
Witness::Base(Value::known(proposal_dest_x)),
Witness::Base(Value::known(proposal_dest_y)),
Witness::Base(Value::known(proposal_amount)),
Witness::Base(Value::known(self.proposal.serial)),
Witness::Base(Value::known(self.proposal.token_id)),
Witness::Base(Value::known(self.proposal.blind)),
// DAO params
Witness::Base(Value::known(dao_proposer_limit)),
Witness::Base(Value::known(dao_quorum)),
Witness::Base(Value::known(dao_approval_ratio_quot)),
Witness::Base(Value::known(dao_approval_ratio_base)),
Witness::Base(Value::known(self.dao.gov_token_id)),
Witness::Base(Value::known(*dao_pubkey_coords.x())),
Witness::Base(Value::known(*dao_pubkey_coords.y())),
Witness::Base(Value::known(self.dao.bulla_blind)),
Witness::Uint32(Value::known(dao_leaf_position.try_into().unwrap())),
Witness::MerklePath(Value::known(self.dao_merkle_path.try_into().unwrap())),
];
let public_inputs = vec![
token_commit,
self.dao_merkle_root.0,
proposal_bulla,
*total_funds_coords.x(),
*total_funds_coords.y(),
];
let circuit = ZkCircuit::new(prover_witnesses, zk_bin);
let proving_key = &zk_info.proving_key;
let main_proof = Proof::create(proving_key, &[circuit], &public_inputs, &mut OsRng)
.expect("DAO::propose() proving error!");
proofs.push(main_proof);
let note = Note { proposal: self.proposal };
let enc_note = note::encrypt(&note, &self.dao.public_key).unwrap();
let header = Header {
dao_merkle_root: self.dao_merkle_root,
proposal_bulla,
token_commit,
enc_note,
};
let call_data = CallData { header, inputs };
FuncCall {
contract_id: *CONTRACT_ID,
func_id: *super::FUNC_ID,
call_data: Box::new(call_data),
proofs,
}
}
}

View File

@@ -0,0 +1,97 @@
use incrementalmerkletree::{bridgetree::BridgeTree, Tree};
use pasta_curves::{group::Group, pallas};
use std::{any::Any, collections::HashMap};
use crate::demo::HashableBase;
use darkfi::{
crypto::{constants::MERKLE_DEPTH, merkle_node::MerkleNode, nullifier::Nullifier},
util::serial::{SerialDecodable, SerialEncodable},
};
#[derive(Clone, SerialEncodable, SerialDecodable)]
pub struct DaoBulla(pub pallas::Base);
type MerkleTree = BridgeTree<MerkleNode, MERKLE_DEPTH>;
pub struct ProposalVotes {
// TODO: might be more logical to have 'yes_votes_commit' and 'no_votes_commit'
/// Weighted vote commit
pub yes_votes_commit: pallas::Point,
/// All value staked in the vote
pub all_votes_commit: pallas::Point,
/// Vote nullifiers
pub vote_nulls: Vec<Nullifier>,
}
impl ProposalVotes {
pub fn nullifier_exists(&self, nullifier: &Nullifier) -> bool {
self.vote_nulls.iter().any(|n| n == nullifier)
}
}
/// This DAO state is for all DAOs on the network. There should only be a single instance.
pub struct State {
dao_bullas: Vec<DaoBulla>,
pub dao_tree: MerkleTree,
pub dao_roots: Vec<MerkleNode>,
//proposal_bullas: Vec<pallas::Base>,
pub proposal_tree: MerkleTree,
pub proposal_roots: Vec<MerkleNode>,
pub proposal_votes: HashMap<HashableBase, ProposalVotes>,
}
impl State {
pub fn new() -> Box<dyn Any> {
Box::new(Self {
dao_bullas: Vec::new(),
dao_tree: MerkleTree::new(100),
dao_roots: Vec::new(),
//proposal_bullas: Vec::new(),
proposal_tree: MerkleTree::new(100),
proposal_roots: Vec::new(),
proposal_votes: HashMap::new(),
})
}
pub fn add_dao_bulla(&mut self, bulla: DaoBulla) {
let node = MerkleNode(bulla.0);
self.dao_bullas.push(bulla);
self.dao_tree.append(&node);
self.dao_roots.push(self.dao_tree.root(0).unwrap());
}
pub fn add_proposal_bulla(&mut self, bulla: pallas::Base) {
let node = MerkleNode(bulla);
//self.proposal_bullas.push(bulla);
self.proposal_tree.append(&node);
self.proposal_roots.push(self.proposal_tree.root(0).unwrap());
self.proposal_votes.insert(
HashableBase(bulla),
ProposalVotes {
yes_votes_commit: pallas::Point::identity(),
all_votes_commit: pallas::Point::identity(),
vote_nulls: Vec::new(),
},
);
}
pub fn lookup_proposal_votes(&self, proposal_bulla: pallas::Base) -> Option<&ProposalVotes> {
self.proposal_votes.get(&HashableBase(proposal_bulla))
}
pub fn lookup_proposal_votes_mut(
&mut self,
proposal_bulla: pallas::Base,
) -> Option<&mut ProposalVotes> {
self.proposal_votes.get_mut(&HashableBase(proposal_bulla))
}
pub fn is_valid_dao_merkle(&self, root: &MerkleNode) -> bool {
self.dao_roots.iter().any(|m| m == root)
}
// TODO: This never gets called.
pub fn _is_valid_proposal_merkle(&self, root: &MerkleNode) -> bool {
self.proposal_roots.iter().any(|m| m == root)
}
}

View File

@@ -0,0 +1,10 @@
use lazy_static::lazy_static;
use pasta_curves::{group::ff::Field, pallas};
use rand::rngs::OsRng;
pub mod validate;
pub mod wallet;
lazy_static! {
pub static ref FUNC_ID: pallas::Base = pallas::Base::random(&mut OsRng);
}

View File

@@ -0,0 +1,206 @@
use darkfi::{
crypto::{
keypair::PublicKey, merkle_node::MerkleNode, nullifier::Nullifier, types::DrkCircuitField,
},
util::serial::{Encodable, SerialDecodable, SerialEncodable},
Error as DarkFiError,
};
use log::error;
use pasta_curves::{
arithmetic::CurveAffine,
group::{Curve, Group},
pallas,
};
use std::any::{Any, TypeId};
use crate::{
dao_contract,
dao_contract::State as DaoState,
demo::{CallDataBase, StateRegistry, Transaction, UpdateBase},
money_contract,
money_contract::state::State as MoneyState,
note::EncryptedNote2,
};
#[derive(Debug, Clone, thiserror::Error)]
pub enum Error {
#[error("Invalid proposal")]
InvalidProposal,
#[error("Voting with already spent coinage")]
SpentCoin,
#[error("Double voting")]
DoubleVote,
#[error("Invalid input merkle root")]
InvalidInputMerkleRoot,
#[error("DarkFi error: {0}")]
DarkFiError(String),
}
type Result<T> = std::result::Result<T, Error>;
impl From<DarkFiError> for Error {
fn from(err: DarkFiError) -> Self {
Self::DarkFiError(err.to_string())
}
}
#[derive(Clone, SerialEncodable, SerialDecodable)]
pub struct CallData {
pub header: Header,
pub inputs: Vec<Input>,
}
impl CallDataBase for CallData {
fn zk_public_values(&self) -> Vec<(String, Vec<DrkCircuitField>)> {
let mut zk_publics = Vec::new();
let mut all_votes_commit = pallas::Point::identity();
assert!(self.inputs.len() > 0, "inputs length cannot be zero");
for input in &self.inputs {
all_votes_commit += input.vote_commit;
let value_coords = input.vote_commit.to_affine().coordinates().unwrap();
let sigpub_coords = input.signature_public.0.to_affine().coordinates().unwrap();
zk_publics.push((
"dao-vote-burn".to_string(),
vec![
input.nullifier.0,
*value_coords.x(),
*value_coords.y(),
self.header.token_commit,
input.merkle_root.0,
*sigpub_coords.x(),
*sigpub_coords.y(),
],
));
}
let yes_vote_commit_coords = self.header.yes_vote_commit.to_affine().coordinates().unwrap();
let vote_commit_coords = all_votes_commit.to_affine().coordinates().unwrap();
zk_publics.push((
"dao-vote-main".to_string(),
vec![
self.header.token_commit,
self.header.proposal_bulla,
*yes_vote_commit_coords.x(),
*yes_vote_commit_coords.y(),
*vote_commit_coords.x(),
*vote_commit_coords.y(),
],
));
zk_publics
}
fn as_any(&self) -> &dyn Any {
self
}
fn signature_public_keys(&self) -> Vec<PublicKey> {
let mut signature_public_keys = vec![];
for input in self.inputs.clone() {
signature_public_keys.push(input.signature_public);
}
signature_public_keys
}
fn encode_bytes(
&self,
mut writer: &mut dyn std::io::Write,
) -> std::result::Result<usize, darkfi::Error> {
self.encode(&mut writer)
}
}
#[derive(Clone, SerialEncodable, SerialDecodable)]
pub struct Header {
pub token_commit: pallas::Base,
pub proposal_bulla: pallas::Base,
pub yes_vote_commit: pallas::Point,
pub enc_note: EncryptedNote2,
}
#[derive(Clone, SerialEncodable, SerialDecodable)]
pub struct Input {
pub nullifier: Nullifier,
pub vote_commit: pallas::Point,
pub merkle_root: MerkleNode,
pub signature_public: PublicKey,
}
pub fn state_transition(
states: &StateRegistry,
func_call_index: usize,
parent_tx: &Transaction,
) -> Result<Box<dyn UpdateBase>> {
let func_call = &parent_tx.func_calls[func_call_index];
let call_data = func_call.call_data.as_any();
assert_eq!((&*call_data).type_id(), TypeId::of::<CallData>());
let call_data = call_data.downcast_ref::<CallData>();
// This will be inside wasm so unwrap is fine.
let call_data = call_data.unwrap();
let dao_state = states.lookup::<DaoState>(*dao_contract::CONTRACT_ID).unwrap();
// Check proposal_bulla exists
let votes_info = dao_state.lookup_proposal_votes(call_data.header.proposal_bulla);
if votes_info.is_none() {
return Err(Error::InvalidProposal)
}
let votes_info = votes_info.unwrap();
// Check the merkle roots for the input coins are valid
let mut vote_nulls = Vec::new();
let mut all_vote_commit = pallas::Point::identity();
for input in &call_data.inputs {
let money_state = states.lookup::<MoneyState>(*money_contract::CONTRACT_ID).unwrap();
if !money_state.is_valid_merkle(&input.merkle_root) {
return Err(Error::InvalidInputMerkleRoot)
}
if money_state.nullifier_exists(&input.nullifier) {
return Err(Error::SpentCoin)
}
if votes_info.nullifier_exists(&input.nullifier) {
return Err(Error::DoubleVote)
}
all_vote_commit += input.vote_commit;
vote_nulls.push(input.nullifier);
}
Ok(Box::new(Update {
proposal_bulla: call_data.header.proposal_bulla,
vote_nulls,
yes_vote_commit: call_data.header.yes_vote_commit,
all_vote_commit,
}))
}
#[derive(Clone)]
pub struct Update {
proposal_bulla: pallas::Base,
vote_nulls: Vec<Nullifier>,
pub yes_vote_commit: pallas::Point,
pub all_vote_commit: pallas::Point,
}
impl UpdateBase for Update {
fn apply(mut self: Box<Self>, states: &mut StateRegistry) {
let state = states.lookup_mut::<DaoState>(*dao_contract::CONTRACT_ID).unwrap();
let votes_info = state.lookup_proposal_votes_mut(self.proposal_bulla).unwrap();
votes_info.yes_votes_commit += self.yes_vote_commit;
votes_info.all_votes_commit += self.all_vote_commit;
votes_info.vote_nulls.append(&mut self.vote_nulls);
}
}

View File

@@ -0,0 +1,289 @@
use darkfi::{
crypto::{
keypair::{Keypair, PublicKey, SecretKey},
merkle_node::MerkleNode,
nullifier::Nullifier,
util::{pedersen_commitment_u64, poseidon_hash},
Proof,
},
util::serial::{SerialDecodable, SerialEncodable},
zk::vm::{Witness, ZkCircuit},
};
use halo2_proofs::circuit::Value;
use incrementalmerkletree::Hashable;
use pasta_curves::{
arithmetic::CurveAffine,
group::{ff::Field, Curve},
pallas,
};
use rand::rngs::OsRng;
use crate::{
dao_contract::{
mint::wallet::DaoParams,
propose::wallet::Proposal,
vote::validate::{CallData, Header, Input},
CONTRACT_ID,
},
demo::{FuncCall, ZkContractInfo, ZkContractTable},
money_contract, note,
};
use log::debug;
#[derive(SerialEncodable, SerialDecodable)]
pub struct Note {
pub vote: Vote,
pub vote_value: u64,
pub vote_value_blind: pallas::Scalar,
}
#[derive(SerialEncodable, SerialDecodable)]
pub struct Vote {
pub vote_option: bool,
pub vote_option_blind: pallas::Scalar,
}
pub struct BuilderInput {
pub secret: SecretKey,
pub note: money_contract::transfer::wallet::Note,
pub leaf_position: incrementalmerkletree::Position,
pub merkle_path: Vec<MerkleNode>,
pub signature_secret: SecretKey,
}
// TODO: should be token locking voting?
// Inside ZKproof, check proposal is correct.
pub struct Builder {
pub inputs: Vec<BuilderInput>,
pub vote: Vote,
pub vote_keypair: Keypair,
pub proposal: Proposal,
pub dao: DaoParams,
}
impl Builder {
pub fn build(self, zk_bins: &ZkContractTable) -> FuncCall {
debug!(target: "dao_contract::vote::wallet::Builder", "build()");
let mut proofs = vec![];
let gov_token_blind = pallas::Base::random(&mut OsRng);
let mut inputs = vec![];
let mut vote_value = 0;
let mut vote_value_blind = pallas::Scalar::from(0);
for input in self.inputs {
let value_blind = pallas::Scalar::random(&mut OsRng);
vote_value += input.note.value;
vote_value_blind += value_blind;
let signature_public = PublicKey::from_secret(input.signature_secret);
let zk_info = zk_bins.lookup(&"dao-vote-burn".to_string()).unwrap();
let zk_info = if let ZkContractInfo::Binary(info) = zk_info {
info
} else {
panic!("Not binary info")
};
let zk_bin = zk_info.bincode.clone();
// Note from the previous output
let note = input.note;
let leaf_pos: u64 = input.leaf_position.into();
let prover_witnesses = vec![
Witness::Base(Value::known(input.secret.0)),
Witness::Base(Value::known(note.serial)),
Witness::Base(Value::known(pallas::Base::from(0))),
Witness::Base(Value::known(pallas::Base::from(0))),
Witness::Base(Value::known(pallas::Base::from(note.value))),
Witness::Base(Value::known(note.token_id)),
Witness::Base(Value::known(note.coin_blind)),
Witness::Scalar(Value::known(vote_value_blind)),
Witness::Base(Value::known(gov_token_blind)),
Witness::Uint32(Value::known(leaf_pos.try_into().unwrap())),
Witness::MerklePath(Value::known(input.merkle_path.clone().try_into().unwrap())),
Witness::Base(Value::known(input.signature_secret.0)),
];
let public_key = PublicKey::from_secret(input.secret);
let coords = public_key.0.to_affine().coordinates().unwrap();
let coin = poseidon_hash::<8>([
*coords.x(),
*coords.y(),
pallas::Base::from(note.value),
note.token_id,
note.serial,
pallas::Base::from(0),
pallas::Base::from(0),
note.coin_blind,
]);
let merkle_root = {
let position: u64 = input.leaf_position.into();
let mut current = MerkleNode(coin);
for (level, sibling) in input.merkle_path.iter().enumerate() {
let level = level as u8;
current = if position & (1 << level) == 0 {
MerkleNode::combine(level.into(), &current, sibling)
} else {
MerkleNode::combine(level.into(), sibling, &current)
};
}
current
};
let token_commit = poseidon_hash::<2>([note.token_id, gov_token_blind]);
assert_eq!(self.dao.gov_token_id, note.token_id);
let nullifier = poseidon_hash::<2>([input.secret.0, note.serial]);
let vote_commit = pedersen_commitment_u64(note.value, vote_value_blind);
let vote_commit_coords = vote_commit.to_affine().coordinates().unwrap();
let sigpub_coords = signature_public.0.to_affine().coordinates().unwrap();
let public_inputs = vec![
nullifier,
*vote_commit_coords.x(),
*vote_commit_coords.y(),
token_commit,
merkle_root.0,
*sigpub_coords.x(),
*sigpub_coords.y(),
];
let circuit = ZkCircuit::new(prover_witnesses, zk_bin);
let proving_key = &zk_info.proving_key;
debug!(target: "dao_contract::vote::wallet::Builder", "input_proof Proof::create()");
let input_proof = Proof::create(proving_key, &[circuit], &public_inputs, &mut OsRng)
.expect("DAO::vote() proving error!");
proofs.push(input_proof);
let input = Input {
nullifier: Nullifier(nullifier),
vote_commit,
merkle_root,
signature_public,
};
inputs.push(input);
}
let token_commit = poseidon_hash::<2>([self.dao.gov_token_id, gov_token_blind]);
let proposal_dest_coords = self.proposal.dest.0.to_affine().coordinates().unwrap();
let proposal_amount = pallas::Base::from(self.proposal.amount);
let dao_proposer_limit = pallas::Base::from(self.dao.proposer_limit);
let dao_quorum = pallas::Base::from(self.dao.quorum);
let dao_approval_ratio_quot = pallas::Base::from(self.dao.approval_ratio_quot);
let dao_approval_ratio_base = pallas::Base::from(self.dao.approval_ratio_base);
let dao_pubkey_coords = self.dao.public_key.0.to_affine().coordinates().unwrap();
let dao_bulla = poseidon_hash::<8>([
dao_proposer_limit,
dao_quorum,
dao_approval_ratio_quot,
dao_approval_ratio_base,
self.dao.gov_token_id,
*dao_pubkey_coords.x(),
*dao_pubkey_coords.y(),
self.dao.bulla_blind,
]);
let proposal_bulla = poseidon_hash::<8>([
*proposal_dest_coords.x(),
*proposal_dest_coords.y(),
proposal_amount,
self.proposal.serial,
self.proposal.token_id,
dao_bulla,
self.proposal.blind,
// @tmp-workaround
self.proposal.blind,
]);
let vote_option = self.vote.vote_option as u64;
assert!(vote_option == 0 || vote_option == 1);
let yes_vote_commit =
pedersen_commitment_u64(vote_option * vote_value, self.vote.vote_option_blind);
let yes_vote_commit_coords = yes_vote_commit.to_affine().coordinates().unwrap();
let all_vote_commit = pedersen_commitment_u64(vote_value, vote_value_blind);
let all_vote_commit_coords = all_vote_commit.to_affine().coordinates().unwrap();
let zk_info = zk_bins.lookup(&"dao-vote-main".to_string()).unwrap();
let zk_info = if let ZkContractInfo::Binary(info) = zk_info {
info
} else {
panic!("Not binary info")
};
let zk_bin = zk_info.bincode.clone();
let prover_witnesses = vec![
// proposal params
Witness::Base(Value::known(*proposal_dest_coords.x())),
Witness::Base(Value::known(*proposal_dest_coords.y())),
Witness::Base(Value::known(proposal_amount)),
Witness::Base(Value::known(self.proposal.serial)),
Witness::Base(Value::known(self.proposal.token_id)),
Witness::Base(Value::known(self.proposal.blind)),
// DAO params
Witness::Base(Value::known(dao_proposer_limit)),
Witness::Base(Value::known(dao_quorum)),
Witness::Base(Value::known(dao_approval_ratio_quot)),
Witness::Base(Value::known(dao_approval_ratio_base)),
Witness::Base(Value::known(self.dao.gov_token_id)),
Witness::Base(Value::known(*dao_pubkey_coords.x())),
Witness::Base(Value::known(*dao_pubkey_coords.y())),
Witness::Base(Value::known(self.dao.bulla_blind)),
// Vote
Witness::Base(Value::known(pallas::Base::from(vote_option))),
Witness::Scalar(Value::known(self.vote.vote_option_blind)),
// Total number of gov tokens allocated
Witness::Base(Value::known(pallas::Base::from(vote_value))),
Witness::Scalar(Value::known(vote_value_blind)),
// gov token
Witness::Base(Value::known(gov_token_blind)),
];
let public_inputs = vec![
token_commit,
proposal_bulla,
// this should be a value commit??
*yes_vote_commit_coords.x(),
*yes_vote_commit_coords.y(),
*all_vote_commit_coords.x(),
*all_vote_commit_coords.y(),
];
let circuit = ZkCircuit::new(prover_witnesses, zk_bin);
let proving_key = &zk_info.proving_key;
debug!(target: "dao_contract::vote::wallet::Builder", "main_proof = Proof::create()");
let main_proof = Proof::create(proving_key, &[circuit], &public_inputs, &mut OsRng)
.expect("DAO::vote() proving error!");
proofs.push(main_proof);
let note = Note { vote: self.vote, vote_value, vote_value_blind };
let enc_note = note::encrypt(&note, &self.vote_keypair.public).unwrap();
let header = Header { token_commit, proposal_bulla, yes_vote_commit, enc_note };
let call_data = CallData { header, inputs };
FuncCall {
contract_id: *CONTRACT_ID,
func_id: *super::FUNC_ID,
call_data: Box::new(call_data),
proofs,
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,10 @@
use lazy_static::lazy_static;
use pasta_curves::{group::ff::Field, pallas};
use rand::rngs::OsRng;
pub mod validate;
pub mod wallet;
lazy_static! {
pub static ref FUNC_ID: pallas::Base = pallas::Base::random(&mut OsRng);
}

View File

@@ -0,0 +1,93 @@
use pasta_curves::pallas;
use darkfi::{
crypto::{keypair::PublicKey, types::DrkCircuitField},
util::serial::{Encodable, SerialDecodable, SerialEncodable},
Error as DarkFiError,
};
use std::any::{Any, TypeId};
use crate::{
demo::{CallDataBase, StateRegistry, Transaction, UpdateBase},
example_contract::{state::State, CONTRACT_ID},
};
type Result<T> = std::result::Result<T, Error>;
#[derive(Debug, Clone, thiserror::Error)]
pub enum Error {
#[error("ValueExists")]
ValueExists,
#[error("DarkFi error: {0}")]
DarkFiError(String),
}
impl From<DarkFiError> for Error {
fn from(err: DarkFiError) -> Self {
Self::DarkFiError(err.to_string())
}
}
#[derive(Clone, SerialEncodable, SerialDecodable)]
pub struct CallData {
pub public_value: pallas::Base,
pub signature_public: PublicKey,
}
impl CallDataBase for CallData {
fn zk_public_values(&self) -> Vec<(String, Vec<DrkCircuitField>)> {
vec![("example-foo".to_string(), vec![self.public_value])]
}
fn as_any(&self) -> &dyn Any {
self
}
fn signature_public_keys(&self) -> Vec<PublicKey> {
vec![self.signature_public]
}
fn encode_bytes(
&self,
mut writer: &mut dyn std::io::Write,
) -> std::result::Result<usize, darkfi::Error> {
self.encode(&mut writer)
}
}
pub fn state_transition(
states: &StateRegistry,
func_call_index: usize,
parent_tx: &Transaction,
) -> Result<Box<dyn UpdateBase>> {
let func_call = &parent_tx.func_calls[func_call_index];
let call_data = func_call.call_data.as_any();
assert_eq!((&*call_data).type_id(), TypeId::of::<CallData>());
let call_data = call_data.downcast_ref::<CallData>();
// This will be inside wasm so unwrap is fine.
let call_data = call_data.unwrap();
let example_state = states.lookup::<State>(*CONTRACT_ID).unwrap();
if example_state.public_exists(&call_data.public_value) {
return Err(Error::ValueExists)
}
Ok(Box::new(Update { public_value: call_data.public_value }))
}
#[derive(Clone)]
pub struct Update {
public_value: pallas::Base,
}
impl UpdateBase for Update {
fn apply(self: Box<Self>, states: &mut StateRegistry) {
let example_state = states.lookup_mut::<State>(*CONTRACT_ID).unwrap();
example_state.add_public_value(self.public_value);
}
}

View File

@@ -0,0 +1,74 @@
use log::debug;
use rand::rngs::OsRng;
use halo2_proofs::circuit::Value;
use pasta_curves::pallas;
use darkfi::{
crypto::{
keypair::{PublicKey, SecretKey},
Proof,
},
zk::vm::{Witness, ZkCircuit},
};
use crate::{
demo::{FuncCall, ZkContractInfo, ZkContractTable},
example_contract::{foo::validate::CallData, CONTRACT_ID},
};
pub struct Foo {
pub a: u64,
pub b: u64,
}
pub struct Builder {
pub foo: Foo,
pub signature_secret: SecretKey,
}
impl Builder {
pub fn build(self, zk_bins: &ZkContractTable) -> FuncCall {
debug!(target: "example_contract::foo::wallet::Builder", "build()");
let mut proofs = vec![];
let zk_info = zk_bins.lookup(&"example-foo".to_string()).unwrap();
let zk_info = if let ZkContractInfo::Binary(info) = zk_info {
info
} else {
panic!("Not binary info")
};
let zk_bin = zk_info.bincode.clone();
let prover_witnesses = vec![
Witness::Base(Value::known(pallas::Base::from(self.foo.a))),
Witness::Base(Value::known(pallas::Base::from(self.foo.b))),
];
let a = pallas::Base::from(self.foo.a);
let b = pallas::Base::from(self.foo.b);
let c = a + b;
let public_inputs = vec![c];
let circuit = ZkCircuit::new(prover_witnesses, zk_bin);
debug!(target: "example_contract::foo::wallet::Builder", "input_proof Proof::create()");
let proving_key = &zk_info.proving_key;
let input_proof = Proof::create(proving_key, &[circuit], &public_inputs, &mut OsRng)
.expect("Example::foo() proving error!)");
proofs.push(input_proof);
let signature_public = PublicKey::from_secret(self.signature_secret);
let call_data = CallData { public_value: c, signature_public };
FuncCall {
contract_id: *CONTRACT_ID,
func_id: *super::FUNC_ID,
call_data: Box::new(call_data),
proofs,
}
}
}

View File

@@ -0,0 +1,12 @@
use lazy_static::lazy_static;
use pasta_curves::{group::ff::Field, pallas};
use rand::rngs::OsRng;
// foo()
pub mod foo;
pub mod state;
lazy_static! {
pub static ref CONTRACT_ID: pallas::Base = pallas::Base::random(&mut OsRng);
}

View File

@@ -0,0 +1,21 @@
use std::any::Any;
use pasta_curves::pallas;
pub struct State {
pub public_values: Vec<pallas::Base>,
}
impl State {
pub fn new() -> Box<dyn Any> {
Box::new(Self { public_values: Vec::new() })
}
pub fn add_public_value(&mut self, public_value: pallas::Base) {
self.public_values.push(public_value)
}
pub fn public_exists(&self, public_value: &pallas::Base) -> bool {
self.public_values.iter().any(|v| v == public_value)
}
}

View File

@@ -14,7 +14,16 @@ use darkfi::{
Result,
};
async fn start() -> Result<()> {
mod dao_contract;
mod example_contract;
mod money_contract;
mod demo;
mod note;
use crate::demo::demo;
async fn _start() -> Result<()> {
let rpc_addr = Url::parse("tcp://127.0.0.1:7777")?;
let rpc_interface = Arc::new(JsonRpcInterface {});
@@ -57,6 +66,7 @@ async fn main() -> Result<()> {
ColorChoice::Auto,
)?;
start().await?;
//start().await?;
demo().await.unwrap();
Ok(())
}

View File

@@ -0,0 +1,13 @@
use lazy_static::lazy_static;
use pasta_curves::{group::ff::Field, pallas};
use rand::rngs::OsRng;
// transfer()
pub mod transfer;
pub mod state;
pub use state::State;
lazy_static! {
pub static ref CONTRACT_ID: pallas::Base = pallas::Base::random(&mut OsRng);
}

View File

@@ -0,0 +1,116 @@
use incrementalmerkletree::{bridgetree::BridgeTree, Tree};
use darkfi::crypto::{
coin::Coin,
constants::MERKLE_DEPTH,
keypair::{PublicKey, SecretKey},
merkle_node::MerkleNode,
nullifier::Nullifier,
};
use super::transfer;
use crate::note::EncryptedNote2;
type MerkleTree = BridgeTree<MerkleNode, MERKLE_DEPTH>;
pub struct OwnCoin {
pub coin: Coin,
pub note: transfer::wallet::Note,
pub leaf_position: incrementalmerkletree::Position,
}
pub struct WalletCache {
// Normally this would be a HashMap, but SecretKey is not Hash-able
// TODO: This can be HashableBase
cache: Vec<(SecretKey, Vec<OwnCoin>)>,
}
impl WalletCache {
pub fn new() -> Self {
Self { cache: Vec::new() }
}
/// Must be called at the start to begin tracking received coins for this secret.
pub fn track(&mut self, secret: SecretKey) {
self.cache.push((secret, Vec::new()));
}
/// Get all coins received by this secret key
/// track() must be called on this secret before calling this or the function will panic.
pub fn get_received(&mut self, secret: &SecretKey) -> Vec<OwnCoin> {
for (other_secret, own_coins) in self.cache.iter_mut() {
if *secret == *other_secret {
// clear own_coins vec, and return current contents
return std::mem::replace(own_coins, Vec::new())
}
}
panic!("you forget to track() this secret!");
}
pub fn try_decrypt_note(
&mut self,
coin: Coin,
ciphertext: EncryptedNote2,
tree: &mut MerkleTree,
) {
// Loop through all our secret keys...
for (secret, own_coins) in self.cache.iter_mut() {
// .. attempt to decrypt the note ...
if let Ok(note) = ciphertext.decrypt(secret) {
let leaf_position = tree.witness().expect("coin should be in tree");
own_coins.push(OwnCoin { coin, note, leaf_position });
}
}
}
}
/// The state machine, held in memory.
pub struct State {
/// The entire Merkle tree state
pub tree: MerkleTree,
/// List of all previous and the current Merkle roots.
/// This is the hashed value of all the children.
pub merkle_roots: Vec<MerkleNode>,
/// Nullifiers prevent double spending
pub nullifiers: Vec<Nullifier>,
/// Public key of the cashier
pub cashier_signature_public: PublicKey,
/// Public key of the faucet
pub faucet_signature_public: PublicKey,
pub wallet_cache: WalletCache,
}
impl State {
pub fn new(
cashier_signature_public: PublicKey,
faucet_signature_public: PublicKey,
) -> Box<Self> {
Box::new(Self {
tree: MerkleTree::new(100),
merkle_roots: vec![],
nullifiers: vec![],
cashier_signature_public,
faucet_signature_public,
wallet_cache: WalletCache::new(),
})
}
pub fn is_valid_cashier_public_key(&self, public: &PublicKey) -> bool {
public == &self.cashier_signature_public
}
pub fn is_valid_faucet_public_key(&self, public: &PublicKey) -> bool {
public == &self.faucet_signature_public
}
pub fn is_valid_merkle(&self, merkle_root: &MerkleNode) -> bool {
self.merkle_roots.iter().any(|m| m == merkle_root)
}
pub fn nullifier_exists(&self, nullifier: &Nullifier) -> bool {
self.nullifiers.iter().any(|n| n == nullifier)
}
}

View File

@@ -0,0 +1,11 @@
use lazy_static::lazy_static;
use pasta_curves::{group::ff::Field, pallas};
use rand::rngs::OsRng;
pub mod validate;
pub mod wallet;
pub use wallet::{Builder, BuilderClearInputInfo, BuilderInputInfo, BuilderOutputInfo, Note};
lazy_static! {
pub static ref FUNC_ID: pallas::Base = pallas::Base::random(&mut OsRng);
}

View File

@@ -0,0 +1,374 @@
use std::any::{Any, TypeId};
use incrementalmerkletree::Tree;
use log::{debug, error};
use pasta_curves::{group::Group, pallas};
use darkfi::{
crypto::{
coin::Coin,
keypair::PublicKey,
merkle_node::MerkleNode,
nullifier::Nullifier,
types::{DrkCircuitField, DrkTokenId, DrkValueBlind, DrkValueCommit},
util::{pedersen_commitment_base, pedersen_commitment_u64},
BurnRevealedValues, MintRevealedValues,
},
util::serial::{Encodable, SerialDecodable, SerialEncodable},
Error as DarkFiError,
};
use crate::{
dao_contract,
demo::{CallDataBase, StateRegistry, Transaction, UpdateBase},
money_contract::{state::State, CONTRACT_ID},
note::EncryptedNote2,
};
const TARGET: &str = "money_contract::transfer::validate::state_transition()";
/// A struct representing a state update.
/// This gets applied on top of an existing state.
#[derive(Clone)]
pub struct Update {
/// All nullifiers in a transaction
pub nullifiers: Vec<Nullifier>,
/// All coins in a transaction
pub coins: Vec<Coin>,
/// All encrypted notes in a transaction
pub enc_notes: Vec<EncryptedNote2>,
}
impl UpdateBase for Update {
fn apply(mut self: Box<Self>, states: &mut StateRegistry) {
let state = states.lookup_mut::<State>(*CONTRACT_ID).unwrap();
// Extend our list of nullifiers with the ones from the update
state.nullifiers.append(&mut self.nullifiers);
//// Update merkle tree and witnesses
for (coin, enc_note) in self.coins.into_iter().zip(self.enc_notes.into_iter()) {
// Add the new coins to the Merkle tree
let node = MerkleNode(coin.0);
state.tree.append(&node);
// Keep track of all Merkle roots that have existed
state.merkle_roots.push(state.tree.root(0).unwrap());
state.wallet_cache.try_decrypt_note(coin, enc_note, &mut state.tree);
}
}
}
pub fn state_transition(
states: &StateRegistry,
func_call_index: usize,
parent_tx: &Transaction,
) -> Result<Box<dyn UpdateBase>> {
// Check the public keys in the clear inputs to see if they're coming
// from a valid cashier or faucet.
debug!(target: TARGET, "Iterate clear_inputs");
let func_call = &parent_tx.func_calls[func_call_index];
let call_data = func_call.call_data.as_any();
assert_eq!((&*call_data).type_id(), TypeId::of::<CallData>());
let call_data = call_data.downcast_ref::<CallData>();
// This will be inside wasm so unwrap is fine.
let call_data = call_data.unwrap();
let state = states.lookup::<State>(*CONTRACT_ID).expect("Return type is not of type State");
// Code goes here
for (i, input) in call_data.clear_inputs.iter().enumerate() {
let pk = &input.signature_public;
// TODO: this depends on the token ID
if !state.is_valid_cashier_public_key(pk) && !state.is_valid_faucet_public_key(pk) {
error!(target: TARGET, "Invalid pubkey for clear input: {:?}", pk);
return Err(Error::VerifyFailed(VerifyFailed::InvalidCashierOrFaucetKey(i)))
}
}
// Nullifiers in the transaction
let mut nullifiers = Vec::with_capacity(call_data.inputs.len());
debug!(target: TARGET, "Iterate inputs");
for (i, input) in call_data.inputs.iter().enumerate() {
let merkle = &input.revealed.merkle_root;
// The Merkle root is used to know whether this is a coin that
// existed in a previous state.
if !state.is_valid_merkle(merkle) {
error!(target: TARGET, "Invalid Merkle root (input {})", i);
debug!(target: TARGET, "root: {:?}", merkle);
return Err(Error::VerifyFailed(VerifyFailed::InvalidMerkle(i)))
}
// Check the spend_hook is satisfied
// The spend_hook says a coin must invoke another contract function when being spent
// If the value is set, then we check the function call exists
let spend_hook = &input.revealed.spend_hook;
if spend_hook != &pallas::Base::from(0) {
// spend_hook is set so we enforce the rules
let mut is_found = false;
for (i, func_call) in parent_tx.func_calls.iter().enumerate() {
// Skip current func_call
if i == func_call_index {
continue
}
// TODO: we need to change these to pallas::Base
// temporary workaround for now
// if func_call.func_id == spend_hook ...
if func_call.func_id == *dao_contract::exec::FUNC_ID {
is_found = true;
break
}
}
if !is_found {
return Err(Error::VerifyFailed(VerifyFailed::SpendHookNotSatisfied))
}
}
// The nullifiers should not already exist.
// It is the double-spend protection.
let nullifier = &input.revealed.nullifier;
if state.nullifier_exists(nullifier) ||
(1..nullifiers.len()).any(|i| nullifiers[i..].contains(&nullifiers[i - 1]))
{
error!(target: TARGET, "Duplicate nullifier found (input {})", i);
debug!(target: TARGET, "nullifier: {:?}", nullifier);
return Err(Error::VerifyFailed(VerifyFailed::NullifierExists(i)))
}
nullifiers.push(input.revealed.nullifier);
}
debug!(target: TARGET, "Verifying call data");
match call_data.verify() {
Ok(()) => {
debug!(target: TARGET, "Verified successfully")
}
Err(e) => {
error!(target: TARGET, "Failed verifying zk proofs: {}", e);
return Err(Error::VerifyFailed(VerifyFailed::ProofVerifyFailed(e.to_string())))
}
}
// Newly created coins for this transaction
let mut coins = Vec::with_capacity(call_data.outputs.len());
let mut enc_notes = Vec::with_capacity(call_data.outputs.len());
for output in &call_data.outputs {
// Gather all the coins
coins.push(output.revealed.coin);
enc_notes.push(output.enc_note.clone());
}
Ok(Box::new(Update { nullifiers, coins, enc_notes }))
}
/// A DarkFi transaction
#[derive(Debug, Clone, PartialEq, Eq, SerialEncodable, SerialDecodable)]
pub struct CallData {
/// Clear inputs
pub clear_inputs: Vec<ClearInput>,
/// Anonymous inputs
pub inputs: Vec<Input>,
/// Anonymous outputs
pub outputs: Vec<Output>,
}
impl CallDataBase for CallData {
fn zk_public_values(&self) -> Vec<(String, Vec<DrkCircuitField>)> {
let mut public_values = Vec::new();
for input in &self.inputs {
public_values.push(("money-transfer-burn".to_string(), input.revealed.make_outputs()));
}
for output in &self.outputs {
public_values.push(("money-transfer-mint".to_string(), output.revealed.make_outputs()));
}
public_values
}
fn as_any(&self) -> &dyn Any {
self
}
fn signature_public_keys(&self) -> Vec<PublicKey> {
let mut signature_public_keys = Vec::new();
for input in self.clear_inputs.clone() {
signature_public_keys.push(input.signature_public);
}
signature_public_keys
}
fn encode_bytes(
&self,
mut writer: &mut dyn std::io::Write,
) -> std::result::Result<usize, darkfi::Error> {
self.encode(&mut writer)
}
}
impl CallData {
/// Verify the transaction
pub fn verify(&self) -> VerifyResult<()> {
// must have minimum 1 clear or anon input, and 1 output
if self.clear_inputs.len() + self.inputs.len() == 0 {
error!("tx::verify(): Missing inputs");
return Err(VerifyFailed::LackingInputs)
}
if self.outputs.len() == 0 {
error!("tx::verify(): Missing outputs");
return Err(VerifyFailed::LackingOutputs)
}
// Accumulator for the value commitments
let mut valcom_total = DrkValueCommit::identity();
// Add values from the clear inputs
for input in &self.clear_inputs {
valcom_total += pedersen_commitment_u64(input.value, input.value_blind);
}
// Add values from the inputs
for input in &self.inputs {
valcom_total += &input.revealed.value_commit;
}
// Subtract values from the outputs
for output in &self.outputs {
valcom_total -= &output.revealed.value_commit;
}
// If the accumulator is not back in its initial state,
// there's a value mismatch.
if valcom_total != DrkValueCommit::identity() {
error!("tx::verify(): Missing funds");
return Err(VerifyFailed::MissingFunds)
}
// Verify that the token commitments match
if !self.verify_token_commitments() {
error!("tx::verify(): Token ID mismatch");
return Err(VerifyFailed::TokenMismatch)
}
Ok(())
}
fn verify_token_commitments(&self) -> bool {
assert_ne!(self.outputs.len(), 0);
let token_commit_value = self.outputs[0].revealed.token_commit;
let mut failed =
self.inputs.iter().any(|input| input.revealed.token_commit != token_commit_value);
failed = failed ||
self.outputs.iter().any(|output| output.revealed.token_commit != token_commit_value);
failed = failed ||
self.clear_inputs.iter().any(|input| {
pedersen_commitment_base(input.token_id, input.token_blind) != token_commit_value
});
!failed
}
}
/// A transaction's clear input
#[derive(Debug, Clone, PartialEq, Eq, SerialEncodable, SerialDecodable)]
pub struct ClearInput {
/// Input's value (amount)
pub value: u64,
/// Input's token ID
pub token_id: DrkTokenId,
/// Blinding factor for `value`
pub value_blind: DrkValueBlind,
/// Blinding factor for `token_id`
pub token_blind: DrkValueBlind,
/// Public key for the signature
pub signature_public: PublicKey,
}
/// A transaction's anonymous input
#[derive(Debug, Clone, PartialEq, Eq, SerialEncodable, SerialDecodable)]
pub struct Input {
/// Public inputs for the zero-knowledge proof
pub revealed: BurnRevealedValues,
}
/// A transaction's anonymous output
#[derive(Debug, Clone, PartialEq, Eq, SerialEncodable, SerialDecodable)]
pub struct Output {
/// Public inputs for the zero-knowledge proof
pub revealed: MintRevealedValues,
/// The encrypted note
pub enc_note: EncryptedNote2,
}
#[derive(Debug, Clone, thiserror::Error)]
pub enum Error {
#[error(transparent)]
VerifyFailed(#[from] VerifyFailed),
#[error("DarkFi error: {0}")]
DarkFiError(String),
}
/// Transaction verification errors
#[derive(Debug, Clone, thiserror::Error)]
pub enum VerifyFailed {
#[error("Transaction has no inputs")]
LackingInputs,
#[error("Transaction has no outputs")]
LackingOutputs,
#[error("Invalid cashier/faucet public key for clear input {0}")]
InvalidCashierOrFaucetKey(usize),
#[error("Invalid Merkle root for input {0}")]
InvalidMerkle(usize),
#[error("Spend hook invoking function is not attached")]
SpendHookNotSatisfied,
#[error("Nullifier already exists for input {0}")]
NullifierExists(usize),
#[error("Token commitments in inputs or outputs to not match")]
TokenMismatch,
#[error("Money in does not match money out (value commitments)")]
MissingFunds,
#[error("Failed verifying zk proofs: {0}")]
ProofVerifyFailed(String),
#[error("Internal error: {0}")]
InternalError(String),
#[error("DarkFi error: {0}")]
DarkFiError(String),
}
type Result<T> = std::result::Result<T, Error>;
impl From<Error> for VerifyFailed {
fn from(err: Error) -> Self {
Self::InternalError(err.to_string())
}
}
impl From<DarkFiError> for VerifyFailed {
fn from(err: DarkFiError) -> Self {
Self::DarkFiError(err.to_string())
}
}
impl From<DarkFiError> for Error {
fn from(err: DarkFiError) -> Self {
Self::DarkFiError(err.to_string())
}
}
/// Result type used in transaction verifications
pub type VerifyResult<T> = std::result::Result<T, VerifyFailed>;

View File

@@ -0,0 +1,219 @@
use pasta_curves::group::ff::Field;
use rand::rngs::OsRng;
use darkfi::{
crypto::{
burn_proof::create_burn_proof,
keypair::{PublicKey, SecretKey},
merkle_node::MerkleNode,
mint_proof::create_mint_proof,
types::{
DrkCoinBlind, DrkSerial, DrkSpendHook, DrkTokenId, DrkUserData, DrkUserDataBlind,
DrkValueBlind,
},
},
util::serial::{SerialDecodable, SerialEncodable},
Result,
};
use crate::{
demo::{FuncCall, ZkContractInfo, ZkContractTable},
money_contract::{
transfer::validate::{CallData, ClearInput, Input, Output},
CONTRACT_ID,
},
note,
};
#[derive(Clone, SerialEncodable, SerialDecodable)]
pub struct Note {
pub serial: DrkSerial,
pub value: u64,
pub token_id: DrkTokenId,
pub spend_hook: DrkSpendHook,
pub user_data: DrkUserData,
pub coin_blind: DrkCoinBlind,
pub value_blind: DrkValueBlind,
pub token_blind: DrkValueBlind,
}
pub struct Builder {
pub clear_inputs: Vec<BuilderClearInputInfo>,
pub inputs: Vec<BuilderInputInfo>,
pub outputs: Vec<BuilderOutputInfo>,
}
pub struct BuilderClearInputInfo {
pub value: u64,
pub token_id: DrkTokenId,
pub signature_secret: SecretKey,
}
pub struct BuilderInputInfo {
pub leaf_position: incrementalmerkletree::Position,
pub merkle_path: Vec<MerkleNode>,
pub secret: SecretKey,
pub note: Note,
pub user_data_blind: DrkUserDataBlind,
pub value_blind: DrkValueBlind,
pub signature_secret: SecretKey,
}
pub struct BuilderOutputInfo {
pub value: u64,
pub token_id: DrkTokenId,
pub public: PublicKey,
pub serial: DrkSerial,
pub coin_blind: DrkCoinBlind,
pub spend_hook: DrkSpendHook,
pub user_data: DrkUserData,
}
impl Builder {
fn compute_remainder_blind(
clear_inputs: &[ClearInput],
input_blinds: &[DrkValueBlind],
output_blinds: &[DrkValueBlind],
) -> DrkValueBlind {
let mut total = DrkValueBlind::zero();
for input in clear_inputs {
total += input.value_blind;
}
for input_blind in input_blinds {
total += input_blind;
}
for output_blind in output_blinds {
total -= output_blind;
}
total
}
pub fn build(self, zk_bins: &ZkContractTable) -> Result<FuncCall> {
assert!(self.clear_inputs.len() + self.inputs.len() > 0);
let mut clear_inputs = vec![];
let token_blind = DrkValueBlind::random(&mut OsRng);
for input in &self.clear_inputs {
let signature_public = PublicKey::from_secret(input.signature_secret);
let value_blind = DrkValueBlind::random(&mut OsRng);
let clear_input = ClearInput {
value: input.value,
token_id: input.token_id,
value_blind,
token_blind,
signature_public,
};
clear_inputs.push(clear_input);
}
let mut proofs = vec![];
let mut inputs = vec![];
let mut input_blinds = vec![];
for input in self.inputs {
let value_blind = input.value_blind;
input_blinds.push(value_blind);
let zk_info = zk_bins.lookup(&"money-transfer-burn".to_string()).unwrap();
let zk_info = if let ZkContractInfo::Native(info) = zk_info {
info
} else {
panic!("Not native info")
};
let burn_pk = &zk_info.proving_key;
// Note from the previous output
let note = input.note.clone();
let (burn_proof, revealed) = create_burn_proof(
burn_pk,
note.value,
note.token_id,
value_blind,
token_blind,
note.serial,
note.spend_hook,
note.user_data,
input.user_data_blind,
note.coin_blind,
input.secret,
input.leaf_position,
input.merkle_path.clone(),
input.signature_secret,
)?;
proofs.push(burn_proof);
let input = Input { revealed };
inputs.push(input);
}
let mut outputs = vec![];
let mut output_blinds = vec![];
// This value_blind calc assumes there will always be at least a single output
assert!(self.outputs.len() > 0);
for (i, output) in self.outputs.iter().enumerate() {
let value_blind = if i == self.outputs.len() - 1 {
Self::compute_remainder_blind(&clear_inputs, &input_blinds, &output_blinds)
} else {
DrkValueBlind::random(&mut OsRng)
};
output_blinds.push(value_blind);
let serial = output.serial;
let coin_blind = output.coin_blind;
let zk_info = zk_bins.lookup(&"money-transfer-mint".to_string()).unwrap();
let zk_info = if let ZkContractInfo::Native(info) = zk_info {
info
} else {
panic!("Not native info")
};
let mint_pk = &zk_info.proving_key;
let (mint_proof, revealed) = create_mint_proof(
mint_pk,
output.value,
output.token_id,
value_blind,
token_blind,
serial,
output.spend_hook,
output.user_data,
coin_blind,
output.public,
)?;
proofs.push(mint_proof);
let note = Note {
serial,
value: output.value,
token_id: output.token_id,
spend_hook: output.spend_hook,
user_data: output.user_data,
coin_blind,
value_blind,
token_blind,
};
let encrypted_note = note::encrypt(&note, &output.public)?;
let output = Output { revealed, enc_note: encrypted_note };
outputs.push(output);
}
let call_data = CallData { clear_inputs, inputs, outputs };
Ok(FuncCall {
contract_id: *CONTRACT_ID,
func_id: *super::FUNC_ID,
call_data: Box::new(call_data),
proofs,
})
}
}

98
bin/daod/src/note.rs Normal file
View File

@@ -0,0 +1,98 @@
use crypto_api_chachapoly::ChachaPolyIetf;
use rand::rngs::OsRng;
use darkfi::{
crypto::{
diffie_hellman::{kdf_sapling, sapling_ka_agree},
keypair::{PublicKey, SecretKey},
},
util::serial::{Decodable, Encodable, SerialDecodable, SerialEncodable},
Error, Result,
};
pub const AEAD_TAG_SIZE: usize = 16;
pub fn encrypt<T: Encodable>(note: &T, public: &PublicKey) -> Result<EncryptedNote2> {
let ephem_secret = SecretKey::random(&mut OsRng);
let ephem_public = PublicKey::from_secret(ephem_secret);
let shared_secret = sapling_ka_agree(&ephem_secret, public);
let key = kdf_sapling(&shared_secret, &ephem_public);
let mut input = Vec::new();
note.encode(&mut input)?;
let mut ciphertext = vec![0; input.len() + AEAD_TAG_SIZE];
assert_eq!(
ChachaPolyIetf::aead_cipher()
.seal_to(&mut ciphertext, &input, &[], key.as_ref(), &[0u8; 12])
.unwrap(),
input.len() + AEAD_TAG_SIZE
);
Ok(EncryptedNote2 { ciphertext, ephem_public })
}
#[derive(Debug, Clone, PartialEq, Eq, SerialEncodable, SerialDecodable)]
pub struct EncryptedNote2 {
ciphertext: Vec<u8>,
ephem_public: PublicKey,
}
impl EncryptedNote2 {
pub fn decrypt<T: Decodable>(&self, secret: &SecretKey) -> Result<T> {
let shared_secret = sapling_ka_agree(secret, &self.ephem_public);
let key = kdf_sapling(&shared_secret, &self.ephem_public);
let mut plaintext = vec![0; self.ciphertext.len()];
assert_eq!(
ChachaPolyIetf::aead_cipher()
.open_to(&mut plaintext, &self.ciphertext, &[], key.as_ref(), &[0u8; 12])
.map_err(|_| Error::NoteDecryptionFailed)?,
self.ciphertext.len() - AEAD_TAG_SIZE
);
T::decode(&plaintext[..])
}
}
#[cfg(test)]
mod tests {
use super::*;
use darkfi::crypto::{
keypair::Keypair,
types::{DrkCoinBlind, DrkSerial, DrkTokenId, DrkValueBlind},
};
use group::ff::Field;
#[test]
fn test_note_encdec() {
#[derive(SerialEncodable, SerialDecodable)]
struct MyNote {
serial: DrkSerial,
value: u64,
token_id: DrkTokenId,
coin_blind: DrkCoinBlind,
value_blind: DrkValueBlind,
token_blind: DrkValueBlind,
memo: Vec<u8>,
}
let note = MyNote {
serial: DrkSerial::random(&mut OsRng),
value: 110,
token_id: DrkTokenId::random(&mut OsRng),
coin_blind: DrkCoinBlind::random(&mut OsRng),
value_blind: DrkValueBlind::random(&mut OsRng),
token_blind: DrkValueBlind::random(&mut OsRng),
memo: vec![32, 223, 231, 3, 1, 1],
};
let keypair = Keypair::random(&mut OsRng);
let encrypted_note = encrypt(&note, &keypair.public).unwrap();
let note2: MyNote = encrypted_note.decrypt(&keypair.secret).unwrap();
assert_eq!(note.value, note2.value);
assert_eq!(note.token_id, note2.token_id);
assert_eq!(note.token_blind, note2.token_blind);
assert_eq!(note.memo, note2.memo);
}
}

View File

@@ -9,30 +9,30 @@ license = "AGPL-3.0-only"
edition = "2021"
[dependencies]
async-channel = "1.6.1"
async-channel = "1.7.1"
async-executor = "1.4.1"
async-std = "1.11.0"
async-trait = "0.1.53"
async-std = "1.12.0"
async-trait = "0.1.57"
blake3 = "1.3.1"
bs58 = "0.4.0"
chrono = "0.4.19"
ctrlc-async = {version = "3.2.2", default-features = false, features = ["async-std", "termination"]}
chrono = "0.4.22"
ctrlc = { version = "3.2.3", features = ["termination"] }
darkfi = {path = "../../", features = ["blockchain", "wallet", "rpc", "net", "node"]}
easy-parallel = "3.2.0"
futures-lite = "1.12.0"
fxhash = "0.2.1"
lazy-init = "0.5.0"
incrementalmerkletree = "0.3.0"
lazy-init = "0.5.1"
log = "0.4.17"
num-bigint = {version = "0.4.3", features = ["serde"]}
pasta_curves = "0.4.0"
rand = "0.8.5"
serde_json = "1.0.81"
serde_json = "1.0.85"
simplelog = "0.12.0"
sled = "0.34.7"
url = "2.2.2"
# Argument parsing
serde = "1.0.137"
serde_derive = "1.0.137"
serde = "1.0.144"
serde_derive = "1.0.144"
structopt = "0.3.26"
structopt-toml = "0.5.0"
structopt-toml = "0.5.1"

View File

@@ -24,11 +24,11 @@
# Participate in the consensus protocol
#consensus = false
# P2P accept address for the consensus protocol
#consensus_p2p_accept = "tls://127.0.0.1:8341"
# P2P accept addresses for the consensus protocol
#consensus_p2p_accept = ["tls://127.0.0.1:8341"]
# P2P external address for the consensus protocol
#consensus_p2p_external = "tls://127.0.0.1:8341"
# P2P external addresses for the consensus protocol
#consensus_p2p_external = ["tls://127.0.0.1:8341"]
# Connection slots for the consensus protocol
#consensus_slots = 8
@@ -45,11 +45,14 @@
# Peers JSON-RPC listen URL for clock synchronization
#consensus_peer_rpc = []
# P2P accept address for the syncing protocol
#sync_p2p_accept = "tls://127.0.0.1:8342"
# Prefered transports of outbound connections for the consensus protocol
#consensus_p2p_transports = ["tls", "tcp"]
# P2P external address for the syncing protocol
#sync_p2p_external = "tls://127.0.0.1:8342"
# P2P accept addresses for the syncing protocol
#sync_p2p_accept = ["tls://127.0.0.1:8342"]
# P2P external addresses for the syncing protocol
#sync_p2p_external = ["tls://127.0.0.1:8342"]
# Connection slots for the syncing protocol
#sync_slots = 8
@@ -60,6 +63,12 @@
# Peers to connect to for the syncing protocol
#sync_p2p_peer = []
# Prefered transports of outbound connections for the syncing protocol
#sync_p2p_transports = ["tls", "tcp"]
# Enable localnet hosts
#localnet = true
# Whitelisted cashier addresses
#cashier_pub = []

View File

@@ -11,12 +11,13 @@ pub enum RpcError {
InvalidKeypair = -32106,
UnknownSlot = -32107,
TxBuildFail = -32108,
NetworkNameError = -32109,
// NetworkNameError = -32109,
ParseError = -32110,
TxBroadcastFail = -32111,
NotYetSynced = -32112,
InvalidAddressParam = -32113,
InvalidAmountParam = -32114,
// InvalidAmountParam = -32114,
DecryptionFailed = -32115,
}
fn to_tuple(e: RpcError) -> (i64, String) {
@@ -29,12 +30,13 @@ fn to_tuple(e: RpcError) -> (i64, String) {
RpcError::InvalidKeypair => "Invalid keypair",
RpcError::UnknownSlot => "Did not find slot",
RpcError::TxBuildFail => "Failed building transaction",
RpcError::NetworkNameError => "Unknown network name",
// RpcError::NetworkNameError => "Unknown network name",
RpcError::ParseError => "Parse error",
RpcError::TxBroadcastFail => "Failed broadcasting transaction",
RpcError::NotYetSynced => "Blockchain not yet synced",
RpcError::InvalidAddressParam => "Invalid address parameter",
RpcError::InvalidAmountParam => "invalid amount parameter",
// RpcError::InvalidAmountParam => "invalid amount parameter",
RpcError::DecryptionFailed => "Decryption failed",
};
(e as i64, msg.to_string())

View File

@@ -22,7 +22,7 @@ use darkfi::{
ValidatorState, MAINNET_GENESIS_HASH_BYTES, MAINNET_GENESIS_TIMESTAMP,
TESTNET_GENESIS_HASH_BYTES, TESTNET_GENESIS_TIMESTAMP,
},
crypto::{address::Address, keypair::PublicKey, token_list::DrkTokenList},
crypto::{address::Address, keypair::PublicKey},
net,
net::P2pPtr,
node::Client,
@@ -82,12 +82,12 @@ struct Args {
rpc_listen: Url,
#[structopt(long)]
/// P2P accept address for the consensus protocol
consensus_p2p_accept: Option<Url>,
/// P2P accept addresses for the consensus protocol (repeatable flag)
consensus_p2p_accept: Vec<Url>,
#[structopt(long)]
/// P2P external address for the consensus protocol
consensus_p2p_external: Option<Url>,
/// P2P external addresses for the consensus protocol (repeatable flag)
consensus_p2p_external: Vec<Url>,
#[structopt(long, default_value = "8")]
/// Connection slots for the consensus protocol
@@ -110,12 +110,16 @@ struct Args {
consensus_seed_rpc: Vec<Url>,
#[structopt(long)]
/// P2P accept address for the syncing protocol
sync_p2p_accept: Option<Url>,
/// Prefered transports of outbound connections for the consensus protocol (repeatable flag)
consensus_p2p_transports: Vec<String>,
#[structopt(long)]
/// P2P external address for the syncing protocol
sync_p2p_external: Option<Url>,
/// P2P accept addresses for the syncing protocol (repeatable flag)
sync_p2p_accept: Vec<Url>,
#[structopt(long)]
/// P2P external addresses for the syncing protocol (repeatable flag)
sync_p2p_external: Vec<Url>,
#[structopt(long, default_value = "8")]
/// Connection slots for the syncing protocol
@@ -129,6 +133,14 @@ struct Args {
/// Connect to seed for the syncing protocol (repeatable flag)
sync_p2p_seed: Vec<Url>,
#[structopt(long)]
/// Prefered transports of outbound connections for the syncing protocol (repeatable flag)
sync_p2p_transports: Vec<String>,
#[structopt(long)]
/// Enable localnet hosts
localnet: bool,
#[structopt(long)]
/// Whitelisted cashier address (repeatable flag)
cashier_pub: Vec<String>,
@@ -176,13 +188,16 @@ impl RequestHandler for Darkfid {
Some("blockchain.merkle_roots") => return self.merkle_roots(req.id, params).await,
Some("tx.transfer") => return self.transfer(req.id, params).await,
Some("wallet.keygen") => return self.keygen(req.id, params).await,
Some("wallet.get_key") => return self.get_key(req.id, params).await,
Some("wallet.get_addrs") => return self.get_addrs(req.id, params).await,
Some("wallet.export_keypair") => return self.export_keypair(req.id, params).await,
Some("wallet.import_keypair") => return self.import_keypair(req.id, params).await,
Some("wallet.set_default_address") => {
return self.set_default_address(req.id, params).await
}
Some("wallet.get_balances") => return self.get_balances(req.id, params).await,
Some("wallet.get_coins_valtok") => return self.get_coins_valtok(req.id, params).await,
Some("wallet.get_merkle_path") => return self.get_merkle_path(req.id, params).await,
Some("wallet.decrypt_note") => return self.decrypt_note(req.id, params).await,
Some(_) | None => return JsonError::new(MethodNotFound, None, req.id).into(),
}
}
@@ -234,8 +249,8 @@ async fn realmain(args: Args, ex: Arc<Executor<'_>>) -> Result<()> {
// tasks, and to catch a shutdown signal, where we can clean up and
// exit gracefully.
let (signal, shutdown) = async_channel::bounded::<()>(1);
ctrlc_async::set_async_handler(async move {
signal.send(()).await.unwrap();
ctrlc::set_handler(move || {
async_std::task::block_on(signal.send(())).unwrap();
})
.unwrap();
@@ -256,18 +271,9 @@ async fn realmain(args: Args, ex: Arc<Executor<'_>>) -> Result<()> {
}
};
debug!("Parsing token lists...");
let tokenlist = Arc::new(DrkTokenList::new(&[
("drk", include_bytes!("../../../contrib/token/darkfi_token_list.min.json")),
("btc", include_bytes!("../../../contrib/token/bitcoin_token_list.min.json")),
("eth", include_bytes!("../../../contrib/token/erc20_token_list.min.json")),
("sol", include_bytes!("../../../contrib/token/solana_token_list.min.json")),
])?);
debug!("Finished parsing token lists");
// TODO: sqldb init cleanup
// Initialize Client
let client = Arc::new(Client::new(wallet, tokenlist).await?);
let client = Arc::new(Client::new(wallet).await?);
// Parse cashier addresses
let mut cashier_pubkeys = vec![];
@@ -304,6 +310,8 @@ async fn realmain(args: Args, ex: Arc<Executor<'_>>) -> Result<()> {
external_addr: args.sync_p2p_external,
peers: args.sync_p2p_peer.clone(),
seeds: args.sync_p2p_seed.clone(),
outbound_transports: net::settings::get_outbound_transports(args.sync_p2p_transports),
localnet: args.localnet,
..Default::default()
};
@@ -345,6 +353,10 @@ async fn realmain(args: Args, ex: Arc<Executor<'_>>) -> Result<()> {
external_addr: args.consensus_p2p_external,
peers: args.consensus_p2p_peer.clone(),
seeds: args.consensus_p2p_seed.clone(),
outbound_transports: net::settings::get_outbound_transports(
args.consensus_p2p_transports,
),
localnet: args.localnet,
..Default::default()
};
let p2p = net::P2p::new(consensus_network_settings).await;
@@ -411,6 +423,9 @@ async fn realmain(args: Args, ex: Arc<Executor<'_>>) -> Result<()> {
})
.detach();
info!("Waiting for sync P2P outbound connections");
sync_p2p.clone().unwrap().wait_for_outbound(ex.clone()).await?;
match block_sync_task(sync_p2p.clone().unwrap(), state.clone()).await {
Ok(()) => *darkfid.synced.lock().await = true,
Err(e) => error!("Failed syncing blockchain: {}", e),
@@ -429,6 +444,9 @@ async fn realmain(args: Args, ex: Arc<Executor<'_>>) -> Result<()> {
})
.detach();
info!("Waiting for consensus P2P outbound connections");
consensus_p2p.clone().unwrap().wait_for_outbound(ex.clone()).await?;
info!("Starting consensus protocol task");
ex.spawn(proposal_task(consensus_p2p.unwrap(), sync_p2p.unwrap(), state)).detach();
} else {

View File

@@ -4,12 +4,9 @@ use log::{error, warn};
use serde_json::{json, Value};
use darkfi::{
crypto::{address::Address, keypair::PublicKey, token_id::generate_id},
rpc::jsonrpc::{
ErrorCode::{InternalError, InvalidParams},
JsonError, JsonResponse, JsonResult,
},
util::{decode_base10, serial::serialize, NetworkName},
crypto::{address::Address, keypair::PublicKey, token_id},
rpc::jsonrpc::{ErrorCode::InvalidParams, JsonError, JsonResponse, JsonResult},
util::serial::serialize,
};
use super::Darkfid;
@@ -19,28 +16,31 @@ impl Darkfid {
// RPCAPI:
// Transfer a given amount of some token to the given address.
// Returns a transaction ID upon success.
// --> {"jsonrpc": "2.0", "method": "tx.transfer", "params": ["darkfi" "gdrk", "1DarkFi...", 12.0], "id": 1}
//
// * `dest_addr` -> Recipient's DarkFi address
// * `token_id` -> ID of the token to send
// * `12345` -> Amount in `u64` of the funds to send
//
// --> {"jsonrpc": "2.0", "method": "tx.transfer", "params": ["dest_addr", "token_id", 12345], "id": 1}
// <-- {"jsonrpc": "2.0", "result": "txID...", "id": 1}
pub async fn transfer(&self, id: Value, params: &[Value]) -> JsonResult {
if params.len() != 4 ||
if params.len() != 3 ||
!params[0].is_string() ||
!params[1].is_string() ||
!params[2].is_string() ||
!params[3].is_f64()
!params[2].is_u64()
{
return JsonError::new(InvalidParams, None, id).into()
}
let network = params[0].as_str().unwrap();
let token = params[1].as_str().unwrap();
let address = params[2].as_str().unwrap();
let amount = params[3].as_f64().unwrap();
if !(*self.synced.lock().await) {
error!("transfer(): Blockchain is not yet synced");
return server_error(RpcError::NotYetSynced, id)
}
let address = params[0].as_str().unwrap();
let token = params[1].as_str().unwrap();
let amount = params[2].as_u64().unwrap();
let address = match Address::from_str(address) {
Ok(v) => v,
Err(e) => {
@@ -57,42 +57,13 @@ impl Darkfid {
}
};
let amount = amount.to_string();
let amount = match decode_base10(&amount, 8, true) {
let token_id = match token_id::parse_b58(token) {
Ok(v) => v,
Err(e) => {
error!("transfer(): Failed parsing amount from string: {}", e);
return server_error(RpcError::InvalidAmountParam, id)
error!("transfer(): Failed parsing Token ID from string: {}", e);
return server_error(RpcError::ParseError, id)
}
};
let amount: u64 = match amount.try_into() {
Ok(v) => v,
Err(e) => {
error!("transfer(): Failed converting biguint to u64: {}", e);
return JsonError::new(InternalError, None, id).into()
}
};
let network = match NetworkName::from_str(network) {
Ok(v) => v,
Err(e) => {
error!("transfer(): Failed parsing NetworkName: {}", e);
return server_error(RpcError::NetworkNameError, id)
}
};
let token_id =
if let Some(tok) = self.client.tokenlist.by_net[&network].get(token.to_uppercase()) {
tok.drk_address
} else {
match generate_id(&network, token) {
Ok(v) => v,
Err(e) => {
error!("transfer(): Failed generate_id(): {}", e);
return JsonError::new(InternalError, None, id).into()
}
}
};
let tx = match self
.client

View File

@@ -1,6 +1,6 @@
use fxhash::FxHashMap;
use log::{error, warn};
use num_bigint::BigUint;
use incrementalmerkletree::Tree;
use log::error;
use pasta_curves::group::ff::PrimeField;
use serde_json::{json, Value};
@@ -8,12 +8,14 @@ use darkfi::{
crypto::{
address::Address,
keypair::{Keypair, PublicKey, SecretKey},
token_id,
},
node::State,
rpc::jsonrpc::{
ErrorCode::{InternalError, InvalidParams},
ErrorCode::{InternalError, InvalidParams, ParseError},
JsonError, JsonResponse, JsonResult,
},
util::{decode_base10, encode_base10, NetworkName},
util::serial::{deserialize, serialize},
};
use super::Darkfid;
@@ -37,9 +39,9 @@ impl Darkfid {
// RPCAPI:
// Fetches public keys by given indexes from the wallet and returns it in an
// encoded format. `-1` is supported to fetch all available keys.
// --> {"jsonrpc": "2.0", "method": "wallet.get_key", "params": [1, 2], "id": 1}
// --> {"jsonrpc": "2.0", "method": "wallet.get_addrs", "params": [1, 2], "id": 1}
// <-- {"jsonrpc": "2.0", "result": ["foo", "bar"], "id": 1}
pub async fn get_key(&self, id: Value, params: &[Value]) -> JsonResult {
pub async fn get_addrs(&self, id: Value, params: &[Value]) -> JsonResult {
if params.is_empty() {
return JsonError::new(InvalidParams, None, id).into()
}
@@ -191,10 +193,10 @@ impl Darkfid {
}
// RPCAPI:
// Queries the wallet for known balances.
// Returns a map of balances, indexed by `network`, and token ID.
// Queries the wallet for known tokens with active balances.
// Returns a map of balances, indexed by the token ID.
// --> {"jsonrpc": "2.0", "method": "wallet.get_balances", "params": [], "id": 1}
// <-- {"jsonrpc": "2.0", "result": [{"btc": [100, "Bitcoin"]}, {...}], "id": 1}
// <-- {"jsonrpc": "2.0", "result": [{"1Foobar...": 100}, {...}]", "id": 1}
pub async fn get_balances(&self, id: Value, _params: &[Value]) -> JsonResult {
let balances = match self.client.get_balances().await {
Ok(v) => v,
@@ -204,52 +206,126 @@ impl Darkfid {
}
};
// k: ticker/drk_addr, v: (amount, network, net_addr, drk_addr)
let mut ret: FxHashMap<String, (String, String, String, String)> = FxHashMap::default();
// k: token_id, v: [amount]
let mut ret: FxHashMap<String, u64> = FxHashMap::default();
for balance in balances.list {
let drk_addr = bs58::encode(balance.token_id.to_repr()).into_string();
let mut amount = BigUint::from(balance.value);
let token_id = bs58::encode(balance.token_id.to_repr()).into_string();
let mut amount = balance.value;
let (net_name, net_addr) =
if let Some((net, tok)) = self.client.tokenlist.by_addr.get(&drk_addr) {
(net, tok.net_address.clone())
} else {
warn!("Could not find network name and token info for {}", drk_addr);
(&NetworkName::DarkFi, "unknown".to_string())
};
let mut ticker = None;
for (k, v) in self.client.tokenlist.by_net[net_name].0.iter() {
if v.net_address == net_addr {
ticker = Some(k.clone());
break
}
if let Some(prev) = ret.get(&token_id) {
amount += prev;
}
if ticker.is_none() {
ticker = Some(drk_addr.clone())
}
let ticker = ticker.unwrap();
if let Some(prev) = ret.get(&ticker) {
// TODO: We shouldn't be hardcoding everything to 8 decimals.
let prev_amnt = match decode_base10(&prev.0, 8, false) {
Ok(v) => v,
Err(e) => {
error!("Failed to decode_base10(): {}", e);
return JsonError::new(InternalError, None, id).into()
}
};
amount += prev_amnt;
}
let amount = encode_base10(amount, 8);
ret.insert(ticker, (amount, net_name.to_string(), net_addr, drk_addr));
ret.insert(token_id, amount);
}
JsonResponse::new(json!(ret), id).into()
}
// RPCAPI:
// Queries the wallet for a coin containing given parameters (value, token_id, unspent),
// and returns the entire row with the coin's data:
//
// --> {"jsonrpc": "2.0", "method": "wallet.get_coins_valtok", "params": [1234, "F00b4r...", true], "id": 1}
// <-- {"jsonrpc": "2.0", "result": ["coin", "data", ...], "id": 1}
pub async fn get_coins_valtok(&self, id: Value, params: &[Value]) -> JsonResult {
if params.len() != 3 ||
!params[0].is_u64() ||
!params[1].is_string() ||
!params[2].is_boolean()
{
return JsonError::new(InvalidParams, None, id).into()
}
let value = params[0].as_u64().unwrap();
let unspent = params[2].as_bool().unwrap();
let token_id = match token_id::parse_b58(params[1].as_str().unwrap()) {
Ok(v) => v,
Err(e) => {
error!("Failed parsing token_id from base58 string: {}", e);
return JsonError::new(ParseError, None, id).into()
}
};
let coins = match self.client.get_coins_valtok(value, token_id, unspent).await {
Ok(v) => v,
Err(e) => {
error!("Failed fetching coins by valtok from wallet: {}", e);
return JsonError::new(InternalError, None, id).into()
}
};
let ret: Vec<String> =
coins.iter().map(|x| bs58::encode(serialize(x)).into_string()).collect();
JsonResponse::new(json!(ret), id).into()
}
// RPCAPI:
// Query the state merkle tree for the merkle path of a given leaf position.
// --> {"jsonrpc": "2.0", "method": "wallet.get_merkle_path", "params": [3], "id": 1}
// <-- {"jsonrpc": "2.0", "result": ["f091uf1...", "081ff0h10w1h0...", ...], "id": 1}
pub async fn get_merkle_path(&self, id: Value, params: &[Value]) -> JsonResult {
if params.len() != 1 || !params[0].is_u64() {
return JsonError::new(InvalidParams, None, id).into()
}
let leaf_pos: incrementalmerkletree::Position =
((params[0].as_u64().unwrap() as u64) as usize).into();
let validator_state = self.validator_state.read().await;
let state = validator_state.state_machine.lock().await;
let root = state.tree.root(0).unwrap();
let merkle_path = state.tree.authentication_path(leaf_pos, &root).unwrap();
drop(state);
drop(validator_state);
let ret: Vec<String> =
merkle_path.iter().map(|x| bs58::encode(serialize(x)).into_string()).collect();
JsonResponse::new(json!(ret), id).into()
}
// RPCAPI:
// Try to decrypt a given encrypted note with the secret keys
// found in the wallet.
// --> {"jsonrpc": "2.0", "method": "wallet.decrypt_note", params": [ciphertext], "id": 1}
// <-- {"jsonrpc": "2.0", "result": "base58_encoded_plain_note", "id": 1}
pub async fn decrypt_note(&self, id: Value, params: &[Value]) -> JsonResult {
if params.len() != 1 || !params[0].is_string() {
return JsonError::new(InvalidParams, None, id).into()
}
let bytes = match bs58::decode(params[0].as_str().unwrap()).into_vec() {
Ok(v) => v,
Err(e) => {
error!("decrypt_note(): Failed decoding base58 string: {}", e);
return JsonError::new(ParseError, None, id).into()
}
};
let enc_note = match deserialize(&bytes) {
Ok(v) => v,
Err(e) => {
error!("decrypt_note(): Failed deserializing bytes into EncryptedNote: {}", e);
return JsonError::new(InternalError, None, id).into()
}
};
let keypairs = match self.client.get_keypairs().await {
Ok(v) => v,
Err(e) => {
error!("decrypt_note(): Failed fetching keypairs: {}", e);
return JsonError::new(InternalError, None, id).into()
}
};
for kp in keypairs {
if let Some(note) = State::try_decrypt_note(&enc_note, kp.secret) {
let s = bs58::encode(&serialize(&note)).into_string();
return JsonResponse::new(json!(s), id).into()
}
}
server_error(RpcError::DecryptionFailed, id)
}
}

20
bin/darkotc/Cargo.toml Normal file
View File

@@ -0,0 +1,20 @@
[package]
name = "darkotc"
version = "0.3.0"
homepage = "https://dark.fi"
description = "Atomic swap commandline tool"
authors = ["darkfi <dev@dark.fi>"]
repository = "https://github.com/darkrenaissance/darkfi"
license = "AGPL-3.0-only"
edition = "2021"
[dependencies]
async-std = {version = "1.12.0", features = ["attributes"]}
bs58 = "0.4.0"
clap = {version = "3.2.20", features = ["derive"]}
darkfi = {path = "../../", features = ["crypto", "rpc", "util", "tx"]}
halo2_proofs = "0.2.0"
halo2_gadgets = "0.2.0"
rand = "0.8.5"
serde_json = "1.0.85"
url = "2.2.2"

4
bin/darkotc/README.md Normal file
View File

@@ -0,0 +1,4 @@
darkotc
=======
Commandline tool for atomic swaps.

View File

@@ -0,0 +1,58 @@
use std::process::exit;
use halo2_proofs::pasta::group::ff::PrimeField;
use darkfi::{crypto::types::DrkTokenId, util::decode_base10, Result};
pub fn parse_value_pair(s: &str) -> Result<(u64, u64)> {
let v: Vec<&str> = s.split(':').collect();
if v.len() != 2 {
eprintln!("Invalid value pair. Use a pair such as '13.37:11.0'.");
exit(1);
}
// TODO: We shouldn't be hardcoding everything to 8 decimals.
let val0 = decode_base10(v[0], 8, true);
let val1 = decode_base10(v[1], 8, true);
if val0.is_err() || val1.is_err() {
eprintln!("Invalid value pair. Use a pair such as '13.37:11.0'.");
exit(1);
}
Ok((val0.unwrap(), val1.unwrap()))
}
pub fn parse_token_pair(s: &str) -> Result<(String, String)> {
let v: Vec<&str> = s.split(':').collect();
if v.len() != 2 {
eprintln!("Invalid token pair. Use a pair such as:");
eprintln!("A7f1RKsCUUHrSXA7a9ogmwg8p3bs6F47ggsW826HD4yd:FCuoMii64H5Ee4eVWBjP18WTFS8iLUJmGi16Qti1xFQ2");
exit(1);
}
let tok0 = bs58::decode(v[0]).into_vec();
let tok1 = bs58::decode(v[1]).into_vec();
if tok0.is_err() || tok1.is_err() {
eprintln!("Invalid token pair. Use a pair such as:");
eprintln!("A7f1RKsCUUHrSXA7a9ogmwg8p3bs6F47ggsW826HD4yd:FCuoMii64H5Ee4eVWBjP18WTFS8iLUJmGi16Qti1xFQ2");
exit(1);
}
if tok0.as_ref().unwrap().len() != 32 ||
DrkTokenId::from_repr(tok0.unwrap().try_into().unwrap()).is_some().unwrap_u8() == 0
{
eprintln!("Error: {} is not a valid token ID", v[0]);
exit(1);
}
if tok1.as_ref().unwrap().len() != 32 ||
DrkTokenId::from_repr(tok1.unwrap().try_into().unwrap()).is_some().unwrap_u8() == 0
{
eprintln!("Error: {} is not a valid token ID", v[1]);
exit(1);
}
Ok((v[0].to_string(), v[1].to_string()))
}

View File

@@ -1,6 +1,8 @@
use std::{
io::{stdin, Read},
process::exit,
/*
<<<<<<< HEAD
str::FromStr,
};
@@ -10,11 +12,21 @@ use halo2_proofs::{arithmetic::Field, pasta::group::ff::PrimeField};
use rand::rngs::OsRng;
use serde_json::json;
use termion::color;
=======
*/
};
use clap::{Parser, Subcommand};
use halo2_proofs::{arithmetic::Field, pasta::group::ff::PrimeField};
use rand::rngs::OsRng;
use url::Url;
use darkfi::{
cli_desc,
crypto::{
/*
<<<<<<< HEAD
address::Address,
burn_proof::{create_burn_proof, verify_burn_proof},
keypair::{PublicKey, SecretKey},
@@ -31,6 +43,32 @@ use darkfi::{
cli::progress_bar,
encode_base10,
serial::{deserialize, serialize, SerialDecodable, SerialEncodable},
=======
*/
burn_proof::{create_burn_proof, verify_burn_proof},
keypair::{PublicKey, SecretKey},
mint_proof::{create_mint_proof, verify_mint_proof},
note::{EncryptedNote, Note},
proof::{ProvingKey, VerifyingKey},
schnorr,
schnorr::SchnorrSecret,
token_id,
types::{
DrkCoinBlind, DrkSerial, DrkSpendHook, DrkTokenId, DrkUserData, DrkUserDataBlind,
DrkValueBlind,
},
util::{pedersen_commitment_base, pedersen_commitment_u64},
BurnRevealedValues, MintRevealedValues, Proof,
},
rpc::client::RpcClient,
tx::{
partial::{PartialTransaction, PartialTransactionInput},
Transaction, TransactionInput, TransactionOutput,
},
util::{
cli::{fg_green, fg_red, progress_bar},
encode_base10,
serial::{deserialize, serialize, Encodable, SerialDecodable, SerialEncodable},
},
zk::circuit::{BurnContract, MintContract},
Result,
@@ -39,6 +77,9 @@ use darkfi::{
mod cli_util;
use cli_util::{parse_token_pair, parse_value_pair};
mod rpc;
use rpc::Rpc;
#[derive(Parser)]
#[clap(name = "darkotc", about = cli_desc!(), version)]
#[clap(arg_required_else_help(true))]
@@ -60,6 +101,8 @@ enum Subcmd {
/// Initialize an atomic swap
Init {
#[clap(short, long)]
/*
<<<<<<< HEAD
/// Pair of token IDs to swap: e.g. token_to_send:token_to_recv
token_pair: String,
@@ -195,12 +238,72 @@ impl Rpc {
Ok(ret)
}
=======
*/
/// Pair of token IDs to swap: token_to_send:token_to_recv
token_pair: String,
#[clap(short, long)]
/// Pair of values to swap: value_to_send:value_to_recv
value_pair: String,
},
/// Inspect partial swap data from stdin.
InspectPartial,
/// Join two partial swap data files and build a tx
Join { data0: String, data1: String },
/// Sign a transaction given from stdin.
SignTx,
}
#[derive(SerialEncodable, SerialDecodable)]
/// Half of the swap data, includes the coin that is supposed to be received,
/// and the coin that is supposed to be sent.
struct PartialSwapData {
/// Mint proof of coin to be received
mint_proof: Proof,
/// Public values for the mint proof
mint_revealed: MintRevealedValues,
/// Value of the coin to be received
mint_value: u64,
/// Token ID of the coin to be received
mint_token: DrkTokenId,
/// Blinding factor for the minted value pedersen commitment
mint_value_blind: DrkValueBlind,
/// Blinding factor for the minted token ID pedersen commitment
mint_token_blind: DrkValueBlind,
/// Burn proof of the coin to be sent
burn_proof: Proof,
/// Public values for the burn proof
burn_revealed: BurnRevealedValues,
/// Value of the coin to be sent
burn_value: u64,
/// Token ID of the coin to be sent
burn_token: DrkTokenId,
/// Blinding factor for the burned value pedersen commitment
burn_value_blind: DrkValueBlind,
/// Blinding factor for the burned token ID pedersen commitment
burn_token_blind: DrkValueBlind,
/// Encrypted note
encrypted_note: EncryptedNote,
}
#[derive(SerialEncodable, SerialDecodable)]
/// Full swap data, containing two instances of `PartialSwapData`, which
/// represent an atomic swap.
struct SwapData {
swap0: PartialSwapData,
swap1: PartialSwapData,
}
async fn init_swap(
endpoint: Url,
token_pair: (String, String),
value_pair: (u64, u64),
/*
<<<<<<< HEAD
) -> Result<()> {
let rpc_client = RpcClient::new(endpoint).await?;
let rpc = Rpc { rpc_client };
@@ -209,18 +312,30 @@ async fn init_swap(
let tp = (token_id::parse_b58(&token_pair.0)?, token_id::parse_b58(&token_pair.1)?);
let vp: (u64, u64) =
(value_pair.0.clone().try_into().unwrap(), value_pair.1.clone().try_into().unwrap());
=======
*/
) -> Result<PartialSwapData> {
let rpc_client = RpcClient::new(endpoint).await?;
let rpc = Rpc { rpc_client };
// TODO: Implement metadata for decimals, don't hardcode.
let tp = (token_id::parse_b58(&token_pair.0)?, token_id::parse_b58(&token_pair.1)?);
let vp = value_pair;
// Connect to darkfid and see if there's available funds.
let balance = rpc.balance_of(&token_pair.0).await?;
if balance < vp.0 {
eprintln!(
"Error: There is not enough balance for token \"{}\" in your wallet.",
"Error: There's not enough balance for token \"{}\" in your wallet.",
token_pair.0
);
eprintln!("Available balance is {} ({})", encode_base10(balance, 8), balance);
exit(1);
}
/*
<<<<<<< HEAD
// If not enough funds in a single coin, mint a single new coin
// with the funds. We do this to minimize the size of the swap
// transaction, i.e. 2 inputs and 2 outputs.
@@ -231,16 +346,38 @@ async fn init_swap(
let coins = rpc.get_coins_valtok(vp.0, &token_pair.0).await?;
if coins.is_empty() {
eprintln!("Error: Did not manage to find a coin with enough value to spend");
=======
*/
// If there's not enough funds in a single coin, mint a single new coin
// with the funds. We do this to minimize the size of the swap transaction.
// i.e. 2 inputs and 2 outputs.
// TODO: Implement ^
// TODO: Maybe this should be done by the user beforehand?
// Find a coin to spend. We can find multiple, but we'll pick the first one.
let coins = rpc.get_coins_valtok(vp.0, &token_pair.0).await?;
if coins.is_empty() {
eprintln!("Error: Did not manage to find a coin with enough value to spend.");
exit(1);
}
eprintln!("Initializing swap data for:");
/*
<<<<<<< HEAD
eprintln!("Send: {} {} tokens", encode_base10(value_pair.0, 8), token_pair.0);
eprintln!("Recv: {} {} tokens", encode_base10(value_pair.1, 8), token_pair.1);
// Fetch our default address
let our_address = rpc.wallet_address().await?;
let our_publickey = match PublicKey::try_from(our_address) {
=======
*/
eprintln!("Send: {} {} tokens", encode_base10(vp.0, 8), token_pair.0);
eprintln!("Recv: {} {} tokens", encode_base10(vp.1, 8), token_pair.1);
// Fetch our default address
let our_addr = rpc.wallet_address().await?;
let our_pubk = match PublicKey::try_from(our_addr) {
Ok(v) => v,
Err(e) => {
eprintln!("Error converting our address into PublicKey: {}", e);
@@ -248,6 +385,8 @@ async fn init_swap(
}
};
/*
<<<<<<< HEAD
// Build proving keys
let pb = progress_bar("Building proving key for the mint contract");
let mint_pk = ProvingKey::build(8, &MintContract::default());
@@ -258,12 +397,34 @@ async fn init_swap(
pb.finish();
// The coin we want to receive.
=======
*/
// Build ZK proving keys
let pb = progress_bar("Building proving key for the Mint contract");
let mint_pk = ProvingKey::build(11, &MintContract::default());
pb.finish();
let pb = progress_bar("Building proving key for the Burn contract");
let burn_pk = ProvingKey::build(11, &BurnContract::default());
pb.finish();
// The coin we want to receive
let recv_value_blind = DrkValueBlind::random(&mut OsRng);
let recv_token_blind = DrkValueBlind::random(&mut OsRng);
let recv_coin_blind = DrkCoinBlind::random(&mut OsRng);
let recv_serial = DrkSerial::random(&mut OsRng);
/*
<<<<<<< HEAD
let pb = progress_bar("Building mint proof for receiving coin");
=======
*/
// Spend hook and user data disabled
let spend_hook = DrkSpendHook::from(0);
let user_data = DrkUserData::from(0);
let pb = progress_bar("Building Mint proof for the receiving coin");
let (mint_proof, mint_revealed) = create_mint_proof(
&mint_pk,
vp.1,
@@ -271,25 +432,50 @@ async fn init_swap(
recv_value_blind,
recv_token_blind,
recv_serial,
/*
<<<<<<< HEAD
recv_coin_blind,
our_publickey,
=======
*/
spend_hook,
user_data,
recv_coin_blind,
our_pubk,
)?;
pb.finish();
// The coin we are spending.
/*
<<<<<<< HEAD
// We'll spend the first one we've found.
let coin = coins[0];
let pb = progress_bar("Building burn proof for spending coin");
=======
*/
let coin = coins[0].clone();
let pb = progress_bar("Building Burn proof for the spending coin");
let signature_secret = SecretKey::random(&mut OsRng);
let merkle_path = match rpc.get_merkle_path(usize::from(coin.leaf_position)).await {
Ok(v) => v,
Err(e) => {
eprintln!("Failed to get merkle path for our coin from darkfid RPC: {}", e);
eprintln!("Failed to get Merkle path for our coin from darkfid RPC: {}", e);
exit(1);
}
};
// Spend hook and user data disabled
let spend_hook = DrkSpendHook::from(0);
let user_data = DrkUserData::from(0);
let user_data_blind = DrkUserDataBlind::random(&mut OsRng);
let (burn_proof, burn_revealed) = create_burn_proof(
&burn_pk,
vp.0,
@@ -297,6 +483,9 @@ async fn init_swap(
coin.note.value_blind,
coin.note.token_blind,
coin.note.serial,
spend_hook,
user_data,
user_data_blind,
coin.note.coin_blind,
coin.secret,
coin.leaf_position,
@@ -305,9 +494,30 @@ async fn init_swap(
)?;
pb.finish();
/*
<<<<<<< HEAD
// Pack proofs together with pedersen commitment openings so
// counterparty can verify correctness.
let swap_data = SwapData {
=======
*/
// Create encrypted note
let note = Note {
serial: recv_serial,
value: vp.1,
token_id: tp.1,
coin_blind: recv_coin_blind,
value_blind: recv_value_blind,
token_blind: recv_token_blind,
// Here we store our secret key we used for signing
memo: signature_secret.to_bytes().to_vec(),
};
let encrypted_note = note.encrypt(&our_pubk)?;
// Pack proofs together with pedersen commitment openings so
// counterparty can verify correctness.
let partial_swap_data = PartialSwapData {
mint_proof,
mint_revealed,
mint_value: vp.1,
@@ -320,6 +530,8 @@ async fn init_swap(
burn_revealed,
burn_value_blind: coin.note.value_blind,
burn_token_blind: coin.note.token_blind,
/*
<<<<<<< HEAD
};
// Print encoded data.
@@ -329,6 +541,16 @@ async fn init_swap(
}
fn inspect(data: &str) -> Result<()> {
=======
*/
encrypted_note,
};
Ok(partial_swap_data)
}
fn inspect_partial(data: &str) -> Result<()> {
let mut mint_valid = false;
let mut burn_valid = false;
let mut mint_value_valid = false;
@@ -344,14 +566,25 @@ fn inspect(data: &str) -> Result<()> {
}
};
/*
<<<<<<< HEAD
let sd: SwapData = match deserialize(&bytes) {
Ok(v) => v,
Err(e) => {
eprintln!("Error: Failed to deserialize swap data into struct: {}", e);
=======
*/
let sd: PartialSwapData = match deserialize(&bytes) {
Ok(v) => v,
Err(e) => {
eprintln!("Error deserializing partial swap data into struct: {}", e);
exit(1);
}
};
/*
<<<<<<< HEAD
eprintln!("Successfully decoded data into SwapData struct");
// Build verifying keys
@@ -364,18 +597,45 @@ fn inspect(data: &str) -> Result<()> {
pb.finish();
let pb = progress_bar("Verifying burn proof");
=======
*/
eprintln!("Successfully decoded partial swap data");
// Build ZK verifying keys
let pb = progress_bar("Building verifying key for the Mint contract");
let mint_vk = VerifyingKey::build(11, &MintContract::default());
pb.finish();
let pb = progress_bar("Building verifying key for the Burn contract");
let burn_vk = VerifyingKey::build(11, &BurnContract::default());
pb.finish();
let pb = progress_bar("Verifying Burn proof");
if verify_burn_proof(&burn_vk, &sd.burn_proof, &sd.burn_revealed).is_ok() {
burn_valid = true;
}
pb.finish();
/*
<<<<<<< HEAD
let pb = progress_bar("Verifying mint proof");
=======
*/
let pb = progress_bar("Verifying Mint proof");
if verify_mint_proof(&mint_vk, &sd.mint_proof, &sd.mint_revealed).is_ok() {
mint_valid = true;
}
pb.finish();
/*
<<<<<<< HEAD
eprintln!(" Verifying pedersen commitments");
=======
*/
eprintln!(" Verifying Pedersen commitments");
if pedersen_commitment_u64(sd.burn_value, sd.burn_value_blind) == sd.burn_revealed.value_commit
{
@@ -402,49 +662,49 @@ fn inspect(data: &str) -> Result<()> {
eprint!(" Burn proof: ");
if burn_valid {
eprintln!("{}VALID{}", color::Fg(color::Green), color::Fg(color::Reset));
eprintln!("{}", fg_green("VALID"));
} else {
eprintln!("{}INVALID{}", color::Fg(color::Red), color::Fg(color::Reset));
eprintln!("{}", fg_red("INVALID"));
valid = false;
}
eprint!(" Burn proof value commitment: ");
if burn_value_valid {
eprintln!("{}VALID{}", color::Fg(color::Green), color::Fg(color::Reset));
eprintln!("{}", fg_green("VALID"));
} else {
eprintln!("{}INVALID{}", color::Fg(color::Red), color::Fg(color::Reset));
eprintln!("{}", fg_red("INVALID"));
valid = false;
}
eprint!(" Burn proof token commitment: ");
if burn_token_valid {
eprintln!("{}VALID{}", color::Fg(color::Green), color::Fg(color::Reset));
eprintln!("{}", fg_green("VALID"));
} else {
eprintln!("{}INVALID{}", color::Fg(color::Red), color::Fg(color::Reset));
eprintln!("{}", fg_red("INVALID"));
valid = false;
}
eprint!(" Mint proof: ");
if mint_valid {
eprintln!("{}VALID{}", color::Fg(color::Green), color::Fg(color::Reset));
eprintln!("{}", fg_green("VALID"));
} else {
eprintln!("{}INVALID{}", color::Fg(color::Red), color::Fg(color::Reset));
eprintln!("{}", fg_red("INVALID"));
valid = false;
}
eprint!(" Mint proof value commitment: ");
if mint_value_valid {
eprintln!("{}VALID{}", color::Fg(color::Green), color::Fg(color::Reset));
eprintln!("{}", fg_green("VALID"));
} else {
eprintln!("{}INVALID{}", color::Fg(color::Red), color::Fg(color::Reset));
eprintln!("{}", fg_red("INVALID"));
valid = false;
}
eprint!(" Mint proof token commitment: ");
if mint_token_valid {
eprintln!("{}VALID{}", color::Fg(color::Green), color::Fg(color::Reset));
eprintln!("{}", fg_green("VALID"));
} else {
eprintln!("{}INVALID{}", color::Fg(color::Red), color::Fg(color::Reset));
eprintln!("{}", fg_red("INVALID"));
valid = false;
}
@@ -461,6 +721,8 @@ fn inspect(data: &str) -> Result<()> {
bs58::encode(sd.burn_token.to_repr()).into_string()
);
/*
<<<<<<< HEAD
if !valid {
eprintln!(
"\nThe ZK proofs and commitments inspected are {}NOT VALID{}",
@@ -474,11 +736,21 @@ fn inspect(data: &str) -> Result<()> {
color::Fg(color::Green),
color::Fg(color::Reset)
);
=======
*/
eprint!("\nThe ZK proofs and commitments inspected are ");
if !valid {
println!("{}", fg_red("NOT VALID"));
exit(1);
} else {
eprintln!("{}", fg_green("VALID"));
}
Ok(())
}
/*
<<<<<<< HEAD
#[derive(SerialEncodable, SerialDecodable)]
struct SwapData {
mint_proof: Proof,
@@ -493,6 +765,171 @@ struct SwapData {
burn_token: DrkTokenId,
burn_value_blind: DrkValueBlind,
burn_token_blind: DrkValueBlind,
=======
*/
async fn join(endpoint: Url, d0: PartialSwapData, d1: PartialSwapData) -> Result<Transaction> {
eprintln!("Joining data into a transaction");
let input0 = PartialTransactionInput { burn_proof: d0.burn_proof, revealed: d0.burn_revealed };
let input1 = PartialTransactionInput { burn_proof: d1.burn_proof, revealed: d1.burn_revealed };
let inputs = vec![input0, input1];
let output0 = TransactionOutput {
mint_proof: d0.mint_proof,
revealed: d0.mint_revealed,
enc_note: d0.encrypted_note.clone(),
};
let output1 = TransactionOutput {
mint_proof: d1.mint_proof,
revealed: d1.mint_revealed,
enc_note: d1.encrypted_note.clone(),
};
let outputs = vec![output0, output1];
let partial_tx = PartialTransaction { clear_inputs: vec![], inputs, outputs };
let mut unsigned_tx_data = vec![];
partial_tx.encode(&mut unsigned_tx_data)?;
let mut inputs = vec![];
let mut signed: bool;
eprint!("Trying to decrypt the note of the first half... ");
let rpc_client = RpcClient::new(endpoint.clone()).await?;
let rpc = Rpc { rpc_client };
let note = match rpc.decrypt_note(&d0.encrypted_note).await {
Ok(v) => v,
Err(_) => None,
};
if let Some(note) = note {
eprintln!("{}", fg_green("Success"));
let signature = try_sign_tx(&note, &unsigned_tx_data[..])?;
let input = TransactionInput::from_partial(partial_tx.inputs[0].clone(), signature);
inputs.push(input);
signed = true;
} else {
eprintln!("{}", fg_red("Failure"));
let signature = schnorr::Signature::dummy();
let input = TransactionInput::from_partial(partial_tx.inputs[0].clone(), signature);
inputs.push(input);
signed = false;
}
// If we have signed, we shouldn't have to look in the other one, but we might
// be sending to ourself for some reason.
eprint!("Trying to decrypt the note of the second half... ");
let rpc_client = RpcClient::new(endpoint).await?;
let rpc = Rpc { rpc_client };
let note = match rpc.decrypt_note(&d1.encrypted_note).await {
Ok(v) => v,
Err(_) => None,
};
if let Some(note) = note {
eprintln!("{}", fg_green("Success"));
let signature = try_sign_tx(&note, &unsigned_tx_data[..])?;
let input = TransactionInput::from_partial(partial_tx.inputs[1].clone(), signature);
inputs.push(input);
signed = true;
} else {
eprintln!("{}", fg_red("Failure"));
let signature = schnorr::Signature::dummy();
let input = TransactionInput::from_partial(partial_tx.inputs[1].clone(), signature);
inputs.push(input);
if !signed {
eprintln!("Error: Failed to sign transaction!");
exit(1);
}
}
if !signed {
eprintln!("Error: Failed to sign transaction!");
exit(1);
}
let tx = Transaction { clear_inputs: vec![], inputs, outputs: partial_tx.outputs };
Ok(tx)
}
async fn sign_tx(endpoint: Url, data: &str) -> Result<Transaction> {
eprintln!("Trying to sign transaction");
let mut tx: Transaction = deserialize(&bs58::decode(data).into_vec()?)?;
let mut input_idxs = vec![];
let mut signature = schnorr::Signature::dummy();
// Find dummy signatures to fill. We assume we're using the same
// signature everywhere.
eprintln!("Looking for dummy signatures...");
for (i, input) in tx.inputs.iter().enumerate() {
if input.signature == schnorr::Signature::dummy() {
eprintln!("Found dummy signature in input {}", i);
input_idxs.push(i);
}
}
if input_idxs.is_empty() {
eprintln!("Error: Did not find any dummy signatures in the transaction.");
exit(1);
}
// Find a note to decrypt that holds our secret key.
let mut found_secret = false;
for (i, output) in tx.outputs.iter().enumerate() {
// TODO: FIXME: Consider not closing the RPC on failure.
let rpc_client = RpcClient::new(endpoint.clone()).await?;
let rpc = Rpc { rpc_client };
let note = match rpc.decrypt_note(&output.enc_note).await {
Ok(v) => v,
Err(_) => continue,
};
if let Some(note) = note {
eprintln!("Successfully decrypted note in output {}", i);
eprintln!("Creating signature...");
let mut unsigned_tx_data = vec![];
let _ = tx.encode_without_signature(&mut unsigned_tx_data)?;
signature = try_sign_tx(&note, &unsigned_tx_data[..])?;
found_secret = true;
break
}
eprintln!("Failed to find a note to decrypt. Signing failed.");
exit(1);
}
if !found_secret {
eprintln!("Error: Did not manage to sign transaction. Couldn't find any secret keys.");
exit(1);
}
for i in input_idxs {
tx.inputs[i].signature = signature.clone();
}
Ok(tx)
}
fn try_sign_tx(note: &Note, tx_data: &[u8]) -> Result<schnorr::Signature> {
if note.memo.len() != 32 {
eprintln!("Error: The note memo is not 32 bytes");
exit(1);
}
let secret = match SecretKey::from_bytes(note.memo.clone().try_into().unwrap()) {
Ok(v) => v,
Err(e) => {
eprintln!("Did not manage to cast bytes into SecretKey: {}", e);
exit(1);
}
};
eprintln!("Signing transaction...");
let signature = secret.sign(tx_data);
Ok(signature)
}
#[async_std::main]
@@ -503,12 +940,50 @@ async fn main() -> Result<()> {
Subcmd::Init { token_pair, value_pair } => {
let token_pair = parse_token_pair(&token_pair)?;
let value_pair = parse_value_pair(&value_pair)?;
/*
<<<<<<< HEAD
init_swap(args.endpoint, token_pair, value_pair).await
}
Subcmd::Inspect => {
let mut buf = String::new();
stdin().read_to_string(&mut buf)?;
inspect(&buf.trim())
=======
*/
let swap_data = init_swap(args.endpoint, token_pair, value_pair).await?;
println!("{}", bs58::encode(serialize(&swap_data)).into_string());
Ok(())
}
Subcmd::InspectPartial => {
let mut buf = String::new();
stdin().read_to_string(&mut buf)?;
inspect_partial(buf.trim())
}
Subcmd::Join { data0, data1 } => {
let d0 = std::fs::read_to_string(data0)?;
let d1 = std::fs::read_to_string(data1)?;
let d0 = deserialize(&bs58::decode(&d0.trim()).into_vec()?)?;
let d1 = deserialize(&bs58::decode(&d1.trim()).into_vec()?)?;
let tx = join(args.endpoint, d0, d1).await?;
println!("{}", bs58::encode(&serialize(&tx)).into_string());
eprintln!("Successfully signed transaction");
Ok(())
}
Subcmd::SignTx => {
let mut buf = String::new();
stdin().read_to_string(&mut buf)?;
let tx = sign_tx(args.endpoint, buf.trim()).await?;
println!("{}", bs58::encode(&serialize(&tx)).into_string());
eprintln!("Successfully signed transaction");
Ok(())
}
}
}

185
bin/darkotc/src/rpc.rs Normal file
View File

@@ -0,0 +1,185 @@
use std::{process::exit, str::FromStr};
use serde_json::json;
use darkfi::{
crypto::{
address::Address,
merkle_node::MerkleNode,
note::{EncryptedNote, Note},
OwnCoin,
},
rpc::{client::RpcClient, jsonrpc::JsonRequest},
util::serial::{deserialize, serialize},
Result,
};
/// The RPC object with functionality for connecting to darkfid.
pub struct Rpc {
pub rpc_client: RpcClient,
}
impl Rpc {
/// Fetch wallet balance of given token ID and return its u64 representation.
pub async fn balance_of(&self, token_id: &str) -> Result<u64> {
let req = JsonRequest::new("wallet.get_balances", json!([]));
let rep = self.rpc_client.request(req).await?;
if !rep.is_object() {
eprintln!("Error: Invalid balance data received from darkfid RPC endpoint.");
exit(1);
}
for i in rep.as_object().unwrap().keys() {
if i == token_id {
if let Some(balance) = rep[i].as_u64() {
return Ok(balance)
}
eprintln!("Error: Invalid balance data received from darkfid RPC endpoint.");
exit(1);
}
}
Ok(0)
}
/// Fetch default wallet address from the darkfid RPC endpoint.
pub async fn wallet_address(&self) -> Result<Address> {
let req = JsonRequest::new("wallet.get_addrs", json!([0_i64]));
let rep = self.rpc_client.request(req).await?;
if !rep.is_array() || !rep.as_array().unwrap()[0].is_string() {
eprintln!("Error: Invalid wallet address received from darkfid RPC endpoint.");
exit(1);
}
match Address::from_str(rep[0].as_str().unwrap()) {
Ok(v) => Ok(v),
Err(e) => {
eprintln!(
"Error: Invalid wallet address received from darkfid RPC endpoint: {}",
e
);
exit(1)
}
}
}
/// Query wallet for unspent coins in wallet matching value and token_id.
pub async fn get_coins_valtok(&self, value: u64, token_id: &str) -> Result<Vec<OwnCoin>> {
let req = JsonRequest::new("wallet.get_coins_valtok", json!([value, token_id, true]));
let rep = self.rpc_client.request(req).await?;
if !rep.is_array() {
eprintln!("Error: Invalid coin data received from darkfid RPC endpoint.");
exit(1);
}
let rep = rep.as_array().unwrap();
let mut ret = vec![];
for i in rep {
if !i.is_string() {
eprintln!(
"Error: Invalid base58 data for OwnCoin received from darkfid RPC endpoint."
);
exit(1);
}
let data = match bs58::decode(i.as_str().unwrap()).into_vec() {
Ok(v) => v,
Err(e) => {
eprintln!("Error: Failed decoding base58 data for OwnCoin: {}", e);
exit(1);
}
};
let oc = match deserialize(&data) {
Ok(v) => v,
Err(e) => {
eprintln!("Error: Failed deserializing OwnCoin: {}", e);
exit(1);
}
};
ret.push(oc);
}
Ok(ret)
}
/// Fetch the merkle path for a given leaf position in the coin tree
pub async fn get_merkle_path(&self, leaf_pos: usize) -> Result<Vec<MerkleNode>> {
let req = JsonRequest::new("wallet.get_merkle_path", json!([leaf_pos as u64]));
let rep = self.rpc_client.request(req).await?;
if !rep.is_array() {
eprintln!("Error: Invalid merkle path data received from darkfid RPC endpoint.");
exit(1);
}
let rep = rep.as_array().unwrap();
let mut ret = vec![];
for i in rep {
if !i.is_string() {
eprintln!("Error: Invalid base58 data received for MerkleNode");
exit(1);
}
let n = match bs58::decode(i.as_str().unwrap()).into_vec() {
Ok(v) => v,
Err(e) => {
eprintln!("Error: Failed decoding base58 for MerkleNode: {}", e);
exit(1);
}
};
if n.len() != 32 {
eprintln!("error: MerkleNode byte length is not 32");
exit(1);
}
let n = MerkleNode::from_bytes(&n.try_into().unwrap());
if n.is_some().unwrap_u8() == 0 {
eprintln!("Error: Noncanonical bytes of MerkleNode");
exit(1);
}
ret.push(n.unwrap());
}
Ok(ret)
}
/// Try to decrypt a given `EncryptedNote`
pub async fn decrypt_note(&self, enc_note: &EncryptedNote) -> Result<Option<Note>> {
let encoded = bs58::encode(&serialize(enc_note)).into_string();
let req = JsonRequest::new("wallet.decrypt_note", json!([encoded]));
let rep = self.rpc_client.oneshot_request(req).await?;
if !rep.is_string() {
eprintln!("Error: decrypt_note() RPC call returned invalid data");
exit(1);
}
let decoded = match bs58::decode(rep.as_str().unwrap()).into_vec() {
Ok(v) => v,
Err(e) => {
eprintln!("Error decoding base58 data received from RPC call: {}", e);
exit(1);
}
};
let note = match deserialize(&decoded) {
Ok(v) => v,
Err(e) => {
eprintln!("Failed deserializing bytes into Note: {}", e);
exit(1);
}
};
Ok(Some(note))
}
}

42
bin/darkwiki/Cargo.toml Normal file
View File

@@ -0,0 +1,42 @@
[package]
name = "darkwiki"
description = ""
version = "0.3.0"
edition = "2021"
authors = ["darkfi <dev@dark.fi>"]
license = "AGPL-3.0-only"
homepage = "https://dark.fi"
repository = "https://github.com/darkrenaissance/darkfi"
keywords = []
categories = []
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
darkfi = {path = "../../", features = ["rpc"]}
# Async
smol = "1.2.5"
async-std = {version = "1.12.0", features = ["attributes"]}
async-trait = "0.1.57"
async-channel = "1.7.1"
futures = "0.3.24"
# Misc
log = "0.4.17"
simplelog = "0.12.0"
rand = "0.8.5"
url = "2.2.2"
# Encoding and parsing
serde = {version = "1.0.144", features = ["derive"]}
serde_json = "1.0.85"
structopt = "0.3.26"

3
bin/darkwiki/README.md Normal file
View File

@@ -0,0 +1,3 @@
# Darkwiki
see [Darkfi Book](https://darkrenaissance.github.io/darkfi/misc/darkwiki.html) for the installation guide.

121
bin/darkwiki/src/main.rs Normal file
View File

@@ -0,0 +1,121 @@
use serde_json::json;
use simplelog::{ColorChoice, TermLogger, TerminalMode};
use structopt::StructOpt;
use url::Url;
use darkfi::{
rpc::{client::RpcClient, jsonrpc::JsonRequest},
util::cli::{get_log_config, get_log_level},
Result,
};
#[derive(Clone, Debug, StructOpt)]
#[structopt(name = "darkwikiupdate")]
struct Args {
#[structopt(subcommand)]
sub_command: ArgsSubCommand,
#[structopt(short, parse(from_occurrences))]
/// Increase verbosity (-vvv supported)
verbose: u8,
#[structopt(short, long, default_value = "tcp://127.0.0.1:24330")]
/// darkfid JSON-RPC endpoint
endpoint: Url,
}
#[derive(Debug, Clone, PartialEq, StructOpt)]
enum ArgsSubCommand {
/// Publish local patches and merging received patches
Update {
#[structopt(long, short)]
/// Run without applying the changes
dry_run: bool,
/// Names of files to update (Note: Will update all the documents if left empty)
values: Vec<String>,
},
/// Show the history of patches
Log {
/// Names of files to log (Note: Will show all the log if left empty)
values: Vec<String>,
},
/// Undo the local changes
Restore {
#[structopt(long, short)]
/// Run without applying the changes
dry_run: bool,
/// Names of files to restore (Note: Will restore all the documents if left empty)
values: Vec<String>,
},
}
fn print_patches(value: &Vec<serde_json::Value>) {
for res in value {
let res = res.as_array().unwrap();
let res: Vec<&str> = res.iter().map(|r| r.as_str().unwrap()).collect();
let (title, workspace, changes) = (res[0], res[1], res[2]);
println!("WORKSPACE: {} FILE: {}", workspace, title);
println!("{}", changes);
println!("----------------------------------");
}
}
#[async_std::main]
async fn main() -> Result<()> {
let args = Args::from_args();
let log_level = get_log_level(args.verbose.into());
let log_config = get_log_config();
TermLogger::init(log_level, log_config, TerminalMode::Mixed, ColorChoice::Auto)?;
let rpc_client = RpcClient::new(args.endpoint).await?;
match args.sub_command {
ArgsSubCommand::Update { dry_run, values } => {
let req = JsonRequest::new("update", json!([dry_run, values]));
let result = rpc_client.request(req).await?;
let result = result.as_array().unwrap();
let local_patches = result[0].as_array().unwrap();
let sync_patches = result[1].as_array().unwrap();
let merge_patches = result[2].as_array().unwrap();
if !local_patches.is_empty() {
println!();
println!("PUBLISH LOCAL PATCHES:");
println!();
print_patches(local_patches);
}
if !sync_patches.is_empty() {
println!();
println!("RECEIVED PATCHES:");
println!();
print_patches(sync_patches);
}
if !merge_patches.is_empty() {
println!();
println!("MERGE:");
println!();
print_patches(merge_patches);
}
}
ArgsSubCommand::Restore { dry_run, values } => {
let req = JsonRequest::new("restore", json!([dry_run, values]));
let result = rpc_client.request(req).await?;
let result = result.as_array().unwrap();
let patches = result[0].as_array().unwrap();
if !patches.is_empty() {
println!();
println!("AFTER RESTORE:");
println!();
print_patches(patches);
}
}
_ => unimplemented!(),
}
rpc_client.close().await
}

View File

@@ -1,3 +1,2 @@
/target
Cargo.lock
raft_db

50
bin/darkwikid/Cargo.toml Normal file
View File

@@ -0,0 +1,50 @@
[package]
name = "darkwikid"
description = ""
version = "0.3.0"
edition = "2021"
authors = ["darkfi <dev@dark.fi>"]
license = "AGPL-3.0-only"
homepage = "https://dark.fi"
repository = "https://github.com/darkrenaissance/darkfi"
keywords = []
categories = []
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
darkfi = {path = "../../", features = ["raft", "net", "rpc"]}
# Async
smol = "1.2.5"
async-std = {version = "1.12.0", features = ["attributes"]}
async-trait = "0.1.57"
async-channel = "1.7.1"
async-executor = "1.4.1"
easy-parallel = "3.2.0"
futures = "0.3.24"
# Misc
log = "0.4.17"
simplelog = "0.12.0"
rand = "0.8.5"
chrono = "0.4.22"
thiserror = "1.0.34"
ctrlc = { version = "3.2.3", features = ["termination"] }
url = "2.2.2"
fxhash = "0.2.1"
colored = "2.0.0"
# Encoding and parsing
serde = {version = "1.0.144", features = ["derive"]}
serde_json = "1.0.85"
structopt = "0.3.26"
structopt-toml = "0.5.1"
unicode-segmentation = "1.9.0"
crypto_box = {version = "0.8.1", features = ["std"]}
hex = "0.4.3"
bs58 = "0.4.0"
sha2 = "0.10.5"

3
bin/darkwikid/README.md Normal file
View File

@@ -0,0 +1,3 @@
# Darkwiki
see [Darkfi Book](https://darkrenaissance.github.io/darkfi/misc/darkwiki.html) for the installation guide.

View File

@@ -0,0 +1,38 @@
## JSON-RPC listen URL
#rpc_listen="tcp://127.0.0.1:24330"
## Sets Docs Path
# docs="~/darkwiki"
## Sets Author name
# author="name"
## Workspaces
# workspaces = ["darkfi:86MGNN31r3VxT4ULMmhQnMtV8pDnod339KwHwHCfabG2"]
## Raft net settings
[net]
## P2P accept addresses
#inbound = ["tls://127.0.0.1:24331"]
## Connection slots
outbound_connections=5
## P2P external addresses
#external_addr = ["tls://127.0.0.1:24331"]
## Peers to connect to
#peers = ["tls://127.0.0.1:24331"]
## Seed nodes to connect to
seeds = ["tls://wiki0.dark.fi:24331", "tls://wiki1.dark.fi:24331"]
# Prefered transports for outbound connections
#transports = ["tls", "tcp"]
## these are the default configuration for the p2p network
#manual_attempt_limit=0
#seed_query_timeout_seconds=8
#connect_timeout_seconds=10
#channel_handshake_seconds=4
#channel_heartbeat_seconds=10

View File

@@ -0,0 +1,112 @@
use async_trait::async_trait;
use log::error;
use serde_json::{json, Value};
use darkfi::{
rpc::{
jsonrpc::{ErrorCode, JsonError, JsonRequest, JsonResponse, JsonResult},
server::RequestHandler,
},
Error,
};
use crate::Patch;
pub struct JsonRpcInterface {
sender: async_channel::Sender<(String, bool, Vec<String>)>,
receiver: async_channel::Receiver<Vec<Vec<Patch>>>,
}
#[async_trait]
impl RequestHandler for JsonRpcInterface {
async fn handle_request(&self, req: JsonRequest) -> JsonResult {
if !req.params.is_array() {
return JsonError::new(ErrorCode::InvalidParams, None, req.id).into()
}
let params = req.params.as_array().unwrap();
let rep = match req.method.as_str() {
Some("update") => self.update(req.id, params).await,
Some("restore") => self.restore(req.id, params).await,
Some("log") => self.log(req.id, params).await,
Some(_) | None => return JsonError::new(ErrorCode::MethodNotFound, None, req.id).into(),
};
rep
}
}
fn patch_to_tuple(p: &Patch, colorize: bool) -> (String, String, String) {
(p.path.to_owned(), p.workspace.to_owned(), if colorize { p.colorize() } else { p.to_string() })
}
fn printable_patches(
patches: Vec<Vec<Patch>>,
colorize: bool,
) -> Vec<Vec<(String, String, String)>> {
let mut response = vec![];
for ps in patches {
response.push(ps.iter().map(|p| patch_to_tuple(p, colorize)).collect())
}
response
}
impl JsonRpcInterface {
pub fn new(
sender: async_channel::Sender<(String, bool, Vec<String>)>,
receiver: async_channel::Receiver<Vec<Vec<Patch>>>,
) -> Self {
Self { sender, receiver }
}
// RPCAPI:
// Update files
// --> {"jsonrpc": "2.0", "method": "update", "params": [], "id": 1}
// <-- {"jsonrpc": "2.0", "result": [[(String, String)]], "id": 1}
async fn update(&self, id: Value, params: &[Value]) -> JsonResult {
let dry = params[0].as_bool().unwrap();
let files: Vec<String> =
params[1].as_array().unwrap().iter().map(|f| f.as_str().unwrap().to_string()).collect();
let res = self.sender.send(("update".into(), dry, files)).await.map_err(Error::from);
if let Err(e) = res {
error!("Failed to update: {}", e);
return JsonError::new(ErrorCode::InternalError, None, id).into()
}
let response = self.receiver.recv().await.unwrap();
let response = printable_patches(response, true);
JsonResponse::new(json!(response), id).into()
}
// RPCAPI:
// Undo the local changes
// --> {"jsonrpc": "2.0", "method": "restore", "params": [dry, files], "id": 1}
// <-- {"jsonrpc": "2.0", "result": [String, ..], "id": 1}
async fn restore(&self, id: Value, params: &[Value]) -> JsonResult {
let dry = params[0].as_bool().unwrap();
let files: Vec<String> =
params[1].as_array().unwrap().iter().map(|f| f.as_str().unwrap().to_string()).collect();
let res = self.sender.send(("restore".into(), dry, files)).await.map_err(Error::from);
if let Err(e) = res {
error!("Failed to restore: {}", e);
return JsonError::new(ErrorCode::InternalError, None, id).into()
}
let response = self.receiver.recv().await.unwrap();
let response = printable_patches(response, false);
JsonResponse::new(json!(response), id).into()
}
// RPCAPI:
// Show all patches
// --> {"jsonrpc": "2.0", "method": "log", "params": [dry, files], "id": 1}
// <-- {"jsonrpc": "2.0", "result": [[(String, String)]], "id": 1}
async fn log(&self, id: Value, _params: &[Value]) -> JsonResult {
JsonResponse::new(json!(true), id).into()
}
}

102
bin/darkwikid/src/lcs.rs Normal file
View File

@@ -0,0 +1,102 @@
use crate::{patch::OpMethod, str_to_chars};
pub struct Lcs<'a> {
a: Vec<&'a str>,
b: Vec<&'a str>,
lengths: Vec<Vec<u64>>,
}
impl<'a> Lcs<'a> {
pub fn new(a: &'a str, b: &'a str) -> Self {
let a: Vec<_> = str_to_chars(a);
let b: Vec<_> = str_to_chars(b);
let (na, nb) = (a.len(), b.len());
let mut lengths = vec![vec![0; nb + 1]; na + 1];
for (i, ci) in a.iter().enumerate() {
for (j, cj) in b.iter().enumerate() {
lengths[i + 1][j + 1] = if ci == cj {
lengths[i][j] + 1
} else {
lengths[i][j + 1].max(lengths[i + 1][j])
}
}
}
Self { a, b, lengths }
}
fn op(&self, ops: &mut Vec<OpMethod>, i: usize, j: usize) {
if i == 0 && j == 0 {
return
}
if i == 0 {
ops.push(OpMethod::Insert(self.b[j - 1].to_string()));
self.op(ops, i, j - 1);
} else if j == 0 {
ops.push(OpMethod::Delete((1) as _));
self.op(ops, i - 1, j);
} else if self.a[i - 1] == self.b[j - 1] {
ops.push(OpMethod::Retain((1) as _));
self.op(ops, i - 1, j - 1);
} else if self.lengths[i - 1][j] > self.lengths[i][j - 1] {
ops.push(OpMethod::Delete((1) as _));
self.op(ops, i - 1, j);
} else {
ops.push(OpMethod::Insert(self.b[j - 1].to_string()));
self.op(ops, i, j - 1);
}
}
pub fn ops(&self) -> Vec<OpMethod> {
let mut ops = vec![];
self.op(&mut ops, self.a.len(), self.b.len());
ops.reverse();
ops
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_lcs() {
let lcs = Lcs::new("hello", "test hello");
assert_eq!(
lcs.ops(),
vec![
OpMethod::Insert("t".into()),
OpMethod::Insert("e".into()),
OpMethod::Insert("s".into()),
OpMethod::Insert("t".into()),
OpMethod::Insert(" ".into()),
OpMethod::Retain(1),
OpMethod::Retain(1),
OpMethod::Retain(1),
OpMethod::Retain(1),
OpMethod::Retain(1),
]
);
let lcs = Lcs::new("hello world", "hello");
assert_eq!(
lcs.ops(),
vec![
OpMethod::Retain(1),
OpMethod::Retain(1),
OpMethod::Retain(1),
OpMethod::Retain(1),
OpMethod::Delete(1),
OpMethod::Delete(1),
OpMethod::Delete(1),
OpMethod::Retain(1),
OpMethod::Delete(1),
OpMethod::Delete(1),
OpMethod::Delete(1),
]
);
}
}

696
bin/darkwikid/src/main.rs Normal file
View File

@@ -0,0 +1,696 @@
use async_std::sync::{Arc, Mutex};
use std::{
fs::{create_dir_all, read_dir, remove_dir_all, remove_file},
io::stdin,
path::{Path, PathBuf},
};
use async_executor::Executor;
use crypto_box::{
aead::{Aead, AeadCore},
rand_core::OsRng,
SalsaBox, SecretKey,
};
use futures::{select, FutureExt};
use fxhash::FxHashMap;
use log::{error, info, warn};
use serde::Deserialize;
use sha2::Digest;
use smol::future;
use structopt::StructOpt;
use structopt_toml::StructOptToml;
use unicode_segmentation::UnicodeSegmentation;
use url::Url;
use darkfi::{
async_daemonize,
net::{self, settings::SettingsOpt},
raft::{NetMsg, ProtocolRaft, Raft, RaftSettings},
rpc::server::listen_and_serve,
util::{
cli::{get_log_config, get_log_level, spawn_config},
expand_path,
file::{load_file, load_json_file, save_file, save_json_file},
path::get_config_path,
serial::{deserialize, serialize, SerialDecodable, SerialEncodable},
},
Error, Result,
};
mod jsonrpc;
mod lcs;
mod patch;
use jsonrpc::JsonRpcInterface;
use lcs::Lcs;
use patch::{OpMethod, Patch};
type Patches = (Vec<Patch>, Vec<Patch>, Vec<Patch>, Vec<Patch>);
pub const CONFIG_FILE: &str = "darkwiki.toml";
pub const CONFIG_FILE_CONTENTS: &str = include_str!("../darkwiki.toml");
/// darkwikid cli
#[derive(Clone, Debug, Deserialize, StructOpt, StructOptToml)]
#[serde(default)]
#[structopt(name = "darkwikid")]
pub struct Args {
/// Sets a custom config file
#[structopt(long)]
pub config: Option<String>,
/// Sets Docs Path
#[structopt(long, default_value = "~/darkwiki")]
pub docs: String,
/// Sets Author Name for Patch
#[structopt(long, default_value = "NONE")]
pub author: String,
/// Secret Key To Encrypt/Decrypt Patches
#[structopt(long)]
pub workspaces: Vec<String>,
/// Generate A New Secret Key
#[structopt(long)]
pub generate: bool,
/// Clean all the local data in docs path
/// (BE CAREFULL) Check the docs path in the config file before running this
#[structopt(long)]
pub refresh: bool,
/// JSON-RPC Listen URL
#[structopt(long = "rpc", default_value = "tcp://127.0.0.1:24330")]
pub rpc_listen: Url,
#[structopt(flatten)]
pub net: SettingsOpt,
/// Increase Verbosity
#[structopt(short, parse(from_occurrences))]
pub verbose: u8,
}
#[derive(Debug, Clone, SerialEncodable, SerialDecodable)]
pub struct EncryptedPatch {
nonce: Vec<u8>,
payload: Vec<u8>,
}
fn get_workspaces(settings: &Args, docs_path: &Path) -> Result<FxHashMap<String, SalsaBox>> {
let mut workspaces = FxHashMap::default();
for workspace in settings.workspaces.iter() {
let workspace: Vec<&str> = workspace.split(':').collect();
let (workspace, secret) = (workspace[0], workspace[1]);
let bytes: [u8; 32] = bs58::decode(secret)
.into_vec()?
.try_into()
.map_err(|_| Error::ParseFailed("Parse secret key failed"))?;
let secret = crypto_box::SecretKey::from(bytes);
let public = secret.public_key();
let salsa_box = crypto_box::SalsaBox::new(&public, &secret);
workspaces.insert(workspace.to_string(), salsa_box);
create_dir_all(docs_path.join(workspace))?;
}
Ok(workspaces)
}
fn encrypt_patch(
patch: &Patch,
salsa_box: &SalsaBox,
rng: &mut crypto_box::rand_core::OsRng,
) -> Result<EncryptedPatch> {
let nonce = SalsaBox::generate_nonce(rng);
let payload = &serialize(patch)[..];
let payload = salsa_box
.encrypt(&nonce, payload)
.map_err(|_| Error::ParseFailed("Encrypting Patch failed"))?;
let nonce = nonce.to_vec();
Ok(EncryptedPatch { nonce, payload })
}
fn decrypt_patch(encrypt_patch: &EncryptedPatch, salsa_box: &SalsaBox) -> Result<Patch> {
let nonce = encrypt_patch.nonce.as_slice();
let decrypted_patch = salsa_box
.decrypt(nonce.into(), &encrypt_patch.payload[..])
.map_err(|_| Error::ParseFailed("Decrypting Patch failed"))?;
let patch = deserialize(&decrypted_patch)?;
Ok(patch)
}
pub struct DarkWikiSettings {
author: String,
docs_path: PathBuf,
datastore_path: PathBuf,
}
fn str_to_chars(s: &str) -> Vec<&str> {
s.graphemes(true).collect::<Vec<&str>>()
}
fn path_to_id(path: &str, workspace: &str) -> String {
let mut hasher = sha2::Sha256::new();
hasher.update(&format!("{}{}", path, workspace));
bs58::encode(hex::encode(hasher.finalize())).into_string()
}
fn get_docs_paths(files: &mut Vec<PathBuf>, path: &Path, parent: Option<&Path>) -> Result<()> {
let docs = read_dir(&path)?;
let docs = docs.filter(|d| d.is_ok()).map(|d| d.unwrap().path()).collect::<Vec<PathBuf>>();
for doc in docs {
if let Some(f) = doc.file_name() {
let file_name = PathBuf::from(f);
let file_name =
if let Some(parent) = parent { parent.join(file_name) } else { file_name };
if doc.is_file() {
if let Some(ext) = doc.extension() {
if ext == "md" {
files.push(file_name);
}
}
} else if doc.is_dir() {
if f == ".log" {
continue
}
get_docs_paths(files, &doc, Some(&file_name))?;
}
}
}
Ok(())
}
fn is_delete_patch(patch: &Patch) -> bool {
if patch.ops().0.len() != 1 {
return false
}
if let OpMethod::Delete(d) = patch.ops().0[0] {
if patch.base.len() as u64 == d {
return true
}
}
false
}
struct Darkwiki {
settings: DarkWikiSettings,
#[allow(clippy::type_complexity)]
rpc: (
async_channel::Sender<Vec<Vec<Patch>>>,
async_channel::Receiver<(String, bool, Vec<String>)>,
),
raft: (async_channel::Sender<EncryptedPatch>, async_channel::Receiver<EncryptedPatch>),
workspaces: FxHashMap<String, SalsaBox>,
}
impl Darkwiki {
async fn start(&self) -> Result<()> {
let mut rng = crypto_box::rand_core::OsRng;
loop {
select! {
val = self.rpc.1.recv().fuse() => {
let (cmd, dry, files) = val?;
match cmd.as_str() {
"update" => {
self.on_receive_update(dry, files, &mut rng).await?;
},
"restore" => {
self.on_receive_restore(dry, files).await?;
},
_ => {}
}
}
patch = self.raft.1.recv().fuse() => {
for (workspace, salsa_box) in self.workspaces.iter() {
if let Ok(patch) = decrypt_patch(&patch.clone()?, &salsa_box) {
info!("[{}] Receive a {:?}", workspace, patch);
self.on_receive_patch(&patch)?;
}
}
}
}
}
}
fn on_receive_patch(&self, received_patch: &Patch) -> Result<()> {
let sync_id_path = self.settings.datastore_path.join("sync").join(&received_patch.id);
let local_id_path = self.settings.datastore_path.join("local").join(&received_patch.id);
if let Ok(mut sync_patch) = load_json_file::<Patch>(&sync_id_path) {
if sync_patch.timestamp == received_patch.timestamp {
return Ok(())
}
if let Ok(local_patch) = load_json_file::<Patch>(&local_id_path) {
if local_patch.timestamp == sync_patch.timestamp {
sync_patch.base = local_patch.to_string();
sync_patch.set_ops(received_patch.ops());
} else {
sync_patch.extend_ops(received_patch.ops());
}
} else {
sync_patch.extend_ops(received_patch.ops());
}
sync_patch.timestamp = received_patch.timestamp;
sync_patch.author = received_patch.author.clone();
save_json_file::<Patch>(&sync_id_path, &sync_patch)?;
} else if !received_patch.base.is_empty() {
save_json_file::<Patch>(&sync_id_path, &received_patch)?;
}
Ok(())
}
async fn on_receive_update(
&self,
dry: bool,
files: Vec<String>,
rng: &mut OsRng,
) -> Result<()> {
let mut local: Vec<Patch> = vec![];
let mut sync: Vec<Patch> = vec![];
let mut merge: Vec<Patch> = vec![];
for (workspace, salsa_box) in self.workspaces.iter() {
let (patches, l, s, m) = self.update(
dry,
&self.settings.docs_path.join(workspace),
files.clone(),
workspace,
)?;
local.extend(l);
sync.extend(s);
merge.extend(m);
if !dry {
for patch in patches {
info!("Send a {:?}", patch);
let encrypt_patch = encrypt_patch(&patch, &salsa_box, rng)?;
self.raft.0.send(encrypt_patch).await?;
}
}
}
self.rpc.0.send(vec![local, sync, merge]).await?;
Ok(())
}
async fn on_receive_restore(&self, dry: bool, files_name: Vec<String>) -> Result<()> {
let mut patches: Vec<Patch> = vec![];
for (workspace, _) in self.workspaces.iter() {
let ps = self.restore(
dry,
&self.settings.docs_path.join(workspace),
&files_name,
workspace,
)?;
patches.extend(ps);
}
self.rpc.0.send(vec![patches]).await?;
Ok(())
}
fn restore(
&self,
dry: bool,
docs_path: &Path,
files_name: &[String],
workspace: &str,
) -> Result<Vec<Patch>> {
let local_path = self.settings.datastore_path.join("local");
let mut patches = vec![];
let local_files = read_dir(&local_path)?;
for file in local_files {
let file_id = file?.file_name();
let file_path = local_path.join(&file_id);
let local_patch: Patch = load_json_file(&file_path)?;
if local_patch.workspace != workspace {
continue
}
if !files_name.is_empty() && !files_name.contains(&local_patch.path.to_string()) {
continue
}
if let Ok(doc) = load_file(&docs_path.join(&local_patch.path)) {
if local_patch.to_string() == doc {
continue
}
}
if !dry {
self.save_doc(&local_patch.path, &local_patch.to_string(), workspace)?;
}
patches.push(local_patch);
}
Ok(patches)
}
fn update(
&self,
dry: bool,
docs_path: &Path,
files_name: Vec<String>,
workspace: &str,
) -> Result<Patches> {
let mut patches: Vec<Patch> = vec![];
let mut local_patches: Vec<Patch> = vec![];
let mut sync_patches: Vec<Patch> = vec![];
let mut merge_patches: Vec<Patch> = vec![];
let local_path = self.settings.datastore_path.join("local");
let sync_path = self.settings.datastore_path.join("sync");
// save and compare docs in darkwiki and local dirs
// then merged with sync patches if any received
let mut docs = vec![];
get_docs_paths(&mut docs, &docs_path, None)?;
for doc in docs {
let doc_path = doc.to_str().unwrap();
if !files_name.is_empty() && !files_name.contains(&doc_path.to_string()) {
continue
}
// load doc content
let edit = load_file(&docs_path.join(doc_path))?;
if edit.is_empty() {
continue
}
let doc_id = path_to_id(doc_path, workspace);
// create new patch
let mut new_patch = Patch::new(doc_path, &doc_id, &self.settings.author, workspace);
// check for any changes found with local doc and darkwiki doc
if let Ok(local_patch) = load_json_file::<Patch>(&local_path.join(&doc_id)) {
// no changes found
if local_patch.to_string() == edit {
continue
}
// check the differences with LCS algorithm
let local_patch_str = local_patch.to_string();
let lcs = Lcs::new(&local_patch_str, &edit);
let lcs_ops = lcs.ops();
// add the change ops to the new patch
for op in lcs_ops {
new_patch.add_op(&op);
}
new_patch.base = local_patch.to_string();
local_patches.push(new_patch.clone());
let mut b_patch = new_patch.clone();
b_patch.base = "".to_string();
patches.push(b_patch);
// check if the same doc has received patch from the network
if let Ok(sync_patch) = load_json_file::<Patch>(&sync_path.join(&doc_id)) {
if !is_delete_patch(&sync_patch) {
if sync_patch.timestamp != local_patch.timestamp {
sync_patches.push(sync_patch.clone());
let sync_patch_t = new_patch.transform(&sync_patch);
new_patch = new_patch.merge(&sync_patch_t);
if !dry {
self.save_doc(doc_path, &new_patch.to_string(), workspace)?;
}
merge_patches.push(new_patch.clone());
}
} else {
merge_patches.push(sync_patch);
patches = vec![];
}
}
} else {
new_patch.base = edit.to_string();
local_patches.push(new_patch.clone());
patches.push(new_patch.clone());
};
if !dry {
save_json_file(&local_path.join(&doc_id), &new_patch)?;
save_json_file(&sync_path.join(doc_id), &new_patch)?;
}
}
// check if a new patch received
// and save the new changes in both local and darkwiki dirs
let sync_files = read_dir(&sync_path)?;
for file in sync_files {
let file_id = file?.file_name();
let file_path = sync_path.join(&file_id);
let sync_patch: Patch = load_json_file(&file_path)?;
if sync_patch.workspace != workspace {
continue
}
if is_delete_patch(&sync_patch) {
if local_path.join(&sync_patch.id).exists() {
sync_patches.push(sync_patch.clone());
}
if !dry {
remove_file(docs_path.join(&sync_patch.path)).unwrap_or(());
remove_file(local_path.join(&sync_patch.id)).unwrap_or(());
remove_file(file_path).unwrap_or(());
}
continue
}
if let Ok(local_patch) = load_json_file::<Patch>(&local_path.join(&file_id)) {
if local_patch.timestamp == sync_patch.timestamp {
continue
}
}
if !files_name.is_empty() && !files_name.contains(&sync_patch.path.to_string()) {
continue
}
if !dry {
self.save_doc(&sync_patch.path, &sync_patch.to_string(), workspace)?;
save_json_file(&local_path.join(file_id), &sync_patch)?;
}
if !sync_patches.contains(&sync_patch) {
sync_patches.push(sync_patch);
}
}
// check if any doc removed from ~/darkwiki
let local_files = read_dir(&local_path)?;
for file in local_files {
let file_id = file?.file_name();
let file_path = local_path.join(&file_id);
let local_patch: Patch = load_json_file(&file_path)?;
if local_patch.workspace != workspace {
continue
}
if !files_name.is_empty() && !files_name.contains(&local_patch.path.to_string()) {
continue
}
if !docs_path.join(&local_patch.path).exists() {
let mut new_patch = Patch::new(
&local_patch.path,
&local_patch.id,
&self.settings.author,
&local_patch.workspace,
);
new_patch.add_op(&OpMethod::Delete(local_patch.to_string().len() as u64));
patches.push(new_patch.clone());
new_patch.base = local_patch.base;
local_patches.push(new_patch);
if !dry {
remove_file(file_path).unwrap_or(());
}
}
}
Ok((patches, local_patches, sync_patches, merge_patches))
}
fn save_doc(&self, path: &str, edit: &str, workspace: &str) -> Result<()> {
let path = self.settings.docs_path.join(workspace).join(path);
if let Some(p) = path.parent() {
if !p.exists() && !p.to_str().unwrap().is_empty() {
create_dir_all(p)?;
}
}
save_file(&path, edit)
}
}
async_daemonize!(realmain);
async fn realmain(settings: Args, executor: Arc<Executor<'_>>) -> Result<()> {
let docs_path = expand_path(&settings.docs)?;
let datastore_path = expand_path(docs_path.join(".log").to_str().unwrap())?;
if settings.refresh {
println!("Removing local docs in: {:?} (yes/no)? ", docs_path);
let mut confirm = String::new();
stdin().read_line(&mut confirm).expect("Failed to read line");
let confirm = confirm.to_lowercase();
let confirm = confirm.trim();
if confirm == "yes" || confirm == "y" {
remove_dir_all(docs_path).unwrap_or(());
println!("Local data removed successfully.");
} else {
error!("Unexpected Value: {}", confirm);
}
return Ok(())
}
create_dir_all(docs_path.clone())?;
create_dir_all(datastore_path.clone())?;
create_dir_all(datastore_path.join("local"))?;
create_dir_all(datastore_path.join("sync"))?;
if settings.generate {
println!("Generating a new workspace");
loop {
println!("Name for the new workspace: ");
let mut workspace = String::new();
stdin().read_line(&mut workspace).ok().expect("Failed to read line");
let workspace = workspace.to_lowercase();
let workspace = workspace.trim();
if workspace.is_empty() && workspace.len() < 3 {
error!("Wrong workspace try again");
continue
}
let mut rng = crypto_box::rand_core::OsRng;
let secret_key = SecretKey::generate(&mut rng);
let encoded = bs58::encode(secret_key.as_bytes());
create_dir_all(docs_path.join(workspace))?;
println!("workspace: {}:{}", workspace, encoded.into_string());
println!("Please add it to the config file.");
break
}
return Ok(())
}
let workspaces = get_workspaces(&settings, &docs_path)?;
if workspaces.is_empty() {
error!("Please add at least on workspace to the config file.");
println!("Run `$ darkwikid --generate` to generate new workspace.");
return Ok(())
}
let (rpc_sx, rpc_rv) = async_channel::unbounded::<(String, bool, Vec<String>)>();
let (notify_sx, notify_rv) = async_channel::unbounded::<Vec<Vec<Patch>>>();
//
// RPC
//
let rpc_interface = Arc::new(JsonRpcInterface::new(rpc_sx, notify_rv));
executor.spawn(listen_and_serve(settings.rpc_listen.clone(), rpc_interface)).detach();
//
// Raft
//
let seen_net_msgs = Arc::new(Mutex::new(FxHashMap::default()));
let datastore_raft = datastore_path.join("darkwiki.db");
let raft_settings = RaftSettings { datastore_path: datastore_raft, ..RaftSettings::default() };
let mut raft = Raft::<EncryptedPatch>::new(raft_settings, seen_net_msgs.clone())?;
//
// P2p setup
//
let mut net_settings = settings.net.clone();
net_settings.app_version = Some(option_env!("CARGO_PKG_VERSION").unwrap_or("").to_string());
let (p2p_send_channel, p2p_recv_channel) = async_channel::unbounded::<NetMsg>();
let p2p = net::P2p::new(net_settings.into()).await;
let p2p = p2p.clone();
let registry = p2p.protocol_registry();
let raft_node_id = raft.id();
registry
.register(net::SESSION_ALL, move |channel, p2p| {
let raft_node_id = raft_node_id.clone();
let sender = p2p_send_channel.clone();
let seen_net_msgs_cloned = seen_net_msgs.clone();
async move {
ProtocolRaft::init(raft_node_id, channel, sender, p2p, seen_net_msgs_cloned).await
}
})
.await;
p2p.clone().start(executor.clone()).await?;
executor.spawn(p2p.clone().run(executor.clone())).detach();
//
// Darkwiki start
//
let raft_sx = raft.sender();
let raft_rv = raft.receiver();
executor
.spawn(async move {
let darkwiki_settings =
DarkWikiSettings { author: settings.author, datastore_path, docs_path };
let darkwiki = Darkwiki {
settings: darkwiki_settings,
raft: (raft_sx, raft_rv),
rpc: (notify_sx, rpc_rv),
workspaces,
};
darkwiki.start().await.unwrap_or(());
})
.detach();
//
// Waiting Exit signal
//
let (signal, shutdown) = async_channel::bounded::<()>(1);
ctrlc::set_handler(move || {
warn!(target: "darkwiki", "Catch exit signal");
// cleaning up tasks running in the background
if let Err(e) = async_std::task::block_on(signal.send(())) {
error!("Error on sending exit signal: {}", e);
}
})
.unwrap();
raft.run(p2p.clone(), p2p_recv_channel.clone(), executor.clone(), shutdown.clone()).await?;
Ok(())
}

601
bin/darkwikid/src/patch.rs Normal file
View File

@@ -0,0 +1,601 @@
use std::{cmp::Ordering, io};
use colored::Colorize;
use serde::{Deserialize, Serialize};
use darkfi::util::{
serial::{Decodable, Encodable, SerialDecodable, SerialEncodable, VarInt},
Timestamp,
};
use crate::str_to_chars;
#[derive(PartialEq, Eq, Serialize, Deserialize, Clone, Debug)]
pub enum OpMethod {
Delete(u64),
Insert(String),
Retain(u64),
}
#[derive(PartialEq, Eq, Serialize, Deserialize, Clone, Debug)]
pub struct OpMethods(pub Vec<OpMethod>);
#[derive(PartialEq, Eq, SerialEncodable, SerialDecodable, Serialize, Deserialize, Clone, Debug)]
pub struct Patch {
pub path: String,
pub author: String,
pub id: String,
pub base: String,
pub timestamp: Timestamp,
pub workspace: String,
ops: OpMethods,
}
impl std::string::ToString for Patch {
fn to_string(&self) -> String {
if self.ops.0.is_empty() {
return self.base.clone()
}
let mut st = vec![];
st.extend(str_to_chars(&self.base));
let st = &mut st.iter();
let mut new_st: Vec<&str> = vec![];
for op in self.ops.0.iter() {
match op {
OpMethod::Retain(n) => {
for c in st.take(*n as usize) {
new_st.push(c);
}
}
OpMethod::Delete(n) => {
for _ in 0..*n {
st.next();
}
}
OpMethod::Insert(insert) => {
let chars = str_to_chars(insert);
new_st.extend(chars);
}
}
}
new_st.join("")
}
}
impl Patch {
pub fn new(path: &str, id: &str, author: &str, workspace: &str) -> Self {
Self {
path: path.to_string(),
id: id.to_string(),
ops: OpMethods(vec![]),
base: String::new(),
workspace: workspace.to_string(),
author: author.to_string(),
timestamp: Timestamp::current_time(),
}
}
pub fn add_op(&mut self, method: &OpMethod) {
match method {
OpMethod::Delete(n) => {
if *n == 0 {
return
}
if let Some(OpMethod::Delete(i)) = self.ops.0.last_mut() {
*i += n;
} else {
self.ops.0.push(method.to_owned());
}
}
OpMethod::Insert(insert) => {
if insert.is_empty() {
return
}
if let Some(OpMethod::Insert(s)) = self.ops.0.last_mut() {
*s += insert;
} else {
self.ops.0.push(OpMethod::Insert(insert.to_owned()));
}
}
OpMethod::Retain(n) => {
if *n == 0 {
return
}
if let Some(OpMethod::Retain(i)) = self.ops.0.last_mut() {
*i += n;
} else {
self.ops.0.push(method.to_owned());
}
}
}
}
fn insert(&mut self, st: &str) {
self.add_op(&OpMethod::Insert(st.into()));
}
fn retain(&mut self, n: u64) {
self.add_op(&OpMethod::Retain(n));
}
fn delete(&mut self, n: u64) {
self.add_op(&OpMethod::Delete(n));
}
pub fn set_ops(&mut self, ops: OpMethods) {
self.ops = ops;
}
pub fn extend_ops(&mut self, ops: OpMethods) {
self.ops.0.extend(ops.0);
}
pub fn ops(&self) -> OpMethods {
self.ops.clone()
}
//
// these two functions are imported from this library
// https://github.com/spebern/operational-transform-rs
// with some major modification
//
// TODO need more work to get better performance with iterators
pub fn transform(&self, other: &Self) -> Self {
let mut new_patch = Self::new(&self.path, &self.id, &self.author, "");
new_patch.base = self.base.clone();
let mut ops1 = self.ops.0.iter().cloned();
let mut ops2 = other.ops.0.iter().cloned();
let mut op1 = ops1.next();
let mut op2 = ops2.next();
loop {
match (&op1, &op2) {
(None, None) => break,
(None, Some(op)) => {
new_patch.add_op(op);
op2 = ops2.next();
continue
}
(Some(op), None) => {
new_patch.add_op(op);
op1 = ops1.next();
continue
}
_ => {}
}
match (op1.as_ref().unwrap(), op2.as_ref().unwrap()) {
(OpMethod::Insert(s), _) => {
new_patch.retain(str_to_chars(s).len() as _);
op1 = ops1.next();
}
(_, OpMethod::Insert(s)) => {
new_patch.insert(s);
op2 = ops2.next();
}
(OpMethod::Retain(i), OpMethod::Retain(j)) => match i.cmp(j) {
Ordering::Less => {
new_patch.retain(*i);
op2 = Some(OpMethod::Retain(j - *i));
op1 = ops1.next();
}
Ordering::Greater => {
new_patch.retain(*j);
op1 = Some(OpMethod::Retain(i - j));
op2 = ops2.next();
}
Ordering::Equal => {
new_patch.retain(*i);
op1 = ops1.next();
op2 = ops2.next();
}
},
(OpMethod::Delete(i), OpMethod::Delete(j)) => match i.cmp(j) {
Ordering::Less => {
op2 = Some(OpMethod::Delete(j - *i));
op1 = ops1.next();
}
Ordering::Greater => {
op1 = Some(OpMethod::Delete(i - j));
op2 = ops2.next();
}
Ordering::Equal => {
op1 = ops1.next();
op2 = ops2.next();
}
},
(OpMethod::Delete(i), OpMethod::Retain(j)) => match i.cmp(j) {
Ordering::Less => {
op2 = Some(OpMethod::Retain(j - *i));
op1 = ops1.next();
}
Ordering::Greater => {
op1 = Some(OpMethod::Delete(i - j));
op2 = ops2.next();
}
Ordering::Equal => {
op1 = ops1.next();
op2 = ops2.next();
}
},
(OpMethod::Retain(i), OpMethod::Delete(j)) => match i.cmp(j) {
Ordering::Less => {
new_patch.delete(*i);
op2 = Some(OpMethod::Delete(j - i));
op1 = ops1.next();
}
Ordering::Greater => {
new_patch.delete(*j);
op1 = Some(OpMethod::Retain(i - j));
op2 = ops2.next();
}
Ordering::Equal => {
new_patch.delete(*i);
op1 = ops1.next();
op2 = ops2.next();
}
},
}
}
new_patch
}
// TODO need more work to get better performance with iterators
pub fn merge(&mut self, other: &Self) -> Self {
let ops1 = self.ops.0.clone();
let mut ops1 = ops1.iter().cloned();
let mut ops2 = other.ops.0.iter().cloned();
let mut new_patch = Self::new(&self.path, &self.id, &self.author, "");
new_patch.base = self.base.clone();
let mut op1 = ops1.next();
let mut op2 = ops2.next();
loop {
match (&op1, &op2) {
(None, None) => break,
(None, Some(op)) => {
new_patch.add_op(op);
op2 = ops2.next();
continue
}
(Some(op), None) => {
new_patch.add_op(op);
op1 = ops1.next();
continue
}
_ => {}
}
match (op1.as_ref().unwrap(), op2.as_ref().unwrap()) {
(OpMethod::Delete(i), _) => {
new_patch.delete(*i);
op1 = ops1.next();
}
(_, OpMethod::Insert(s)) => {
new_patch.insert(s);
op2 = ops2.next();
}
(OpMethod::Retain(i), OpMethod::Retain(j)) => match i.cmp(j) {
Ordering::Less => {
new_patch.retain(*i);
op2 = Some(OpMethod::Retain(*j - i));
op1 = ops1.next();
}
Ordering::Greater => {
new_patch.retain(*j);
op1 = Some(OpMethod::Retain(i - *j));
op2 = ops2.next();
}
Ordering::Equal => {
new_patch.retain(*i);
op1 = ops1.next();
op2 = ops2.next();
}
},
(OpMethod::Insert(s), OpMethod::Delete(j)) => {
let chars = str_to_chars(s);
let chars_len = chars.len() as u64;
match chars_len.cmp(j) {
Ordering::Less => {
op1 = ops1.next();
op2 = Some(OpMethod::Delete(j - chars_len));
}
Ordering::Greater => {
let st = chars.into_iter().skip(*j as usize).collect();
op1 = Some(OpMethod::Insert(st));
op2 = ops2.next();
}
Ordering::Equal => {
op1 = ops1.next();
op2 = ops2.next();
}
}
}
(OpMethod::Insert(s), OpMethod::Retain(j)) => {
let chars = str_to_chars(s);
let chars_len = chars.len() as u64;
match chars_len.cmp(j) {
Ordering::Less => {
new_patch.insert(s);
op1 = ops1.next();
op2 = Some(OpMethod::Retain(*j - chars_len));
}
Ordering::Greater => {
let st = chars.into_iter().take(*j as usize).collect::<String>();
new_patch.insert(&st);
op1 = Some(OpMethod::Insert(st));
op2 = ops2.next();
}
Ordering::Equal => {
new_patch.insert(s);
op1 = ops1.next();
op2 = ops2.next();
}
}
}
(OpMethod::Retain(i), OpMethod::Delete(j)) => match i.cmp(j) {
Ordering::Less => {
new_patch.delete(*i);
op2 = Some(OpMethod::Delete(*j - *i));
op1 = ops1.next();
}
Ordering::Greater => {
new_patch.delete(*j);
op1 = Some(OpMethod::Retain(*i - *j));
op2 = ops2.next();
}
Ordering::Equal => {
new_patch.delete(*j);
op1 = ops1.next();
op2 = ops2.next();
}
},
};
}
new_patch
}
pub fn colorize(&self) -> String {
if self.ops.0.is_empty() {
return format!("{}", self.base.green())
}
let mut st = vec![];
st.extend(str_to_chars(&self.base));
let st = &mut st.iter();
let mut colorized_str: Vec<String> = vec![];
for op in self.ops.0.iter() {
match op {
OpMethod::Retain(n) => {
for c in st.take(*n as usize) {
colorized_str.push(c.to_string());
}
}
OpMethod::Delete(n) => {
let mut deleted_part = vec![];
for _ in 0..*n {
let s = st.next();
if let Some(s) = s {
deleted_part.push(s.to_string());
}
}
colorized_str.push(format!("{}", deleted_part.join("").red()));
}
OpMethod::Insert(insert) => {
let chars = str_to_chars(insert);
colorized_str.push(format!("{}", chars.join("").green()));
}
}
}
colorized_str.join("")
}
}
impl Decodable for OpMethod {
fn decode<D: io::Read>(mut d: D) -> darkfi::Result<Self> {
let com: u8 = Decodable::decode(&mut d)?;
match com {
0 => {
let i: u64 = Decodable::decode(&mut d)?;
Ok(Self::Delete(i))
}
1 => {
let t: String = Decodable::decode(d)?;
Ok(Self::Insert(t))
}
2 => {
let i: u64 = Decodable::decode(&mut d)?;
Ok(Self::Retain(i))
}
_ => Err(darkfi::Error::ParseFailed("Parse OpMethod failed")),
}
}
}
impl Encodable for OpMethod {
fn encode<S: io::Write>(&self, mut s: S) -> darkfi::Result<usize> {
let len: usize = match self {
Self::Delete(i) => (0_u8).encode(&mut s)? + i.encode(&mut s)?,
Self::Insert(t) => (1_u8).encode(&mut s)? + t.encode(&mut s)?,
Self::Retain(i) => (2_u8).encode(&mut s)? + i.encode(&mut s)?,
};
Ok(len)
}
}
impl Encodable for OpMethods {
fn encode<S: io::Write>(&self, mut s: S) -> darkfi::Result<usize> {
let mut len = 0;
len += VarInt(self.0.len() as u64).encode(&mut s)?;
for c in self.0.iter() {
len += c.encode(&mut s)?;
}
Ok(len)
}
}
impl Decodable for OpMethods {
fn decode<D: io::Read>(mut d: D) -> darkfi::Result<Self> {
let len = VarInt::decode(&mut d)?.0;
let mut ret = Vec::with_capacity(len as usize);
for _ in 0..len {
ret.push(Decodable::decode(&mut d)?);
}
Ok(Self(ret))
}
}
#[cfg(test)]
mod tests {
use super::*;
use darkfi::util::{
gen_id,
serial::{deserialize, serialize},
};
#[test]
fn test_to_string() {
let mut patch = Patch::new("", &gen_id(30), "", "");
patch.base = "text example\n hello".to_string();
patch.retain(14);
patch.delete(5);
patch.insert("hey");
assert_eq!(patch.to_string(), "text example\n hey");
}
#[test]
fn test_merge() {
let mut patch_init = Patch::new("", &gen_id(30), "", "");
let base = "text example\n hello";
patch_init.base = base.to_string();
let mut patch1 = patch_init.clone();
patch1.retain(14);
patch1.delete(5);
patch1.insert("hey");
let mut patch2 = patch_init.clone();
patch2.retain(14);
patch2.delete(5);
patch2.insert("test");
patch1.merge(&patch2);
let patch3 = patch1.merge(&patch2);
assert_eq!(patch3.to_string(), "text example\n test");
let mut patch1 = patch_init.clone();
patch1.retain(5);
patch1.delete(7);
patch1.insert("ex");
patch1.retain(7);
let mut patch2 = patch_init.clone();
patch2.delete(4);
patch2.insert("new");
patch2.retain(13);
let patch3 = patch1.merge(&patch2);
assert_eq!(patch3.to_string(), "new ex\n hello");
}
#[test]
fn test_transform() {
let mut patch_init = Patch::new("", &gen_id(30), "", "");
let base = "text example\n hello";
patch_init.base = base.to_string();
let mut patch1 = patch_init.clone();
patch1.retain(14);
patch1.delete(5);
patch1.insert("hey");
let mut patch2 = patch_init.clone();
patch2.retain(14);
patch2.delete(5);
patch2.insert("test");
let patch3 = patch1.transform(&patch2);
let patch4 = patch1.merge(&patch3);
assert_eq!(patch4.to_string(), "text example\n heytest");
let mut patch1 = patch_init.clone();
patch1.retain(5);
patch1.delete(7);
patch1.insert("ex");
patch1.retain(7);
let mut patch2 = patch_init.clone();
patch2.delete(4);
patch2.insert("new");
patch2.retain(13);
let patch3 = patch1.transform(&patch2);
let patch4 = patch1.merge(&patch3);
assert_eq!(patch4.to_string(), "new ex\n hello");
}
#[test]
fn test_transform2() {
let mut patch_init = Patch::new("", &gen_id(30), "", "");
let base = "#hello\n hello";
patch_init.base = base.to_string();
let mut patch1 = patch_init.clone();
patch1.retain(13);
patch1.insert(" world");
let mut patch2 = patch_init.clone();
patch2.retain(1);
patch2.delete(5);
patch2.insert("this is the title");
patch2.retain(7);
patch2.insert("\n this is the content");
let patch3 = patch1.transform(&patch2);
let patch4 = patch1.merge(&patch3);
assert_eq!(patch4.to_string(), "#this is the title\n hello world\n this is the content");
}
#[test]
fn test_serialize() {
// serialize & deserialize OpMethod
let op_method = OpMethod::Delete(3);
let op_method_ser = serialize(&op_method);
let op_method_deser = deserialize(&op_method_ser).unwrap();
assert_eq!(op_method, op_method_deser);
// serialize & deserialize Patch
let mut patch = Patch::new("", &gen_id(30), "", "");
patch.insert("hello");
patch.delete(2);
let patch_ser = serialize(&patch);
let patch_deser = deserialize(&patch_ser).unwrap();
assert_eq!(patch, patch_deser);
}
}

View File

@@ -1,7 +1,12 @@
[package]
name = "dnetview"
description = "P2P network monitoring TUI utility"
version = "0.3.0"
edition = "2021"
authors = ["darkfi <dev@dark.fi>"]
license = "AGPL-3.0-only"
homepage = "https://dark.fi"
repository = "https://github.com/darkrenaissance/darkfi"
[dependencies.darkfi]
path = "../../"
@@ -10,25 +15,25 @@ features = ["rpc"]
[dependencies]
# Tui
termion = "1.5.6"
tui = {version = "0.18.0", features = ["termion"]}
tui = {version = "0.19.0", features = ["termion"]}
# Async
smol = "1.2.5"
async-std = {version = "1.11.0", features = ["attributes"]}
async-std = {version = "1.12.0", features = ["attributes"]}
easy-parallel = "3.2.0"
async-channel = "1.6.1"
async-channel = "1.7.1"
# Misc
clap = "3.1.18"
clap = {version = "3.2.20", features = ["derive"]}
rand = "0.8.5"
simplelog = "0.12.0"
log = "0.4.17"
num_cpus = "1.13.1"
url = "2.2.2"
fxhash = "0.2.1"
thiserror = "1.0.31"
thiserror = "1.0.34"
# Encoding and parsing
serde_json = "1.0.81"
serde = {version = "1.0.137", features = ["derive"]}
serde_json = "1.0.85"
serde = {version = "1.0.144", features = ["derive"]}

View File

@@ -6,12 +6,15 @@
[[nodes]]
name = "Node 1"
rpc_url = "tcp://127.0.0.1:8000"
node_type = "NORMAL"
[[nodes]]
name = "Node 2"
rpc_url = "tcp://127.0.0.1:7777"
node_type = "NORMAL"
[[nodes]]
name = "Node 3"
rpc_url = "tcp://127.0.0.1:1234"
node_type = "NORMAL"

View File

@@ -1,14 +1,22 @@
use serde::{Deserialize, Serialize};
pub const CONFIG_FILE: &str = "dnetview_config.toml";
pub const CONFIG_FILE_CONTENTS: &[u8] = include_bytes!("../dnetview_config.toml");
#[derive(Clone, Serialize, Deserialize, Debug)]
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct DnvConfig {
pub nodes: Vec<IrcNode>,
pub nodes: Vec<Node>,
}
#[derive(Clone, Serialize, Deserialize, Debug)]
pub struct IrcNode {
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Node {
pub name: String,
pub rpc_url: String,
pub node_type: NodeType,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum NodeType {
LILITH,
NORMAL,
}

View File

@@ -54,26 +54,3 @@ impl From<url::ParseError> for DnetViewError {
Self::UrlParse(err.to_string())
}
}
//pub fn to_json_result(res: DnetViewResult<Value>, id: Value) -> JsonResult {
// match res {
// Ok(v) => JsonResult::Resp(jsonresp(v, id)),
// Err(err) => match err {
// DnetViewError::InvalidId => JsonResult::Err(jsonerr(
// ErrorCode::InvalidParams,
// Some("invalid task's id".into()),
// id,
// )),
// DnetViewError::InvalidData(e) | DnetViewError::SerdeJsonError(e) => {
// JsonResult::Err(jsonerr(ErrorCode::InvalidParams, Some(e), id))
// }
// DnetViewError::InvalidDueTime => JsonResult::Err(jsonerr(
// ErrorCode::InvalidParams,
// Some("invalid due time".into()),
// id,
// )),
// DnetViewError::Darkfi(e) => {
// JsonResult::Err(jsonerr(ErrorCode::InternalError, Some(e.to_string()), id))
// }
// },
// }
//}

View File

@@ -1,9 +0,0 @@
pub mod config;
pub mod error;
pub mod model;
pub mod options;
pub mod util;
pub mod view;
pub use config::{DnvConfig, CONFIG_FILE_CONTENTS};
pub use options::ProgramOptions;

View File

@@ -1,10 +1,14 @@
use async_std::sync::{Arc, Mutex};
use std::{collections::hash_map::Entry, fs::File, io, io::Read, path::PathBuf};
use async_std::sync::Arc;
use std::{fs::File, io, io::Read};
use clap::Parser;
use darkfi::util::{
cli::{get_log_config, get_log_level, spawn_config, Config},
expand_path,
path::get_config_path,
};
use easy_parallel::Parallel;
use fxhash::{FxHashMap, FxHashSet};
use log::{error, info};
use serde_json::{json, Value};
use log::info;
use simplelog::*;
use smol::Executor;
use termion::{async_stdin, event::Key, input::TermRead, raw::IntoRawMode};
@@ -12,71 +16,114 @@ use tui::{
backend::{Backend, TermionBackend},
Terminal,
};
use url::Url;
pub mod config;
pub mod error;
pub mod model;
pub mod options;
pub mod parser;
pub mod rpc;
pub mod util;
pub mod view;
use darkfi::{
error::Result,
rpc::{client::RpcClient, jsonrpc::JsonRequest},
util::{
async_util,
cli::{get_log_config, get_log_level, spawn_config, Config},
join_config_path, NanoTimestamp,
},
};
use dnetview::{
config::{DnvConfig, CONFIG_FILE_CONTENTS},
use crate::{
config::{DnvConfig, CONFIG_FILE, CONFIG_FILE_CONTENTS},
error::{DnetViewError, DnetViewResult},
model::{ConnectInfo, Model, NodeInfo, SelectableObject, Session, SessionInfo},
options::ProgramOptions,
util::{is_empty_session, make_connect_id, make_empty_id, make_node_id, make_session_id},
view::{IdListView, NodeInfoView, View},
model::Model,
options::Args,
parser::DataParser,
view::View,
};
struct DnetView {
name: String,
rpc_client: RpcClient,
model: Arc<Model>,
view: View,
}
impl DnetView {
async fn new(url: Url, name: String) -> Result<Self> {
let rpc_client = RpcClient::new(url).await?;
Ok(Self { name, rpc_client })
fn new(model: Arc<Model>, view: View) -> Self {
Self { model, view }
}
// --> {"jsonrpc": "2.0", "method": "ping", "params": [], "id": 42}
// <-- {"jsonrpc": "2.0", "result": "pong", "id": 42}
async fn _ping(&self) -> Result<Value> {
let req = JsonRequest::new("ping", json!([]));
self.rpc_client.request(req).await
}
async fn render_view<B: Backend>(&mut self, terminal: &mut Terminal<B>) -> DnetViewResult<()> {
let mut asi = async_stdin();
//--> {"jsonrpc": "2.0", "method": "poll", "params": [], "id": 42}
// <-- {"jsonrpc": "2.0", "result": {"nodeID": [], "nodeinfo" [], "id": 42}
async fn get_info(&self) -> DnetViewResult<Value> {
let req = JsonRequest::new("get_info", json!([]));
match self.rpc_client.request(req).await {
Ok(req) => Ok(req),
Err(e) => Err(DnetViewError::Darkfi(e)),
terminal.clear()?;
self.view.id_menu.state.select(Some(0));
self.view.msg_list.state.select(Some(0));
loop {
self.view.update(
self.model.msg_map.lock().await.clone(),
self.model.selectables.lock().await.clone(),
//self.model.selectables2.lock().await.clone(),
);
//debug!(target: "dnetview::render_view()", "ID MENU: {:?}", self.view.id_menu.ids);
//debug!(target: "dnetview::render_view()", "SELECTABLES ID LIST: {:?}", self.model.selectables.lock().await.keys());
let mut err: Option<DnetViewError> = None;
terminal.draw(|f| match self.view.render(f) {
Ok(()) => {}
Err(e) => {
err = Some(e);
}
})?;
match err {
Some(e) => return Err(e),
None => {}
}
self.view.msg_list.scroll()?;
for k in asi.by_ref().keys() {
match k.unwrap() {
Key::Char('q') => {
terminal.clear()?;
return Ok(())
}
Key::Char('j') => {
self.view.id_menu.next();
}
Key::Char('k') => {
self.view.id_menu.previous();
}
Key::Char('u') => {
// TODO
//view.msg_list.next();
}
Key::Char('d') => {
// TODO
//view.msg_list.previous();
}
_ => (),
}
}
util::sleep(100).await;
}
}
}
#[async_std::main]
async fn main() -> DnetViewResult<()> {
let options = ProgramOptions::load()?;
//debug!(target: "dnetview", "main() START");
let args = Args::parse();
let verbosity_level = options.app.occurrences_of("verbose");
let log_level = get_log_level(verbosity_level);
let log_level = get_log_level(args.verbose.into());
let log_config = get_log_config();
let file = File::create(&*options.log_path).unwrap();
let log_file_path = expand_path(&args.log_path)?;
if let Some(parent) = log_file_path.parent() {
std::fs::create_dir_all(parent)?;
};
let file = File::create(log_file_path)?;
WriteLogger::init(log_level, log_config, file)?;
info!("Log level: {}", log_level);
let config_path = join_config_path(&PathBuf::from("dnetview_config.toml"))?;
let config_path = get_config_path(args.config, CONFIG_FILE)?;
spawn_config(&config_path, CONFIG_FILE_CONTENTS)?;
let config = Config::<DnvConfig>::load(config_path)?;
@@ -87,24 +134,24 @@ async fn main() -> DnetViewResult<()> {
terminal.clear()?;
let ids = Mutex::new(FxHashSet::default());
let nodes = Mutex::new(FxHashMap::default());
let selectables = Mutex::new(FxHashMap::default());
let msg_log = Mutex::new(FxHashMap::default());
let model = Arc::new(Model::new(ids, nodes, selectables, msg_log));
let nthreads = num_cpus::get();
let (signal, shutdown) = async_channel::unbounded::<()>();
let model = Model::new();
let view = View::new();
let ex = Arc::new(Executor::new());
let ex2 = ex.clone();
let mut dnetview = DnetView::new(model.clone(), view);
let parser = DataParser::new(model, config);
let nthreads = num_cpus::get();
let (signal, shutdown) = async_channel::unbounded::<()>();
let (_, result) = Parallel::new()
.each(0..nthreads, |_| smol::future::block_on(ex.run(shutdown.recv())))
.finish(|| {
smol::future::block_on(async move {
poll_and_update_model(&config, ex2.clone(), model.clone()).await?;
render_view(&mut terminal, model.clone()).await?;
parser.start_connect_slots(ex2).await?;
dnetview.render_view(&mut terminal).await?;
drop(signal);
Ok(())
})
@@ -112,445 +159,3 @@ async fn main() -> DnetViewResult<()> {
result
}
// create a new RPC instance for every node in the config file
// spawn poll() and detach in the background
async fn poll_and_update_model(
config: &DnvConfig,
ex: Arc<Executor<'_>>,
model: Arc<Model>,
) -> DnetViewResult<()> {
for node in &config.nodes {
info!("Attempting to poll {}, RPC URL: {}", node.name, node.rpc_url);
match DnetView::new(Url::parse(&node.rpc_url)?, node.name.clone()).await {
Ok(client) => ex.spawn(poll(client, model.clone())).detach(),
Err(e) => error!("{}", e),
}
}
Ok(())
}
async fn poll(client: DnetView, model: Arc<Model>) -> DnetViewResult<()> {
loop {
match client.get_info().await {
Ok(reply) => {
if reply.as_object().is_some() && !reply.as_object().unwrap().is_empty() {
parse_data(reply.as_object().unwrap(), &client, model.clone()).await?;
} else {
return Err(DnetViewError::EmptyRpcReply)
}
}
Err(e) => {
error!("{:?}", e);
parse_offline(&client, model.clone()).await?;
}
}
async_util::sleep(2).await;
}
}
async fn parse_offline(client: &DnetView, model: Arc<Model>) -> DnetViewResult<()> {
let name = "Offline".to_string();
let session_type = Session::Offline;
let node_name = &client.name;
let node_id = make_node_id(node_name)?;
let session_id = make_session_id(&node_id, &session_type)?;
let mut connects: Vec<ConnectInfo> = Vec::new();
let mut sessions: Vec<SessionInfo> = Vec::new();
// initialize with empty values
let id = make_empty_id(&node_id, &session_type, 0)?;
let addr = "Null".to_string();
let state = "Null".to_string();
let parent = node_id.clone();
let msg_log = Vec::new();
let is_empty = true;
let last_msg = "Null".to_string();
let last_status = "Null".to_string();
let connect_info =
ConnectInfo::new(id, addr, state, parent.clone(), msg_log, is_empty, last_msg, last_status);
connects.push(connect_info.clone());
let accept_addr = None;
let session_info =
SessionInfo::new(session_id, name, is_empty, parent.clone(), connects, accept_addr);
sessions.push(session_info);
let node = NodeInfo::new(node_id.clone(), node_name.to_string(), sessions.clone(), None, true);
update_node(model.clone(), node.clone(), node_id.clone()).await;
update_selectable_and_ids(model.clone(), sessions, node.clone()).await?;
Ok(())
}
async fn parse_data(
reply: &serde_json::Map<String, Value>,
client: &DnetView,
model: Arc<Model>,
) -> DnetViewResult<()> {
let addr = &reply.get("external_addr");
let inbound = &reply["session_inbound"];
let _manual = &reply["session_manual"];
let outbound = &reply["session_outbound"];
let mut sessions: Vec<SessionInfo> = Vec::new();
let node_name = &client.name;
let node_id = make_node_id(node_name)?;
//let external_addr = ext_addr.unwrap().as_str().unwrap();
let ext_addr = parse_external_addr(addr).await?;
let in_session = parse_inbound(inbound, &node_id).await?;
let out_session = parse_outbound(outbound, &node_id).await?;
//let man_session = parse_manual(manual, &node_id).await?;
sessions.push(in_session.clone());
sessions.push(out_session.clone());
//sessions.push(man_session.clone());
let node =
NodeInfo::new(node_id.clone(), node_name.to_string(), sessions.clone(), ext_addr, false);
update_node(model.clone(), node.clone(), node_id.clone()).await;
update_selectable_and_ids(model.clone(), sessions.clone(), node.clone()).await?;
update_msgs(model.clone(), sessions.clone()).await?;
//debug!("IDS: {:?}", model.ids.lock().await);
//debug!("INFOS: {:?}", model.nodes.lock().await);
Ok(())
}
async fn update_msgs(model: Arc<Model>, sessions: Vec<SessionInfo>) -> DnetViewResult<()> {
for session in sessions {
for connection in session.children {
if !model.msg_log.lock().await.contains_key(&connection.id) {
model.msg_log.lock().await.insert(connection.id, connection.msg_log);
} else {
match model.msg_log.lock().await.entry(connection.id) {
Entry::Vacant(e) => {
e.insert(connection.msg_log);
}
Entry::Occupied(mut e) => {
for msg in connection.msg_log {
e.get_mut().push(msg);
}
}
}
}
}
}
Ok(())
}
async fn update_ids(model: Arc<Model>, id: String) {
model.ids.lock().await.insert(id);
}
async fn update_node(model: Arc<Model>, node: NodeInfo, id: String) {
model.nodes.lock().await.insert(id, node);
}
async fn update_selectable_and_ids(
model: Arc<Model>,
sessions: Vec<SessionInfo>,
node: NodeInfo,
) -> DnetViewResult<()> {
let node_obj = SelectableObject::Node(node.clone());
model.selectables.lock().await.insert(node.id.clone(), node_obj);
update_ids(model.clone(), node.id.clone()).await;
for session in sessions {
let session_obj = SelectableObject::Session(session.clone());
model.selectables.lock().await.insert(session.clone().id, session_obj);
update_ids(model.clone(), session.clone().id).await;
for connect in session.children {
let connect_obj = SelectableObject::Connect(connect.clone());
model.selectables.lock().await.insert(connect.clone().id, connect_obj);
update_ids(model.clone(), connect.clone().id).await;
}
}
Ok(())
}
async fn parse_external_addr(addr: &Option<&Value>) -> DnetViewResult<Option<String>> {
match addr {
Some(addr) => match addr.as_str() {
Some(addr) => Ok(Some(addr.to_string())),
None => Ok(None),
},
None => Err(DnetViewError::NoExternalAddr),
}
}
async fn parse_inbound(inbound: &Value, node_id: &String) -> DnetViewResult<SessionInfo> {
let name = "Inbound".to_string();
let session_type = Session::Inbound;
let parent = node_id.to_string();
let id = make_session_id(&parent, &session_type)?;
let mut connects: Vec<ConnectInfo> = Vec::new();
let connections = &inbound["connected"];
let mut connect_count = 0;
let mut accept_vec = Vec::new();
match connections.as_object() {
Some(connect) => {
match connect.is_empty() {
true => {
connect_count += 1;
// channel is empty. initialize with empty values
let id = make_empty_id(node_id, &session_type, connect_count)?;
let addr = "Null".to_string();
let state = "Null".to_string();
let parent = parent.clone();
let msg_log = Vec::new();
let is_empty = true;
let last_msg = "Null".to_string();
let last_status = "Null".to_string();
let connect_info = ConnectInfo::new(
id,
addr,
state,
parent,
msg_log,
is_empty,
last_msg,
last_status,
);
connects.push(connect_info);
}
false => {
// channel is not empty. initialize with whole values
for k in connect.keys() {
let node = connect.get(k);
let addr = k.to_string();
let info = node.unwrap().as_array();
// get the accept address
let accept_addr = info.unwrap().get(0);
let acc_addr = accept_addr
.unwrap()
.get("accept_addr")
.unwrap()
.as_str()
.unwrap()
.to_string();
accept_vec.push(acc_addr);
let info2 = info.unwrap().get(1);
let id = info2.unwrap().get("random_id").unwrap().as_u64().unwrap();
let id = make_connect_id(&id)?;
let state = "state".to_string();
let parent = parent.clone();
let msg_values = info2.unwrap().get("log").unwrap().as_array().unwrap();
let mut msg_log: Vec<(NanoTimestamp, String, String)> = Vec::new();
for msg in msg_values {
let msg: (NanoTimestamp, String, String) =
serde_json::from_value(msg.clone())?;
msg_log.push(msg);
}
let is_empty = false;
let last_msg =
info2.unwrap().get("last_msg").unwrap().as_str().unwrap().to_string();
let last_status = info2
.unwrap()
.get("last_status")
.unwrap()
.as_str()
.unwrap()
.to_string();
let connect_info = ConnectInfo::new(
id,
addr,
state,
parent,
msg_log,
is_empty,
last_msg,
last_status,
);
connects.push(connect_info.clone());
}
}
}
let is_empty = is_empty_session(&connects);
// TODO: clean this up
if accept_vec.is_empty() {
let accept_addr = None;
let session_info =
SessionInfo::new(id, name, is_empty, parent, connects, accept_addr);
Ok(session_info)
} else {
let accept_addr = Some(accept_vec[0].clone());
let session_info =
SessionInfo::new(id, name, is_empty, parent, connects, accept_addr);
Ok(session_info)
}
}
None => Err(DnetViewError::ValueIsNotObject),
}
}
// TODO: placeholder for now
async fn _parse_manual(_manual: &Value, node_id: &String) -> DnetViewResult<SessionInfo> {
let name = "Manual".to_string();
let session_type = Session::Manual;
let mut connects: Vec<ConnectInfo> = Vec::new();
let parent = node_id.to_string();
let session_id = make_session_id(&parent, &session_type)?;
//let id: u64 = 0;
let connect_id = make_empty_id(node_id, &session_type, 0)?;
//let connect_id = make_connect_id(&id)?;
let addr = "Null".to_string();
let state = "Null".to_string();
let msg_log = Vec::new();
let is_empty = true;
let msg = "Null".to_string();
let status = "Null".to_string();
let connect_info =
ConnectInfo::new(connect_id.clone(), addr, state, parent, msg_log, is_empty, msg, status);
connects.push(connect_info);
let parent = connect_id;
let is_empty = is_empty_session(&connects);
let accept_addr = None;
let session_info =
SessionInfo::new(session_id, name, is_empty, parent, connects.clone(), accept_addr);
Ok(session_info)
}
async fn parse_outbound(outbound: &Value, node_id: &String) -> DnetViewResult<SessionInfo> {
let name = "Outbound".to_string();
let session_type = Session::Outbound;
let parent = node_id.to_string();
let id = make_session_id(&parent, &session_type)?;
let mut connects: Vec<ConnectInfo> = Vec::new();
let slots = &outbound["slots"];
let mut slot_count = 0;
match slots.as_array() {
Some(slots) => {
for slot in slots {
slot_count += 1;
match slot["channel"].is_null() {
true => {
// channel is empty. initialize with empty values
let id = make_empty_id(node_id, &session_type, slot_count)?;
let addr = "Null".to_string();
let state = &slot["state"];
let state = state.as_str().unwrap().to_string();
let parent = parent.clone();
let msg_log = Vec::new();
let is_empty = true;
let last_msg = "Null".to_string();
let last_status = "Null".to_string();
let connect_info = ConnectInfo::new(
id,
addr,
state,
parent,
msg_log,
is_empty,
last_msg,
last_status,
);
connects.push(connect_info.clone());
}
false => {
// channel is not empty. initialize with whole values
let channel = &slot["channel"];
let id = channel["random_id"].as_u64().unwrap();
let id = make_connect_id(&id)?;
let addr = &slot["addr"];
let addr = addr.as_str().unwrap().to_string();
let state = &slot["state"];
let state = state.as_str().unwrap().to_string();
let parent = parent.clone();
let msg_values = channel["log"].as_array().unwrap();
let mut msg_log: Vec<(NanoTimestamp, String, String)> = Vec::new();
for msg in msg_values {
let msg: (NanoTimestamp, String, String) =
serde_json::from_value(msg.clone())?;
msg_log.push(msg);
}
let is_empty = false;
let last_msg = channel["last_msg"].as_str().unwrap().to_string();
let last_status = channel["last_status"].as_str().unwrap().to_string();
let connect_info = ConnectInfo::new(
id,
addr,
state,
parent,
msg_log,
is_empty,
last_msg,
last_status,
);
connects.push(connect_info.clone());
}
}
}
let is_empty = is_empty_session(&connects);
let accept_addr = None;
let session_info = SessionInfo::new(id, name, is_empty, parent, connects, accept_addr);
Ok(session_info)
}
None => Err(DnetViewError::ValueIsNotObject),
}
}
async fn render_view<B: Backend>(
terminal: &mut Terminal<B>,
model: Arc<Model>,
) -> DnetViewResult<()> {
let mut asi = async_stdin();
terminal.clear()?;
let nodes = NodeInfoView::new(FxHashMap::default());
let msg_log = FxHashMap::default();
let active_ids = IdListView::new(FxHashSet::default());
let selectables = FxHashMap::default();
let mut view = View::new(nodes, msg_log, active_ids, selectables);
view.active_ids.state.select(Some(0));
loop {
view.update(
model.nodes.lock().await.clone(),
model.msg_log.lock().await.clone(),
model.selectables.lock().await.clone(),
);
let mut err: Option<DnetViewError> = None;
terminal.draw(|f| match view.render(f) {
Ok(()) => {}
Err(e) => {
err = Some(e);
}
})?;
match err {
Some(e) => return Err(e),
None => {}
}
for k in asi.by_ref().keys() {
match k.unwrap() {
Key::Char('q') => {
terminal.clear()?;
return Ok(())
}
Key::Char('j') => {
view.active_ids.next();
}
Key::Char('k') => {
view.active_ids.previous();
}
_ => (),
}
}
}
}

View File

@@ -1,13 +1,14 @@
use async_std::sync::Mutex;
use async_std::sync::{Arc, Mutex};
use fxhash::{FxHashMap, FxHashSet};
use fxhash::FxHashMap;
use serde::{Deserialize, Serialize};
use darkfi::util::NanoTimestamp;
type MsgLogMutex = Mutex<FxHashMap<String, Vec<(NanoTimestamp, String, String)>>>;
type MsgLog = Vec<(NanoTimestamp, String, String)>;
type MsgMap = Mutex<FxHashMap<String, MsgLog>>;
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Eq)]
pub enum Session {
Inbound,
Outbound,
@@ -15,35 +16,36 @@ pub enum Session {
Offline,
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Eq)]
pub enum SelectableObject {
Node(NodeInfo),
Lilith(LilithInfo),
Network(NetworkInfo),
Session(SessionInfo),
Connect(ConnectInfo),
}
#[derive(Debug)]
pub struct Model {
pub ids: Mutex<FxHashSet<String>>,
pub nodes: Mutex<FxHashMap<String, NodeInfo>>,
pub msg_log: MsgLogMutex,
pub msg_map: MsgMap,
pub msg_log: Mutex<MsgLog>,
pub selectables: Mutex<FxHashMap<String, SelectableObject>>,
}
impl Model {
pub fn new(
ids: Mutex<FxHashSet<String>>,
nodes: Mutex<FxHashMap<String, NodeInfo>>,
msg_log: MsgLogMutex,
selectables: Mutex<FxHashMap<String, SelectableObject>>,
) -> Model {
Model { ids, nodes, msg_log, selectables }
pub fn new() -> Arc<Self> {
let selectables = Mutex::new(FxHashMap::default());
let msg_map = Mutex::new(FxHashMap::default());
let msg_log = Mutex::new(Vec::new());
Arc::new(Model { msg_map, msg_log, selectables })
}
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Eq)]
pub struct NodeInfo {
pub id: String,
pub name: String,
pub state: String,
pub children: Vec<SessionInfo>,
pub external_addr: Option<String>,
pub is_offline: bool,
@@ -53,23 +55,24 @@ impl NodeInfo {
pub fn new(
id: String,
name: String,
state: String,
children: Vec<SessionInfo>,
external_addr: Option<String>,
is_offline: bool,
) -> NodeInfo {
NodeInfo { id, name, children, external_addr, is_offline }
) -> Self {
Self { id, name, state, children, external_addr, is_offline }
}
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Eq)]
pub struct SessionInfo {
// TODO: make all values optional to handle empty sessions
pub id: String,
pub name: String,
pub parent: String,
pub is_empty: bool,
pub children: Vec<ConnectInfo>,
pub accept_addr: Option<String>,
pub hosts: Option<Vec<String>>,
}
impl SessionInfo {
@@ -80,14 +83,14 @@ impl SessionInfo {
parent: String,
children: Vec<ConnectInfo>,
accept_addr: Option<String>,
) -> SessionInfo {
SessionInfo { id, name, is_empty, parent, children, accept_addr }
hosts: Option<Vec<String>>,
) -> Self {
Self { id, name, is_empty, parent, children, accept_addr, hosts }
}
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Eq)]
pub struct ConnectInfo {
// TODO: make all values optional to handle empty connections
pub id: String,
pub addr: String,
pub state: String,
@@ -96,6 +99,7 @@ pub struct ConnectInfo {
pub is_empty: bool,
pub last_msg: String,
pub last_status: String,
pub remote_node_id: String,
}
impl ConnectInfo {
@@ -109,7 +113,36 @@ impl ConnectInfo {
is_empty: bool,
last_msg: String,
last_status: String,
) -> ConnectInfo {
ConnectInfo { id, addr, state, parent, msg_log, is_empty, last_msg, last_status }
remote_node_id: String,
) -> Self {
Self { id, addr, state, parent, msg_log, is_empty, last_msg, last_status, remote_node_id }
}
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Eq)]
pub struct LilithInfo {
pub id: String,
pub name: String,
pub urls: Vec<String>,
pub networks: Vec<NetworkInfo>,
}
impl LilithInfo {
pub fn new(id: String, name: String, urls: Vec<String>, networks: Vec<NetworkInfo>) -> Self {
Self { id, name, urls, networks }
}
}
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, Eq)]
pub struct NetworkInfo {
pub id: String,
pub name: String,
pub urls: Vec<String>,
pub nodes: Vec<String>,
}
impl NetworkInfo {
pub fn new(id: String, name: String, urls: Vec<String>, nodes: Vec<String>) -> Self {
Self { id, name, urls, nodes }
}
}

View File

@@ -1,43 +1,17 @@
use clap::{Arg, ArgMatches, Command};
use darkfi::cli_desc;
use darkfi::Result;
#[derive(clap::Parser)]
#[clap(name = "dnetview", about = cli_desc!(), version)]
pub struct Args {
#[clap(short, parse(from_occurrences))]
/// Increase verbosity (-vvv supported)
pub verbose: u8,
pub struct ProgramOptions {
pub log_path: Box<std::path::PathBuf>,
pub app: ArgMatches,
}
impl ProgramOptions {
pub fn load() -> Result<ProgramOptions> {
let app = Command::new("dnetview")
.version("0.1.0")
.author("lunar_mining")
.about("dnetview")
.arg(
Arg::new("LOG_PATH")
.long("log")
.value_name("LOG_PATH")
.help("Logfile path")
.takes_value(true),
)
.arg(
Arg::new("verbose")
.short('v')
.long("verbose")
.multiple_occurrences(true)
.help("Sets the level of verbosity"),
)
.get_matches();
let log_path = Box::new(
if let Some(log_path) = app.value_of("LOG_PATH") {
std::path::Path::new(log_path)
} else {
std::path::Path::new("/tmp/dnetview.log")
}
.to_path_buf(),
);
Ok(ProgramOptions { log_path, app })
}
/// Logfile path
#[clap(default_value = "~/.local/darkfi/dnetview.log")]
pub log_path: String,
/// Sets a custom config file
#[clap(short, long)]
pub config: Option<String>,
}

583
bin/dnetview/src/parser.rs Normal file
View File

@@ -0,0 +1,583 @@
use async_std::sync::Arc;
use std::collections::hash_map::Entry;
use log::{debug, error, info};
use serde_json::Value;
use smol::Executor;
use url::Url;
use darkfi::util::NanoTimestamp;
use crate::{
config::{DnvConfig, Node, NodeType},
error::{DnetViewError, DnetViewResult},
model::{
ConnectInfo, LilithInfo, Model, NetworkInfo, NodeInfo, SelectableObject, Session,
SessionInfo,
},
rpc::RpcConnect,
util::{is_empty_session, make_connect_id, make_empty_id, make_node_id, make_session_id},
};
pub struct DataParser {
model: Arc<Model>,
config: DnvConfig,
}
impl DataParser {
pub fn new(model: Arc<Model>, config: DnvConfig) -> Arc<Self> {
Arc::new(Self { model, config })
}
pub async fn start_connect_slots(self: Arc<Self>, ex: Arc<Executor<'_>>) -> DnetViewResult<()> {
debug!(target: "dnetview", "start_connect_slots() START");
for node in &self.config.nodes {
debug!(target: "dnetview", "attempting to spawn...");
ex.clone().spawn(self.clone().try_connect(node.clone())).detach();
}
Ok(())
}
async fn try_connect(self: Arc<Self>, node: Node) -> DnetViewResult<()> {
debug!(target: "dnetview", "try_connect() START");
loop {
info!("Attempting to poll {}, RPC URL: {}", node.name, node.rpc_url);
// Parse node config and execute poll.
// On any failure, sleep and retry.
match RpcConnect::new(Url::parse(&node.rpc_url)?, node.name.clone()).await {
Ok(client) => {
if let Err(e) = self.poll(&node, client).await {
error!("Poll execution error: {:?}", e);
}
}
Err(e) => {
error!("RPC client creation error: {:?}", e);
}
}
self.parse_offline(node.name.clone()).await?;
crate::util::sleep(2000).await;
}
}
async fn poll(&self, node: &Node, client: RpcConnect) -> DnetViewResult<()> {
loop {
// Ping the node to verify if its online.
if let Err(e) = client.ping().await {
return Err(DnetViewError::Darkfi(e))
}
// Retrieve node info, based on its type
let response = match &node.node_type {
NodeType::LILITH => client.lilith_spawns().await,
NodeType::NORMAL => client.get_info().await,
};
// Parse response
match response {
Ok(reply) => {
if reply.as_object().is_none() || reply.as_object().unwrap().is_empty() {
return Err(DnetViewError::EmptyRpcReply)
}
match &node.node_type {
NodeType::LILITH => {
self.parse_lilith_data(
reply.as_object().unwrap().clone(),
node.name.clone(),
)
.await?
}
NodeType::NORMAL => {
self.parse_data(reply.as_object().unwrap(), node.name.clone()).await?
}
};
}
Err(e) => return Err(e),
}
// Sleep until next poll
crate::util::sleep(2000).await;
}
}
async fn parse_offline(&self, node_name: String) -> DnetViewResult<()> {
let name = "Offline".to_string();
let session_type = Session::Offline;
let node_id = make_node_id(&node_name)?;
let session_id = make_session_id(&node_id, &session_type)?;
let mut connects: Vec<ConnectInfo> = Vec::new();
let mut sessions: Vec<SessionInfo> = Vec::new();
// initialize with empty values
let id = make_empty_id(&node_id, &session_type, 0)?;
let addr = "Null".to_string();
let state = "Null".to_string();
let parent = node_id.clone();
let msg_log = Vec::new();
let is_empty = true;
let last_msg = "Null".to_string();
let last_status = "Null".to_string();
let remote_node_id = "Null".to_string();
let connect_info = ConnectInfo::new(
id,
addr,
state.clone(),
parent.clone(),
msg_log,
is_empty,
last_msg,
last_status,
remote_node_id,
);
connects.push(connect_info.clone());
let accept_addr = None;
let session_info =
SessionInfo::new(session_id, name, is_empty, parent, connects, accept_addr, None);
sessions.push(session_info);
let node = NodeInfo::new(node_id, node_name, state, sessions.clone(), None, true);
self.update_selectables(sessions, node).await?;
Ok(())
}
async fn parse_data(
&self,
reply: &serde_json::Map<String, Value>,
node_name: String,
) -> DnetViewResult<()> {
let addr = &reply.get("external_addr");
let inbound = &reply["session_inbound"];
let _manual = &reply["session_manual"];
let outbound = &reply["session_outbound"];
let state = &reply["state"];
let mut sessions: Vec<SessionInfo> = Vec::new();
let node_id = make_node_id(&node_name)?;
let ext_addr = self.parse_external_addr(addr).await?;
let in_session = self.parse_inbound(inbound, &node_id).await?;
let out_session = self.parse_outbound(outbound, &node_id).await?;
//let man_session = self.parse_manual(manual, &node_id).await?;
sessions.push(in_session.clone());
sessions.push(out_session.clone());
//sessions.push(man_session.clone());
let node = NodeInfo::new(
node_id,
node_name,
state.as_str().unwrap().to_string(),
sessions.clone(),
ext_addr,
false,
);
self.update_selectables(sessions.clone(), node).await?;
self.update_msgs(sessions).await?;
//debug!("IDS: {:?}", self.model.ids.lock().await);
//debug!("INFOS: {:?}", self.model.nodes.lock().await);
Ok(())
}
async fn parse_lilith_data(
&self,
reply: serde_json::Map<String, Value>,
name: String,
) -> DnetViewResult<()> {
let urls: Vec<String> = serde_json::from_value(reply.get("urls").unwrap().clone()).unwrap();
let spawns: Vec<serde_json::Map<String, Value>> =
serde_json::from_value(reply.get("spawns").unwrap().clone()).unwrap();
let mut networks = vec![];
for spawn in spawns {
let name = spawn.get("name").unwrap().as_str().unwrap().to_string();
let id = make_node_id(&name)?;
let urls: Vec<String> =
serde_json::from_value(spawn.get("urls").unwrap().clone()).unwrap();
let nodes: Vec<String> =
serde_json::from_value(spawn.get("hosts").unwrap().clone()).unwrap();
let network = NetworkInfo::new(id, name, urls, nodes);
networks.push(network);
}
let id = make_node_id(&name)?;
let lilith = LilithInfo::new(id.clone(), name, urls, networks);
let lilith_obj = SelectableObject::Lilith(lilith.clone());
self.model.selectables.lock().await.insert(id, lilith_obj);
for network in lilith.networks {
let network_obj = SelectableObject::Network(network.clone());
self.model.selectables.lock().await.insert(network.id, network_obj);
}
Ok(())
}
async fn update_msgs(&self, sessions: Vec<SessionInfo>) -> DnetViewResult<()> {
for session in sessions {
for connection in session.children {
if !self.model.msg_map.lock().await.contains_key(&connection.id) {
// we don't have this ID: it is a new node
self.model
.msg_map
.lock()
.await
.insert(connection.id, connection.msg_log.clone());
} else {
// we have this id: append the msg values
match self.model.msg_map.lock().await.entry(connection.id) {
Entry::Vacant(e) => {
e.insert(connection.msg_log);
}
Entry::Occupied(mut e) => {
for msg in connection.msg_log {
e.get_mut().push(msg);
}
}
}
}
}
}
Ok(())
}
async fn update_selectables(
&self,
sessions: Vec<SessionInfo>,
node: NodeInfo,
) -> DnetViewResult<()> {
if node.is_offline {
let node_obj = SelectableObject::Node(node.clone());
self.model.selectables.lock().await.insert(node.id.clone(), node_obj.clone());
} else {
let node_obj = SelectableObject::Node(node.clone());
self.model.selectables.lock().await.insert(node.id.clone(), node_obj.clone());
for session in sessions {
if !session.is_empty {
let session_obj = SelectableObject::Session(session.clone());
self.model
.selectables
.lock()
.await
.insert(session.clone().id, session_obj.clone());
for connect in session.children {
let connect_obj = SelectableObject::Connect(connect.clone());
self.model
.selectables
.lock()
.await
.insert(connect.clone().id, connect_obj.clone());
}
}
}
}
Ok(())
}
async fn parse_external_addr(&self, addr: &Option<&Value>) -> DnetViewResult<Option<String>> {
match addr {
Some(addr) => match addr.as_str() {
Some(addr) => Ok(Some(addr.to_string())),
None => Ok(None),
},
None => Err(DnetViewError::NoExternalAddr),
}
}
async fn parse_inbound(
&self,
inbound: &Value,
node_id: &String,
) -> DnetViewResult<SessionInfo> {
let name = "Inbound".to_string();
let session_type = Session::Inbound;
let parent = node_id.to_string();
let id = make_session_id(&parent, &session_type)?;
let mut connects: Vec<ConnectInfo> = Vec::new();
let connections = &inbound["connected"];
let mut connect_count = 0;
let mut accept_vec = Vec::new();
match connections.as_object() {
Some(connect) => {
match connect.is_empty() {
true => {
connect_count += 1;
// channel is empty. initialize with empty values
let id = make_empty_id(node_id, &session_type, connect_count)?;
let addr = "Null".to_string();
let state = "Null".to_string();
let parent = parent.clone();
let msg_log = Vec::new();
let is_empty = true;
let last_msg = "Null".to_string();
let last_status = "Null".to_string();
let remote_node_id = "Null".to_string();
let connect_info = ConnectInfo::new(
id,
addr,
state,
parent,
msg_log,
is_empty,
last_msg,
last_status,
remote_node_id,
);
connects.push(connect_info);
}
false => {
// channel is not empty. initialize with whole values
for k in connect.keys() {
let node = connect.get(k);
let addr = k.to_string();
let info = node.unwrap().as_array();
// get the accept address
let accept_addr = info.unwrap().get(0);
let acc_addr = accept_addr
.unwrap()
.get("accept_addr")
.unwrap()
.as_str()
.unwrap()
.to_string();
accept_vec.push(acc_addr);
let info2 = info.unwrap().get(1);
let id = info2.unwrap().get("random_id").unwrap().as_u64().unwrap();
let id = make_connect_id(&id)?;
let state = "state".to_string();
let parent = parent.clone();
let msg_values = info2.unwrap().get("log").unwrap().as_array().unwrap();
let mut msg_log: Vec<(NanoTimestamp, String, String)> = Vec::new();
for msg in msg_values {
let msg: (NanoTimestamp, String, String) =
serde_json::from_value(msg.clone())?;
msg_log.push(msg);
}
let is_empty = false;
let last_msg = info2
.unwrap()
.get("last_msg")
.unwrap()
.as_str()
.unwrap()
.to_string();
let last_status = info2
.unwrap()
.get("last_status")
.unwrap()
.as_str()
.unwrap()
.to_string();
let remote_node_id = info2
.unwrap()
.get("remote_node_id")
.unwrap()
.as_str()
.unwrap()
.to_string();
let r_node_id: String = match remote_node_id.is_empty() {
true => "no remote id".to_string(),
false => remote_node_id,
};
let connect_info = ConnectInfo::new(
id,
addr,
state,
parent,
msg_log,
is_empty,
last_msg,
last_status,
r_node_id,
);
connects.push(connect_info.clone());
}
}
}
let is_empty = is_empty_session(&connects);
// TODO: clean this up
if accept_vec.is_empty() {
let accept_addr = None;
let session_info =
SessionInfo::new(id, name, is_empty, parent, connects, accept_addr, None);
Ok(session_info)
} else {
let accept_addr = Some(accept_vec[0].clone());
let session_info =
SessionInfo::new(id, name, is_empty, parent, connects, accept_addr, None);
Ok(session_info)
}
}
None => Err(DnetViewError::ValueIsNotObject),
}
}
// TODO: placeholder for now
async fn _parse_manual(
&self,
_manual: &Value,
node_id: &String,
) -> DnetViewResult<SessionInfo> {
let name = "Manual".to_string();
let session_type = Session::Manual;
let mut connects: Vec<ConnectInfo> = Vec::new();
let parent = node_id.to_string();
let session_id = make_session_id(&parent, &session_type)?;
//let id: u64 = 0;
let connect_id = make_empty_id(node_id, &session_type, 0)?;
//let connect_id = make_connect_id(&id)?;
let addr = "Null".to_string();
let state = "Null".to_string();
let msg_log = Vec::new();
let is_empty = true;
let msg = "Null".to_string();
let status = "Null".to_string();
let remote_node_id = "Null".to_string();
let connect_info = ConnectInfo::new(
connect_id.clone(),
addr,
state,
parent,
msg_log,
is_empty,
msg,
status,
remote_node_id,
);
connects.push(connect_info);
let parent = connect_id;
let is_empty = is_empty_session(&connects);
let accept_addr = None;
let session_info = SessionInfo::new(
session_id,
name,
is_empty,
parent,
connects.clone(),
accept_addr,
None,
);
Ok(session_info)
}
async fn parse_outbound(
&self,
outbound: &Value,
node_id: &String,
) -> DnetViewResult<SessionInfo> {
let name = "Outbound".to_string();
let session_type = Session::Outbound;
let parent = node_id.to_string();
let id = make_session_id(&parent, &session_type)?;
let mut connects: Vec<ConnectInfo> = Vec::new();
let slots = &outbound["slots"];
let mut slot_count = 0;
let hosts = &outbound["hosts"];
match slots.as_array() {
Some(slots) => {
for slot in slots {
slot_count += 1;
match slot["channel"].is_null() {
true => {
// TODO: this is not actually empty
let id = make_empty_id(node_id, &session_type, slot_count)?;
let addr = "Null".to_string();
let state = &slot["state"];
let state = state.as_str().unwrap().to_string();
let parent = parent.clone();
let msg_log = Vec::new();
let is_empty = false;
let last_msg = "Null".to_string();
let last_status = "Null".to_string();
let remote_node_id = "Null".to_string();
let connect_info = ConnectInfo::new(
id,
addr,
state,
parent,
msg_log,
is_empty,
last_msg,
last_status,
remote_node_id,
);
connects.push(connect_info.clone());
}
false => {
// channel is not empty. initialize with whole values
let channel = &slot["channel"];
let id = channel["random_id"].as_u64().unwrap();
let id = make_connect_id(&id)?;
let addr = &slot["addr"];
let addr = addr.as_str().unwrap().to_string();
let state = &slot["state"];
let state = state.as_str().unwrap().to_string();
let parent = parent.clone();
let msg_values = channel["log"].as_array().unwrap();
let mut msg_log: Vec<(NanoTimestamp, String, String)> = Vec::new();
for msg in msg_values {
let msg: (NanoTimestamp, String, String) =
serde_json::from_value(msg.clone())?;
msg_log.push(msg);
}
let is_empty = false;
let last_msg = channel["last_msg"].as_str().unwrap().to_string();
let last_status = channel["last_status"].as_str().unwrap().to_string();
let remote_node_id =
channel["remote_node_id"].as_str().unwrap().to_string();
let r_node_id: String = match remote_node_id.is_empty() {
true => "no remote id".to_string(),
false => remote_node_id,
};
let connect_info = ConnectInfo::new(
id,
addr,
state,
parent,
msg_log,
is_empty,
last_msg,
last_status,
r_node_id,
);
connects.push(connect_info.clone());
}
}
}
let is_empty = is_empty_session(&connects);
let accept_addr = None;
match hosts.as_array() {
Some(hosts) => {
let hosts: Vec<String> =
hosts.iter().map(|addr| addr.as_str().unwrap().to_string()).collect();
let session_info = SessionInfo::new(
id,
name,
is_empty,
parent,
connects,
accept_addr,
Some(hosts),
);
Ok(session_info)
}
None => Err(DnetViewError::ValueIsNotObject),
}
}
None => Err(DnetViewError::ValueIsNotObject),
}
}
}

49
bin/dnetview/src/rpc.rs Normal file
View File

@@ -0,0 +1,49 @@
use darkfi::{
error::Result,
rpc::{client::RpcClient, jsonrpc::JsonRequest},
};
use serde_json::{json, Value};
use url::Url;
use crate::error::{DnetViewError, DnetViewResult};
pub struct RpcConnect {
pub name: String,
pub rpc_client: RpcClient,
}
impl RpcConnect {
pub async fn new(url: Url, name: String) -> Result<Self> {
let rpc_client = RpcClient::new(url).await?;
Ok(Self { name, rpc_client })
}
// --> {"jsonrpc": "2.0", "method": "ping", "params": [], "id": 42}
// <-- {"jsonrpc": "2.0", "result": "pong", "id": 42}
pub async fn ping(&self) -> Result<Value> {
let req = JsonRequest::new("ping", json!([]));
self.rpc_client.request(req).await
}
// --> {"jsonrpc": "2.0", "method": "poll", "params": [], "id": 42}
// <-- {"jsonrpc": "2.0", "result": {"nodeID": [], "nodeinfo" [], "id": 42}
pub async fn get_info(&self) -> DnetViewResult<Value> {
let req = JsonRequest::new("get_info", json!([]));
match self.rpc_client.request(req).await {
Ok(req) => Ok(req),
Err(e) => Err(DnetViewError::Darkfi(e)),
}
}
// Returns all lilith node spawned networks names with their node addresses.
// --> {"jsonrpc": "2.0", "method": "spawns", "params": [], "id": 42}
// <-- {"jsonrpc": "2.0", "result": "{spawns}", "id": 42}
pub async fn lilith_spawns(&self) -> DnetViewResult<Value> {
let req = JsonRequest::new("spawns", json!([]));
match self.rpc_client.request(req).await {
Ok(req) => Ok(req),
Err(e) => Err(DnetViewError::Darkfi(e)),
}
}
}

View File

@@ -1,13 +1,17 @@
use crate::model::{ConnectInfo, Session};
use darkfi::{util::serial, Result};
use smol::Timer;
use std::time::Duration;
/// Sleep for any number of milliseconds.
pub async fn sleep(millis: u64) {
Timer::after(Duration::from_millis(millis)).await;
}
pub fn make_node_id(node_name: &String) -> Result<String> {
//match serial::serialize_hex(node_name).as_str() {
// e => {
// debug!("NODE ID {} IS DERIVED FROM NODE NAME {} ", e, node_name);
// }
//}
Ok(serial::serialize_hex(node_name))
let mut id = serial::serialize_hex(node_name);
id.insert_str(0, "NODE");
Ok(id)
}
pub fn make_session_id(node_id: &str, session: &Session) -> Result<String> {
@@ -28,21 +32,15 @@ pub fn make_session_id(node_id: &str, session: &Session) -> Result<String> {
num += i as u64
}
//match serial::serialize_hex(&num).as_str() {
// e => {
// debug!("SESSION ID {} IS DERIVED FROM NODE ID {} AND SESSION {:?}", e, node_id, session);
// }
//}
Ok(serial::serialize_hex(&num))
let mut id = serial::serialize_hex(&num);
id.insert_str(0, "SESSION");
Ok(id)
}
pub fn make_connect_id(id: &u64) -> Result<String> {
//match serial::serialize_hex(id).as_str() {
// e => {
// debug!("CONNECT ID {} IS DERIVED FROM ID {}", e, id);
// }
//}
Ok(serial::serialize_hex(id))
let mut id = serial::serialize_hex(id);
id.insert_str(0, "CONNECT");
Ok(id)
}
pub fn make_empty_id(node_id: &str, session: &Session, count: u64) -> Result<String> {
@@ -50,29 +48,62 @@ pub fn make_empty_id(node_id: &str, session: &Session, count: u64) -> Result<Str
let mut num = 0_u64;
let session_chars = match session {
Session::Inbound => vec!['i', 'n'],
Session::Outbound => vec!['o', 'u', 't'],
Session::Manual => vec!['m', 'a', 'n'],
Session::Offline => vec!['o', 'f', 'f'],
let id = match session {
Session::Inbound => {
let session_chars = vec!['i', 'n'];
for i in session_chars {
num += i as u64
}
for i in node_id.chars() {
num += i as u64
}
num += count;
let mut id = serial::serialize_hex(&num);
id.insert_str(0, "EMPTYIN");
id
}
Session::Outbound => {
let session_chars = vec!['o', 'u', 't'];
for i in session_chars {
num += i as u64
}
for i in node_id.chars() {
num += i as u64
}
num += count;
let mut id = serial::serialize_hex(&num);
id.insert_str(0, "EMPTYOUT");
id
}
Session::Manual => {
let session_chars = vec!['m', 'a', 'n'];
for i in session_chars {
num += i as u64
}
for i in node_id.chars() {
num += i as u64
}
num += count;
let mut id = serial::serialize_hex(&num);
id.insert_str(0, "EMPTYMAN");
id
}
Session::Offline => {
let session_chars = vec!['o', 'f', 'f'];
for i in session_chars {
num += i as u64
}
for i in node_id.chars() {
num += i as u64
}
num += count;
let mut id = serial::serialize_hex(&num);
id.insert_str(0, "EMPTYOFF");
id
}
};
for i in session_chars {
num += i as u64
}
for i in node_id.chars() {
num += i as u64
}
num += count;
//match serial::serialize_hex(&num).as_str() {
// e => {
// debug!("EMPTY ID {} IS DERIVED FROM NODE ID {} AND SESSION {:?} AND COUNT {}", e, node_id, session, count);
// }
//}
Ok(serial::serialize_hex(&num))
Ok(id)
}
pub fn is_empty_session(connects: &[ConnectInfo]) -> bool {

View File

@@ -1,13 +1,11 @@
//use darkfi::error::{Error, Result};
use fxhash::{FxHashMap, FxHashSet};
use tui::widgets::ListState;
use fxhash::FxHashMap;
use tui::{
backend::Backend,
layout::{Constraint, Direction, Layout, Rect},
style::{Color, Modifier, Style},
text::{Span, Spans},
widgets::{Block, Borders, List, ListItem, Paragraph},
widgets::{Block, Borders, List, ListItem, ListState, Paragraph},
Frame,
};
@@ -18,39 +16,49 @@ use crate::{
model::{NodeInfo, SelectableObject},
};
#[derive(Debug)]
//use log::debug;
type MsgLog = Vec<(NanoTimestamp, String, String)>;
type MsgMap = FxHashMap<String, MsgLog>;
#[derive(Debug, Clone)]
pub struct View {
pub nodes: NodeInfoView,
pub msg_log: FxHashMap<String, Vec<(NanoTimestamp, String, String)>>,
pub active_ids: IdListView,
pub id_menu: IdMenu,
pub msg_list: MsgList,
pub selectables: FxHashMap<String, SelectableObject>,
pub ordered_list: Vec<String>,
}
impl View {
pub fn new(
nodes: NodeInfoView,
msg_log: FxHashMap<String, Vec<(NanoTimestamp, String, String)>>,
active_ids: IdListView,
selectables: FxHashMap<String, SelectableObject>,
) -> View {
View { nodes, msg_log, active_ids, selectables }
impl Default for View {
fn default() -> Self {
Self::new()
}
}
impl<'a> View {
pub fn new() -> Self {
let msg_map = FxHashMap::default();
let msg_list = MsgList::new(msg_map, 0);
let selectables = FxHashMap::default();
let id_menu = IdMenu::new(Vec::new());
let ordered_list = Vec::new();
Self { id_menu, msg_list, selectables, ordered_list }
}
pub fn update(
&mut self,
nodes: FxHashMap<String, NodeInfo>,
msg_log: FxHashMap<String, Vec<(NanoTimestamp, String, String)>>,
selectables: FxHashMap<String, SelectableObject>,
) {
self.update_nodes(nodes);
self.update_selectable(selectables);
self.update_active_ids();
self.update_msg_log(msg_log);
pub fn update(&mut self, msg_map: MsgMap, selectables: FxHashMap<String, SelectableObject>) {
self.update_selectable(selectables.clone());
self.update_msg_list(msg_map);
self.update_id_menu(selectables);
self.update_msg_index();
self.make_ordered_list();
}
fn update_nodes(&mut self, nodes: FxHashMap<String, NodeInfo>) {
for (id, node) in nodes {
self.nodes.infos.insert(id, node);
fn update_id_menu(&mut self, selectables: FxHashMap<String, SelectableObject>) {
for id in selectables.keys() {
if !self.id_menu.ids.iter().any(|i| i == id) {
self.id_menu.ids.push(id.to_string());
}
}
}
@@ -60,24 +68,65 @@ impl View {
}
}
fn update_active_ids(&mut self) {
self.active_ids.ids.clear();
for info in self.nodes.infos.values() {
self.active_ids.ids.insert(info.id.to_string());
for session in &info.children {
if !session.is_empty {
self.active_ids.ids.insert(session.id.to_string());
for connect in &session.children {
self.active_ids.ids.insert(connect.id.to_string());
fn make_ordered_list(&mut self) {
for obj in self.selectables.values() {
match obj {
SelectableObject::Node(node) => {
if !self.ordered_list.iter().any(|i| i == &node.id) {
self.ordered_list.push(node.id.clone());
}
if !node.is_offline {
for session in &node.children {
if !session.is_empty {
if !self.ordered_list.iter().any(|i| i == &session.id) {
self.ordered_list.push(session.id.clone());
}
for connection in &session.children {
if !self.ordered_list.iter().any(|i| i == &connection.id) {
self.ordered_list.push(connection.id.clone());
}
}
}
}
}
}
SelectableObject::Lilith(lilith) => {
if !self.ordered_list.iter().any(|i| i == &lilith.id) {
self.ordered_list.push(lilith.id.clone());
}
for network in &lilith.networks {
if !self.ordered_list.iter().any(|i| i == &network.id) {
self.ordered_list.push(network.id.clone());
}
}
}
_ => (),
}
}
//debug!(target: "dnetview", "render_ids()::ordered_list: {:?}", self.ordered_list);
}
// TODO: this function is dynamically resizing the msgs index
// according to what set of msgs is selected.
// it's ugly. would prefer something more simple
fn update_msg_index(&mut self) {
match self.id_menu.state.selected() {
Some(i) => match self.ordered_list.get(i) {
Some(i) => match self.msg_list.msg_map.get(i) {
Some(i) => {
self.msg_list.index = i.len();
}
None => {}
},
None => {}
},
None => {}
}
}
fn update_msg_log(&mut self, msg_log: FxHashMap<String, Vec<(NanoTimestamp, String, String)>>) {
for (id, msg) in msg_log {
self.msg_log.insert(id, msg);
fn update_msg_list(&mut self, msg_map: MsgMap) {
for (id, msg) in msg_map {
self.msg_list.msg_map.insert(id, msg);
}
}
@@ -92,120 +141,147 @@ impl View {
.constraints(cnstrnts)
.split(f.size());
let mut id_list = self.render_id_list(f, slice.clone())?;
id_list.dedup();
if id_list.is_empty() {
self.render_ids(f, slice.clone())?;
if self.ordered_list.is_empty() {
// we have not received any data
Ok(())
} else {
// get the id at the current index
match self.active_ids.state.selected() {
Some(i) => match id_list.get(i) {
Some(i) => {
self.render_info(f, slice, i.to_string())?;
Ok(())
match self.id_menu.state.selected() {
Some(i) => {
//debug!(target: "dnetview", "render()::selected index: {}", i);
match self.ordered_list.get(i) {
Some(i) => {
let id = i.clone();
self.render_info(f, slice, id)?;
Ok(())
}
None => Err(DnetViewError::NoIdAtIndex),
}
None => Err(DnetViewError::NoIdAtIndex),
},
}
// nothing is selected right now
None => Ok(()),
}
}
}
fn render_id_list<B: Backend>(
fn render_ids<B: Backend>(
&mut self,
f: &mut Frame<'_, B>,
slice: Vec<Rect>,
) -> DnetViewResult<Vec<String>> {
) -> DnetViewResult<()> {
let style = Style::default();
let mut nodes = Vec::new();
let mut node_ids: Vec<String> = Vec::new();
let mut session_ids: Vec<String> = Vec::new();
let mut connect_ids: Vec<String> = Vec::new();
let mut ids: Vec<String> = Vec::new();
for info in self.nodes.infos.values() {
match info.is_offline {
true => {
let style = Style::default().fg(Color::Blue).add_modifier(Modifier::ITALIC);
let mut name = String::new();
name.push_str(&info.name);
name.push_str("(Offline)");
let name_span = Span::styled(name, style);
let lines = vec![Spans::from(name_span)];
let names = ListItem::new(lines);
nodes.push(names);
ids.push(info.id.clone());
node_ids.push(info.id.clone());
}
false => {
let name_span = Span::raw(&info.name);
let lines = vec![Spans::from(name_span)];
let names = ListItem::new(lines);
nodes.push(names);
ids.push(info.id.clone());
node_ids.push(info.id.clone());
for session in &info.children {
if !session.is_empty {
let name = Span::styled(format!(" {}", session.name), style);
let lines = vec![Spans::from(name)];
let names = ListItem::new(lines);
nodes.push(names);
session_ids.push(session.id.clone());
ids.push(session.id.clone());
for connection in &session.children {
let mut info = Vec::new();
let name =
Span::styled(format!(" {}", connection.addr), style);
info.push(name);
match connection.last_status.as_str() {
"recv" => {
let msg = Span::styled(
format!(
" [R: {}]",
connection.last_msg
),
style,
);
info.push(msg);
}
"sent" => {
let msg = Span::styled(
format!(
" [S: {}]",
connection.last_msg
),
style,
);
info.push(msg);
}
"Null" => {
// Empty msg log. Do nothing
}
data => {
return Err(DnetViewError::UnexpectedData(data.to_string()))
}
}
let lines = vec![Spans::from(info)];
for obj in self.selectables.values() {
match obj {
SelectableObject::Node(node) => {
if node.is_offline {
let style = Style::default().fg(Color::Blue).add_modifier(Modifier::ITALIC);
let mut name = String::new();
name.push_str(&node.name);
name.push_str("(Offline)");
let name_span = Span::styled(name, style);
let lines = vec![Spans::from(name_span)];
let names = ListItem::new(lines);
nodes.push(names);
} else {
let name_span = Span::raw(&node.name);
let lines = vec![Spans::from(name_span)];
let names = ListItem::new(lines);
nodes.push(names);
for session in &node.children {
if !session.is_empty {
let name = Span::styled(format!(" {}", session.name), style);
let lines = vec![Spans::from(name)];
let names = ListItem::new(lines);
nodes.push(names);
connect_ids.push(connection.id.clone());
ids.push(connection.id.clone());
for connection in &session.children {
let mut info = Vec::new();
match connection.addr.as_str() {
"Null" => {
let style = Style::default()
.fg(Color::Blue)
.add_modifier(Modifier::ITALIC);
let name = Span::styled(
format!(" {} ", connection.addr),
style,
);
info.push(name);
}
addr => {
let name = Span::styled(
format!(
" {} ({})",
addr, connection.remote_node_id
),
style,
);
info.push(name);
}
}
let lines = vec![Spans::from(info)];
let names = ListItem::new(lines);
nodes.push(names);
}
}
}
}
}
SelectableObject::Lilith(lilith) => {
let name_span = Span::raw(&lilith.name);
let lines = vec![Spans::from(name_span)];
let names = ListItem::new(lines);
nodes.push(names);
for network in &lilith.networks {
let name = Span::styled(format!(" {}", network.name), style);
let lines = vec![Spans::from(name)];
let names = ListItem::new(lines);
nodes.push(names);
}
}
_ => (),
}
}
let nodes =
List::new(nodes).block(Block::default().borders(Borders::ALL)).highlight_symbol(">> ");
f.render_stateful_widget(nodes, slice[0], &mut self.active_ids.state);
f.render_stateful_widget(nodes, slice[0], &mut self.id_menu.state);
Ok(ids)
Ok(())
}
fn parse_msg_list(&self, connect_id: String) -> DnetViewResult<List<'a>> {
let send_style = Style::default().fg(Color::LightCyan);
let recv_style = Style::default().fg(Color::DarkGray);
let mut texts = Vec::new();
let mut lines = Vec::new();
let log = self.msg_list.msg_map.get(&connect_id);
match log {
Some(values) => {
for (i, (t, k, v)) in values.iter().enumerate() {
lines.push(match k.as_str() {
"send" => {
Span::styled(format!("{} {} S: {}", i, t, v), send_style)
}
"recv" => {
Span::styled(format!("{} {} R: {}", i, t, v), recv_style)
}
data => return Err(DnetViewError::UnexpectedData(data.to_string())),
});
}
}
None => return Err(DnetViewError::CannotFindId),
}
for line in lines.clone() {
let text = ListItem::new(line);
texts.push(text);
}
let msg_list = List::new(texts).block(Block::default().borders(Borders::ALL));
Ok(msg_list)
}
fn render_info<B: Backend>(
@@ -222,45 +298,65 @@ impl View {
return Ok(())
} else {
let info = self.selectables.get(&selected);
//debug!(target: "dnetview", "render_info()::selected {}", selected);
match info {
Some(SelectableObject::Node(node)) => {
if node.external_addr.is_some() {
let node_info = Span::styled(
format!("External addr: {}", node.external_addr.as_ref().unwrap()),
style,
);
lines.push(Spans::from(node_info));
//debug!(target: "dnetview", "render_info()::SelectableObject::Node");
lines.push(Spans::from(Span::styled("Type: Normal", style)));
match &node.external_addr {
Some(addr) => {
let node_info = Span::styled(format!("External addr: {}", addr), style);
lines.push(Spans::from(node_info));
}
None => {
let node_info = Span::styled("External addr: Null".to_string(), style);
lines.push(Spans::from(node_info));
}
}
lines.push(Spans::from(Span::styled(
format!("P2P state: {}", node.state),
style,
)));
}
Some(SelectableObject::Session(session)) => {
//debug!(target: "dnetview", "render_info()::SelectableObject::Session");
if session.accept_addr.is_some() {
let session_info = Span::styled(
let accept_addr = Span::styled(
format!("Accept addr: {}", session.accept_addr.as_ref().unwrap()),
style,
);
lines.push(Spans::from(session_info));
lines.push(Spans::from(accept_addr));
}
if session.hosts.is_some() {
let hosts = Span::styled(format!("Hosts:"), style);
lines.push(Spans::from(hosts));
for host in session.hosts.as_ref().unwrap() {
let host = Span::styled(format!(" {}", host), style);
lines.push(Spans::from(host));
}
}
}
Some(SelectableObject::Connect(connect)) => {
let log = self.msg_log.get(&connect.id);
match log {
Some(values) => {
for (t, k, v) in values {
lines.push(Spans::from(match k.as_str() {
"send" => {
Span::styled(format!("{} S: {}", t, v), style)
}
"recv" => {
Span::styled(format!("{} R: {}", t, v), style)
}
data => {
return Err(DnetViewError::UnexpectedData(data.to_string()))
}
}));
}
}
None => return Err(DnetViewError::CannotFindId),
//debug!(target: "dnetview", "render_info()::SelectableObject::Connect");
let text = self.parse_msg_list(connect.id.clone())?;
f.render_stateful_widget(text, slice[1], &mut self.msg_list.state);
}
Some(SelectableObject::Lilith(lilith)) => {
lines.push(Spans::from(Span::styled("Type: Lilith", style)));
lines.push(Spans::from(Span::styled("URLs:", style)));
for url in &lilith.urls {
lines.push(Spans::from(Span::styled(format!(" {}", url), style)));
}
}
Some(SelectableObject::Network(network)) => {
lines.push(Spans::from(Span::styled("URLs:", style)));
for url in &network.urls {
lines.push(Spans::from(Span::styled(format!(" {}", url), style)));
}
lines.push(Spans::from(Span::styled("Hosts:", style)));
for node in &network.nodes {
lines.push(Spans::from(Span::styled(format!(" {}", node), style)));
}
}
None => return Err(DnetViewError::NotSelectableObject),
@@ -278,15 +374,16 @@ impl View {
}
#[derive(Debug, Clone)]
pub struct IdListView {
pub struct IdMenu {
pub state: ListState,
pub ids: FxHashSet<String>,
pub ids: Vec<String>,
}
impl IdListView {
pub fn new(ids: FxHashSet<String>) -> IdListView {
IdListView { state: ListState::default(), ids }
impl IdMenu {
pub fn new(ids: Vec<String>) -> IdMenu {
IdMenu { state: ListState::default(), ids }
}
pub fn next(&mut self) {
let i = match self.state.selected() {
Some(i) => {
@@ -320,6 +417,61 @@ impl IdListView {
}
}
#[derive(Debug, Clone)]
pub struct MsgList {
pub state: ListState,
pub msg_map: MsgMap,
pub index: usize,
}
impl MsgList {
pub fn new(msg_map: MsgMap, index: usize) -> MsgList {
MsgList { state: ListState::default(), msg_map, index }
}
// TODO: reimplement
//pub fn next(&mut self) {
// let i = match self.state.selected() {
// Some(i) => {
// if i >= self.msg_len - 1 {
// 0
// } else {
// i + 1
// }
// }
// None => 0,
// };
// self.state.select(Some(i));
//}
//pub fn previous(&mut self) {
// let i = match self.state.selected() {
// Some(i) => {
// if i == 0 {
// self.msg_len - 1
// } else {
// i - 1
// }
// }
// None => 0,
// };
// self.state.select(Some(i));
//}
pub fn scroll(&mut self) -> DnetViewResult<()> {
let i = match self.state.selected() {
Some(i) => i + self.index,
None => 0,
};
self.state.select(Some(i));
Ok(())
}
pub fn unselect(&mut self) {
self.state.select(None);
}
}
#[derive(Debug, Clone)]
pub struct NodeInfoView {
pub index: usize,
@@ -333,15 +485,15 @@ impl NodeInfoView {
NodeInfoView { index, infos }
}
pub fn next(&mut self) {
self.index = (self.index + 1) % self.infos.len();
}
//pub fn next(&mut self) {
// self.index = (self.index + 1) % self.infos.len();
//}
pub fn previous(&mut self) {
if self.index > 0 {
self.index -= 1;
} else {
self.index = self.infos.len() - 1;
}
}
//pub fn previous(&mut self) {
// if self.index > 0 {
// self.index -= 1;
// } else {
// self.index = self.infos.len() - 1;
// }
//}
}

View File

@@ -9,10 +9,11 @@ license = "AGPL-3.0-only"
edition = "2021"
[dependencies]
async-std = {version = "1.11.0", features = ["attributes"]}
clap = {version = "3.1.18", features = ["derive"]}
async-std = {version = "1.12.0", features = ["attributes"]}
clap = {version = "3.2.20", features = ["derive"]}
darkfi = {path = "../../", features = ["crypto", "util", "rpc"]}
log = "0.4.17"
serde_json = "1.0.81"
prettytable-rs = "0.9.0"
serde_json = "1.0.85"
simplelog = "0.12.0"
url = "2.2.2"

View File

@@ -1,6 +1,7 @@
use std::{process::exit, str::FromStr, time::Instant};
use clap::{Parser, Subcommand};
use prettytable::{format, row, Table};
use serde_json::json;
use simplelog::{ColorChoice, TermLogger, TerminalMode};
@@ -8,11 +9,11 @@ use url::Url;
use darkfi::{
cli_desc,
crypto::address::Address,
crypto::{address::Address, token_id},
rpc::{client::RpcClient, jsonrpc::JsonRequest},
util::{
cli::{get_log_config, get_log_level},
NetworkName,
cli::{get_log_config, get_log_level, progress_bar},
encode_base10, NetworkName,
},
Result,
};
@@ -51,6 +52,10 @@ enum DrkSubcommand {
/// f64 amount requested for airdrop
amount: f64,
/// Token ID to airdrop
#[clap(long)]
token_id: String,
},
/// Wallet operations
@@ -110,22 +115,41 @@ impl Drk {
Ok(())
}
async fn airdrop(&self, address: Option<Address>, endpoint: Url, amount: f64) -> Result<()> {
async fn airdrop(
&self,
address: Option<Address>,
endpoint: Url,
amount: f64,
token_id: String,
) -> Result<()> {
let addr = if address.is_some() {
address.unwrap()
} else {
let req = JsonRequest::new("wallet.get_key", json!([0_i64]));
let req = JsonRequest::new("wallet.get_addrs", json!([0_i64]));
let rep = self.rpc_client.request(req).await?;
Address::from_str(rep.as_array().unwrap()[0].as_str().unwrap())?
};
println!("Requesting airdrop for {}", addr);
let req = JsonRequest::new("airdrop", json!([json!(addr.to_string()), amount]));
let rpc_client = RpcClient::new(endpoint).await?;
let rep = rpc_client.request(req).await?;
rpc_client.close().await?;
// Check if token ID is valid base58
if token_id::parse_b58(&token_id).is_err() {
eprintln!("Error: Invalid Token ID passed as argument.");
exit(1);
}
println!("Success! Transaction ID: {}", rep);
let pb = progress_bar(&format!("Requesting airdrop for {}", addr));
let req = JsonRequest::new("airdrop", json!([json!(addr.to_string()), amount, token_id]));
let rpc_client = RpcClient::new(endpoint).await?;
let rep = match rpc_client.oneshot_request(req).await {
Ok(v) => v,
Err(e) => {
eprintln!("{}", e);
exit(1);
}
};
pb.finish();
println!("Transaction ID: {}", rep);
Ok(())
}
@@ -139,20 +163,43 @@ impl Drk {
async fn wallet_balance(&self) -> Result<()> {
let req = JsonRequest::new("wallet.get_balances", json!([]));
let rep = self.rpc_client.request(req).await?;
// TODO: Better representation
println!("Balances:\n{:#?}", rep);
if !rep.is_object() {
eprintln!("Invalid balance data received from darkfid RPC endpoint.");
exit(1);
}
let mut table = Table::new();
table.set_format(*format::consts::FORMAT_NO_BORDER_LINE_SEPARATOR);
table.set_titles(row!["Token ID", "Balance"]);
for i in rep.as_object().unwrap().keys() {
if let Some(balance) = rep[i].as_u64() {
table.add_row(row![i, encode_base10(balance, 8)]);
continue
}
eprintln!("Found invalid balance data for key \"{}\"", i);
}
if table.is_empty() {
println!("No balances.");
} else {
println!("{}", table);
}
Ok(())
}
async fn wallet_address(&self) -> Result<()> {
let req = JsonRequest::new("wallet.get_key", json!([0_i64]));
let req = JsonRequest::new("wallet.get_addrs", json!([0_i64]));
let rep = self.rpc_client.request(req).await?;
println!("Default wallet address: {}", rep);
Ok(())
}
async fn wallet_all_addresses(&self) -> Result<()> {
let req = JsonRequest::new("wallet.get_key", json!([-1]));
let req = JsonRequest::new("wallet.get_addrs", json!([-1]));
let rep = self.rpc_client.request(req).await?;
println!("Wallet addresses:\n{:#?}", rep);
Ok(())
@@ -193,8 +240,8 @@ async fn main() -> Result<()> {
match args.command {
DrkSubcommand::Ping => drk.ping().await,
DrkSubcommand::Airdrop { address, faucet_endpoint, amount } => {
drk.airdrop(address, faucet_endpoint, amount).await
DrkSubcommand::Airdrop { address, faucet_endpoint, amount, token_id } => {
drk.airdrop(address, faucet_endpoint, amount, token_id).await
}
DrkSubcommand::Wallet { keygen, balance, address, all_addresses } => {

View File

@@ -9,29 +9,28 @@ license = "AGPL-3.0-only"
edition = "2021"
[dependencies]
async-channel = "1.6.1"
async-channel = "1.7.1"
async-executor = "1.4.1"
async-std = "1.11.0"
async-trait = "0.1.53"
async-std = "1.12.0"
async-trait = "0.1.57"
blake3 = "1.3.1"
bs58 = "0.4.0"
chrono = "0.4.19"
ctrlc-async = {version = "3.2.2", default-features = false, features = ["async-std", "termination"]}
chrono = "0.4.22"
ctrlc = { version = "3.2.3", features = ["termination"] }
darkfi = {path = "../../", features = ["blockchain", "wallet", "rpc", "net", "node"]}
easy-parallel = "3.2.0"
futures-lite = "1.12.0"
hex = "0.4.3"
lazy-init = "0.5.0"
lazy-init = "0.5.1"
log = "0.4.17"
num-bigint = {version = "0.4.3", features = ["serde"]}
rand = "0.8.5"
serde_json = "1.0.81"
serde_json = "1.0.85"
simplelog = "0.12.0"
sled = "0.34.7"
url = "2.2.2"
# Argument parsing
serde = "1.0.137"
serde_derive = "1.0.137"
serde = "1.0.144"
serde_derive = "1.0.144"
structopt = "0.3.26"
structopt-toml = "0.5.0"
structopt-toml = "0.5.1"

View File

@@ -21,20 +21,26 @@
# JSON-RPC listen URL
#rpc_listen = "tcp://127.0.0.1:8340"
# P2P accept address for the syncing protocol
#sync_p2p_accept = "tls://127.0.0.1:8342"
# P2P accept addresses for the syncing protocol
#sync_p2p_accept = ["tls://127.0.0.1:9342"]
# P2P external address for the syncing protocol
#sync_p2p_external = "tls://127.0.0.1:8342"
# P2P external addresses for the syncing protocol
#sync_p2p_external = ["tls://127.0.0.1:9342"]
# Connection slots for the syncing protocol
#sync_slots = 8
# Seed nodes to connect to for the syncing protocol
#sync_seed = []
#sync_p2p_seed = []
# Peers to connect to for the syncing protocol
#sync_peer = []
#sync_p2p_peer = []
# Prefered transports of outbound connections for the syncing protocol
#sync_p2p_transports = ["tls", "tcp"]
# Enable localnet hosts
#localnet = true
# Airdrop timeout limit in seconds
#airdrop_timeout = 600

View File

@@ -10,9 +10,9 @@ pub enum RpcError {
fn to_tuple(e: RpcError) -> (i64, String) {
let msg = match e {
RpcError::AmountExceedsLimit => "Amount requested is higher than the faucet limit",
RpcError::TimeLimitReached => "Timeout not expired. Try again later",
RpcError::ParseError => "Parse error",
RpcError::AmountExceedsLimit => "Amount requested is higher than the faucet limit.",
RpcError::TimeLimitReached => "Timeout not expired. Try again later.",
RpcError::ParseError => "Parse error.",
};
(e as i64, msg.to_string())

View File

@@ -6,7 +6,6 @@ use async_trait::async_trait;
use chrono::Utc;
use futures_lite::future;
use log::{debug, error, info};
use num_bigint::BigUint;
use serde_derive::Deserialize;
use serde_json::{json, Value};
use structopt::StructOpt;
@@ -21,7 +20,7 @@ use darkfi::{
ValidatorState, ValidatorStatePtr, MAINNET_GENESIS_HASH_BYTES, MAINNET_GENESIS_TIMESTAMP,
TESTNET_GENESIS_HASH_BYTES, TESTNET_GENESIS_TIMESTAMP,
},
crypto::{address::Address, keypair::PublicKey, token_list::DrkTokenList},
crypto::{address::Address, keypair::PublicKey, token_id},
net,
net::P2pPtr,
node::Client,
@@ -37,7 +36,7 @@ use darkfi::{
decode_base10, expand_path,
path::get_config_path,
serial::serialize,
sleep, NetworkName,
sleep,
},
wallet::walletdb::init_wallet,
Error, Result,
@@ -73,17 +72,17 @@ struct Args {
/// Path to blockchain database
database: String,
#[structopt(long, default_value = "tcp://127.0.0.1:8340")]
#[structopt(long, default_value = "tcp://127.0.0.1:9340")]
/// JSON-RPC listen URL
rpc_listen: Url,
#[structopt(long)]
/// P2P accept address for the syncing protocol
sync_p2p_accept: Option<Url>,
/// P2P accept addresses for the syncing protocol
sync_p2p_accept: Vec<Url>,
#[structopt(long)]
/// P2P external address for the syncing protocol
sync_p2p_external: Option<Url>,
/// P2P external addresses for the syncing protocol
sync_p2p_external: Vec<Url>,
#[structopt(long, default_value = "8")]
/// Connection slots for the syncing protocol
@@ -97,6 +96,14 @@ struct Args {
/// Connect to peer for the syncing protocol (repeatable flag)
sync_p2p_peer: Vec<Url>,
#[structopt(long)]
/// Prefered transports of outbound connections for the syncing protocol (repeatable flag)
sync_p2p_transports: Vec<String>,
#[structopt(long)]
/// Enable localnet hosts
localnet: bool,
#[structopt(long)]
/// Whitelisted cashier address (repeatable flag)
cashier_pub: Vec<String>,
@@ -111,7 +118,7 @@ struct Args {
#[structopt(long, default_value = "10")]
/// Airdrop amount limit
airdrop_limit: String, // We convert this to biguint with decode_base10
airdrop_limit: String, // We convert this to u64 with decode_base10
#[structopt(short, parse(from_occurrences))]
/// Increase verbosity (-vvv supported)
@@ -124,7 +131,7 @@ pub struct Faucetd {
client: Arc<Client>,
validator_state: ValidatorStatePtr,
airdrop_timeout: i64,
airdrop_limit: BigUint,
airdrop_limit: u64,
airdrop_map: Arc<Mutex<HashMap<Address, i64>>>,
}
@@ -149,7 +156,7 @@ impl Faucetd {
validator_state: ValidatorStatePtr,
sync_p2p: P2pPtr,
timeout: i64,
limit: BigUint,
limit: u64,
) -> Result<Self> {
let client = validator_state.read().await.client.clone();
@@ -165,12 +172,16 @@ impl Faucetd {
}
// RPCAPI:
// Processes an airdrop request and airdrops requested amount to address.
// Processes an airdrop request and airdrops requested token and amount to address.
// Returns the transaction ID upon success.
// --> {"jsonrpc": "2.0", "method": "airdrop", "params": ["1DarkFi...", 1.42], "id": 1}
// --> {"jsonrpc": "2.0", "method": "airdrop", "params": ["1DarkFi...", 1.42, "1F00b4r..."], "id": 1}
// <-- {"jsonrpc": "2.0", "result": "txID", "id": 1}
async fn airdrop(&self, id: Value, params: &[Value]) -> JsonResult {
if params.len() != 2 || !params[0].is_string() || !params[1].is_f64() {
if params.len() != 3 ||
!params[0].is_string() ||
!params[1].is_f64() ||
!params[2].is_string()
{
return JsonError::new(InvalidParams, None, id).into()
}
@@ -208,6 +219,16 @@ impl Faucetd {
return server_error(RpcError::AmountExceedsLimit, id)
}
// Here we allow the faucet to mint arbitrary token IDs.
// TODO: Revert this to native token when we have contracts for minting tokens.
let token_id = match token_id::parse_b58(params[2].as_str().unwrap()) {
Ok(v) => v,
Err(_) => {
error!("airdrop(): Failed parsing token id from string");
return server_error(RpcError::ParseError, id)
}
};
// Check if there as a previous airdrop and the timeout has passed.
let now = Utc::now().timestamp();
let map = self.airdrop_map.lock().await;
@@ -218,24 +239,11 @@ impl Faucetd {
};
drop(map);
let token_id = self.client.tokenlist.by_net[&NetworkName::DarkFi]
.get("DRK".to_string())
.unwrap()
.drk_address;
let amnt: u64 = match amount.try_into() {
Ok(v) => v,
Err(e) => {
error!("airdrop(): Failed converting biguint to u64: {}", e);
return JsonError::new(InternalError, None, id).into()
}
};
let tx = match self
.client
.build_transaction(
pubkey,
amnt,
amount,
token_id,
true,
self.validator_state.read().await.state_machine.clone(),
@@ -299,8 +307,8 @@ async fn realmain(args: Args, ex: Arc<Executor<'_>>) -> Result<()> {
// tasks, and to catch a shutdown signal, where we can clean up and
// exit gracefully.
let (signal, shutdown) = async_channel::bounded::<()>(1);
ctrlc_async::set_async_handler(async move {
signal.send(()).await.unwrap();
ctrlc::set_handler(move || {
async_std::task::block_on(signal.send(())).unwrap();
})
.unwrap();
@@ -321,16 +329,9 @@ async fn realmain(args: Args, ex: Arc<Executor<'_>>) -> Result<()> {
}
};
let tokenlist = Arc::new(DrkTokenList::new(&[
("drk", include_bytes!("../../../contrib/token/darkfi_token_list.min.json")),
("btc", include_bytes!("../../../contrib/token/bitcoin_token_list.min.json")),
("eth", include_bytes!("../../../contrib/token/erc20_token_list.min.json")),
("sol", include_bytes!("../../../contrib/token/solana_token_list.min.json")),
])?);
// TODO: sqldb init cleanup
// Initialize client
let client = Arc::new(Client::new(wallet.clone(), tokenlist).await?);
let client = Arc::new(Client::new(wallet.clone()).await?);
// Parse cashier addresses
let mut cashier_pubkeys = vec![];
@@ -367,6 +368,8 @@ async fn realmain(args: Args, ex: Arc<Executor<'_>>) -> Result<()> {
external_addr: args.sync_p2p_external,
peers: args.sync_p2p_peer.clone(),
seeds: args.sync_p2p_seed.clone(),
outbound_transports: net::settings::get_outbound_transports(args.sync_p2p_transports),
localnet: args.localnet,
..Default::default()
};
@@ -416,7 +419,10 @@ async fn realmain(args: Args, ex: Arc<Executor<'_>>) -> Result<()> {
})
.detach();
match block_sync_task(sync_p2p.clone(), state.clone()).await {
info!("Waiting for sync P2P outbound connections");
sync_p2p.clone().wait_for_outbound(ex).await?;
match block_sync_task(sync_p2p, state.clone()).await {
Ok(()) => *faucetd.synced.lock().await = true,
Err(e) => error!("Failed syncing blockchain: {}", e),
}

View File

@@ -12,30 +12,30 @@ edition = "2021"
darkfi = {path = "../../", features = ["net", "rpc", "raft"]}
# Async
smol = "1.2.5"
futures = "0.3.21"
futures-rustls = "0.22.1"
async-std = "1.11.0"
async-trait = "0.1.53"
async-channel = "1.6.1"
futures = "0.3.24"
futures-rustls = "0.22.2"
async-std = "1.12.0"
async-trait = "0.1.57"
async-channel = "1.7.1"
async-executor = "1.4.1"
easy-parallel = "3.2.0"
# Crypto
crypto_box = "0.7.2"
crypto_box = "0.8.1"
rand = "0.8.5"
# Misc
clap = {version = "3.1.18", features = ["derive"]}
clap = {version = "3.2.20", features = ["derive"]}
log = "0.4.17"
simplelog = "0.12.0"
fxhash = "0.2.1"
ctrlc-async = {version= "3.2.2", default-features = false, features = ["async-std", "termination"]}
ctrlc = { version = "3.2.3", features = ["termination"] }
url = "2.2.2"
# Encoding and parsing
serde_json = "1.0.81"
serde = {version = "1.0.137", features = ["derive"]}
serde_json = "1.0.85"
serde = {version = "1.0.144", features = ["derive"]}
structopt = "0.3.26"
structopt-toml = "0.5.0"
structopt-toml = "0.5.1"
bs58 = "0.4.0"
toml = "0.5.9"

View File

@@ -9,14 +9,14 @@ datastore="~/.config/ircd-inbound"
## Raft net settings
[net]
## P2P accept address
inbound="127.0.0.1:11002"
## P2P accept addresses
inbound=["127.0.0.1:11002"]
## Connection slots
#outbound_connections=5
## P2P external address
external_addr="127.0.0.1:11004"
## P2P external addresses
external_addr=["127.0.0.1:11004"]
## Peers to connect to
#peers=["127.0.0.1:11003"]

View File

@@ -9,14 +9,14 @@ datastore="~/.config/ircd-outbound"
## Raft net settings
[net]
## P2P accept address
# inbound="127.0.0.1:11002"
## P2P accept addresses
# inbound=["127.0.0.1:11002"]
## Connection slots
outbound_connections=5
## P2P external address
#external_addr="127.0.0.1:11002"
## P2P external addresses
#external_addr=["127.0.0.1:11002"]
## Peers to connect to
#peers=["127.0.0.1:11003"]

View File

@@ -9,14 +9,14 @@ datastore="~/.config/ircd-seed"
## Raft net settings
[net]
## P2P accept address
inbound="127.0.0.1:11001"
## P2P accept addresses
inbound=["127.0.0.1:11001"]
## Connection slots
# outbound_connections=5
## P2P external address
# external_addr="127.0.0.1:11001"
## P2P external addresses
# external_addr=["127.0.0.1:11001"]
## Peers to connect to
# peers=["127.0.0.1:11001"]

View File

@@ -12,14 +12,14 @@ autojoin = ["#dev"]
## Raft net settings
[net]
## P2P accept address
#inbound="tls://127.0.0.1:11002"
## P2P accept addresses
#inbound=["tls://127.0.0.1:11002"]
## Connection slots
outbound_connections=5
## P2P external address
#external_addr="tls://127.0.0.1:11002"
## P2P external addresses
#external_addr=["tls://127.0.0.1:11002"]
## Peers to connect to
#peers=["tls://127.0.0.1:11003"]
@@ -27,6 +27,9 @@ outbound_connections=5
## Seed nodes to connect to
seeds=["tls://irc0.dark.fi:11001", "tls://irc1.dark.fi:11001"]
# Prefered transports for outbound connections
#transports = ["tls", "tcp"]
## these are the default configuration for the p2p network
#manual_attempt_limit=0
#seed_query_timeout_seconds=8

View File

@@ -169,8 +169,8 @@ async fn realmain(settings: Args, executor: Arc<Executor<'_>>) -> Result<()> {
let net_settings = settings.net;
let datastore_raft = datastore_path.join("ircd.db");
let mut raft = Raft::<Privmsg>::new(net_settings.inbound.clone(), datastore_raft)?;
let raft_sender = raft.get_broadcast();
let raft_receiver = raft.get_commits();
let raft_sender = raft.get_msgs_channel();
let raft_receiver = raft.get_commits_channel();
// P2p setup
let (p2p_send_channel, p2p_recv_channel) = async_channel::unbounded::<NetMsg>();
@@ -222,7 +222,7 @@ async fn realmain(settings: Args, executor: Arc<Executor<'_>>) -> Result<()> {
Ok((s, a)) => (s, a),
Err(e) => {
error!("Failed listening for connections: {}", e);
return Err(Error::ServiceStopped)
return Err(Error::NetworkServiceStopped)
}
};
@@ -244,13 +244,13 @@ async fn realmain(settings: Args, executor: Arc<Executor<'_>>) -> Result<()> {
// Run once receive exit signal
let (signal, shutdown) = async_channel::bounded::<()>(1);
ctrlc_async::set_async_handler(async move {
ctrlc::set_handler(move || {
warn!(target: "ircd", "ircd start Exit Signal");
// cleaning up tasks running in the background
signal.send(()).await.unwrap();
rpc_task.cancel().await;
irc_task.cancel().await;
p2p_run_task.cancel().await;
async_std::task::block_on(signal.send(())).unwrap();
async_std::task::block_on(rpc_task.cancel());
async_std::task::block_on(irc_task.cancel());
async_std::task::block_on(p2p_run_task.cancel());
})
.unwrap();

View File

@@ -173,7 +173,7 @@ impl IrcServerConnection {
}
"QUIT" => {
// Close the connection
return Err(Error::ServiceStopped)
return Err(Error::NetworkServiceStopped)
}
_ => {
warn!("Unimplemented `{}` command", command);

View File

@@ -1,42 +1,47 @@
[package]
name = "ircd"
version = "0.3.0"
homepage = "https://dark.fi"
description = "P2P IRC daemon"
authors = ["darkfi <dev@dark.fi>"]
repository = "https://github.com/darkrenaissance/darkfi"
license = "AGPL-3.0-only"
version = "0.4.0"
edition = "2021"
authors = ["darkfi <dev@dark.fi>"]
license = "AGPL-3.0-only"
homepage = "https://dark.fi"
repository = "https://github.com/darkrenaissance/darkfi"
keywords = []
categories = []
[dependencies]
darkfi = {path = "../../", features = ["net", "rpc"]}
# Async
smol = "1.2.5"
futures = "0.3.21"
futures-rustls = "0.22.1"
async-std = "1.11.0"
async-trait = "0.1.53"
async-channel = "1.6.1"
futures = "0.3.24"
futures-rustls = "0.22.2"
rustls-pemfile = "1.0.1"
async-std = "1.12.0"
async-trait = "0.1.57"
async-channel = "1.7.1"
async-executor = "1.4.1"
easy-parallel = "3.2.0"
# Crypto
crypto_box = "0.7.2"
crypto_box = "0.8.1"
rand = "0.8.5"
# Misc
clap = {version = "3.1.18", features = ["derive"]}
clap = {version = "3.2.20", features = ["derive"]}
log = "0.4.17"
simplelog = "0.12.0"
fxhash = "0.2.1"
ctrlc-async = {version= "3.2.2", default-features = false, features = ["async-std", "termination"]}
ctrlc = { version = "3.2.3", features = ["termination"] }
url = "2.2.2"
ringbuffer = "0.8.4"
chrono = "0.4.22"
ripemd = "0.1.1"
# Encoding and parsing
serde_json = "1.0.81"
serde = {version = "1.0.137", features = ["derive"]}
serde_json = "1.0.85"
serde = {version = "1.0.144", features = ["derive"]}
structopt = "0.3.26"
structopt-toml = "0.5.0"
structopt-toml = "0.5.1"
bs58 = "0.4.0"
toml = "0.5.9"
hex = "0.4.3"

View File

@@ -9,14 +9,14 @@ datastore="~/.config/ircd-inbound"
## Raft net settings
[net]
## P2P accept address
inbound="127.0.0.1:11002"
## P2P accept addresses
inbound=["127.0.0.1:11002"]
## Connection slots
#outbound_connections=5
## P2P external address
external_addr="127.0.0.1:11004"
## P2P external addresses
external_addr=["127.0.0.1:11004"]
## Peers to connect to
#peers=["127.0.0.1:11003"]

View File

@@ -9,14 +9,14 @@ datastore="~/.config/ircd-outbound"
## Raft net settings
[net]
## P2P accept address
# inbound="127.0.0.1:11002"
## P2P accept addresses
# inbound=["127.0.0.1:11002"]
## Connection slots
outbound_connections=5
## P2P external address
#external_addr="127.0.0.1:11002"
## P2P external addresses
#external_addr=["127.0.0.1:11002"]
## Peers to connect to
#peers=["127.0.0.1:11003"]

View File

@@ -9,14 +9,14 @@ datastore="~/.config/ircd-seed"
## Raft net settings
[net]
## P2P accept address
inbound="127.0.0.1:11001"
## P2P accept addresses
inbound=["127.0.0.1:11001"]
## Connection slots
# outbound_connections=5
## P2P external address
# external_addr="127.0.0.1:11001"
## P2P external addresses
# external_addr=["127.0.0.1:11001"]
## Peers to connect to
# peers=["127.0.0.1:11001"]

Some files were not shown because too many files have changed in this diff Show More