Compare commits

...

6 Commits

Author SHA1 Message Date
rymnc
b5e5be47db chore(rln): onchain tree poc 2023-11-30 12:25:33 +05:30
tyshko-rostyslav
b5760697bc Fix nightly build (#223)
* install libssl-dev and pkg-config

* fix

* move instal ssl

* attempt

* vendored

* old semaphore
2023-11-20 02:29:43 +03:00
tyshko-rostyslav
5c4e3fc13c Semaphore update (#220)
* update toml file

* update lock file
2023-11-13 13:27:47 +01:00
Sasha
a92d6428d6 feat: rln-wast 0.0.13(#222) 2023-11-08 20:09:43 +01:00
Richard Ramos
e6db05f27c feat: rln-wasm 0.0.12 2023-11-08 14:07:03 -04:00
tyshko-rostyslav
25f822e779 Refactoring (#219)
* public_api_tests module

* add tests to new module

* rm tests

* fmt

* rm redundunt code

* fmt
2023-10-30 09:37:17 +01:00
10 changed files with 1338 additions and 1057 deletions

221
Cargo.lock generated
View File

@@ -48,6 +48,17 @@ dependencies = [
"memchr",
]
[[package]]
name = "alloy-rlp"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc0fac0fc16baf1f63f78b47c3d24718f3619b0714076f6a02957d808d52cbef"
dependencies = [
"arrayvec",
"bytes",
"smol_str",
]
[[package]]
name = "anes"
version = "0.1.6"
@@ -603,6 +614,12 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07"
[[package]]
name = "bitvec"
version = "1.0.1"
@@ -754,7 +771,7 @@ version = "2.34.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"textwrap 0.11.0",
"unicode-width",
]
@@ -765,7 +782,7 @@ version = "3.2.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"clap_lex 0.2.4",
"indexmap",
"textwrap 0.16.0",
@@ -790,7 +807,7 @@ checksum = "914c8c79fb560f238ef6429439a30023c862f7a28e688c58f7203f12b29970bd"
dependencies = [
"anstream",
"anstyle",
"bitflags",
"bitflags 1.3.2",
"clap_lex 0.4.1",
"once_cell",
"strsim",
@@ -893,18 +910,25 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "const-hex"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5104de16b218eddf8e34ffe2f86f74bfa4e61e95a1b89732fccf6325efd0557"
dependencies = [
"cfg-if",
"cpufeatures",
"hex",
"proptest",
"serde",
]
[[package]]
name = "const-oid"
version = "0.9.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "520fbf3c07483f94e3e3ca9d0cfd913d7718ef2483d2cfd91c0d9e91474ab913"
[[package]]
name = "convert_case"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e"
[[package]]
name = "corosensei"
version = "0.1.3"
@@ -1326,10 +1350,8 @@ version = "0.99.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
dependencies = [
"convert_case",
"proc-macro2",
"quote",
"rustc_version 0.4.0",
"syn 1.0.109",
]
@@ -1376,9 +1398,9 @@ checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91"
[[package]]
name = "elliptic-curve"
version = "0.13.4"
version = "0.13.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75c71eaa367f2e5d556414a8eea812bc62985c879748d6403edabd9cb03f16e7"
checksum = "d97ca172ae9dc9f9b779a6e3a65d308f2af74e5b8c921299075bdb4a0370e914"
dependencies = [
"base16ct",
"crypto-bigint",
@@ -1517,7 +1539,7 @@ dependencies = [
"getrandom",
"hex",
"k256",
"num_enum",
"num_enum 0.6.1",
"open-fastrlp",
"rand",
"rlp",
@@ -1532,19 +1554,19 @@ dependencies = [
[[package]]
name = "ethers-core"
version = "2.0.8"
version = "2.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "60ca2514feb98918a0a31de7e1983c29f2267ebf61b2dc5d4294f91e5b866623"
checksum = "c0a17f0708692024db9956b31d7a20163607d2745953f5ae8125ab368ba280ad"
dependencies = [
"arrayvec",
"bytes",
"chrono",
"const-hex",
"elliptic-curve",
"ethabi",
"generic-array",
"hex",
"k256",
"num_enum",
"num_enum 0.7.1",
"open-fastrlp",
"rand",
"rlp",
@@ -1575,11 +1597,19 @@ checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7"
[[package]]
name = "fastrand"
version = "1.9.0"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be"
checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5"
[[package]]
name = "fastrlp"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418"
dependencies = [
"instant",
"arrayvec",
"auto_impl",
"bytes",
]
[[package]]
@@ -1898,7 +1928,7 @@ checksum = "adcf93614601c8129ddf72e2d5633df827ba6551541c6d8c59520a371475be1f"
dependencies = [
"hermit-abi 0.3.1",
"io-lifetimes",
"rustix",
"rustix 0.37.19",
"windows-sys 0.48.0",
]
@@ -1962,9 +1992,9 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67"
[[package]]
name = "libc"
version = "0.2.144"
version = "0.2.150"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b00cc1c228a6782d0f076e7b232802e0c5689d41bb5df366f2a6b6621cfdfe1"
checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c"
[[package]]
name = "libloading"
@@ -1976,12 +2006,24 @@ dependencies = [
"winapi",
]
[[package]]
name = "libm"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
[[package]]
name = "linux-raw-sys"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ece97ea872ece730aed82664c424eb4c8291e1ff2480247ccf7409044bc6479f"
[[package]]
name = "linux-raw-sys"
version = "0.4.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f"
[[package]]
name = "lock_api"
version = "0.4.9"
@@ -2167,6 +2209,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd"
dependencies = [
"autocfg",
"libm",
]
[[package]]
@@ -2185,7 +2228,16 @@ version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1"
dependencies = [
"num_enum_derive",
"num_enum_derive 0.6.1",
]
[[package]]
name = "num_enum"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "683751d591e6d81200c39fb0d1032608b77724f34114db54f571ff1317b337c0"
dependencies = [
"num_enum_derive 0.7.1",
]
[[package]]
@@ -2200,6 +2252,18 @@ dependencies = [
"syn 2.0.16",
]
[[package]]
name = "num_enum_derive"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c11e44798ad209ccdd91fc192f0526a369a01234f7373e1b141c96d7cee4f0e"
dependencies = [
"proc-macro-crate",
"proc-macro2",
"quote",
"syn 2.0.16",
]
[[package]]
name = "object"
version = "0.28.4"
@@ -2481,13 +2545,29 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.56"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435"
checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da"
dependencies = [
"unicode-ident",
]
[[package]]
name = "proptest"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c003ac8c77cb07bb74f5f198bce836a689bcd5a42574612bf14d17bfd08c20e"
dependencies = [
"bitflags 2.4.1",
"lazy_static",
"num-traits",
"rand",
"rand_chacha",
"rand_xorshift",
"regex-syntax",
"unarray",
]
[[package]]
name = "ptr_meta"
version = "0.1.4"
@@ -2553,6 +2633,15 @@ dependencies = [
"getrandom",
]
[[package]]
name = "rand_xorshift"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f"
dependencies = [
"rand_core",
]
[[package]]
name = "rayon"
version = "1.7.0"
@@ -2581,16 +2670,16 @@ version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
dependencies = [
"bitflags",
"bitflags 1.3.2",
]
[[package]]
name = "redox_syscall"
version = "0.3.5"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
dependencies = [
"bitflags",
"bitflags 1.3.2",
]
[[package]]
@@ -2639,7 +2728,7 @@ version = "3.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76e189c2369884dce920945e2ddf79b3dff49e071a167dd1817fa9c4c00d512e"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"libc",
"mach",
"winapi",
@@ -2737,7 +2826,7 @@ dependencies = [
[[package]]
name = "rln-wasm"
version = "0.0.9"
version = "0.0.13"
dependencies = [
"console_error_panic_hook",
"getrandom",
@@ -2777,24 +2866,32 @@ dependencies = [
[[package]]
name = "ruint"
version = "1.8.0"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d470e29e933dac4101180fd6574971892315c414cf2961a192729089687cc9b"
checksum = "95294d6e3a6192f3aabf91c38f56505a625aa495533442744185a36d75a790c4"
dependencies = [
"alloy-rlp",
"ark-ff 0.3.0",
"derive_more",
"ark-ff 0.4.1",
"bytes",
"fastrlp",
"num-bigint",
"parity-scale-codec",
"primitive-types",
"proptest",
"rand",
"rlp",
"ruint-macro",
"rustc_version 0.4.0",
"serde",
"thiserror",
"valuable",
"zeroize",
]
[[package]]
name = "ruint-macro"
version = "1.0.2"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62cc5760263ea229d367e7dff3c0cbf09e4797a125bd87059a6c095804f3b2d1"
checksum = "e666a5496a0b2186dbcd0ff6106e29e093c15591bde62c20d3842007c6978a09"
[[package]]
name = "rustc-demangle"
@@ -2838,11 +2935,24 @@ version = "0.37.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "acf8729d8542766f1b2cf77eb034d52f40d375bb8b615d0b147089946e16613d"
dependencies = [
"bitflags",
"bitflags 1.3.2",
"errno",
"io-lifetimes",
"libc",
"linux-raw-sys",
"linux-raw-sys 0.3.7",
"windows-sys 0.48.0",
]
[[package]]
name = "rustix"
version = "0.38.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3"
dependencies = [
"bitflags 2.4.1",
"errno",
"libc",
"linux-raw-sys 0.4.10",
"windows-sys 0.48.0",
]
@@ -2963,7 +3073,7 @@ dependencies = [
"ark-relations 0.3.0",
"ark-std 0.3.0",
"color-eyre 0.6.2",
"ethers-core 2.0.8",
"ethers-core 2.0.10",
"once_cell",
"rand",
"rand_chacha",
@@ -3136,6 +3246,15 @@ version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
[[package]]
name = "smol_str"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74212e6bbe9a4352329b2f68ba3130c15a3f26fe88ff22dbdc6cdd58fa85e99c"
dependencies = [
"serde",
]
[[package]]
name = "spki"
version = "0.7.2"
@@ -3250,15 +3369,15 @@ checksum = "fd1ba337640d60c3e96bc6f0638a939b9c9a7f2c316a1598c279828b3d1dc8c5"
[[package]]
name = "tempfile"
version = "3.5.0"
version = "3.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998"
checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5"
dependencies = [
"cfg-if",
"fastrand",
"redox_syscall 0.3.5",
"rustix",
"windows-sys 0.45.0",
"redox_syscall 0.4.1",
"rustix 0.38.21",
"windows-sys 0.48.0",
]
[[package]]
@@ -3457,6 +3576,12 @@ dependencies = [
"static_assertions",
]
[[package]]
name = "unarray"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94"
[[package]]
name = "unicode-bidi"
version = "0.3.13"

View File

@@ -1,6 +1,6 @@
[package]
name = "rln-wasm"
version = "0.0.9"
version = "0.0.13"
edition = "2021"
license = "MIT or Apache2"

View File

@@ -56,7 +56,7 @@ sled = "=0.34.7"
criterion = { version = "=0.4.0", features = ["html_reports"] }
[features]
default = ["parallel", "wasmer/sys-default", "pmtree-ft"]
default = ["parallel", "wasmer/sys-default"]
parallel = ["ark-ec/parallel", "ark-ff/parallel", "ark-std/parallel", "ark-groth16/parallel", "utils/parallel"]
wasm = ["wasmer/js", "wasmer/std"]
fullmerkletree = ["default"]

View File

@@ -7,6 +7,8 @@ pub mod pm_tree_adapter;
pub mod poseidon_tree;
pub mod protocol;
pub mod public;
#[cfg(test)]
pub mod public_api_tests;
pub mod utils;
#[cfg(not(target_arch = "wasm32"))]

File diff suppressed because it is too large Load Diff

995
rln/src/public_api_tests.rs Normal file
View File

@@ -0,0 +1,995 @@
use crate::circuit::{Curve, Fr, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
use crate::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash};
use crate::protocol::*;
use crate::public::RLN;
use crate::utils::*;
use ark_groth16::Proof as ArkProof;
use ark_serialize::{CanonicalDeserialize, Read};
use num_bigint::BigInt;
use std::io::Cursor;
use std::str::FromStr;
use utils::ZerokitMerkleTree;
use ark_std::{rand::thread_rng, UniformRand};
use rand::Rng;
use serde_json::{json, Value};
#[test]
// We test merkle batch Merkle tree additions
fn test_merkle_operations() {
let tree_height = TEST_TREE_HEIGHT;
let no_of_leaves = 256;
// We generate a vector of random leaves
let mut leaves: Vec<Fr> = Vec::new();
let mut rng = thread_rng();
for _ in 0..no_of_leaves {
leaves.push(Fr::rand(&mut rng));
}
// We create a new tree
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
// We first add leaves one by one specifying the index
for (i, leaf) in leaves.iter().enumerate() {
// We check if the number of leaves set is consistent
assert_eq!(rln.tree.leaves_set(), i);
let mut buffer = Cursor::new(fr_to_bytes_le(&leaf));
rln.set_leaf(i, &mut buffer).unwrap();
}
// We get the root of the tree obtained adding one leaf per time
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_single, _) = bytes_le_to_fr(&buffer.into_inner());
// We reset the tree to default
rln.set_tree(tree_height).unwrap();
// We add leaves one by one using the internal index (new leaves goes in next available position)
for leaf in &leaves {
let mut buffer = Cursor::new(fr_to_bytes_le(&leaf));
rln.set_next_leaf(&mut buffer).unwrap();
}
// We check if numbers of leaves set is consistent
assert_eq!(rln.tree.leaves_set(), no_of_leaves);
// We get the root of the tree obtained adding leaves using the internal index
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_next, _) = bytes_le_to_fr(&buffer.into_inner());
assert_eq!(root_single, root_next);
// We reset the tree to default
rln.set_tree(tree_height).unwrap();
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
rln.init_tree_with_leaves(&mut buffer).unwrap();
// We check if number of leaves set is consistent
assert_eq!(rln.tree.leaves_set(), no_of_leaves);
// We get the root of the tree obtained adding leaves in batch
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_batch, _) = bytes_le_to_fr(&buffer.into_inner());
assert_eq!(root_single, root_batch);
// We now delete all leaves set and check if the root corresponds to the empty tree root
// delete calls over indexes higher than no_of_leaves are ignored and will not increase self.tree.next_index
for i in 0..no_of_leaves {
rln.delete_leaf(i).unwrap();
}
// We check if number of leaves set is consistent
assert_eq!(rln.tree.leaves_set(), no_of_leaves);
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_delete, _) = bytes_le_to_fr(&buffer.into_inner());
// We reset the tree to default
rln.set_tree(tree_height).unwrap();
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_empty, _) = bytes_le_to_fr(&buffer.into_inner());
assert_eq!(root_delete, root_empty);
}
#[test]
// We test leaf setting with a custom index, to enable batch updates to the root
// Uses `set_leaves_from` to set leaves in a batch, from index `start_index`
fn test_leaf_setting_with_index() {
let tree_height = TEST_TREE_HEIGHT;
let no_of_leaves = 256;
// We generate a vector of random leaves
let mut leaves: Vec<Fr> = Vec::new();
let mut rng = thread_rng();
for _ in 0..no_of_leaves {
leaves.push(Fr::rand(&mut rng));
}
// set_index is the index from which we start setting leaves
// random number between 0..no_of_leaves
let set_index = rng.gen_range(0..no_of_leaves) as usize;
// We create a new tree
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
rln.init_tree_with_leaves(&mut buffer).unwrap();
// We check if number of leaves set is consistent
assert_eq!(rln.tree.leaves_set(), no_of_leaves);
// We get the root of the tree obtained adding leaves in batch
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_batch_with_init, _) = bytes_le_to_fr(&buffer.into_inner());
// `init_tree_with_leaves` resets the tree to the height it was initialized with, using `set_tree`
// We add leaves in a batch starting from index 0..set_index
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves[0..set_index]).unwrap());
rln.init_tree_with_leaves(&mut buffer).unwrap();
// We add the remaining n leaves in a batch starting from index m
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves[set_index..]).unwrap());
rln.set_leaves_from(set_index, &mut buffer).unwrap();
// We check if number of leaves set is consistent
assert_eq!(rln.tree.leaves_set(), no_of_leaves);
// We get the root of the tree obtained adding leaves in batch
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_batch_with_custom_index, _) = bytes_le_to_fr(&buffer.into_inner());
assert_eq!(root_batch_with_init, root_batch_with_custom_index);
// We reset the tree to default
rln.set_tree(tree_height).unwrap();
// We add leaves one by one using the internal index (new leaves goes in next available position)
for leaf in &leaves {
let mut buffer = Cursor::new(fr_to_bytes_le(&leaf));
rln.set_next_leaf(&mut buffer).unwrap();
}
// We check if numbers of leaves set is consistent
assert_eq!(rln.tree.leaves_set(), no_of_leaves);
// We get the root of the tree obtained adding leaves using the internal index
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_single_additions, _) = bytes_le_to_fr(&buffer.into_inner());
assert_eq!(root_batch_with_init, root_single_additions);
rln.flush().unwrap();
}
#[test]
// Tests the atomic_operation fn, which set_leaves_from uses internally
fn test_atomic_operation() {
let tree_height = TEST_TREE_HEIGHT;
let no_of_leaves = 256;
// We generate a vector of random leaves
let mut leaves: Vec<Fr> = Vec::new();
let mut rng = thread_rng();
for _ in 0..no_of_leaves {
leaves.push(Fr::rand(&mut rng));
}
// We create a new tree
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
rln.init_tree_with_leaves(&mut buffer).unwrap();
// We check if number of leaves set is consistent
assert_eq!(rln.tree.leaves_set(), no_of_leaves);
// We get the root of the tree obtained adding leaves in batch
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_after_insertion, _) = bytes_le_to_fr(&buffer.into_inner());
// We check if number of leaves set is consistent
assert_eq!(rln.tree.leaves_set(), no_of_leaves);
let last_leaf = leaves.last().unwrap();
let last_leaf_index = no_of_leaves - 1;
let indices = vec![last_leaf_index as u8];
let last_leaf = vec![*last_leaf];
let indices_buffer = Cursor::new(vec_u8_to_bytes_le(&indices).unwrap());
let leaves_buffer = Cursor::new(vec_fr_to_bytes_le(&last_leaf).unwrap());
rln.atomic_operation(last_leaf_index, leaves_buffer, indices_buffer)
.unwrap();
// We get the root of the tree obtained after a no-op
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_after_noop, _) = bytes_le_to_fr(&buffer.into_inner());
assert_eq!(root_after_insertion, root_after_noop);
}
#[test]
fn test_atomic_operation_zero_indexed() {
// Test duplicated from https://github.com/waku-org/go-zerokit-rln/pull/12/files
let tree_height = TEST_TREE_HEIGHT;
let no_of_leaves = 256;
// We generate a vector of random leaves
let mut leaves: Vec<Fr> = Vec::new();
let mut rng = thread_rng();
for _ in 0..no_of_leaves {
leaves.push(Fr::rand(&mut rng));
}
// We create a new tree
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
rln.init_tree_with_leaves(&mut buffer).unwrap();
// We check if number of leaves set is consistent
assert_eq!(rln.tree.leaves_set(), no_of_leaves);
// We get the root of the tree obtained adding leaves in batch
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_after_insertion, _) = bytes_le_to_fr(&buffer.into_inner());
let zero_index = 0;
let indices = vec![zero_index as u8];
let zero_leaf: Vec<Fr> = vec![];
let indices_buffer = Cursor::new(vec_u8_to_bytes_le(&indices).unwrap());
let leaves_buffer = Cursor::new(vec_fr_to_bytes_le(&zero_leaf).unwrap());
rln.atomic_operation(0, leaves_buffer, indices_buffer)
.unwrap();
// We get the root of the tree obtained after a deletion
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_after_deletion, _) = bytes_le_to_fr(&buffer.into_inner());
assert_ne!(root_after_insertion, root_after_deletion);
}
#[test]
fn test_atomic_operation_consistency() {
// Test duplicated from https://github.com/waku-org/go-zerokit-rln/pull/12/files
let tree_height = TEST_TREE_HEIGHT;
let no_of_leaves = 256;
// We generate a vector of random leaves
let mut leaves: Vec<Fr> = Vec::new();
let mut rng = thread_rng();
for _ in 0..no_of_leaves {
leaves.push(Fr::rand(&mut rng));
}
// We create a new tree
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
rln.init_tree_with_leaves(&mut buffer).unwrap();
// We check if number of leaves set is consistent
assert_eq!(rln.tree.leaves_set(), no_of_leaves);
// We get the root of the tree obtained adding leaves in batch
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_after_insertion, _) = bytes_le_to_fr(&buffer.into_inner());
let set_index = rng.gen_range(0..no_of_leaves) as usize;
let indices = vec![set_index as u8];
let zero_leaf: Vec<Fr> = vec![];
let indices_buffer = Cursor::new(vec_u8_to_bytes_le(&indices).unwrap());
let leaves_buffer = Cursor::new(vec_fr_to_bytes_le(&zero_leaf).unwrap());
rln.atomic_operation(0, leaves_buffer, indices_buffer)
.unwrap();
// We get the root of the tree obtained after a deletion
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_after_deletion, _) = bytes_le_to_fr(&buffer.into_inner());
assert_ne!(root_after_insertion, root_after_deletion);
// We get the leaf
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.get_leaf(set_index, &mut output_buffer).unwrap();
let (received_leaf, _) = bytes_le_to_fr(output_buffer.into_inner().as_ref());
assert_eq!(received_leaf, Fr::from(0));
}
#[allow(unused_must_use)]
#[test]
// This test checks if `set_leaves_from` throws an error when the index is out of bounds
fn test_set_leaves_bad_index() {
let tree_height = TEST_TREE_HEIGHT;
let no_of_leaves = 256;
// We generate a vector of random leaves
let mut leaves: Vec<Fr> = Vec::new();
let mut rng = thread_rng();
for _ in 0..no_of_leaves {
leaves.push(Fr::rand(&mut rng));
}
let bad_index = (1 << tree_height) - rng.gen_range(0..no_of_leaves) as usize;
// We create a new tree
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
// Get root of empty tree
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_empty, _) = bytes_le_to_fr(&buffer.into_inner());
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
rln.set_leaves_from(bad_index, &mut buffer)
.expect_err("Should throw an error");
// We check if number of leaves set is consistent
assert_eq!(rln.tree.leaves_set(), 0);
// Get the root of the tree
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_after_bad_set, _) = bytes_le_to_fr(&buffer.into_inner());
assert_eq!(root_empty, root_after_bad_set);
}
fn fq_from_str(s: String) -> ark_bn254::Fq {
ark_bn254::Fq::from_str(&s).unwrap()
}
fn g1_from_str(g1: &[String]) -> ark_bn254::G1Affine {
let x = fq_from_str(g1[0].clone());
let y = fq_from_str(g1[1].clone());
let z = fq_from_str(g1[2].clone());
ark_bn254::G1Affine::from(ark_bn254::G1Projective::new(x, y, z))
}
fn g2_from_str(g2: &[Vec<String>]) -> ark_bn254::G2Affine {
let c0 = fq_from_str(g2[0][0].clone());
let c1 = fq_from_str(g2[0][1].clone());
let x = ark_bn254::Fq2::new(c0, c1);
let c0 = fq_from_str(g2[1][0].clone());
let c1 = fq_from_str(g2[1][1].clone());
let y = ark_bn254::Fq2::new(c0, c1);
let c0 = fq_from_str(g2[2][0].clone());
let c1 = fq_from_str(g2[2][1].clone());
let z = ark_bn254::Fq2::new(c0, c1);
ark_bn254::G2Affine::from(ark_bn254::G2Projective::new(x, y, z))
}
fn value_to_string_vec(value: &Value) -> Vec<String> {
value
.as_array()
.unwrap()
.into_iter()
.map(|val| val.as_str().unwrap().to_string())
.collect()
}
#[test]
fn test_groth16_proof_hardcoded() {
let tree_height = TEST_TREE_HEIGHT;
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let rln = RLN::new(tree_height, input_buffer).unwrap();
let valid_snarkjs_proof = json!({
"pi_a": [
"606446415626469993821291758185575230335423926365686267140465300918089871829",
"14881534001609371078663128199084130129622943308489025453376548677995646280161",
"1"
],
"pi_b": [
[
"18053812507994813734583839134426913715767914942522332114506614735770984570178",
"11219916332635123001710279198522635266707985651975761715977705052386984005181"
],
[
"17371289494006920912949790045699521359436706797224428511776122168520286372970",
"14038575727257298083893642903204723310279435927688342924358714639926373603890"
],
[
"1",
"0"
]
],
"pi_c": [
"17701377127561410274754535747274973758826089226897242202671882899370780845888",
"12608543716397255084418384146504333522628400182843246910626782513289789807030",
"1"
],
"protocol": "groth16",
"curve": "bn128"
});
let valid_ark_proof = ArkProof {
a: g1_from_str(&value_to_string_vec(&valid_snarkjs_proof["pi_a"])),
b: g2_from_str(
&valid_snarkjs_proof["pi_b"]
.as_array()
.unwrap()
.iter()
.map(|item| value_to_string_vec(item))
.collect::<Vec<Vec<String>>>(),
),
c: g1_from_str(&value_to_string_vec(&valid_snarkjs_proof["pi_c"])),
};
let valid_proof_values = RLNProofValues {
x: str_to_fr(
"20645213238265527935869146898028115621427162613172918400241870500502509785943",
10,
)
.unwrap(),
external_nullifier: str_to_fr(
"21074405743803627666274838159589343934394162804826017440941339048886754734203",
10,
)
.unwrap(),
y: str_to_fr(
"16401008481486069296141645075505218976370369489687327284155463920202585288271",
10,
)
.unwrap(),
root: str_to_fr(
"8502402278351299594663821509741133196466235670407051417832304486953898514733",
10,
)
.unwrap(),
nullifier: str_to_fr(
"9102791780887227194595604713537772536258726662792598131262022534710887343694",
10,
)
.unwrap(),
};
let verified = verify_proof(&rln.verification_key, &valid_ark_proof, &valid_proof_values);
assert!(verified.unwrap());
}
#[test]
// This test is similar to the one in lib, but uses only public API
fn test_groth16_proof() {
let tree_height = TEST_TREE_HEIGHT;
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
// Note: we only test Groth16 proof generation, so we ignore setting the tree in the RLN object
let rln_witness = random_rln_witness(tree_height);
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
// We compute a Groth16 proof
let mut input_buffer = Cursor::new(serialize_witness(&rln_witness).unwrap());
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.prove(&mut input_buffer, &mut output_buffer).unwrap();
let serialized_proof = output_buffer.into_inner();
// Before checking public verify API, we check that the (deserialized) proof generated by prove is actually valid
let proof = ArkProof::deserialize_compressed(&mut Cursor::new(&serialized_proof)).unwrap();
let verified = verify_proof(&rln.verification_key, &proof, &proof_values);
// dbg!(verified.unwrap());
assert!(verified.unwrap());
// We prepare the input to prove API, consisting of serialized_proof (compressed, 4*32 bytes) || serialized_proof_values (6*32 bytes)
let serialized_proof_values = serialize_proof_values(&proof_values);
let mut verify_data = Vec::<u8>::new();
verify_data.extend(&serialized_proof);
verify_data.extend(&serialized_proof_values);
let mut input_buffer = Cursor::new(verify_data);
// We verify the Groth16 proof against the provided proof values
let verified = rln.verify(&mut input_buffer).unwrap();
assert!(verified);
}
#[test]
fn test_rln_proof() {
let tree_height = TEST_TREE_HEIGHT;
let no_of_leaves = 256;
// We generate a vector of random leaves
let mut leaves: Vec<Fr> = Vec::new();
let mut rng = thread_rng();
for _ in 0..no_of_leaves {
let id_commitment = Fr::rand(&mut rng);
let rate_commitment = utils_poseidon_hash(&[id_commitment, Fr::from(100)]);
leaves.push(rate_commitment);
}
// We create a new RLN instance
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
rln.init_tree_with_leaves(&mut buffer).unwrap();
// Generate identity pair
let (identity_secret_hash, id_commitment) = keygen();
// We set as leaf rate_commitment after storing its index
let identity_index = rln.tree.leaves_set();
let user_message_limit = Fr::from(65535);
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
rln.set_next_leaf(&mut buffer).unwrap();
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
// We generate a random rln_identifier
let rln_identifier = hash_to_field(b"test-rln-identifier");
// We prepare input for generate_rln_proof API
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut fr_to_bytes_le(&user_message_limit));
serialized.append(&mut fr_to_bytes_le(&Fr::from(1)));
serialized.append(&mut fr_to_bytes_le(&utils_poseidon_hash(&[
epoch,
rln_identifier,
])));
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
let mut input_buffer = Cursor::new(serialized);
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.generate_rln_proof(&mut input_buffer, &mut output_buffer)
.unwrap();
// output_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> ]
let mut proof_data = output_buffer.into_inner();
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data || signal_len<8> | signal<var> ]
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal.to_vec());
let mut input_buffer = Cursor::new(proof_data);
let verified = rln.verify_rln_proof(&mut input_buffer).unwrap();
assert!(verified);
}
#[test]
fn test_rln_with_witness() {
let tree_height = TEST_TREE_HEIGHT;
let no_of_leaves = 256;
// We generate a vector of random leaves
let mut leaves: Vec<Fr> = Vec::new();
let mut rng = thread_rng();
for _ in 0..no_of_leaves {
leaves.push(Fr::rand(&mut rng));
}
// We create a new RLN instance
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
rln.init_tree_with_leaves(&mut buffer).unwrap();
// Generate identity pair
let (identity_secret_hash, id_commitment) = keygen();
// We set as leaf rate_commitment after storing its index
let identity_index = rln.tree.leaves_set();
let user_message_limit = Fr::from(100);
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
rln.set_next_leaf(&mut buffer).unwrap();
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
// We generate a random rln_identifier
let rln_identifier = hash_to_field(b"test-rln-identifier");
// We prepare input for generate_rln_proof API
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut fr_to_bytes_le(&user_message_limit));
serialized.append(&mut fr_to_bytes_le(&Fr::from(1)));
serialized.append(&mut fr_to_bytes_le(&utils_poseidon_hash(&[
epoch,
rln_identifier,
])));
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
let mut input_buffer = Cursor::new(serialized);
// We read input RLN witness and we serialize_compressed it
let mut witness_byte: Vec<u8> = Vec::new();
input_buffer.read_to_end(&mut witness_byte).unwrap();
let (rln_witness, _) = proof_inputs_to_rln_witness(&mut rln.tree, &witness_byte).unwrap();
let serialized_witness = serialize_witness(&rln_witness).unwrap();
// Calculate witness outside zerokit (simulating what JS is doing)
let inputs = inputs_for_witness_calculation(&rln_witness)
.unwrap()
.into_iter()
.map(|(name, values)| (name.to_string(), values));
let calculated_witness = rln
.witness_calculator
.lock()
.expect("witness_calculator mutex should not get poisoned")
.calculate_witness_element::<Curve, _>(inputs, false)
.map_err(ProofError::WitnessError)
.unwrap();
let calculated_witness_vec: Vec<BigInt> = calculated_witness
.into_iter()
.map(|v| to_bigint(&v).unwrap())
.collect();
// Generating the proof
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.generate_rln_proof_with_witness(
calculated_witness_vec,
serialized_witness,
&mut output_buffer,
)
.unwrap();
// output_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> ]
let mut proof_data = output_buffer.into_inner();
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data || signal_len<8> | signal<var> ]
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal.to_vec());
let mut input_buffer = Cursor::new(proof_data);
let verified = rln.verify_rln_proof(&mut input_buffer).unwrap();
assert!(verified);
}
#[test]
fn proof_verification_with_roots() {
// The first part is similar to test_rln_with_witness
let tree_height = TEST_TREE_HEIGHT;
let no_of_leaves = 256;
// We generate a vector of random leaves
let mut leaves: Vec<Fr> = Vec::new();
let mut rng = thread_rng();
for _ in 0..no_of_leaves {
leaves.push(Fr::rand(&mut rng));
}
// We create a new RLN instance
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
rln.init_tree_with_leaves(&mut buffer).unwrap();
// Generate identity pair
let (identity_secret_hash, id_commitment) = keygen();
// We set as leaf id_commitment after storing its index
let identity_index = rln.tree.leaves_set();
let user_message_limit = Fr::from(100);
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
rln.set_next_leaf(&mut buffer).unwrap();
// We generate a random signal
let mut rng = thread_rng();
let signal: [u8; 32] = rng.gen();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
// We generate a random rln_identifier
let rln_identifier = hash_to_field(b"test-rln-identifier");
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
// We prepare input for generate_rln_proof API
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | rln_identifier<32> | user_message_limit<32> | message_id<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut fr_to_bytes_le(&user_message_limit));
serialized.append(&mut fr_to_bytes_le(&Fr::from(1)));
serialized.append(&mut fr_to_bytes_le(&external_nullifier));
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
let mut input_buffer = Cursor::new(serialized);
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.generate_rln_proof(&mut input_buffer, &mut output_buffer)
.unwrap();
// output_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> ]
let mut proof_data = output_buffer.into_inner();
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data || signal_len<8> | signal<var> ]
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal.to_vec());
let input_buffer = Cursor::new(proof_data);
// If no roots is provided, proof validation is skipped and if the remaining proof values are valid, the proof will be correctly verified
let mut roots_serialized: Vec<u8> = Vec::new();
let mut roots_buffer = Cursor::new(roots_serialized.clone());
let verified = rln
.verify_with_roots(&mut input_buffer.clone(), &mut roots_buffer)
.unwrap();
assert!(verified);
// We serialize in the roots buffer some random values and we check that the proof is not verified since doesn't contain the correct root the proof refers to
for _ in 0..5 {
roots_serialized.append(&mut fr_to_bytes_le(&Fr::rand(&mut rng)));
}
roots_buffer = Cursor::new(roots_serialized.clone());
let verified = rln
.verify_with_roots(&mut input_buffer.clone(), &mut roots_buffer)
.unwrap();
assert_eq!(verified, false);
// We get the root of the tree obtained adding one leaf per time
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root, _) = bytes_le_to_fr(&buffer.into_inner());
// We add the real root and we check if now the proof is verified
roots_serialized.append(&mut fr_to_bytes_le(&root));
roots_buffer = Cursor::new(roots_serialized.clone());
let verified = rln
.verify_with_roots(&mut input_buffer.clone(), &mut roots_buffer)
.unwrap();
assert!(verified);
}
#[test]
fn test_recover_id_secret() {
let tree_height = TEST_TREE_HEIGHT;
// We create a new RLN instance
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
// Generate identity pair
let (identity_secret_hash, id_commitment) = keygen();
let user_message_limit = Fr::from(100);
let message_id = Fr::from(0);
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
// We set as leaf id_commitment after storing its index
let identity_index = rln.tree.leaves_set();
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
rln.set_next_leaf(&mut buffer).unwrap();
// We generate two random signals
let mut rng = rand::thread_rng();
let signal1: [u8; 32] = rng.gen();
let signal2: [u8; 32] = rng.gen();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
// We generate a random rln_identifier
let rln_identifier = hash_to_field(b"test-rln-identifier");
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
// We generate two proofs using same epoch but different signals.
// We prepare input for generate_rln_proof API
let mut serialized1: Vec<u8> = Vec::new();
serialized1.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized1.append(&mut normalize_usize(identity_index));
serialized1.append(&mut fr_to_bytes_le(&user_message_limit));
serialized1.append(&mut fr_to_bytes_le(&message_id));
serialized1.append(&mut fr_to_bytes_le(&external_nullifier));
// The first part is the same for both proof input, so we clone
let mut serialized2 = serialized1.clone();
// We attach the first signal to the first proof input
serialized1.append(&mut normalize_usize(signal1.len()));
serialized1.append(&mut signal1.to_vec());
// We attach the second signal to the first proof input
serialized2.append(&mut normalize_usize(signal2.len()));
serialized2.append(&mut signal2.to_vec());
// We generate the first proof
let mut input_buffer = Cursor::new(serialized1);
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.generate_rln_proof(&mut input_buffer, &mut output_buffer)
.unwrap();
let proof_data_1 = output_buffer.into_inner();
// We generate the second proof
let mut input_buffer = Cursor::new(serialized2);
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.generate_rln_proof(&mut input_buffer, &mut output_buffer)
.unwrap();
let proof_data_2 = output_buffer.into_inner();
let mut input_proof_data_1 = Cursor::new(proof_data_1.clone());
let mut input_proof_data_2 = Cursor::new(proof_data_2);
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.recover_id_secret(
&mut input_proof_data_1,
&mut input_proof_data_2,
&mut output_buffer,
)
.unwrap();
let serialized_identity_secret_hash = output_buffer.into_inner();
// We ensure that a non-empty value is written to output_buffer
assert!(!serialized_identity_secret_hash.is_empty());
// We check if the recovered identity secret hash corresponds to the original one
let (recovered_identity_secret_hash, _) = bytes_le_to_fr(&serialized_identity_secret_hash);
assert_eq!(recovered_identity_secret_hash, identity_secret_hash);
// We now test that computing identity_secret_hash is unsuccessful if shares computed from two different identity secret hashes but within same epoch are passed
// We generate a new identity pair
let (identity_secret_hash_new, id_commitment_new) = keygen();
let rate_commitment_new = utils_poseidon_hash(&[id_commitment_new, user_message_limit]);
// We add it to the tree
let identity_index_new = rln.tree.leaves_set();
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment_new));
rln.set_next_leaf(&mut buffer).unwrap();
// We generate a random signals
let signal3: [u8; 32] = rng.gen();
// We prepare proof input. Note that epoch is the same as before
let mut serialized3: Vec<u8> = Vec::new();
serialized3.append(&mut fr_to_bytes_le(&identity_secret_hash_new));
serialized3.append(&mut normalize_usize(identity_index_new));
serialized3.append(&mut fr_to_bytes_le(&user_message_limit));
serialized3.append(&mut fr_to_bytes_le(&message_id));
serialized3.append(&mut fr_to_bytes_le(&external_nullifier));
serialized3.append(&mut normalize_usize(signal3.len()));
serialized3.append(&mut signal3.to_vec());
// We generate the proof
let mut input_buffer = Cursor::new(serialized3);
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.generate_rln_proof(&mut input_buffer, &mut output_buffer)
.unwrap();
let proof_data_3 = output_buffer.into_inner();
// We attempt to recover the secret using share1 (coming from identity_secret_hash) and share3 (coming from identity_secret_hash_new)
let mut input_proof_data_1 = Cursor::new(proof_data_1.clone());
let mut input_proof_data_3 = Cursor::new(proof_data_3);
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.recover_id_secret(
&mut input_proof_data_1,
&mut input_proof_data_3,
&mut output_buffer,
)
.unwrap();
let serialized_identity_secret_hash = output_buffer.into_inner();
let (recovered_identity_secret_hash_new, _) = bytes_le_to_fr(&serialized_identity_secret_hash);
// ensure that the recovered secret does not match with either of the
// used secrets in proof generation
assert_ne!(recovered_identity_secret_hash_new, identity_secret_hash_new);
}
#[test]
fn test_get_leaf() {
// We generate a random tree
let tree_height = 10;
let mut rng = thread_rng();
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
// We generate a random leaf
let leaf = Fr::rand(&mut rng);
// We generate a random index
let index = rng.gen_range(0..rln.tree.capacity());
// We add the leaf to the tree
let mut buffer = Cursor::new(fr_to_bytes_le(&leaf));
rln.set_leaf(index, &mut buffer).unwrap();
// We get the leaf
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.get_leaf(index, &mut output_buffer).unwrap();
// We ensure that the leaf is the same as the one we added
let (received_leaf, _) = bytes_le_to_fr(output_buffer.into_inner().as_ref());
assert_eq!(received_leaf, leaf);
}
#[test]
fn test_metadata() {
let tree_height = TEST_TREE_HEIGHT;
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
let arbitrary_metadata: &[u8] = b"block_number:200000";
rln.set_metadata(arbitrary_metadata).unwrap();
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_metadata(&mut buffer).unwrap();
let received_metadata = buffer.into_inner();
assert_eq!(arbitrary_metadata, received_metadata);
}

View File

@@ -107,6 +107,8 @@ mod test {
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
dbg!(&path_elements);
dbg!(poseidon_hash(&[rate_commitment, Fr::from(0)]));
let identity_path_index = merkle_proof.get_path_index();
// We check correct computation of the path and indexes
@@ -257,6 +259,118 @@ mod test {
assert!(verified.unwrap());
}
#[test]
fn test_compute_root_single_insertion() {
let tree_height = TEST_TREE_HEIGHT;
let leaf_index = 0;
let rate_commitment = str_to_fr(
"0x2A09A9FD93C590C26B91EFFBB2499F07E8F7AA12E2B4940A3AED2411CB65E11C",
16,
)
.unwrap();
let default_leaf = Fr::from(0);
let mut tree = PoseidonTree::new(
tree_height,
default_leaf,
ConfigOf::<PoseidonTree>::default(),
)
.unwrap();
tree.set(leaf_index, rate_commitment.into()).unwrap();
assert_eq!(
tree.root().to_string(),
"7919895337495550471953660523154055129542864206434083474237224229170626792564"
);
}
#[test]
fn test_compute_root_multiple_insertions() {
let tree_height = TEST_TREE_HEIGHT;
let start_index = 0;
let rate_commitments: Vec<Fr> = [
BigInt([
6344960399222479404,
8873735028955887118,
7344916015079734877,
3049769223023031486,
]),
BigInt([
5712098114927176582,
8940737845291386850,
13760702216785496874,
2705829225787818087,
]),
BigInt([
11095489423361569757,
3600059334404558726,
2596276295120316067,
1747990648971046346,
]),
BigInt([
6042348329893423557,
18258910608249868782,
15808282831752017379,
431669247253051424,
]),
BigInt([
10095207707778447201,
5682738389371124904,
13211310082780638286,
1315201582035914269,
]),
BigInt([
17025532269492512967,
1150892318682047614,
9382150527271933425,
3232654496558305327,
]),
BigInt([
12575250814731208081,
3588033008530583836,
14988210865591309718,
1882695786084137797,
]),
BigInt([
2907978739955320703,
8716018548752635030,
10462674785957232325,
2943370953425792650,
]),
BigInt([
5234706529109067247,
11231622334196983240,
1886386083208828393,
1690607978854820878,
]),
BigInt([
12359804935986041669,
10294673542421867784,
11783956311711833641,
2759017549080634703,
]),
]
.into_iter()
.map(|x| Fr::from(x))
.collect();
let default_leaf = Fr::from(0);
let mut tree = PoseidonTree::new(
tree_height,
default_leaf,
ConfigOf::<PoseidonTree>::default(),
)
.unwrap();
tree.set_range(start_index, rate_commitments).unwrap();
assert_eq!(
tree.root().to_string(),
"5210724218081541877101688952118136930297124697603087561558225712176057209122"
);
}
#[test]
// We test a RLN proof generation and verification
fn test_end_to_end() {
@@ -332,6 +446,24 @@ mod test {
assert_eq!(proof_values, deser);
}
#[test]
fn test_poseidon_hash() {
let inputs = &[
str_to_fr(
"0x2A09A9FD93C590C26B91EFFBB2499F07E8F7AA12E2B4940A3AED2411CB65E11C",
16,
)
.unwrap(),
Fr::from(0),
];
let hash = poseidon_hash(inputs);
assert_eq!(
hash.to_string(),
"13164376930590487041313497514223288845711140604177161029957349518915056324115"
);
}
#[test]
// Tests seeded keygen
// Note that hardcoded values are only valid for Bn254

View File

@@ -17,25 +17,25 @@ ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"]
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
ark-relations = { version = "0.3.0", default-features = false }
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
color-eyre = "0.6.1"
once_cell = "1.8"
rand = "0.8.4"
color-eyre = "0.6.2"
once_cell = "1.17.1"
rand = "0.8.5"
semaphore = { git = "https://github.com/worldcoin/semaphore-rs", rev = "ee658c2"}
ethers-core = { version = "2.0.8", default-features = false }
ruint = { version = "1.2.0", features = [ "serde", "num-bigint", "ark-ff" ] }
ethers-core = { version = "2.0.10", default-features = false }
ruint = { version = "1.10.0", features = [ "serde", "num-bigint", "ark-ff" ] }
serde = "1.0"
thiserror = "1.0.0"
wasmer = { version = "2.0" }
thiserror = "1.0.39"
wasmer = { version = "2.3" }
[dev-dependencies]
rand_chacha = "0.3.1"
serde_json = "1.0.79"
serde_json = "1.0.96"
[build-dependencies]
color-eyre = "0.6.1"
wasmer = { version = "2.0" }
wasmer-engine-dylib = { version = "2.2.1", optional = true }
wasmer-compiler-cranelift = { version = "3.1.1", optional = true }
color-eyre = "0.6.2"
wasmer = { version = "2.3" }
wasmer-engine-dylib = { version = "2.3.0", optional = true }
wasmer-compiler-cranelift = { version = "3.3.0", optional = true }
[profile.release]
codegen-units = 1

View File

@@ -13,6 +13,7 @@
//! * Disk based storage backend (using mmaped files should be easy)
//! * Implement serialization for tree and Merkle proof
use std::fmt::Debug;
use std::str::FromStr;
use color_eyre::Result;
@@ -21,7 +22,7 @@ use color_eyre::Result;
/// and the hash function used to initialize a Merkle Tree implementation
pub trait Hasher {
/// Type of the leaf and tree node
type Fr: Clone + Copy + Eq;
type Fr: Clone + Copy + Eq + Debug + ToString;
/// Returns the default tree leaf
fn default_leaf() -> Self::Fr;

View File

@@ -113,6 +113,8 @@ where
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
let log = format!("inserting {} at {}[{index}]", leaf.to_string(), self.depth);
dbg!(log);
self.nodes.insert((self.depth, index), leaf);
self.recalculate_from(index)?;
self.next_index = max(self.next_index, index + 1);
@@ -192,6 +194,7 @@ where
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
dbg!("yomama");
let mut witness = Vec::<(H::Fr, u8)>::with_capacity(self.depth);
let mut i = index;
let mut depth = self.depth;
@@ -246,6 +249,13 @@ where
.nodes
.get(&(depth, index))
.unwrap_or_else(|| &self.cached_nodes[depth]);
let log = format!(
"depth: {}, index: {}, node at depth[index]: {}",
depth,
index,
&node.to_string()
);
dbg!(log);
node
}
@@ -255,10 +265,14 @@ where
fn hash_couple(&mut self, depth: usize, index: usize) -> H::Fr {
let b = index & !1;
H::hash(&[self.get_node(depth, b), self.get_node(depth, b + 1)])
let l = self.get_node(depth, b);
let r = self.get_node(depth, b + 1);
dbg!(l.to_string(), r.to_string());
H::hash(&[l, r])
}
fn recalculate_from(&mut self, index: usize) -> Result<()> {
dbg!("running recalc from");
let mut i = index;
let mut depth = self.depth;
loop {
@@ -266,6 +280,8 @@ where
i >>= 1;
depth -= 1;
self.nodes.insert((depth, i), h);
let log = format!("inserting {} at {depth}[{i}]", h.to_string());
dbg!(log);
if depth == 0 {
break;
}