mirror of
https://github.com/vacp2p/zerokit.git
synced 2026-01-09 13:47:58 -05:00
Compare commits
8 Commits
v0.4.4
...
zerokit_ut
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
31331f8a93 | ||
|
|
820240d8c0 | ||
|
|
fe2b224981 | ||
|
|
d3d85c3e3c | ||
|
|
0005b1d61f | ||
|
|
4931b25237 | ||
|
|
652cc3647e | ||
|
|
51939be4a8 |
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -123,8 +123,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: boa-dev/criterion-compare-action@v3
|
||||
with:
|
||||
|
||||
3
.gitmodules
vendored
3
.gitmodules
vendored
@@ -1,3 +0,0 @@
|
||||
[submodule "mopro"]
|
||||
path = mopro
|
||||
url = https://github.com/zkmopro/mopro.git
|
||||
59
Cargo.lock
generated
59
Cargo.lock
generated
@@ -119,33 +119,6 @@ name = "ark-circom"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "295bb8e275f3e211b36a822469ba88deb028ecb3d7fe8684102598a9158a7350"
|
||||
dependencies = [
|
||||
"ark-bn254",
|
||||
"ark-crypto-primitives",
|
||||
"ark-ec",
|
||||
"ark-ff",
|
||||
"ark-groth16",
|
||||
"ark-poly",
|
||||
"ark-relations",
|
||||
"ark-serialize",
|
||||
"ark-std",
|
||||
"byteorder",
|
||||
"cfg-if",
|
||||
"color-eyre",
|
||||
"criterion 0.3.6",
|
||||
"fnv",
|
||||
"hex",
|
||||
"num",
|
||||
"num-bigint",
|
||||
"num-traits",
|
||||
"thiserror",
|
||||
"wasmer",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ark-circom"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/vimwitch/circom-compat.git#21c6d43132c062364c270147e876dbc00d505a1c"
|
||||
dependencies = [
|
||||
"ark-bn254",
|
||||
"ark-crypto-primitives",
|
||||
@@ -342,9 +315,11 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "ark-zkey"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2974c9f59c1a071db6753ffa7848dbed39746b4e0b431f9c5709553b9433f989"
|
||||
dependencies = [
|
||||
"ark-bn254",
|
||||
"ark-circom 0.1.0 (git+https://github.com/vimwitch/circom-compat.git)",
|
||||
"ark-circom",
|
||||
"ark-ec",
|
||||
"ark-ff",
|
||||
"ark-groth16",
|
||||
@@ -2041,15 +2016,6 @@ dependencies = [
|
||||
"plotters-backend",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pmtree"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e054322ee96d2ccd86cd47b87797166682e45f5d67571c48eaa864668d26f510"
|
||||
dependencies = [
|
||||
"rayon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ppv-lite86"
|
||||
version = "0.2.17"
|
||||
@@ -2323,10 +2289,10 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rln"
|
||||
version = "0.4.3"
|
||||
version = "0.5.0"
|
||||
dependencies = [
|
||||
"ark-bn254",
|
||||
"ark-circom 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ark-circom",
|
||||
"ark-ec",
|
||||
"ark-ff",
|
||||
"ark-groth16",
|
||||
@@ -3003,6 +2969,15 @@ version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4dad5567ad0cf5b760e5665964bec1b47dfd077ba8a2544b513f3556d3d239a2"
|
||||
|
||||
[[package]]
|
||||
name = "vacp2p_pmtree"
|
||||
version = "2.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "632293f506ca10d412dbe1d427295317b4c794fa9ddfd66fbd2fa971de88c1f6"
|
||||
dependencies = [
|
||||
"rayon",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "valuable"
|
||||
version = "0.1.0"
|
||||
@@ -3589,17 +3564,19 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zerokit_utils"
|
||||
version = "0.4.3"
|
||||
version = "0.5.0"
|
||||
dependencies = [
|
||||
"ark-bn254",
|
||||
"ark-ff",
|
||||
"color-eyre",
|
||||
"criterion 0.4.0",
|
||||
"hex",
|
||||
"hex-literal",
|
||||
"lazy_static 1.4.0",
|
||||
"num-bigint",
|
||||
"num-traits",
|
||||
"pmtree",
|
||||
"serde",
|
||||
"sled",
|
||||
"tiny-keccak",
|
||||
"vacp2p_pmtree",
|
||||
]
|
||||
|
||||
1
mopro
1
mopro
Submodule mopro deleted from 3c8d734336
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "rln"
|
||||
version = "0.4.3"
|
||||
version = "0.5.0"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "APIs to manage, compute and verify zkSNARK proofs and RLN primitives"
|
||||
@@ -32,7 +32,7 @@ ark-serialize = { version = "=0.4.1", default-features = false }
|
||||
ark-circom = { version = "=0.1.0", default-features = false, features = [
|
||||
"circom-2",
|
||||
] }
|
||||
ark-zkey = { path = "../mopro/ark-zkey", optional = true, default-features = false }
|
||||
ark-zkey = { version = "0.1.0", optional = true, default-features = false }
|
||||
|
||||
# WASM
|
||||
wasmer = { version = "=2.3.0", default-features = false }
|
||||
@@ -51,7 +51,7 @@ once_cell = "=1.17.1"
|
||||
rand = "=0.8.5"
|
||||
rand_chacha = "=0.3.1"
|
||||
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
|
||||
utils = { package = "zerokit_utils", version = "=0.4.3", path = "../utils/", default-features = false }
|
||||
utils = { package = "zerokit_utils", version = "=0.5.0", path = "../utils/", default-features = false }
|
||||
|
||||
|
||||
# serialization
|
||||
@@ -87,3 +87,7 @@ harness = false
|
||||
[[bench]]
|
||||
name = "circuit_loading_benchmark"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "poseidon_tree_benchmark"
|
||||
harness = false
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
This module provides APIs to manage, compute and verify [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and RLN primitives.
|
||||
|
||||
## Pre-requisites
|
||||
|
||||
### Install dependencies and clone repo
|
||||
|
||||
```sh
|
||||
@@ -14,6 +15,7 @@ cd zerokit/rln
|
||||
### Build and Test
|
||||
|
||||
To build and test, run the following commands within the module folder
|
||||
|
||||
```bash
|
||||
cargo make build
|
||||
cargo make test
|
||||
@@ -21,11 +23,11 @@ cargo make test
|
||||
|
||||
### Compile ZK circuits
|
||||
|
||||
The `rln` (https://github.com/privacy-scaling-explorations/rln) repository, which contains the RLN circuit implementation is a submodule of zerokit RLN.
|
||||
The `rln` (https://github.com/rate-limiting-nullifier/circom-rln) repository, which contains the RLN circuit implementation is a submodule of zerokit RLN.
|
||||
|
||||
To compile the RLN circuit
|
||||
|
||||
``` sh
|
||||
```sh
|
||||
# Update submodules
|
||||
git submodule update --init --recursive
|
||||
|
||||
@@ -52,10 +54,9 @@ include "./rln-base.circom";
|
||||
component main {public [x, epoch, rln_identifier ]} = RLN(N);
|
||||
```
|
||||
|
||||
However, if `N` is too big, this might require a bigger Powers of Tau ceremony than the one hardcoded in `./scripts/build-circuits.sh`, which is `2^14`.
|
||||
However, if `N` is too big, this might require a bigger Powers of Tau ceremony than the one hardcoded in `./scripts/build-circuits.sh`, which is `2^14`.
|
||||
In such case we refer to the official [Circom documentation](https://docs.circom.io/getting-started/proving-circuits/#powers-of-tau) for instructions on how to run an appropriate Powers of Tau ceremony and Phase 2 in order to compile the desired circuit.
|
||||
|
||||
|
||||
Currently, the `rln` module comes with 2 [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources) RLN circuits having Merkle tree of height `20` and `32`, respectively.
|
||||
|
||||
## Getting started
|
||||
@@ -73,7 +74,7 @@ rln = { git = "https://github.com/vacp2p/zerokit" }
|
||||
|
||||
First, we need to create a RLN object for a chosen input Merkle tree size.
|
||||
|
||||
Note that we need to pass to RLN object constructor the path where the circuit (`rln.wasm`, built for the input tree size), the corresponding proving key (`rln_final.zkey`) and verification key (`verification_key.json`, optional) are found.
|
||||
Note that we need to pass to RLN object constructor the path where the circuit (`rln.wasm`, built for the input tree size), the corresponding proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) and verification key (`verification_key.json`, optional) are found.
|
||||
|
||||
In the following we will use [cursors](https://doc.rust-lang.org/std/io/struct.Cursor.html) as readers/writers for interfacing with RLN public APIs.
|
||||
|
||||
@@ -82,14 +83,14 @@ use rln::protocol::*;
|
||||
use rln::public::*;
|
||||
use std::io::Cursor;
|
||||
|
||||
// We set the RLN parameters:
|
||||
// We set the RLN parameters:
|
||||
// - the tree height;
|
||||
// - the circuit resource folder (requires a trailing "/").
|
||||
// - the tree config, if it is not defined, the default value will be set
|
||||
let tree_height = 20;
|
||||
let resources = Cursor::new("../zerokit/rln/resources/tree_height_20/");
|
||||
let input = Cursor::new(json!({}).to_string());
|
||||
|
||||
// We create a new RLN instance
|
||||
let mut rln = RLN::new(tree_height, resources);
|
||||
let mut rln = RLN::new(tree_height, input);
|
||||
```
|
||||
|
||||
### Generate an identity keypair
|
||||
@@ -121,33 +122,43 @@ rln.set_leaf(id_index, &mut buffer).unwrap();
|
||||
|
||||
Note that when tree leaves are not explicitly set by the user (in this example, all those with index less and greater than `10`), their values is set to an hardcoded default (all-`0` bytes in current implementation).
|
||||
|
||||
### Set epoch
|
||||
### Set external nullifier
|
||||
|
||||
The epoch, sometimes referred to as _external nullifier_, is used to identify messages received in a certain time frame. It usually corresponds to the current UNIX time but can also be set to a random value or generated by a seed, provided that it corresponds to a field element.
|
||||
The `external nullifier` includes two parameters.
|
||||
|
||||
The first one is `epoch` and it's used to identify messages received in a certain time frame. It usually corresponds to the current UNIX time but can also be set to a random value or generated by a seed, provided that it corresponds to a field element.
|
||||
|
||||
The second one is `rln_identifier` and it's used to prevent a RLN ZK proof generated for one application to be re-used in another one.
|
||||
|
||||
```rust
|
||||
// We generate epoch from a date seed and we ensure is
|
||||
// mapped to a field element by hashing-to-field its content
|
||||
let epoch = hash_to_field(b"Today at noon, this year");
|
||||
// We generate rln_identifier from a date seed and we ensure is
|
||||
// mapped to a field element by hashing-to-field its content
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
|
||||
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
|
||||
```
|
||||
|
||||
### Set signal
|
||||
|
||||
The signal is the message for which we are computing a RLN proof.
|
||||
|
||||
```rust
|
||||
// We set our signal
|
||||
// We set our signal
|
||||
let signal = b"RLN is awesome";
|
||||
```
|
||||
|
||||
### Generate a RLN proof
|
||||
|
||||
We prepare the input to the proof generation routine.
|
||||
We prepare the input to the proof generation routine.
|
||||
|
||||
Input buffer is serialized as `[ identity_key | id_index | epoch | rln_identifier | user_message_limit | message_id | signal_len | signal ]`.
|
||||
Input buffer is serialized as `[ identity_key | id_index | external_nullifier | user_message_limit | message_id | signal_len | signal ]`.
|
||||
|
||||
```rust
|
||||
// We prepare input to the proof generation routine
|
||||
let proof_input = prepare_prove_input(identity_secret_hash, id_index, epoch, rln_identifier, user_message_limit, message_id, signal);
|
||||
let proof_input = prepare_prove_input(identity_secret_hash, id_index, external_nullifier, signal);
|
||||
```
|
||||
|
||||
We are now ready to generate a RLN ZK proof along with the _public outputs_ of the ZK circuit evaluation.
|
||||
@@ -164,12 +175,11 @@ rln.generate_rln_proof(&mut in_buffer, &mut out_buffer)
|
||||
let proof_data = out_buffer.into_inner();
|
||||
```
|
||||
|
||||
The byte vector `proof_data` is serialized as `[ zk-proof | tree_root | epoch | share_x | share_y | nullifier | rln_identifier ]`.
|
||||
|
||||
The byte vector `proof_data` is serialized as `[ zk-proof | tree_root | external_nullifier | share_x | share_y | nullifier ]`.
|
||||
|
||||
### Verify a RLN proof
|
||||
|
||||
We prepare the input to the proof verification routine.
|
||||
We prepare the input to the proof verification routine.
|
||||
|
||||
Input buffer is serialized as `[proof_data | signal_len | signal ]`, where `proof_data` is (computed as) the output obtained by `generate_rln_proof`.
|
||||
|
||||
@@ -182,17 +192,21 @@ let mut in_buffer = Cursor::new(verify_data);
|
||||
let verified = rln.verify(&mut in_buffer).unwrap();
|
||||
```
|
||||
|
||||
We check if the proof verification was successful:
|
||||
We check if the proof verification was successful:
|
||||
|
||||
```rust
|
||||
// We ensure the proof is valid
|
||||
assert!(verified);
|
||||
```
|
||||
|
||||
## Get involved!
|
||||
|
||||
Zerokit RLN public and FFI APIs allow interaction with many more features than what briefly showcased above.
|
||||
|
||||
We invite you to check our API documentation by running
|
||||
|
||||
```rust
|
||||
cargo doc --no-deps
|
||||
```
|
||||
and look at unit tests to have an hint on how to interface and use them.
|
||||
|
||||
and look at unit tests to have an hint on how to interface and use them.
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use rln::circuit::TEST_RESOURCES_FOLDER;
|
||||
|
||||
// Depending on the key type (enabled by the `--features arkzkey` flag)
|
||||
// the upload speed from the `rln_final.zkey` or `rln_final.arkzkey` file is calculated
|
||||
pub fn key_load_benchmark(c: &mut Criterion) {
|
||||
c.bench_function("zkey::upload_from_folder", |b| {
|
||||
b.iter(|| {
|
||||
let _ = rln::circuit::zkey_from_folder(TEST_RESOURCES_FOLDER);
|
||||
let _ = rln::circuit::zkey_from_folder();
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
@@ -37,6 +37,19 @@ pub fn pmtree_benchmark(c: &mut Criterion) {
|
||||
tree.get(0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
// check intermediate node getter which required additional computation of sub root index
|
||||
c.bench_function("Pmtree::get_subtree_root", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_subtree_root(1, 0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("Pmtree::get_empty_leaves_indices", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_empty_leaves_indices();
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(benches, pmtree_benchmark);
|
||||
|
||||
79
rln/benches/poseidon_tree_benchmark.rs
Normal file
79
rln/benches/poseidon_tree_benchmark.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
|
||||
use rln::{
|
||||
circuit::{Fr, TEST_TREE_HEIGHT},
|
||||
hashers::PoseidonHash,
|
||||
};
|
||||
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleTree};
|
||||
|
||||
pub fn get_leaves(n: u32) -> Vec<Fr> {
|
||||
(0..n).map(|s| Fr::from(s)).collect()
|
||||
}
|
||||
|
||||
pub fn optimal_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
|
||||
c.bench_function("OptimalMerkleTree::<Poseidon>::full_height_gen", |b| {
|
||||
b.iter(|| {
|
||||
OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
let mut group = c.benchmark_group("Set");
|
||||
for &n in [1u32, 10, 100].iter() {
|
||||
let leaves = get_leaves(n);
|
||||
|
||||
let mut tree = OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
|
||||
group.bench_function(
|
||||
BenchmarkId::new("OptimalMerkleTree::<Poseidon>::set", n),
|
||||
|b| {
|
||||
b.iter(|| {
|
||||
for (i, l) in leaves.iter().enumerate() {
|
||||
let _ = tree.set(i, *l);
|
||||
}
|
||||
})
|
||||
},
|
||||
);
|
||||
|
||||
group.bench_function(
|
||||
BenchmarkId::new("OptimalMerkleTree::<Poseidon>::set_range", n),
|
||||
|b| b.iter(|| tree.set_range(0, leaves.iter().cloned())),
|
||||
);
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
pub fn full_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
|
||||
c.bench_function("FullMerkleTree::<Poseidon>::full_height_gen", |b| {
|
||||
b.iter(|| {
|
||||
FullMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
let mut group = c.benchmark_group("Set");
|
||||
for &n in [1u32, 10, 100].iter() {
|
||||
let leaves = get_leaves(n);
|
||||
|
||||
let mut tree = FullMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
|
||||
group.bench_function(
|
||||
BenchmarkId::new("FullMerkleTree::<Poseidon>::set", n),
|
||||
|b| {
|
||||
b.iter(|| {
|
||||
for (i, l) in leaves.iter().enumerate() {
|
||||
let _ = tree.set(i, *l);
|
||||
}
|
||||
})
|
||||
},
|
||||
);
|
||||
|
||||
group.bench_function(
|
||||
BenchmarkId::new("FullMerkleTree::<Poseidon>::set_range", n),
|
||||
|b| b.iter(|| tree.set_range(0, leaves.iter().cloned())),
|
||||
);
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
benches,
|
||||
optimal_merkle_tree_poseidon_benchmark,
|
||||
full_merkle_tree_poseidon_benchmark
|
||||
);
|
||||
criterion_main!(benches);
|
||||
@@ -26,7 +26,7 @@ cfg_if! {
|
||||
cfg_if! {
|
||||
if #[cfg(feature = "arkzkey")] {
|
||||
use ark_zkey::read_arkzkey_from_bytes;
|
||||
const ARKZKEY_FILENAME: &str = "rln_final.arkzkey";
|
||||
const ARKZKEY_FILENAME: &str = "tree_height_20/rln_final.arkzkey";
|
||||
|
||||
} else {
|
||||
use std::io::Cursor;
|
||||
@@ -34,12 +34,11 @@ cfg_if! {
|
||||
}
|
||||
}
|
||||
|
||||
const ZKEY_FILENAME: &str = "rln_final.zkey";
|
||||
const VK_FILENAME: &str = "verification_key.json";
|
||||
const WASM_FILENAME: &str = "rln.wasm";
|
||||
const ZKEY_FILENAME: &str = "tree_height_20/rln_final.zkey";
|
||||
const VK_FILENAME: &str = "tree_height_20/verification_key.json";
|
||||
const WASM_FILENAME: &str = "tree_height_20/rln.wasm";
|
||||
|
||||
pub const TEST_TREE_HEIGHT: usize = 20;
|
||||
pub const TEST_RESOURCES_FOLDER: &str = "tree_height_20";
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
static RESOURCES_DIR: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/resources");
|
||||
@@ -75,13 +74,11 @@ pub fn zkey_from_raw(zkey_data: &Vec<u8>) -> Result<(ProvingKey<Curve>, Constrai
|
||||
|
||||
// Loads the proving key
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn zkey_from_folder(
|
||||
resources_folder: &str,
|
||||
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
|
||||
pub fn zkey_from_folder() -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
|
||||
#[cfg(feature = "arkzkey")]
|
||||
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ARKZKEY_FILENAME));
|
||||
let zkey = RESOURCES_DIR.get_file(Path::new(ARKZKEY_FILENAME));
|
||||
#[cfg(not(feature = "arkzkey"))]
|
||||
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
|
||||
let zkey = RESOURCES_DIR.get_file(Path::new(ZKEY_FILENAME));
|
||||
|
||||
if let Some(zkey) = zkey {
|
||||
let proving_key_and_matrices = match () {
|
||||
@@ -117,9 +114,9 @@ pub fn vk_from_raw(vk_data: &[u8], zkey_data: &Vec<u8>) -> Result<VerifyingKey<C
|
||||
|
||||
// Loads the verification key
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn vk_from_folder(resources_folder: &str) -> Result<VerifyingKey<Curve>> {
|
||||
let vk = RESOURCES_DIR.get_file(Path::new(resources_folder).join(VK_FILENAME));
|
||||
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
|
||||
pub fn vk_from_folder() -> Result<VerifyingKey<Curve>> {
|
||||
let vk = RESOURCES_DIR.get_file(Path::new(VK_FILENAME));
|
||||
let zkey = RESOURCES_DIR.get_file(Path::new(ZKEY_FILENAME));
|
||||
|
||||
let verifying_key: VerifyingKey<Curve>;
|
||||
if let Some(vk) = vk {
|
||||
@@ -128,7 +125,7 @@ pub fn vk_from_folder(resources_folder: &str) -> Result<VerifyingKey<Curve>> {
|
||||
))?)?;
|
||||
Ok(verifying_key)
|
||||
} else if let Some(_zkey) = zkey {
|
||||
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
|
||||
let (proving_key, _matrices) = zkey_from_folder()?;
|
||||
verifying_key = proving_key.vk;
|
||||
Ok(verifying_key)
|
||||
} else {
|
||||
@@ -152,9 +149,9 @@ pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> Result<&'static Mutex<WitnessCal
|
||||
|
||||
// Initializes the witness calculator
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn circom_from_folder(resources_folder: &str) -> Result<&'static Mutex<WitnessCalculator>> {
|
||||
pub fn circom_from_folder() -> Result<&'static Mutex<WitnessCalculator>> {
|
||||
// We read the wasm file
|
||||
let wasm = RESOURCES_DIR.get_file(Path::new(resources_folder).join(WASM_FILENAME));
|
||||
let wasm = RESOURCES_DIR.get_file(Path::new(WASM_FILENAME));
|
||||
|
||||
if let Some(wasm) = wasm {
|
||||
let wasm_buffer = wasm.contents();
|
||||
@@ -277,11 +274,8 @@ fn vk_from_vector(vk: &[u8]) -> Result<VerifyingKey<Curve>> {
|
||||
|
||||
// Checks verification key to be correct with respect to proving key
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn check_vk_from_zkey(
|
||||
resources_folder: &str,
|
||||
verifying_key: VerifyingKey<Curve>,
|
||||
) -> Result<()> {
|
||||
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
|
||||
pub fn check_vk_from_zkey(verifying_key: VerifyingKey<Curve>) -> Result<()> {
|
||||
let (proving_key, _matrices) = zkey_from_folder()?;
|
||||
if proving_key.vk == verifying_key {
|
||||
Ok(())
|
||||
} else {
|
||||
|
||||
@@ -5,6 +5,7 @@ use std::str::FromStr;
|
||||
use color_eyre::{Report, Result};
|
||||
use serde_json::Value;
|
||||
|
||||
use utils::pmtree::tree::Key;
|
||||
use utils::pmtree::{Database, Hasher};
|
||||
use utils::*;
|
||||
|
||||
@@ -16,6 +17,9 @@ const METADATA_KEY: [u8; 8] = *b"metadata";
|
||||
|
||||
pub struct PmTree {
|
||||
tree: pmtree::MerkleTree<SledDB, PoseidonHash>,
|
||||
/// The indices of leaves which are set into zero upto next_index.
|
||||
/// Set to 0 if the leaf is empty and set to 1 in otherwise.
|
||||
cached_leaves_indices: Vec<u8>,
|
||||
// metadata that an application may use to store additional information
|
||||
metadata: Vec<u8>,
|
||||
}
|
||||
@@ -143,6 +147,7 @@ impl ZerokitMerkleTree for PmTree {
|
||||
|
||||
Ok(PmTree {
|
||||
tree,
|
||||
cached_leaves_indices: vec![0; 1 << depth],
|
||||
metadata: Vec::new(),
|
||||
})
|
||||
}
|
||||
@@ -155,7 +160,7 @@ impl ZerokitMerkleTree for PmTree {
|
||||
self.tree.capacity()
|
||||
}
|
||||
|
||||
fn leaves_set(&mut self) -> usize {
|
||||
fn leaves_set(&self) -> usize {
|
||||
self.tree.leaves_set()
|
||||
}
|
||||
|
||||
@@ -170,7 +175,9 @@ impl ZerokitMerkleTree for PmTree {
|
||||
fn set(&mut self, index: usize, leaf: FrOf<Self::Hasher>) -> Result<()> {
|
||||
self.tree
|
||||
.set(index, leaf)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
.map_err(|e| Report::msg(e.to_string()))?;
|
||||
self.cached_leaves_indices[index] = 1;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_range<I: IntoIterator<Item = FrOf<Self::Hasher>>>(
|
||||
@@ -178,15 +185,51 @@ impl ZerokitMerkleTree for PmTree {
|
||||
start: usize,
|
||||
values: I,
|
||||
) -> Result<()> {
|
||||
let v = values.into_iter().collect::<Vec<_>>();
|
||||
self.tree
|
||||
.set_range(start, values)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
.set_range(start, v.clone().into_iter())
|
||||
.map_err(|e| Report::msg(e.to_string()))?;
|
||||
for i in start..v.len() {
|
||||
self.cached_leaves_indices[i] = 1
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>> {
|
||||
self.tree.get(index).map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
|
||||
fn get_subtree_root(&self, n: usize, index: usize) -> Result<FrOf<Self::Hasher>> {
|
||||
if n > self.depth() {
|
||||
return Err(Report::msg("level exceeds depth size"));
|
||||
}
|
||||
if index >= self.capacity() {
|
||||
return Err(Report::msg("index exceeds set size"));
|
||||
}
|
||||
if n == 0 {
|
||||
Ok(self.root())
|
||||
} else if n == self.depth() {
|
||||
self.get(index)
|
||||
} else {
|
||||
let node = self
|
||||
.tree
|
||||
.get_elem(Key::new(n, index >> (self.depth() - n)))
|
||||
.unwrap();
|
||||
Ok(node)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_empty_leaves_indices(&self) -> Vec<usize> {
|
||||
let next_idx = self.leaves_set();
|
||||
self.cached_leaves_indices
|
||||
.iter()
|
||||
.take(next_idx)
|
||||
.enumerate()
|
||||
.filter(|&(_, &v)| v == 0u8)
|
||||
.map(|(idx, _)| idx)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn override_range<I: IntoIterator<Item = FrOf<Self::Hasher>>, J: IntoIterator<Item = usize>>(
|
||||
&mut self,
|
||||
start: usize,
|
||||
@@ -201,7 +244,7 @@ impl ZerokitMerkleTree for PmTree {
|
||||
(0, 0) => Err(Report::msg("no leaves or indices to be removed")),
|
||||
(1, 0) => self.set(start, leaves[0]),
|
||||
(0, 1) => self.delete(indices[0]),
|
||||
(_, 0) => self.set_range_with_leaves(start, leaves),
|
||||
(_, 0) => self.set_range(start, leaves),
|
||||
(0, _) => self.remove_indices(&indices),
|
||||
(_, _) => self.remove_indices_and_set_leaves(start, leaves, &indices),
|
||||
}
|
||||
@@ -216,7 +259,9 @@ impl ZerokitMerkleTree for PmTree {
|
||||
fn delete(&mut self, index: usize) -> Result<()> {
|
||||
self.tree
|
||||
.delete(index)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
.map_err(|e| Report::msg(e.to_string()))?;
|
||||
self.cached_leaves_indices[index] = 0;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn proof(&self, index: usize) -> Result<Self::Proof> {
|
||||
@@ -261,12 +306,6 @@ type PmTreeHasher = <PmTree as ZerokitMerkleTree>::Hasher;
|
||||
type FrOfPmTreeHasher = FrOf<PmTreeHasher>;
|
||||
|
||||
impl PmTree {
|
||||
fn set_range_with_leaves(&mut self, start: usize, leaves: Vec<FrOfPmTreeHasher>) -> Result<()> {
|
||||
self.tree
|
||||
.set_range(start, leaves)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
|
||||
fn remove_indices(&mut self, indices: &[usize]) -> Result<()> {
|
||||
let start = indices[0];
|
||||
let end = indices.last().unwrap() + 1;
|
||||
@@ -275,7 +314,12 @@ impl PmTree {
|
||||
|
||||
self.tree
|
||||
.set_range(start, new_leaves)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
.map_err(|e| Report::msg(e.to_string()))?;
|
||||
|
||||
for i in start..end {
|
||||
self.cached_leaves_indices[i] = 0
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_indices_and_set_leaves(
|
||||
@@ -301,8 +345,17 @@ impl PmTree {
|
||||
}
|
||||
|
||||
self.tree
|
||||
.set_range(min_index, set_values)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
.set_range(start, set_values)
|
||||
.map_err(|e| Report::msg(e.to_string()))?;
|
||||
|
||||
for i in indices {
|
||||
self.cached_leaves_indices[*i] = 0;
|
||||
}
|
||||
|
||||
for i in start..(max_index - min_index) {
|
||||
self.cached_leaves_indices[i] = 1
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,11 +4,13 @@ use ark_circom::{CircomReduction, WitnessCalculator};
|
||||
use ark_groth16::{prepare_verifying_key, Groth16, Proof as ArkProof, ProvingKey, VerifyingKey};
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use ark_relations::r1cs::SynthesisError;
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use color_eyre::{Report, Result};
|
||||
use num_bigint::BigInt;
|
||||
use rand::{Rng, SeedableRng};
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use std::sync::Mutex;
|
||||
#[cfg(debug_assertions)]
|
||||
@@ -29,14 +31,20 @@ use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
// RLN Witness data structure and utility functions
|
||||
///////////////////////////////////////////////////////
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct RLNWitnessInput {
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
identity_secret: Fr,
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
user_message_limit: Fr,
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
message_id: Fr,
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
path_elements: Vec<Fr>,
|
||||
identity_path_index: Vec<u8>,
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
x: Fr,
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
external_nullifier: Fr,
|
||||
}
|
||||
|
||||
@@ -162,7 +170,7 @@ pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)
|
||||
|
||||
// This function deserializes input for kilic's rln generate_proof public API
|
||||
// https://github.com/kilic/rln/blob/7ac74183f8b69b399e3bc96c1ae8ab61c026dc43/src/public.rs#L148
|
||||
// input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | signal_len<8> | signal<var> ]
|
||||
// input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
|
||||
// return value is a rln witness populated according to this information
|
||||
pub fn proof_inputs_to_rln_witness(
|
||||
tree: &mut PoseidonTree,
|
||||
@@ -214,55 +222,6 @@ pub fn proof_inputs_to_rln_witness(
|
||||
))
|
||||
}
|
||||
|
||||
/// Returns `RLNWitnessInput` given a file with JSON serialized values.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if `message_id` is not within `user_message_limit`.
|
||||
pub fn rln_witness_from_json(input_json_str: &str) -> Result<RLNWitnessInput> {
|
||||
let input_json: serde_json::Value =
|
||||
serde_json::from_str(input_json_str).expect("JSON was not well-formatted");
|
||||
|
||||
let user_message_limit = str_to_fr(&input_json["userMessageLimit"].to_string(), 10)?;
|
||||
|
||||
let message_id = str_to_fr(&input_json["messageId"].to_string(), 10)?;
|
||||
|
||||
message_id_range_check(&message_id, &user_message_limit)?;
|
||||
|
||||
let identity_secret = str_to_fr(&input_json["identitySecret"].to_string(), 10)?;
|
||||
|
||||
let path_elements = input_json["pathElements"]
|
||||
.as_array()
|
||||
.ok_or(Report::msg("not an array"))?
|
||||
.iter()
|
||||
.map(|v| str_to_fr(&v.to_string(), 10))
|
||||
.collect::<Result<_>>()?;
|
||||
|
||||
let identity_path_index_array = input_json["identityPathIndex"]
|
||||
.as_array()
|
||||
.ok_or(Report::msg("not an array"))?;
|
||||
|
||||
let mut identity_path_index: Vec<u8> = vec![];
|
||||
|
||||
for v in identity_path_index_array {
|
||||
identity_path_index.push(v.as_u64().ok_or(Report::msg("not a u64 value"))? as u8);
|
||||
}
|
||||
|
||||
let x = str_to_fr(&input_json["x"].to_string(), 10)?;
|
||||
|
||||
let external_nullifier = str_to_fr(&input_json["externalNullifier"].to_string(), 10)?;
|
||||
|
||||
Ok(RLNWitnessInput {
|
||||
identity_secret,
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates `RLNWitnessInput` from it's fields.
|
||||
///
|
||||
/// # Errors
|
||||
@@ -759,40 +718,49 @@ pub fn verify_proof(
|
||||
Ok(verified)
|
||||
}
|
||||
|
||||
/// Get CIRCOM JSON inputs
|
||||
// auxiliary function for serialisation Fr to json using ark serilize
|
||||
fn ark_se<S, A: CanonicalSerialize>(a: &A, s: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let mut bytes = vec![];
|
||||
a.serialize_compressed(&mut bytes)
|
||||
.map_err(serde::ser::Error::custom)?;
|
||||
s.serialize_bytes(&bytes)
|
||||
}
|
||||
|
||||
// auxiliary function for deserialisation Fr to json using ark serilize
|
||||
fn ark_de<'de, D, A: CanonicalDeserialize>(data: D) -> Result<A, D::Error>
|
||||
where
|
||||
D: serde::de::Deserializer<'de>,
|
||||
{
|
||||
let s: Vec<u8> = serde::de::Deserialize::deserialize(data)?;
|
||||
let a = A::deserialize_compressed_unchecked(s.as_slice());
|
||||
a.map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
/// Converts a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object to the corresponding JSON serialization.
|
||||
///
|
||||
/// Returns a JSON object containing the inputs necessary to calculate
|
||||
/// the witness with CIRCOM on javascript
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if `message_id` is not within `user_message_limit`.
|
||||
pub fn rln_witness_from_json(input_json: serde_json::Value) -> Result<RLNWitnessInput> {
|
||||
let rln_witness: RLNWitnessInput = serde_json::from_value(input_json).unwrap();
|
||||
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
|
||||
|
||||
Ok(rln_witness)
|
||||
}
|
||||
|
||||
/// Converts a JSON value into [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
|
||||
pub fn get_json_inputs(rln_witness: &RLNWitnessInput) -> Result<serde_json::Value> {
|
||||
pub fn rln_witness_to_json(rln_witness: &RLNWitnessInput) -> Result<serde_json::Value> {
|
||||
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
|
||||
|
||||
let mut path_elements = Vec::new();
|
||||
|
||||
for v in rln_witness.path_elements.iter() {
|
||||
path_elements.push(to_bigint(v)?.to_str_radix(10));
|
||||
}
|
||||
|
||||
let mut identity_path_index = Vec::new();
|
||||
rln_witness
|
||||
.identity_path_index
|
||||
.iter()
|
||||
.for_each(|v| identity_path_index.push(BigInt::from(*v).to_str_radix(10)));
|
||||
|
||||
let inputs = serde_json::json!({
|
||||
"identitySecret": to_bigint(&rln_witness.identity_secret)?.to_str_radix(10),
|
||||
"userMessageLimit": to_bigint(&rln_witness.user_message_limit)?.to_str_radix(10),
|
||||
"messageId": to_bigint(&rln_witness.message_id)?.to_str_radix(10),
|
||||
"pathElements": path_elements,
|
||||
"identityPathIndex": identity_path_index,
|
||||
"x": to_bigint(&rln_witness.x)?.to_str_radix(10),
|
||||
"externalNullifier": to_bigint(&rln_witness.external_nullifier)?.to_str_radix(10),
|
||||
});
|
||||
|
||||
Ok(inputs)
|
||||
let rln_witness_json = serde_json::to_value(rln_witness)?;
|
||||
Ok(rln_witness_json)
|
||||
}
|
||||
|
||||
pub fn message_id_range_check(message_id: &Fr, user_message_limit: &Fr) -> Result<()> {
|
||||
|
||||
@@ -19,7 +19,7 @@ cfg_if! {
|
||||
if #[cfg(not(target_arch = "wasm32"))] {
|
||||
use std::default::Default;
|
||||
use std::sync::Mutex;
|
||||
use crate::circuit::{circom_from_folder, vk_from_folder, circom_from_raw, zkey_from_folder, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
|
||||
use crate::circuit::{circom_from_folder, vk_from_folder, circom_from_raw, zkey_from_folder, TEST_TREE_HEIGHT};
|
||||
use ark_circom::WitnessCalculator;
|
||||
use serde_json::{json, Value};
|
||||
use utils::{Hasher};
|
||||
@@ -58,17 +58,16 @@ impl RLN<'_> {
|
||||
///
|
||||
/// Input parameters are
|
||||
/// - `tree_height`: the height of the internal Merkle tree
|
||||
/// - `input_data`: a reader for the string path of the resource folder containing the ZK circuit (`rln.wasm`), the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) and the verification key (`verification_key.json`).
|
||||
///
|
||||
/// - `input_data`: include `tree_config` a reader for a string containing a json with the merkle tree configuration
|
||||
/// Example:
|
||||
/// ```
|
||||
/// use std::io::Cursor;
|
||||
///
|
||||
/// let tree_height = 20;
|
||||
/// let resources = Cursor::new(json!({"resources_folder": "tree_height_20"});
|
||||
/// let input = Cursor::new(json!({}).to_string());;
|
||||
///
|
||||
/// // We create a new RLN instance
|
||||
/// let mut rln = RLN::new(tree_height, resources);
|
||||
/// let mut rln = RLN::new(tree_height, input);
|
||||
/// ```
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn new<R: Read>(tree_height: usize, mut input_data: R) -> Result<RLN<'static>> {
|
||||
@@ -77,15 +76,12 @@ impl RLN<'_> {
|
||||
input_data.read_to_end(&mut input)?;
|
||||
|
||||
let rln_config: Value = serde_json::from_str(&String::from_utf8(input)?)?;
|
||||
let resources_folder = rln_config["resources_folder"]
|
||||
.as_str()
|
||||
.unwrap_or(TEST_RESOURCES_FOLDER);
|
||||
let tree_config = rln_config["tree_config"].to_string();
|
||||
|
||||
let witness_calculator = circom_from_folder(resources_folder)?;
|
||||
let proving_key = zkey_from_folder(resources_folder)?;
|
||||
let witness_calculator = circom_from_folder()?;
|
||||
let proving_key = zkey_from_folder()?;
|
||||
|
||||
let verification_key = vk_from_folder(resources_folder)?;
|
||||
let verification_key = vk_from_folder()?;
|
||||
|
||||
let tree_config: <PoseidonTree as ZerokitMerkleTree>::Config = if tree_config.is_empty() {
|
||||
<PoseidonTree as ZerokitMerkleTree>::Config::default()
|
||||
@@ -117,7 +113,7 @@ impl RLN<'_> {
|
||||
/// - `circom_vec`: a byte vector containing the ZK circuit (`rln.wasm`) as binary file
|
||||
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) as binary file
|
||||
/// - `vk_vec`: a byte vector containing to the verification key (`verification_key.json`) as binary file
|
||||
/// - `tree_config`: a reader for a string containing a json with the merkle tree configuration
|
||||
/// - `tree_config_input`: a reader for a string containing a json with the merkle tree configuration
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
@@ -136,7 +132,7 @@ impl RLN<'_> {
|
||||
/// file.read_exact(&mut buffer).expect("buffer overflow");
|
||||
/// resources.push(buffer);
|
||||
/// let tree_config = "{}".to_string();
|
||||
/// let tree_config_buffer = &Buffer::from(tree_config.as_bytes());
|
||||
/// let tree_config_input = &Buffer::from(tree_config.as_bytes());
|
||||
/// }
|
||||
///
|
||||
/// let mut rln = RLN::new_with_params(
|
||||
@@ -144,7 +140,7 @@ impl RLN<'_> {
|
||||
/// resources[0].clone(),
|
||||
/// resources[1].clone(),
|
||||
/// resources[2].clone(),
|
||||
/// tree_config_buffer,
|
||||
/// tree_config_input,
|
||||
/// );
|
||||
/// ```
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@@ -265,6 +261,9 @@ impl RLN<'_> {
|
||||
/// Input values are:
|
||||
/// - `index`: the index of the leaf
|
||||
///
|
||||
/// Output values are:
|
||||
/// - `output_data`: a writer receiving the serialization of the metadata
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
/// use crate::protocol::*;
|
||||
@@ -384,7 +383,7 @@ impl RLN<'_> {
|
||||
/// // We atomically add leaves and remove indices from the tree
|
||||
/// let mut leaves_buffer = Cursor::new(vec_fr_to_bytes_le(&leaves));
|
||||
/// let mut indices_buffer = Cursor::new(vec_u8_to_bytes_le(&indices));
|
||||
/// rln.set_leaves_from(index, &mut leaves_buffer, indices_buffer).unwrap();
|
||||
/// rln.atomic_operation(index, &mut leaves_buffer, indices_buffer).unwrap();
|
||||
/// ```
|
||||
pub fn atomic_operation<R: Read>(
|
||||
&mut self,
|
||||
@@ -545,6 +544,33 @@ impl RLN<'_> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the root of subtree in the Merkle tree
|
||||
///
|
||||
/// Output values are:
|
||||
/// - `output_data`: a writer receiving the serialization of the node value (serialization done with [`rln::utils::fr_to_bytes_le`](crate::utils::fr_to_bytes_le))
|
||||
///
|
||||
/// Example
|
||||
/// ```
|
||||
/// use rln::utils::*;
|
||||
///
|
||||
/// let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
/// let level = 1;
|
||||
/// let index = 2;
|
||||
/// rln.get_subtree_root(level, index, &mut buffer).unwrap();
|
||||
/// let (subroot, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
/// ```
|
||||
pub fn get_subtree_root<W: Write>(
|
||||
&self,
|
||||
level: usize,
|
||||
index: usize,
|
||||
mut output_data: W,
|
||||
) -> Result<()> {
|
||||
let subroot = self.tree.get_subtree_root(level, index)?;
|
||||
output_data.write_all(&fr_to_bytes_le(&subroot))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the Merkle proof of the leaf at position index
|
||||
///
|
||||
/// Input values are:
|
||||
@@ -577,6 +603,44 @@ impl RLN<'_> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns indices of leaves in the tree are set to zero (upto the final leaf that was set).
|
||||
///
|
||||
/// Output values are:
|
||||
/// - `output_data`: a writer receiving the serialization of the indices of leaves.
|
||||
///
|
||||
/// Example
|
||||
/// ```
|
||||
/// use rln::circuit::Fr;
|
||||
/// use rln::utils::*;
|
||||
///
|
||||
/// let start_index = 5;
|
||||
/// let no_of_leaves = 256;
|
||||
///
|
||||
/// // We generate a vector of random leaves
|
||||
/// let mut leaves: Vec<Fr> = Vec::new();
|
||||
/// let mut rng = thread_rng();
|
||||
/// for _ in 0..no_of_leaves {
|
||||
/// let (_, id_commitment) = keygen();
|
||||
/// let rate_commitment = poseidon_hash(&[id_commitment, 1.into()]);
|
||||
/// leaves.push(rate_commitment);
|
||||
/// }
|
||||
///
|
||||
/// // We add leaves in a batch into the tree
|
||||
/// let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves));
|
||||
/// rln.set_leaves_from(index, &mut buffer).unwrap();
|
||||
///
|
||||
/// // Get indices of first empty leaves upto start_index
|
||||
/// let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
/// rln.get_empty_leaves_indices(&mut buffer).unwrap();
|
||||
/// let idxs = bytes_le_to_vec_usize(&buffer.into_inner()).unwrap();
|
||||
/// assert_eq!(idxs, [0, 1, 2, 3, 4]);
|
||||
/// ```
|
||||
pub fn get_empty_leaves_indices<W: Write>(&self, mut output_data: W) -> Result<()> {
|
||||
let idxs = self.tree.get_empty_leaves_indices();
|
||||
idxs.serialize_compressed(&mut output_data)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// zkSNARK APIs
|
||||
////////////////////////////////////////////////////////
|
||||
@@ -629,7 +693,8 @@ impl RLN<'_> {
|
||||
/// Verifies a zkSNARK RLN proof.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of the RLN zkSNARK proof concatenated with a serialization of the circuit output values, i.e. `[ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]`, where <_> indicates the byte length.
|
||||
/// - `input_data`: a reader for the serialization of the RLN zkSNARK proof concatenated with a serialization of the circuit output values,
|
||||
/// i.e. `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]`, where <_> indicates the byte length.
|
||||
///
|
||||
/// The function returns true if the zkSNARK proof is valid with respect to the provided circuit output values, false otherwise.
|
||||
///
|
||||
@@ -664,7 +729,7 @@ impl RLN<'_> {
|
||||
pub fn verify<R: Read>(&self, mut input_data: R) -> Result<bool> {
|
||||
// Input data is serialized for Curve as:
|
||||
// serialized_proof (compressed, 4*32 bytes) || serialized_proof_values (6*32 bytes), i.e.
|
||||
// [ proof<128> | root<32> | external_nullifier<32> | share_x<32> | share_y<32> | nullifier<32> ]
|
||||
// [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let mut input_byte: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut input_byte)?;
|
||||
let proof = ArkProof::deserialize_compressed(&mut Cursor::new(&input_byte[..128]))?;
|
||||
@@ -676,18 +741,19 @@ impl RLN<'_> {
|
||||
Ok(verified)
|
||||
}
|
||||
|
||||
/// Computes a zkSNARK RLN proof from the identity secret, the Merkle tree index, the epoch and signal.
|
||||
/// Computes a zkSNARK RLN proof from the identity secret, the Merkle tree index, the user message limit, the message id, the external nullifier (which include epoch and rln identifier) and signal.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of `[ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]`
|
||||
/// - `input_data`: a reader for the serialization of `[ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]`
|
||||
///
|
||||
/// Output values are:
|
||||
/// - `output_data`: a writer receiving the serialization of the zkSNARK proof and the circuit evaluations outputs, i.e. `[ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]`
|
||||
/// - `output_data`: a writer receiving the serialization of the zkSNARK proof and the circuit evaluations outputs, i.e. `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]`
|
||||
///
|
||||
/// Example
|
||||
/// ```
|
||||
/// use rln::protocol::*:
|
||||
/// use rln::utils::*;
|
||||
/// use rln::hashers::*;
|
||||
///
|
||||
/// // Generate identity pair
|
||||
/// let (identity_secret_hash, id_commitment) = keygen();
|
||||
@@ -701,15 +767,21 @@ impl RLN<'_> {
|
||||
/// // We generate a random signal
|
||||
/// let mut rng = rand::thread_rng();
|
||||
/// let signal: [u8; 32] = rng.gen();
|
||||
///
|
||||
/// // We generate a random epoch
|
||||
/// let epoch = hash_to_field(b"test-epoch");
|
||||
/// // We generate a random rln_identifier
|
||||
/// let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
/// let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
|
||||
///
|
||||
/// // We prepare input for generate_rln_proof API
|
||||
/// // input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
|
||||
/// // input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
|
||||
/// let mut serialized: Vec<u8> = Vec::new();
|
||||
/// serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
|
||||
/// serialized.append(&mut normalize_usize(identity_index));
|
||||
/// serialized.append(&mut fr_to_bytes_le(&epoch));
|
||||
/// serialized.append(&mut fr_to_bytes_le(&user_message_limit));
|
||||
/// serialized.append(&mut fr_to_bytes_le(&Fr::from(1))); // message_id
|
||||
/// serialized.append(&mut fr_to_bytes_le(&external_nullifier));
|
||||
/// serialized.append(&mut normalize_usize(signal_len).resize(8,0));
|
||||
/// serialized.append(&mut signal.to_vec());
|
||||
///
|
||||
@@ -718,7 +790,7 @@ impl RLN<'_> {
|
||||
/// rln.generate_rln_proof(&mut input_buffer, &mut output_buffer)
|
||||
/// .unwrap();
|
||||
///
|
||||
/// // proof_data is [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]
|
||||
/// // proof_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]
|
||||
/// let mut proof_data = output_buffer.into_inner();
|
||||
/// ```
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@@ -746,7 +818,7 @@ impl RLN<'_> {
|
||||
// TODO: this function seems to use redundant witness (as bigint and serialized) and should be refactored
|
||||
// Generate RLN Proof using a witness calculated from outside zerokit
|
||||
//
|
||||
// output_data is [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]
|
||||
// output_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]
|
||||
// we skip it from documentation for now
|
||||
#[doc(hidden)]
|
||||
pub fn generate_rln_proof_with_witness<W: Write>(
|
||||
@@ -770,7 +842,8 @@ impl RLN<'_> {
|
||||
/// Verifies a zkSNARK RLN proof against the provided proof values and the state of the internal Merkle tree.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of the RLN zkSNARK proof concatenated with a serialization of the circuit output values and the signal information, i.e. `[ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> | signal_len<8> | signal<var> ]`
|
||||
/// - `input_data`: a reader for the serialization of the RLN zkSNARK proof concatenated with a serialization of the circuit output values and the signal information,
|
||||
/// i.e. `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var>]`, where <_> indicates the byte length.
|
||||
///
|
||||
/// The function returns true if the zkSNARK proof is valid with respect to the provided circuit output values and signal. Returns false otherwise.
|
||||
///
|
||||
@@ -784,7 +857,7 @@ impl RLN<'_> {
|
||||
/// // proof_data is computed as in the example code snippet provided for rln::public::RLN::generate_rln_proof
|
||||
///
|
||||
/// // We prepare input for verify_rln_proof API
|
||||
/// // input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
|
||||
/// // input_data is `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var>]`
|
||||
/// // that is [ proof_data || signal_len<8> | signal<var> ]
|
||||
/// proof_data.append(&mut normalize_usize(signal_len));
|
||||
/// proof_data.append(&mut signal.to_vec());
|
||||
@@ -821,7 +894,7 @@ impl RLN<'_> {
|
||||
/// Verifies a zkSNARK RLN proof against the provided proof values and a set of allowed Merkle tree roots.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of the RLN zkSNARK proof concatenated with a serialization of the circuit output values and the signal information, i.e. `[ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> | signal_len<8> | signal<var> ]`
|
||||
/// - `input_data`: a reader for the serialization of the RLN zkSNARK proof concatenated with a serialization of the circuit output values and the signal information, i.e. `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var>]`
|
||||
/// - `roots_data`: a reader for the serialization of a vector of roots, i.e. `[ number_of_roots<8> | root_1<32> | ... | root_n<32> ]` (number_of_roots is a uint64 in little-endian, roots are serialized using `rln::utils::fr_to_bytes_le`))
|
||||
///
|
||||
/// The function returns true if the zkSNARK proof is valid with respect to the provided circuit output values, signal and roots. Returns false otherwise.
|
||||
@@ -1076,10 +1149,12 @@ impl RLN<'_> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Recovers the identity secret from two set of proof values computed for same secret in same epoch.
|
||||
/// Recovers the identity secret from two set of proof values computed for same secret in same epoch with same rln identifier.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_proof_data_1`: a reader for the serialization of a RLN zkSNARK proof concatenated with a serialization of the circuit output values and -optionally- the signal information, i.e. either `[ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]` or `[ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> | signal_len<8> | signal<var> ]` (to maintain compatibility with both output of [`generate_rln_proof`](crate::public::RLN::generate_rln_proof) and input of [`verify_rln_proof`](crate::public::RLN::verify_rln_proof))
|
||||
/// - `input_proof_data_1`: a reader for the serialization of a RLN zkSNARK proof concatenated with a serialization of the circuit output values and -optionally- the signal information,
|
||||
/// i.e. either `[proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]`
|
||||
/// or `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var> ]` (to maintain compatibility with both output of [`generate_rln_proof`](crate::public::RLN::generate_rln_proof) and input of [`verify_rln_proof`](crate::public::RLN::verify_rln_proof))
|
||||
/// - `input_proof_data_2`: same as `input_proof_data_1`
|
||||
///
|
||||
/// Output values are:
|
||||
@@ -1087,7 +1162,7 @@ impl RLN<'_> {
|
||||
///
|
||||
/// Example
|
||||
/// ```
|
||||
/// // identity_secret_hash, proof_data_1 and proof_data_2 are computed as in the example code snippet provided for rln::public::RLN::generate_rln_proof using same identity secret and epoch (but not necessarily same signal)
|
||||
/// // identity_secret_hash, proof_data_1 and proof_data_2 are computed as in the example code snippet provided for rln::public::RLN::generate_rln_proof using same identity secret, epoch and rln identifier (but not necessarily same signal)
|
||||
///
|
||||
/// let mut input_proof_data_1 = Cursor::new(proof_data_1);
|
||||
/// let mut input_proof_data_2 = Cursor::new(proof_data_2);
|
||||
@@ -1127,8 +1202,8 @@ impl RLN<'_> {
|
||||
let (proof_values_2, _) = deserialize_proof_values(&serialized[128..]);
|
||||
let external_nullifier_2 = proof_values_2.external_nullifier;
|
||||
|
||||
// We continue only if the proof values are for the same epoch
|
||||
// The idea is that proof values that go as input to this function are verified first (with zk-proof verify), hence ensuring validity of epoch and other fields.
|
||||
// We continue only if the proof values are for the same external nullifier (which includes epoch and rln identifier)
|
||||
// The idea is that proof values that go as input to this function are verified first (with zk-proof verify), hence ensuring validity of external nullifier and other fields.
|
||||
// Only in case all fields are valid, an external_nullifier for the message will be stored (otherwise signal/proof will be simply discarded)
|
||||
// If the nullifier matches one already seen, we can recovery of identity secret.
|
||||
if external_nullifier_1 == external_nullifier_2 {
|
||||
@@ -1150,10 +1225,10 @@ impl RLN<'_> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the serialization of a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) populated from the identity secret, the Merkle tree index, the epoch and signal.
|
||||
/// Returns the serialization of a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) populated from the identity secret, the Merkle tree index, the user message limit, the message id, the external nullifier (which include epoch and rln identifier) and signal.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of `[ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]`
|
||||
/// - `input_data`: a reader for the serialization of `[ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]`
|
||||
///
|
||||
/// The function returns the corresponding [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object serialized using [`rln::protocol::serialize_witness`](crate::protocol::serialize_witness)).
|
||||
pub fn get_serialized_rln_witness<R: Read>(&mut self, mut input_data: R) -> Result<Vec<u8>> {
|
||||
@@ -1173,7 +1248,7 @@ impl RLN<'_> {
|
||||
/// The function returns the corresponding JSON encoding of the input [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object.
|
||||
pub fn get_rln_witness_json(&mut self, serialized_witness: &[u8]) -> Result<serde_json::Value> {
|
||||
let (rln_witness, _) = deserialize_witness(serialized_witness)?;
|
||||
get_json_inputs(&rln_witness)
|
||||
rln_witness_to_json(&rln_witness)
|
||||
}
|
||||
|
||||
/// Closes the connection to the Merkle tree database.
|
||||
@@ -1189,7 +1264,7 @@ impl RLN<'_> {
|
||||
impl Default for RLN<'_> {
|
||||
fn default() -> Self {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let buffer = Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let buffer = Cursor::new(json!({}).to_string());
|
||||
Self::new(tree_height, buffer).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::circuit::{Curve, Fr, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
|
||||
use crate::circuit::{Curve, Fr, TEST_TREE_HEIGHT};
|
||||
use crate::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash};
|
||||
use crate::protocol::*;
|
||||
use crate::public::RLN;
|
||||
@@ -28,9 +28,7 @@ fn test_merkle_operations() {
|
||||
}
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
// We first add leaves one by one specifying the index
|
||||
for (i, leaf) in leaves.iter().enumerate() {
|
||||
@@ -124,9 +122,7 @@ fn test_leaf_setting_with_index() {
|
||||
let set_index = rng.gen_range(0..no_of_leaves) as usize;
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
@@ -196,9 +192,7 @@ fn test_atomic_operation() {
|
||||
}
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
@@ -247,9 +241,7 @@ fn test_atomic_operation_zero_indexed() {
|
||||
}
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
@@ -293,9 +285,7 @@ fn test_atomic_operation_consistency() {
|
||||
}
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
@@ -348,9 +338,7 @@ fn test_set_leaves_bad_index() {
|
||||
let bad_index = (1 << tree_height) - rng.gen_range(0..no_of_leaves) as usize;
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
// Get root of empty tree
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
@@ -413,9 +401,7 @@ fn value_to_string_vec(value: &Value) -> Vec<String> {
|
||||
fn test_groth16_proof_hardcoded() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
let valid_snarkjs_proof = json!({
|
||||
"pi_a": [
|
||||
@@ -495,9 +481,7 @@ fn test_groth16_proof_hardcoded() {
|
||||
fn test_groth16_proof() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
// Note: we only test Groth16 proof generation, so we ignore setting the tree in the RLN object
|
||||
let rln_witness = random_rln_witness(tree_height);
|
||||
@@ -543,9 +527,7 @@ fn test_rln_proof() {
|
||||
}
|
||||
|
||||
// We create a new RLN instance
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
@@ -588,11 +570,11 @@ fn test_rln_proof() {
|
||||
rln.generate_rln_proof(&mut input_buffer, &mut output_buffer)
|
||||
.unwrap();
|
||||
|
||||
// output_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> ]
|
||||
// output_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let mut proof_data = output_buffer.into_inner();
|
||||
|
||||
// We prepare input for verify_rln_proof API
|
||||
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
|
||||
// input_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var> ]
|
||||
// that is [ proof_data || signal_len<8> | signal<var> ]
|
||||
proof_data.append(&mut normalize_usize(signal.len()));
|
||||
proof_data.append(&mut signal.to_vec());
|
||||
@@ -616,9 +598,7 @@ fn test_rln_with_witness() {
|
||||
}
|
||||
|
||||
// We create a new RLN instance
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
@@ -693,11 +673,11 @@ fn test_rln_with_witness() {
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// output_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> ]
|
||||
// output_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let mut proof_data = output_buffer.into_inner();
|
||||
|
||||
// We prepare input for verify_rln_proof API
|
||||
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
|
||||
// input_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var> ]
|
||||
// that is [ proof_data || signal_len<8> | signal<var> ]
|
||||
proof_data.append(&mut normalize_usize(signal.len()));
|
||||
proof_data.append(&mut signal.to_vec());
|
||||
@@ -722,9 +702,7 @@ fn proof_verification_with_roots() {
|
||||
}
|
||||
|
||||
// We create a new RLN instance
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
@@ -751,7 +729,7 @@ fn proof_verification_with_roots() {
|
||||
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
|
||||
|
||||
// We prepare input for generate_rln_proof API
|
||||
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | rln_identifier<32> | user_message_limit<32> | message_id<32> | signal_len<8> | signal<var> ]
|
||||
// input_data is [ identity_secret<32> | id_index<8> | external_nullifier<32> | user_message_limit<32> | message_id<32> | signal_len<8> | signal<var> ]
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
|
||||
serialized.append(&mut normalize_usize(identity_index));
|
||||
@@ -766,11 +744,11 @@ fn proof_verification_with_roots() {
|
||||
rln.generate_rln_proof(&mut input_buffer, &mut output_buffer)
|
||||
.unwrap();
|
||||
|
||||
// output_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> ]
|
||||
// output_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let mut proof_data = output_buffer.into_inner();
|
||||
|
||||
// We prepare input for verify_rln_proof API
|
||||
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
|
||||
// input_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var> ]
|
||||
// that is [ proof_data || signal_len<8> | signal<var> ]
|
||||
proof_data.append(&mut normalize_usize(signal.len()));
|
||||
proof_data.append(&mut signal.to_vec());
|
||||
@@ -816,9 +794,7 @@ fn test_recover_id_secret() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a new RLN instance
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
// Generate identity pair
|
||||
let (identity_secret_hash, id_commitment) = keygen();
|
||||
@@ -953,9 +929,7 @@ fn test_get_leaf() {
|
||||
// We generate a random tree
|
||||
let tree_height = 10;
|
||||
let mut rng = thread_rng();
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
// We generate a random leaf
|
||||
let leaf = Fr::rand(&mut rng);
|
||||
@@ -980,9 +954,7 @@ fn test_get_leaf() {
|
||||
fn test_valid_metadata() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
let arbitrary_metadata: &[u8] = b"block_number:200000";
|
||||
rln.set_metadata(arbitrary_metadata).unwrap();
|
||||
@@ -998,9 +970,7 @@ fn test_valid_metadata() {
|
||||
fn test_empty_metadata() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_metadata(&mut buffer).unwrap();
|
||||
|
||||
@@ -5,6 +5,8 @@ use ark_ff::PrimeField;
|
||||
use color_eyre::{Report, Result};
|
||||
use num_bigint::{BigInt, BigUint};
|
||||
use num_traits::Num;
|
||||
use serde_json::json;
|
||||
use std::io::Cursor;
|
||||
use std::iter::Extend;
|
||||
|
||||
pub fn to_bigint(el: &Fr) -> Result<BigInt> {
|
||||
@@ -179,6 +181,24 @@ pub fn normalize_usize(input: usize) -> Vec<u8> {
|
||||
normalized_usize
|
||||
}
|
||||
|
||||
pub fn bytes_le_to_vec_usize(input: &[u8]) -> Result<Vec<usize>> {
|
||||
let nof_elem = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
|
||||
if nof_elem == 0 {
|
||||
Ok(vec![])
|
||||
} else {
|
||||
let elements: Vec<usize> = input[8..]
|
||||
.chunks(8)
|
||||
.map(|ch| usize::from_le_bytes(ch[0..8].try_into().unwrap()))
|
||||
.collect();
|
||||
Ok(elements)
|
||||
}
|
||||
}
|
||||
|
||||
// using for test
|
||||
pub fn generate_input_buffer() -> Cursor<String> {
|
||||
Cursor::new(json!({}).to_string())
|
||||
}
|
||||
|
||||
/* Old conversion utilities between different libraries data types
|
||||
|
||||
// Conversion Utilities between poseidon-rs Field and arkworks Fr (in order to call directly poseidon-rs' poseidon_hash)
|
||||
|
||||
681
rln/tests/ffi.rs
681
rln/tests/ffi.rs
@@ -14,26 +14,67 @@ mod test {
|
||||
use std::mem::MaybeUninit;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
const NO_OF_LEAVES: usize = 256;
|
||||
|
||||
fn create_rln_instance() -> &'static mut RLN<'static> {
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({}).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(TEST_TREE_HEIGHT, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
unsafe { &mut *rln_pointer.assume_init() }
|
||||
}
|
||||
|
||||
fn set_leaves_init(rln_pointer: &mut RLN, leaves: &[Fr]) {
|
||||
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
|
||||
let input_buffer = &Buffer::from(leaves_ser.as_ref());
|
||||
let success = init_tree_with_leaves(rln_pointer, input_buffer);
|
||||
assert!(success, "init tree with leaves call failed");
|
||||
assert_eq!(rln_pointer.leaves_set(), leaves.len());
|
||||
}
|
||||
|
||||
fn get_random_leaves() -> Vec<Fr> {
|
||||
let mut rng = thread_rng();
|
||||
(0..NO_OF_LEAVES).map(|_| Fr::rand(&mut rng)).collect()
|
||||
}
|
||||
|
||||
fn get_tree_root(rln_pointer: &mut RLN) -> Fr {
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root, _) = bytes_le_to_fr(&result_data);
|
||||
root
|
||||
}
|
||||
|
||||
fn identity_pair_gen(rln_pointer: &mut RLN) -> (Fr, Fr) {
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = key_gen(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "key gen call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret_hash, read) = bytes_le_to_fr(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..].to_vec());
|
||||
(identity_secret_hash, id_commitment)
|
||||
}
|
||||
|
||||
fn rln_proof_gen(rln_pointer: &mut RLN, serialized: &[u8]) -> Vec<u8> {
|
||||
let input_buffer = &Buffer::from(serialized);
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = generate_rln_proof(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "generate rln proof call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
<&[u8]>::from(&output_buffer).to_vec()
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test merkle batch Merkle tree additions
|
||||
fn test_merkle_operations_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Fr> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..no_of_leaves {
|
||||
leaves.push(Fr::rand(&mut rng));
|
||||
}
|
||||
|
||||
let leaves = get_random_leaves();
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resource_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We first add leaves one by one specifying the index
|
||||
for (i, leaf) in leaves.iter().enumerate() {
|
||||
@@ -45,15 +86,10 @@ mod test {
|
||||
}
|
||||
|
||||
// We get the root of the tree obtained adding one leaf per time
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_single, _) = bytes_le_to_fr(&result_data);
|
||||
let root_single = get_tree_root(rln_pointer);
|
||||
|
||||
// We reset the tree to default
|
||||
let success = set_tree(rln_pointer, tree_height);
|
||||
let success = set_tree(rln_pointer, TEST_TREE_HEIGHT);
|
||||
assert!(success, "set tree call failed");
|
||||
|
||||
// We add leaves one by one using the internal index (new leaves goes in next available position)
|
||||
@@ -65,63 +101,40 @@ mod test {
|
||||
}
|
||||
|
||||
// We get the root of the tree obtained adding leaves using the internal index
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_next, _) = bytes_le_to_fr(&result_data);
|
||||
let root_next = get_tree_root(rln_pointer);
|
||||
|
||||
// We check if roots are the same
|
||||
assert_eq!(root_single, root_next);
|
||||
|
||||
// We reset the tree to default
|
||||
let success = set_tree(rln_pointer, tree_height);
|
||||
let success = set_tree(rln_pointer, TEST_TREE_HEIGHT);
|
||||
assert!(success, "set tree call failed");
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
|
||||
let input_buffer = &Buffer::from(leaves_ser.as_ref());
|
||||
let success = init_tree_with_leaves(rln_pointer, input_buffer);
|
||||
assert!(success, "init tree with leaves call failed");
|
||||
set_leaves_init(rln_pointer, &leaves);
|
||||
|
||||
// We get the root of the tree obtained adding leaves in batch
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_batch, _) = bytes_le_to_fr(&result_data);
|
||||
let root_batch = get_tree_root(rln_pointer);
|
||||
|
||||
// We check if roots are the same
|
||||
assert_eq!(root_single, root_batch);
|
||||
|
||||
// We now delete all leaves set and check if the root corresponds to the empty tree root
|
||||
// delete calls over indexes higher than no_of_leaves are ignored and will not increase self.tree.next_index
|
||||
for i in 0..no_of_leaves {
|
||||
for i in 0..NO_OF_LEAVES {
|
||||
let success = delete_leaf(rln_pointer, i);
|
||||
assert!(success, "delete leaf call failed");
|
||||
}
|
||||
|
||||
// We get the root of the tree obtained deleting all leaves
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_delete, _) = bytes_le_to_fr(&result_data);
|
||||
let root_delete = get_tree_root(rln_pointer);
|
||||
|
||||
// We reset the tree to default
|
||||
let success = set_tree(rln_pointer, tree_height);
|
||||
let success = set_tree(rln_pointer, TEST_TREE_HEIGHT);
|
||||
assert!(success, "set tree call failed");
|
||||
|
||||
// We get the root of the empty tree
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_empty, _) = bytes_le_to_fr(&result_data);
|
||||
let root_empty = get_tree_root(rln_pointer);
|
||||
|
||||
// We check if roots are the same
|
||||
assert_eq!(root_delete, root_empty);
|
||||
@@ -131,54 +144,28 @@ mod test {
|
||||
// This test is similar to the one in public.rs but it uses the RLN object as a pointer
|
||||
// Uses `set_leaves_from` to set leaves in a batch
|
||||
fn test_leaf_setting_with_index_ffi() {
|
||||
// We create a new tree
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
let rln_pointer = create_rln_instance();
|
||||
assert_eq!(rln_pointer.leaves_set(), 0);
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Fr> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..no_of_leaves {
|
||||
leaves.push(Fr::rand(&mut rng));
|
||||
}
|
||||
let leaves = get_random_leaves();
|
||||
|
||||
// set_index is the index from which we start setting leaves
|
||||
// random number between 0..no_of_leaves
|
||||
let set_index = rng.gen_range(0..no_of_leaves) as usize;
|
||||
let mut rng = thread_rng();
|
||||
let set_index = rng.gen_range(0..NO_OF_LEAVES) as usize;
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
|
||||
let input_buffer = &Buffer::from(leaves_ser.as_ref());
|
||||
let success = init_tree_with_leaves(rln_pointer, input_buffer);
|
||||
assert!(success, "init tree with leaves call failed");
|
||||
assert_eq!(rln_pointer.leaves_set(), no_of_leaves);
|
||||
set_leaves_init(rln_pointer, &leaves);
|
||||
|
||||
// We get the root of the tree obtained adding leaves in batch
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_batch_with_init, _) = bytes_le_to_fr(&result_data);
|
||||
let root_batch_with_init = get_tree_root(rln_pointer);
|
||||
|
||||
// `init_tree_with_leaves` resets the tree to the height it was initialized with, using `set_tree`
|
||||
|
||||
// We add leaves in a batch starting from index 0..set_index
|
||||
let leaves_m = vec_fr_to_bytes_le(&leaves[0..set_index]).unwrap();
|
||||
let buffer = &Buffer::from(leaves_m.as_ref());
|
||||
let success = init_tree_with_leaves(rln_pointer, buffer);
|
||||
assert!(success, "init tree with leaves call failed");
|
||||
set_leaves_init(rln_pointer, &leaves[0..set_index]);
|
||||
|
||||
// We add the remaining n leaves in a batch starting from index set_index
|
||||
let leaves_n = vec_fr_to_bytes_le(&leaves[set_index..]).unwrap();
|
||||
@@ -187,18 +174,11 @@ mod test {
|
||||
assert!(success, "set leaves from call failed");
|
||||
|
||||
// We get the root of the tree obtained adding leaves in batch
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_batch_with_custom_index, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
let root_batch_with_custom_index = get_tree_root(rln_pointer);
|
||||
assert_eq!(root_batch_with_init, root_batch_with_custom_index);
|
||||
|
||||
// We reset the tree to default
|
||||
let success = set_tree(rln_pointer, tree_height);
|
||||
let success = set_tree(rln_pointer, TEST_TREE_HEIGHT);
|
||||
assert!(success, "set tree call failed");
|
||||
|
||||
// We add leaves one by one using the internal index (new leaves goes in next available position)
|
||||
@@ -210,55 +190,26 @@ mod test {
|
||||
}
|
||||
|
||||
// We get the root of the tree obtained adding leaves using the internal index
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_single_additions, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
let root_single_additions = get_tree_root(rln_pointer);
|
||||
assert_eq!(root_batch_with_init, root_single_additions);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// This test is similar to the one in public.rs but it uses the RLN object as a pointer
|
||||
fn test_atomic_operation_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Fr> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..no_of_leaves {
|
||||
leaves.push(Fr::rand(&mut rng));
|
||||
}
|
||||
let leaves = get_random_leaves();
|
||||
// We create a RLN instance
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
|
||||
let input_buffer = &Buffer::from(leaves_ser.as_ref());
|
||||
let success = init_tree_with_leaves(rln_pointer, input_buffer);
|
||||
assert!(success, "init tree with leaves call failed");
|
||||
set_leaves_init(rln_pointer, &leaves);
|
||||
|
||||
// We get the root of the tree obtained adding leaves in batch
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_after_insertion, _) = bytes_le_to_fr(&result_data);
|
||||
let root_after_insertion = get_tree_root(rln_pointer);
|
||||
|
||||
let last_leaf = leaves.last().unwrap();
|
||||
let last_leaf_index = no_of_leaves - 1;
|
||||
let last_leaf_index = NO_OF_LEAVES - 1;
|
||||
let indices = vec![last_leaf_index as u8];
|
||||
let last_leaf = vec![*last_leaf];
|
||||
let indices = vec_u8_to_bytes_le(&indices).unwrap();
|
||||
@@ -275,48 +226,23 @@ mod test {
|
||||
assert!(success, "atomic operation call failed");
|
||||
|
||||
// We get the root of the tree obtained after a no-op
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_after_noop, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
let root_after_noop = get_tree_root(rln_pointer);
|
||||
assert_eq!(root_after_insertion, root_after_noop);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// This test is similar to the one in public.rs but it uses the RLN object as a pointer
|
||||
fn test_set_leaves_bad_index_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Fr> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..no_of_leaves {
|
||||
leaves.push(Fr::rand(&mut rng));
|
||||
}
|
||||
|
||||
let bad_index = (1 << tree_height) - rng.gen_range(0..no_of_leaves) as usize;
|
||||
|
||||
let leaves = get_random_leaves();
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
let mut rng = thread_rng();
|
||||
let bad_index = (1 << TEST_TREE_HEIGHT) - rng.gen_range(0..NO_OF_LEAVES) as usize;
|
||||
|
||||
// Get root of empty tree
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_empty, _) = bytes_le_to_fr(&result_data);
|
||||
let root_empty = get_tree_root(rln_pointer);
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let leaves = vec_fr_to_bytes_le(&leaves).unwrap();
|
||||
@@ -325,30 +251,16 @@ mod test {
|
||||
assert!(!success, "set leaves from call succeeded");
|
||||
|
||||
// Get root of tree after attempted set
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_after_bad_set, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
let root_after_bad_set = get_tree_root(rln_pointer);
|
||||
assert_eq!(root_empty, root_after_bad_set);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// This test is similar to the one in lib, but uses only public C API
|
||||
fn test_merkle_proof_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// generate identity
|
||||
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
|
||||
@@ -363,27 +275,19 @@ mod test {
|
||||
assert!(success, "set leaf call failed");
|
||||
|
||||
// We obtain the Merkle tree root
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root, _) = bytes_le_to_fr(&result_data);
|
||||
let root = get_tree_root(rln_pointer);
|
||||
|
||||
use ark_ff::BigInt;
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
assert_eq!(
|
||||
root,
|
||||
BigInt([
|
||||
4939322235247991215,
|
||||
5110804094006647505,
|
||||
4427606543677101242,
|
||||
910933464535675827
|
||||
])
|
||||
.into()
|
||||
);
|
||||
}
|
||||
assert_eq!(
|
||||
root,
|
||||
BigInt([
|
||||
4939322235247991215,
|
||||
5110804094006647505,
|
||||
4427606543677101242,
|
||||
910933464535675827
|
||||
])
|
||||
.into()
|
||||
);
|
||||
|
||||
// We obtain the Merkle tree root
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
@@ -396,121 +300,33 @@ mod test {
|
||||
let (identity_path_index, _) = bytes_le_to_vec_u8(&result_data[read..].to_vec()).unwrap();
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
let mut expected_path_elements = vec![
|
||||
str_to_fr(
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
];
|
||||
let expected_path_elements: Vec<Fr> = [
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
]
|
||||
.map(|e| str_to_fr(e, 16).unwrap())
|
||||
.to_vec();
|
||||
|
||||
let mut expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
]);
|
||||
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
|
||||
}
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
16,
|
||||
)
|
||||
.unwrap()]);
|
||||
expected_identity_path_index.append(&mut vec![0]);
|
||||
}
|
||||
let expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
assert_eq!(path_elements, expected_path_elements);
|
||||
assert_eq!(identity_path_index, expected_identity_path_index);
|
||||
@@ -529,15 +345,8 @@ mod test {
|
||||
#[test]
|
||||
// Benchmarks proof generation and verification
|
||||
fn test_groth16_proofs_performance_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We compute some benchmarks regarding proof and verify API calls
|
||||
// Note that circuit loading requires some initial overhead.
|
||||
@@ -548,7 +357,7 @@ mod test {
|
||||
|
||||
for _ in 0..sample_size {
|
||||
// We generate random witness instances and relative proof values
|
||||
let rln_witness = random_rln_witness(tree_height);
|
||||
let rln_witness = random_rln_witness(TEST_TREE_HEIGHT);
|
||||
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
|
||||
|
||||
// We prepare id_commitment and we set the leaf at provided index
|
||||
@@ -592,36 +401,25 @@ mod test {
|
||||
#[test]
|
||||
// Creating a RLN with raw data should generate same results as using a path to resources
|
||||
fn test_rln_raw_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a RLN instance using a resource folder path
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
// We create a RLN instance
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We obtain the root from the RLN instance
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_rln_folder, _) = bytes_le_to_fr(&result_data);
|
||||
let root_rln_folder = get_tree_root(rln_pointer);
|
||||
|
||||
// Reading the raw data from the files required for instantiating a RLN instance using raw data
|
||||
let circom_path = format!("./resources/tree_height_{TEST_TREE_HEIGHT}/rln.wasm");
|
||||
let circom_path = "./resources/tree_height_20/rln.wasm";
|
||||
let mut circom_file = File::open(&circom_path).expect("no file found");
|
||||
let metadata = std::fs::metadata(&circom_path).expect("unable to read metadata");
|
||||
let mut circom_buffer = vec![0; metadata.len() as usize];
|
||||
circom_file
|
||||
.read_exact(&mut circom_buffer)
|
||||
.expect("buffer overflow");
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
let zkey_path = format!("./resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.arkzkey");
|
||||
let zkey_path = "./resources/tree_height_20/rln_final.arkzkey";
|
||||
#[cfg(not(feature = "arkzkey"))]
|
||||
let zkey_path = format!("./resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
|
||||
let zkey_path = "./resources/tree_height_20/rln_final.zkey";
|
||||
let mut zkey_file = File::open(&zkey_path).expect("no file found");
|
||||
let metadata = std::fs::metadata(&zkey_path).expect("unable to read metadata");
|
||||
let mut zkey_buffer = vec![0; metadata.len() as usize];
|
||||
@@ -629,8 +427,7 @@ mod test {
|
||||
.read_exact(&mut zkey_buffer)
|
||||
.expect("buffer overflow");
|
||||
|
||||
let vk_path = format!("./resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.json");
|
||||
|
||||
let vk_path = "./resources/tree_height_20/verification_key.json";
|
||||
let mut vk_file = File::open(&vk_path).expect("no file found");
|
||||
let metadata = std::fs::metadata(&vk_path).expect("unable to read metadata");
|
||||
let mut vk_buffer = vec![0; metadata.len() as usize];
|
||||
@@ -645,7 +442,7 @@ mod test {
|
||||
let tree_config = "".to_string();
|
||||
let tree_config_buffer = &Buffer::from(tree_config.as_bytes());
|
||||
let success = new_with_params(
|
||||
tree_height,
|
||||
TEST_TREE_HEIGHT,
|
||||
circom_data,
|
||||
zkey_data,
|
||||
vk_data,
|
||||
@@ -656,57 +453,31 @@ mod test {
|
||||
let rln_pointer2 = unsafe { &mut *rln_pointer_raw_bytes.assume_init() };
|
||||
|
||||
// We obtain the root from the RLN instance containing raw data
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer2, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_rln_raw, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
// And compare that the same root was generated
|
||||
let root_rln_raw = get_tree_root(rln_pointer2);
|
||||
assert_eq!(root_rln_folder, root_rln_raw);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Computes and verifies an RLN ZK proof using FFI APIs
|
||||
fn test_rln_proof_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
let user_message_limit = Fr::from(100);
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Fr> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..no_of_leaves {
|
||||
let id_commitment = Fr::rand(&mut rng);
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, Fr::from(100)]);
|
||||
leaves.push(rate_commitment);
|
||||
}
|
||||
let leaves: Vec<Fr> = (0..NO_OF_LEAVES)
|
||||
.map(|_| utils_poseidon_hash(&[Fr::rand(&mut rng), Fr::from(100)]))
|
||||
.collect();
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
|
||||
let input_buffer = &Buffer::from(leaves_ser.as_ref());
|
||||
let success = init_tree_with_leaves(rln_pointer, input_buffer);
|
||||
assert!(success, "init tree with leaves call failed");
|
||||
set_leaves_init(rln_pointer, &leaves);
|
||||
|
||||
// We generate a new identity pair
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = key_gen(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "key gen call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret_hash, read) = bytes_le_to_fr(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..].to_vec());
|
||||
|
||||
let identity_index: usize = no_of_leaves;
|
||||
let (identity_secret_hash, id_commitment) = identity_pair_gen(rln_pointer);
|
||||
let identity_index: usize = NO_OF_LEAVES;
|
||||
|
||||
// We generate a random signal
|
||||
let mut rng = rand::thread_rng();
|
||||
@@ -738,16 +509,11 @@ mod test {
|
||||
serialized.append(&mut signal.to_vec());
|
||||
|
||||
// We call generate_rln_proof
|
||||
let input_buffer = &Buffer::from(serialized.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = generate_rln_proof(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "generate rln proof call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
// result_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> ]
|
||||
let mut proof_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
// result_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let mut proof_data = rln_proof_gen(rln_pointer, serialized.as_ref());
|
||||
|
||||
// We prepare input for verify_rln_proof API
|
||||
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
|
||||
// input_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var> ]
|
||||
// that is [ proof_data | signal_len<8> | signal<var> ]
|
||||
proof_data.append(&mut normalize_usize(signal.len()));
|
||||
proof_data.append(&mut signal.to_vec());
|
||||
@@ -765,42 +531,20 @@ mod test {
|
||||
// Computes and verifies an RLN ZK proof by checking proof's root against an input roots buffer
|
||||
fn test_verify_with_roots() {
|
||||
// First part similar to test_rln_proof_ffi
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
let user_message_limit = Fr::from(100);
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Fr> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..no_of_leaves {
|
||||
leaves.push(Fr::rand(&mut rng));
|
||||
}
|
||||
|
||||
let leaves = get_random_leaves();
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
|
||||
let input_buffer = &Buffer::from(leaves_ser.as_ref());
|
||||
let success = init_tree_with_leaves(rln_pointer, input_buffer);
|
||||
assert!(success, "set leaves call failed");
|
||||
set_leaves_init(rln_pointer, &leaves);
|
||||
|
||||
// We generate a new identity pair
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = key_gen(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "key gen call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret_hash, read) = bytes_le_to_fr(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..].to_vec());
|
||||
let (identity_secret_hash, id_commitment) = identity_pair_gen(rln_pointer);
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
|
||||
let identity_index: usize = no_of_leaves;
|
||||
let identity_index: usize = NO_OF_LEAVES;
|
||||
|
||||
// We generate a random signal
|
||||
let mut rng = rand::thread_rng();
|
||||
@@ -832,16 +576,11 @@ mod test {
|
||||
serialized.append(&mut signal.to_vec());
|
||||
|
||||
// We call generate_rln_proof
|
||||
let input_buffer = &Buffer::from(serialized.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = generate_rln_proof(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "generate rln proof call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
// result_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> ]
|
||||
let mut proof_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
// result_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let mut proof_data = rln_proof_gen(rln_pointer, serialized.as_ref());
|
||||
|
||||
// We prepare input for verify_rln_proof API
|
||||
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
|
||||
// input_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var> ]
|
||||
// that is [ proof_data | signal_len<8> | signal<var> ]
|
||||
proof_data.append(&mut normalize_usize(signal.len()));
|
||||
proof_data.append(&mut signal.to_vec());
|
||||
@@ -878,12 +617,7 @@ mod test {
|
||||
|
||||
// We finally include the correct root
|
||||
// We get the root of the tree obtained adding one leaf per time
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root, _) = bytes_le_to_fr(&result_data);
|
||||
let root = get_tree_root(rln_pointer);
|
||||
|
||||
// We include the root and verify the proof
|
||||
roots_data.append(&mut fr_to_bytes_le(&root));
|
||||
@@ -901,24 +635,11 @@ mod test {
|
||||
#[test]
|
||||
// Computes and verifies an RLN ZK proof using FFI APIs
|
||||
fn test_recover_id_secret_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We generate a new identity pair
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = key_gen(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "key gen call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret_hash, read) = bytes_le_to_fr(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..].to_vec());
|
||||
let (identity_secret_hash, id_commitment) = identity_pair_gen(rln_pointer);
|
||||
|
||||
let user_message_limit = Fr::from(100);
|
||||
let message_id = Fr::from(0);
|
||||
@@ -967,22 +688,12 @@ mod test {
|
||||
serialized2.append(&mut signal2.to_vec());
|
||||
|
||||
// We call generate_rln_proof for first proof values
|
||||
let input_buffer = &Buffer::from(serialized1.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = generate_rln_proof(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "generate rln proof call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
// result_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> ]
|
||||
let proof_data_1 = <&[u8]>::from(&output_buffer).to_vec();
|
||||
// result_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let proof_data_1 = rln_proof_gen(rln_pointer, serialized1.as_ref());
|
||||
|
||||
// We call generate_rln_proof
|
||||
let input_buffer = &Buffer::from(serialized2.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = generate_rln_proof(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "generate rln proof call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
// result_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> ]
|
||||
let proof_data_2 = <&[u8]>::from(&output_buffer).to_vec();
|
||||
// result_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let proof_data_2 = rln_proof_gen(rln_pointer, serialized2.as_ref());
|
||||
|
||||
let input_proof_buffer_1 = &Buffer::from(proof_data_1.as_ref());
|
||||
let input_proof_buffer_2 = &Buffer::from(proof_data_2.as_ref());
|
||||
@@ -1008,13 +719,7 @@ mod test {
|
||||
// We now test that computing identity_secret_hash is unsuccessful if shares computed from two different identity secret hashes but within same epoch are passed
|
||||
|
||||
// We generate a new identity pair
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = key_gen(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "key gen call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret_hash_new, read) = bytes_le_to_fr(&result_data);
|
||||
let (id_commitment_new, _) = bytes_le_to_fr(&result_data[read..].to_vec());
|
||||
let (identity_secret_hash_new, id_commitment_new) = identity_pair_gen(rln_pointer);
|
||||
let rate_commitment_new = utils_poseidon_hash(&[id_commitment_new, user_message_limit]);
|
||||
|
||||
// We set as leaf id_commitment, its index would be equal to 1 since at 0 there is id_commitment
|
||||
@@ -1041,13 +746,8 @@ mod test {
|
||||
serialized.append(&mut signal3.to_vec());
|
||||
|
||||
// We call generate_rln_proof
|
||||
let input_buffer = &Buffer::from(serialized.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = generate_rln_proof(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "generate rln proof call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
// result_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> ]
|
||||
let proof_data_3 = <&[u8]>::from(&output_buffer).to_vec();
|
||||
// result_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let proof_data_3 = rln_proof_gen(rln_pointer, serialized.as_ref());
|
||||
|
||||
// We attempt to recover the secret using share1 (coming from identity_secret_hash) and share3 (coming from identity_secret_hash_new)
|
||||
|
||||
@@ -1074,15 +774,8 @@ mod test {
|
||||
#[test]
|
||||
// Tests hash to field using FFI APIs
|
||||
fn test_seeded_keygen_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We generate a new identity pair from an input seed
|
||||
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
@@ -1115,14 +808,8 @@ mod test {
|
||||
#[test]
|
||||
// Tests hash to field using FFI APIs
|
||||
fn test_seeded_extended_keygen_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We generate a new identity tuple from an input seed
|
||||
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
@@ -1220,15 +907,10 @@ mod test {
|
||||
#[test]
|
||||
fn test_get_leaf() {
|
||||
// We create a RLN instance
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 1 << TEST_TREE_HEIGHT;
|
||||
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
// We create a RLN instance
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We generate a new identity tuple from an input seed
|
||||
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
@@ -1265,14 +947,7 @@ mod test {
|
||||
#[test]
|
||||
fn test_valid_metadata() {
|
||||
// We create a RLN instance
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
let input_buffer = &Buffer::from(seed_bytes);
|
||||
@@ -1293,21 +968,13 @@ mod test {
|
||||
#[test]
|
||||
fn test_empty_metadata() {
|
||||
// We create a RLN instance
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_metadata(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get_metadata call failed");
|
||||
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
|
||||
assert_eq!(output_buffer.len, 0);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,48 +4,25 @@
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use rln::circuit::*;
|
||||
use rln::hashers::PoseidonHash;
|
||||
use rln::hashers::{poseidon_hash, PoseidonHash};
|
||||
use rln::{circuit::*, poseidon_tree::PoseidonTree};
|
||||
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
#[test]
|
||||
/// A basic performance comparison between the two supported Merkle Tree implementations
|
||||
fn test_zerokit_merkle_implementations_performances() {
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
let tree_height = 20;
|
||||
// The test is checked correctness for `FullMerkleTree` and `OptimalMerkleTree` with Poseidon hash
|
||||
fn test_zerokit_merkle_implementations() {
|
||||
let sample_size = 100;
|
||||
|
||||
let leaves: Vec<Fr> = (0..sample_size).map(|s| Fr::from(s)).collect();
|
||||
|
||||
let mut gen_time_full: u128 = 0;
|
||||
let mut upd_time_full: u128 = 0;
|
||||
let mut gen_time_opt: u128 = 0;
|
||||
let mut upd_time_opt: u128 = 0;
|
||||
|
||||
for _ in 0..sample_size.try_into().unwrap() {
|
||||
let now = Instant::now();
|
||||
FullMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
|
||||
gen_time_full += now.elapsed().as_nanos();
|
||||
|
||||
let now = Instant::now();
|
||||
OptimalMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
|
||||
gen_time_opt += now.elapsed().as_nanos();
|
||||
}
|
||||
|
||||
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
|
||||
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
|
||||
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
|
||||
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
|
||||
|
||||
for i in 0..sample_size.try_into().unwrap() {
|
||||
let now = Instant::now();
|
||||
tree_full.set(i, leaves[i]).unwrap();
|
||||
upd_time_full += now.elapsed().as_nanos();
|
||||
let proof = tree_full.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
|
||||
let now = Instant::now();
|
||||
tree_opt.set(i, leaves[i]).unwrap();
|
||||
upd_time_opt += now.elapsed().as_nanos();
|
||||
let proof = tree_opt.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
}
|
||||
@@ -55,26 +32,108 @@ mod test {
|
||||
let tree_opt_root = tree_opt.root();
|
||||
|
||||
assert_eq!(tree_full_root, tree_opt_root);
|
||||
}
|
||||
|
||||
println!(" Average tree generation time:");
|
||||
println!(
|
||||
" - Full Merkle Tree: {:?}",
|
||||
Duration::from_nanos((gen_time_full / sample_size).try_into().unwrap())
|
||||
);
|
||||
println!(
|
||||
" - Optimal Merkle Tree: {:?}",
|
||||
Duration::from_nanos((gen_time_opt / sample_size).try_into().unwrap())
|
||||
#[test]
|
||||
fn test_subtree_root() {
|
||||
const DEPTH: usize = 3;
|
||||
const LEAVES_LEN: usize = 6;
|
||||
|
||||
let mut tree = PoseidonTree::default(DEPTH).unwrap();
|
||||
let leaves: Vec<Fr> = (0..LEAVES_LEN).map(|s| Fr::from(s as i32)).collect();
|
||||
let _ = tree.set_range(0, leaves);
|
||||
|
||||
for i in 0..LEAVES_LEN {
|
||||
// check leaves
|
||||
assert_eq!(
|
||||
tree.get(i).unwrap(),
|
||||
tree.get_subtree_root(DEPTH, i).unwrap()
|
||||
);
|
||||
// check root
|
||||
assert_eq!(tree.root(), tree.get_subtree_root(0, i).unwrap());
|
||||
}
|
||||
|
||||
// check intermediate nodes
|
||||
for n in (1..=DEPTH).rev() {
|
||||
for i in (0..(1 << n)).step_by(2) {
|
||||
let idx_l = i * (1 << (DEPTH - n));
|
||||
let idx_r = (i + 1) * (1 << (DEPTH - n));
|
||||
let idx_sr = idx_l;
|
||||
|
||||
let prev_l = tree.get_subtree_root(n, idx_l).unwrap();
|
||||
let prev_r = tree.get_subtree_root(n, idx_r).unwrap();
|
||||
let subroot = tree.get_subtree_root(n - 1, idx_sr).unwrap();
|
||||
|
||||
assert_eq!(poseidon_hash(&[prev_l, prev_r]), subroot);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_empty_leaves_indices() {
|
||||
let depth = 4;
|
||||
let nof_leaves: usize = 1 << (depth - 1);
|
||||
|
||||
let mut tree = PoseidonTree::default(depth).unwrap();
|
||||
let leaves: Vec<Fr> = (0..nof_leaves).map(|s| Fr::from(s as i32)).collect();
|
||||
|
||||
// check set_range
|
||||
let _ = tree.set_range(0, leaves.clone());
|
||||
assert!(tree.get_empty_leaves_indices().is_empty());
|
||||
|
||||
let mut vec_idxs = Vec::new();
|
||||
// check delete function
|
||||
for i in 0..nof_leaves {
|
||||
vec_idxs.push(i);
|
||||
let _ = tree.delete(i);
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec_idxs);
|
||||
}
|
||||
// check set function
|
||||
for i in (0..nof_leaves).rev() {
|
||||
vec_idxs.pop();
|
||||
let _ = tree.set(i, leaves[i]);
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec_idxs);
|
||||
}
|
||||
|
||||
// check remove_indices_and_set_leaves inside override_range function
|
||||
assert!(tree.get_empty_leaves_indices().is_empty());
|
||||
let leaves_2: Vec<Fr> = (0..2).map(|s| Fr::from(s as i32)).collect();
|
||||
tree.override_range(0, leaves_2.clone(), [0, 1, 2, 3])
|
||||
.unwrap();
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec![2, 3]);
|
||||
|
||||
// check remove_indices inside override_range function
|
||||
tree.override_range(0, [], [0, 1]).unwrap();
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec![0, 1, 2, 3]);
|
||||
|
||||
// check set_range inside override_range function
|
||||
tree.override_range(0, leaves_2.clone(), []).unwrap();
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec![2, 3]);
|
||||
|
||||
let leaves_4: Vec<Fr> = (0..4).map(|s| Fr::from(s as i32)).collect();
|
||||
// check if the indexes for write and delete are the same
|
||||
tree.override_range(0, leaves_4.clone(), [0, 1, 2, 3])
|
||||
.unwrap();
|
||||
assert!(tree.get_empty_leaves_indices().is_empty());
|
||||
|
||||
// check if indexes for deletion are before indexes for overwriting
|
||||
tree.override_range(4, leaves_4.clone(), [0, 1, 2, 3])
|
||||
.unwrap();
|
||||
// The result will be like this, because in the set_range function in pmtree
|
||||
// the next_index value is increased not by the number of elements to insert,
|
||||
// but by the union of indices for deleting and inserting.
|
||||
assert_eq!(
|
||||
tree.get_empty_leaves_indices(),
|
||||
vec![0, 1, 2, 3, 8, 9, 10, 11]
|
||||
);
|
||||
|
||||
println!(" Average update_next execution time:");
|
||||
println!(
|
||||
" - Full Merkle Tree: {:?}",
|
||||
Duration::from_nanos((upd_time_full / sample_size).try_into().unwrap())
|
||||
);
|
||||
|
||||
println!(
|
||||
" - Optimal Merkle Tree: {:?}",
|
||||
Duration::from_nanos((upd_time_opt / sample_size).try_into().unwrap())
|
||||
);
|
||||
// check if the indices for write and delete do not overlap completely
|
||||
tree.override_range(2, leaves_4.clone(), [0, 1, 2, 3])
|
||||
.unwrap();
|
||||
// The result will be like this, because in the set_range function in pmtree
|
||||
// the next_index value is increased not by the number of elements to insert,
|
||||
// but by the union of indices for deleting and inserting.
|
||||
// + we've already set to 6 and 7 in previous test
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec![0, 1, 8, 9, 10, 11]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,9 +2,7 @@
|
||||
mod test {
|
||||
use ark_ff::BigInt;
|
||||
use rln::circuit::zkey_from_folder;
|
||||
use rln::circuit::{
|
||||
circom_from_folder, vk_from_folder, Fr, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT,
|
||||
};
|
||||
use rln::circuit::{circom_from_folder, vk_from_folder, Fr, TEST_TREE_HEIGHT};
|
||||
use rln::hashers::{hash_to_field, poseidon_hash};
|
||||
use rln::poseidon_tree::PoseidonTree;
|
||||
use rln::protocol::*;
|
||||
@@ -13,65 +11,9 @@ mod test {
|
||||
|
||||
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
|
||||
|
||||
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
|
||||
const WITNESS_JSON_20: &str = r#"
|
||||
{
|
||||
"externalNullifier": "21074405743803627666274838159589343934394162804826017440941339048886754734203",
|
||||
"identityPathIndex": [
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
0
|
||||
],
|
||||
"identitySecret": "2301650865650889795878889082892690584512243988708213561328369865554257051708",
|
||||
"messageId": "1",
|
||||
"pathElements": [
|
||||
"14082964758224722211945379872337797638951236517417253447686770846170014042825",
|
||||
"6628418579821163687428454604867534487917867918886059133241840211975892987309",
|
||||
"12745863228198753394445659605634840709296716381893463421165313830643281758511",
|
||||
"56118267389743063830320351452083247040583061493621478539311100137113963555",
|
||||
"3648731943306935051357703221473866306053186513730785325303257057776816073765",
|
||||
"10548621390442503192989374711060717107954536293658152583621924810330521179016",
|
||||
"11741160669079729961275351458682156164905457324981803454515784688429276743441",
|
||||
"17165464309215350864730477596846156251863702878546777829650812432906796008534",
|
||||
"18947162586829418653666557598416458949428989734998924978331450666032720066913",
|
||||
"8809427088917589399897132358419395928548406347152047718919154153577297139202",
|
||||
"6261460226929242970747566981077801929281729646713842579109271945192964422300",
|
||||
"13871468675790284383809887052382100311103716176061564908030808887079542722597",
|
||||
"10413964486611723004584705484327518190402370933255450052832412709168190985805",
|
||||
"3978387560092078849178760154060822400741873818692524912249877867958842934383",
|
||||
"14014915591348694328771517896715085647041518432952027841088176673715002508448",
|
||||
"17680675606519345547327984724173632294904524423937145835611954334756161077843",
|
||||
"17107175244885276119916848057745382329169223109661217238296871427531065458152",
|
||||
"18326186549441826262593357123467931475982067066825042001499291800252145875109",
|
||||
"7043961192177345916232559778383741091053414803377017307095275172896944935996",
|
||||
"2807630271073553218355393059254209097448243975722083008310815929736065268921"
|
||||
],
|
||||
"userMessageLimit": "100",
|
||||
"x": "20645213238265527935869146898028115621427162613172918400241870500502509785943"
|
||||
}
|
||||
"#;
|
||||
|
||||
#[test]
|
||||
// We test Merkle tree generation, proofs and verification
|
||||
fn test_merkle_proof() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
|
||||
// generate identity
|
||||
@@ -82,7 +24,7 @@ mod test {
|
||||
// generate merkle tree
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
tree_height,
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
@@ -92,141 +34,49 @@ mod test {
|
||||
// We check correct computation of the root
|
||||
let root = tree.root();
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
assert_eq!(
|
||||
root,
|
||||
BigInt([
|
||||
4939322235247991215,
|
||||
5110804094006647505,
|
||||
4427606543677101242,
|
||||
910933464535675827
|
||||
])
|
||||
.into()
|
||||
);
|
||||
}
|
||||
assert_eq!(
|
||||
root,
|
||||
BigInt([
|
||||
4939322235247991215,
|
||||
5110804094006647505,
|
||||
4427606543677101242,
|
||||
910933464535675827
|
||||
])
|
||||
.into()
|
||||
);
|
||||
|
||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
// These values refers to TEST_TREE_HEIGHT == 16
|
||||
let mut expected_path_elements = vec![
|
||||
str_to_fr(
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
];
|
||||
let expected_path_elements: Vec<Fr> = [
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
]
|
||||
.map(|e| str_to_fr(e, 16).unwrap())
|
||||
.to_vec();
|
||||
|
||||
let mut expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
// We add the remaining elements for the case TEST_TREE_HEIGHT = 20
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
]);
|
||||
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
|
||||
}
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
16,
|
||||
)
|
||||
.unwrap()]);
|
||||
expected_identity_path_index.append(&mut vec![0]);
|
||||
}
|
||||
let expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
assert_eq!(path_elements, expected_path_elements);
|
||||
assert_eq!(identity_path_index, expected_identity_path_index);
|
||||
@@ -235,34 +85,8 @@ mod test {
|
||||
assert!(tree.verify(&rate_commitment, &merkle_proof).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_witness_from_json() {
|
||||
// We generate all relevant keys
|
||||
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let builder = circom_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
|
||||
// We compute witness from the json input example
|
||||
let witness_json = WITNESS_JSON_20;
|
||||
let rln_witness = rln_witness_from_json(witness_json).unwrap();
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
|
||||
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
|
||||
|
||||
// Let's verify the proof
|
||||
let verified = verify_proof(&verification_key, &proof, &proof_values);
|
||||
|
||||
assert!(verified.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_end_to_end() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
fn get_test_witness() -> RLNWitnessInput {
|
||||
let leaf_index = 3;
|
||||
|
||||
// Generate identity pair
|
||||
let (identity_secret_hash, id_commitment) = keygen();
|
||||
let user_message_limit = Fr::from(100);
|
||||
@@ -271,7 +95,7 @@ mod test {
|
||||
//// generate merkle tree
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
tree_height,
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
@@ -288,7 +112,7 @@ mod test {
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
|
||||
|
||||
let rln_witness: RLNWitnessInput = rln_witness_from_values(
|
||||
rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
x,
|
||||
@@ -296,17 +120,50 @@ mod test {
|
||||
user_message_limit,
|
||||
Fr::from(1),
|
||||
)
|
||||
.unwrap();
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_witness_from_json() {
|
||||
// We generate all relevant keys
|
||||
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let builder = circom_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let proving_key = zkey_from_folder().unwrap();
|
||||
let verification_key = vk_from_folder().unwrap();
|
||||
let builder = circom_from_folder().unwrap();
|
||||
|
||||
// We compute witness from the json input
|
||||
let rln_witness = get_test_witness();
|
||||
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
|
||||
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
|
||||
assert_eq!(rln_witness_deser, rln_witness);
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
|
||||
let proof = generate_proof(builder, &proving_key, &rln_witness_deser).unwrap();
|
||||
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
|
||||
|
||||
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
|
||||
// Let's verify the proof
|
||||
let verified = verify_proof(&verification_key, &proof, &proof_values);
|
||||
|
||||
assert!(verified.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_end_to_end() {
|
||||
let rln_witness = get_test_witness();
|
||||
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
|
||||
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
|
||||
assert_eq!(rln_witness_deser, rln_witness);
|
||||
|
||||
// We generate all relevant keys
|
||||
let proving_key = zkey_from_folder().unwrap();
|
||||
let verification_key = vk_from_folder().unwrap();
|
||||
let builder = circom_from_folder().unwrap();
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(builder, &proving_key, &rln_witness_deser).unwrap();
|
||||
|
||||
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
|
||||
|
||||
// Let's verify the proof
|
||||
let success = verify_proof(&verification_key, &proof, &proof_values).unwrap();
|
||||
@@ -316,11 +173,13 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_witness_serialization() {
|
||||
// We test witness JSON serialization
|
||||
let rln_witness = get_test_witness();
|
||||
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
|
||||
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
|
||||
assert_eq!(rln_witness_deser, rln_witness);
|
||||
|
||||
// We test witness serialization
|
||||
let witness_json: &str = WITNESS_JSON_20;
|
||||
|
||||
let rln_witness = rln_witness_from_json(witness_json).unwrap();
|
||||
|
||||
let ser = serialize_witness(&rln_witness).unwrap();
|
||||
let (deser, _) = deserialize_witness(&ser).unwrap();
|
||||
assert_eq!(rln_witness, deser);
|
||||
|
||||
@@ -3,50 +3,56 @@ mod test {
|
||||
use ark_ff::BigInt;
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
use rln::circuit::{Fr, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
|
||||
use rln::circuit::{Fr, TEST_TREE_HEIGHT};
|
||||
use rln::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
|
||||
use rln::protocol::{compute_tree_root, deserialize_identity_tuple};
|
||||
use rln::public::{hash as public_hash, poseidon_hash as public_poseidon_hash, RLN};
|
||||
use rln::utils::*;
|
||||
use serde_json::json;
|
||||
use std::io::Cursor;
|
||||
|
||||
#[test]
|
||||
// This test is similar to the one in lib, but uses only public API
|
||||
fn test_merkle_proof() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
let user_message_limit = 1;
|
||||
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let mut rln = RLN::new(TEST_TREE_HEIGHT, generate_input_buffer()).unwrap();
|
||||
|
||||
// generate identity
|
||||
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
|
||||
let id_commitment = utils_poseidon_hash(&vec![identity_secret_hash]);
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit.into()]);
|
||||
|
||||
// check that leaves indices is empty
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_empty_leaves_indices(&mut buffer).unwrap();
|
||||
let idxs = bytes_le_to_vec_usize(&buffer.into_inner()).unwrap();
|
||||
assert!(idxs.is_empty());
|
||||
|
||||
// We pass rate_commitment as Read buffer to RLN's set_leaf
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
|
||||
rln.set_leaf(leaf_index, &mut buffer).unwrap();
|
||||
|
||||
// check that leaves before leaf_index is set to zero
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_empty_leaves_indices(&mut buffer).unwrap();
|
||||
let idxs = bytes_le_to_vec_usize(&buffer.into_inner()).unwrap();
|
||||
assert_eq!(idxs, [0, 1, 2]);
|
||||
|
||||
// We check correct computation of the root
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_root(&mut buffer).unwrap();
|
||||
let (root, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
assert_eq!(
|
||||
root,
|
||||
Fr::from(BigInt([
|
||||
17110646155607829651,
|
||||
5040045984242729823,
|
||||
6965416728592533086,
|
||||
2328960363755461975
|
||||
]))
|
||||
);
|
||||
}
|
||||
assert_eq!(
|
||||
root,
|
||||
Fr::from(BigInt([
|
||||
17110646155607829651,
|
||||
5040045984242729823,
|
||||
6965416728592533086,
|
||||
2328960363755461975
|
||||
]))
|
||||
);
|
||||
|
||||
// We check correct computation of merkle proof
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
@@ -57,126 +63,60 @@ mod test {
|
||||
let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec()).unwrap();
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
let mut expected_path_elements = vec![
|
||||
str_to_fr(
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
];
|
||||
let expected_path_elements: Vec<Fr> = [
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
]
|
||||
.map(|e| str_to_fr(e, 16).unwrap())
|
||||
.to_vec();
|
||||
|
||||
let mut expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
// We add the remaining elements for the case TEST_TREE_HEIGHT = 20
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
]);
|
||||
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
|
||||
}
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
16,
|
||||
)
|
||||
.unwrap()]);
|
||||
expected_identity_path_index.append(&mut vec![0]);
|
||||
}
|
||||
let expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
assert_eq!(path_elements, expected_path_elements);
|
||||
assert_eq!(identity_path_index, expected_identity_path_index);
|
||||
|
||||
// check subtree root computation for leaf 0 for all corresponding node until the root
|
||||
let l_idx = 0;
|
||||
for n in (1..=TEST_TREE_HEIGHT).rev() {
|
||||
let idx_l = l_idx * (1 << (TEST_TREE_HEIGHT - n));
|
||||
let idx_r = (l_idx + 1) * (1 << (TEST_TREE_HEIGHT - n));
|
||||
let idx_sr = idx_l;
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_subtree_root(n, idx_l, &mut buffer).unwrap();
|
||||
let (prev_l, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_subtree_root(n, idx_r, &mut buffer).unwrap();
|
||||
let (prev_r, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_subtree_root(n - 1, idx_sr, &mut buffer).unwrap();
|
||||
let (subroot, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
let res = utils_poseidon_hash(&[prev_l, prev_r]);
|
||||
assert_eq!(res, subroot);
|
||||
}
|
||||
|
||||
// We double check that the proof computed from public API is correct
|
||||
let root_from_proof = compute_tree_root(
|
||||
&identity_secret_hash,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "zerokit_utils"
|
||||
version = "0.4.3"
|
||||
version = "0.5.0"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "Various utilities for Zerokit"
|
||||
@@ -15,9 +15,11 @@ bench = false
|
||||
ark-ff = { version = "=0.4.1", default-features = false, features = ["asm"] }
|
||||
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
|
||||
color-eyre = "=0.6.2"
|
||||
pmtree = { package = "pmtree", version = "=2.0.0", optional = true}
|
||||
pmtree = { package = "vacp2p_pmtree", version = "=2.0.2", optional = true}
|
||||
sled = "=0.34.7"
|
||||
serde = "=1.0.163"
|
||||
lazy_static = "1.4.0"
|
||||
hex = "0.4"
|
||||
|
||||
[dev-dependencies]
|
||||
ark-bn254 = "=0.4.0"
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use hex_literal::hex;
|
||||
use lazy_static::lazy_static;
|
||||
use std::{fmt::Display, str::FromStr};
|
||||
use tiny_keccak::{Hasher as _, Keccak};
|
||||
use zerokit_utils::{
|
||||
@@ -45,22 +46,24 @@ impl FromStr for TestFr {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
let mut tree =
|
||||
OptimalMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), OptimalMerkleConfig::default())
|
||||
.unwrap();
|
||||
|
||||
let leaves = [
|
||||
lazy_static! {
|
||||
static ref LEAVES: [TestFr; 4] = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
]
|
||||
.map(|x| TestFr(x));
|
||||
.map(TestFr);
|
||||
}
|
||||
|
||||
pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
let mut tree =
|
||||
OptimalMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), OptimalMerkleConfig::default())
|
||||
.unwrap();
|
||||
|
||||
c.bench_function("OptimalMerkleTree::set", |b| {
|
||||
b.iter(|| {
|
||||
tree.set(0, leaves[0]).unwrap();
|
||||
tree.set(0, LEAVES[0]).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
@@ -72,7 +75,7 @@ pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
|
||||
c.bench_function("OptimalMerkleTree::override_range", |b| {
|
||||
b.iter(|| {
|
||||
tree.override_range(0, leaves, [0, 1, 2, 3]).unwrap();
|
||||
tree.override_range(0, *LEAVES, [0, 1, 2, 3]).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
@@ -87,23 +90,28 @@ pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
tree.get(0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
// check intermediate node getter which required additional computation of sub root index
|
||||
c.bench_function("OptimalMerkleTree::get_subtree_root", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_subtree_root(1, 0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("OptimalMerkleTree::get_empty_leaves_indices", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_empty_leaves_indices();
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
pub fn full_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
let mut tree =
|
||||
FullMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), FullMerkleConfig::default()).unwrap();
|
||||
|
||||
let leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
]
|
||||
.map(|x| TestFr(x));
|
||||
|
||||
c.bench_function("FullMerkleTree::set", |b| {
|
||||
b.iter(|| {
|
||||
tree.set(0, leaves[0]).unwrap();
|
||||
tree.set(0, LEAVES[0]).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
@@ -115,7 +123,7 @@ pub fn full_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
|
||||
c.bench_function("FullMerkleTree::override_range", |b| {
|
||||
b.iter(|| {
|
||||
tree.override_range(0, leaves, [0, 1, 2, 3]).unwrap();
|
||||
tree.override_range(0, *LEAVES, [0, 1, 2, 3]).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
@@ -130,6 +138,19 @@ pub fn full_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
tree.get(0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
// check intermediate node getter which required additional computation of sub root index
|
||||
c.bench_function("FullMerkleTree::get_subtree_root", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_subtree_root(1, 0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("FullMerkleTree::get_empty_leaves_indices", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_empty_leaves_indices();
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
|
||||
@@ -26,6 +26,10 @@ pub struct FullMerkleTree<H: Hasher> {
|
||||
/// The tree nodes
|
||||
nodes: Vec<H::Fr>,
|
||||
|
||||
/// The indices of leaves which are set into zero upto next_index.
|
||||
/// Set to 0 if the leaf is empty and set to 1 in otherwise.
|
||||
cached_leaves_indices: Vec<u8>,
|
||||
|
||||
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
|
||||
// (deletions leave next_index unchanged)
|
||||
next_index: usize,
|
||||
@@ -96,6 +100,7 @@ where
|
||||
depth,
|
||||
cached_nodes,
|
||||
nodes,
|
||||
cached_leaves_indices: vec![0; 1 << depth],
|
||||
next_index,
|
||||
metadata: Vec::new(),
|
||||
})
|
||||
@@ -116,7 +121,7 @@ where
|
||||
}
|
||||
|
||||
// Returns the total number of leaves set
|
||||
fn leaves_set(&mut self) -> usize {
|
||||
fn leaves_set(&self) -> usize {
|
||||
self.next_index
|
||||
}
|
||||
|
||||
@@ -141,6 +146,42 @@ where
|
||||
Ok(self.nodes[self.capacity() + leaf - 1])
|
||||
}
|
||||
|
||||
fn get_subtree_root(&self, n: usize, index: usize) -> Result<H::Fr> {
|
||||
if n > self.depth() {
|
||||
return Err(Report::msg("level exceeds depth size"));
|
||||
}
|
||||
if index >= self.capacity() {
|
||||
return Err(Report::msg("index exceeds set size"));
|
||||
}
|
||||
if n == 0 {
|
||||
Ok(self.root())
|
||||
} else if n == self.depth {
|
||||
self.get(index)
|
||||
} else {
|
||||
let mut idx = self.capacity() + index - 1;
|
||||
let mut nd = self.depth;
|
||||
loop {
|
||||
let parent = self.parent(idx).unwrap();
|
||||
nd -= 1;
|
||||
if nd == n {
|
||||
return Ok(self.nodes[parent]);
|
||||
} else {
|
||||
idx = parent;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
fn get_empty_leaves_indices(&self) -> Vec<usize> {
|
||||
self.cached_leaves_indices
|
||||
.iter()
|
||||
.take(self.next_index)
|
||||
.enumerate()
|
||||
.filter(|&(_, &v)| v == 0u8)
|
||||
.map(|(idx, _)| idx)
|
||||
.collect()
|
||||
}
|
||||
|
||||
// Sets tree nodes, starting from start index
|
||||
// Function proper of FullMerkleTree implementation
|
||||
fn set_range<I: IntoIterator<Item = FrOf<Self::Hasher>>>(
|
||||
@@ -158,6 +199,7 @@ where
|
||||
}
|
||||
hashes.into_iter().for_each(|hash| {
|
||||
self.nodes[index + count] = hash;
|
||||
self.cached_leaves_indices[start + count] = 1;
|
||||
count += 1;
|
||||
});
|
||||
if count != 0 {
|
||||
@@ -167,37 +209,36 @@ where
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
|
||||
fn override_range<I, J>(&mut self, start: usize, leaves: I, indices: J) -> Result<()>
|
||||
where
|
||||
I: IntoIterator<Item = FrOf<Self::Hasher>>,
|
||||
J: IntoIterator<Item = usize>,
|
||||
{
|
||||
let index = self.capacity() + start - 1;
|
||||
let mut count = 0;
|
||||
let leaves = leaves.into_iter().collect::<Vec<_>>();
|
||||
let to_remove_indices = to_remove_indices.into_iter().collect::<Vec<_>>();
|
||||
// first count number of hashes, and check that they fit in the tree
|
||||
// then insert into the tree
|
||||
if leaves.len() + start - to_remove_indices.len() > self.capacity() {
|
||||
return Err(Report::msg("provided hashes do not fit in the tree"));
|
||||
let indices = indices.into_iter().collect::<Vec<_>>();
|
||||
let min_index = *indices.first().unwrap();
|
||||
let leaves_vec = leaves.into_iter().collect::<Vec<_>>();
|
||||
|
||||
let max_index = start + leaves_vec.len();
|
||||
|
||||
let mut set_values = vec![Self::Hasher::default_leaf(); max_index - min_index];
|
||||
|
||||
for i in min_index..start {
|
||||
if !indices.contains(&i) {
|
||||
let value = self.get(i)?;
|
||||
set_values[i - min_index] = value;
|
||||
}
|
||||
}
|
||||
|
||||
// remove leaves
|
||||
for i in &to_remove_indices {
|
||||
self.delete(*i)?;
|
||||
for i in 0..leaves_vec.len() {
|
||||
set_values[start - min_index + i] = leaves_vec[i];
|
||||
}
|
||||
|
||||
// insert new leaves
|
||||
for hash in leaves {
|
||||
self.nodes[index + count] = hash;
|
||||
count += 1;
|
||||
for i in indices {
|
||||
self.cached_leaves_indices[i] = 0;
|
||||
}
|
||||
|
||||
if count != 0 {
|
||||
self.update_nodes(index, index + (count - 1))?;
|
||||
self.next_index = max(self.next_index, start + count - to_remove_indices.len());
|
||||
}
|
||||
Ok(())
|
||||
self.set_range(start, set_values)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
|
||||
// Sets a leaf at the next available index
|
||||
@@ -211,6 +252,7 @@ where
|
||||
// We reset the leaf only if we previously set a leaf at that index
|
||||
if index < self.next_index {
|
||||
self.set(index, H::default_leaf())?;
|
||||
self.cached_leaves_indices[index] = 0;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -47,14 +47,16 @@ pub trait ZerokitMerkleTree {
|
||||
Self: Sized;
|
||||
fn depth(&self) -> usize;
|
||||
fn capacity(&self) -> usize;
|
||||
fn leaves_set(&mut self) -> usize;
|
||||
fn leaves_set(&self) -> usize;
|
||||
fn root(&self) -> FrOf<Self::Hasher>;
|
||||
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>>;
|
||||
fn get_subtree_root(&self, n: usize, index: usize) -> Result<FrOf<Self::Hasher>>;
|
||||
fn set(&mut self, index: usize, leaf: FrOf<Self::Hasher>) -> Result<()>;
|
||||
fn set_range<I>(&mut self, start: usize, leaves: I) -> Result<()>
|
||||
where
|
||||
I: IntoIterator<Item = FrOf<Self::Hasher>>;
|
||||
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>>;
|
||||
fn get_empty_leaves_indices(&self) -> Vec<usize>;
|
||||
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
|
||||
where
|
||||
I: IntoIterator<Item = FrOf<Self::Hasher>>,
|
||||
|
||||
@@ -27,6 +27,10 @@ where
|
||||
/// The tree nodes
|
||||
nodes: HashMap<(usize, usize), H::Fr>,
|
||||
|
||||
/// The indices of leaves which are set into zero upto next_index.
|
||||
/// Set to 0 if the leaf is empty and set to 1 in otherwise.
|
||||
cached_leaves_indices: Vec<u8>,
|
||||
|
||||
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
|
||||
// (deletions leave next_index unchanged)
|
||||
next_index: usize,
|
||||
@@ -78,6 +82,7 @@ where
|
||||
cached_nodes: cached_nodes.clone(),
|
||||
depth,
|
||||
nodes: HashMap::new(),
|
||||
cached_leaves_indices: vec![0; 1 << depth],
|
||||
next_index: 0,
|
||||
metadata: Vec::new(),
|
||||
})
|
||||
@@ -98,7 +103,7 @@ where
|
||||
}
|
||||
|
||||
// Returns the total number of leaves set
|
||||
fn leaves_set(&mut self) -> usize {
|
||||
fn leaves_set(&self) -> usize {
|
||||
self.next_index
|
||||
}
|
||||
|
||||
@@ -108,6 +113,22 @@ where
|
||||
self.get_node(0, 0)
|
||||
}
|
||||
|
||||
fn get_subtree_root(&self, n: usize, index: usize) -> Result<H::Fr> {
|
||||
if n > self.depth() {
|
||||
return Err(Report::msg("level exceeds depth size"));
|
||||
}
|
||||
if index >= self.capacity() {
|
||||
return Err(Report::msg("index exceeds set size"));
|
||||
}
|
||||
if n == 0 {
|
||||
Ok(self.root())
|
||||
} else if n == self.depth {
|
||||
self.get(index)
|
||||
} else {
|
||||
Ok(self.get_node(n, index >> (self.depth - n)))
|
||||
}
|
||||
}
|
||||
|
||||
// Sets a leaf at the specified tree index
|
||||
fn set(&mut self, index: usize, leaf: H::Fr) -> Result<()> {
|
||||
if index >= self.capacity() {
|
||||
@@ -116,6 +137,7 @@ where
|
||||
self.nodes.insert((self.depth, index), leaf);
|
||||
self.recalculate_from(index)?;
|
||||
self.next_index = max(self.next_index, index + 1);
|
||||
self.cached_leaves_indices[index] = 1;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -127,6 +149,16 @@ where
|
||||
Ok(self.get_node(self.depth, index))
|
||||
}
|
||||
|
||||
fn get_empty_leaves_indices(&self) -> Vec<usize> {
|
||||
self.cached_leaves_indices
|
||||
.iter()
|
||||
.take(self.next_index)
|
||||
.enumerate()
|
||||
.filter(|&(_, &v)| v == 0u8)
|
||||
.map(|(idx, _)| idx)
|
||||
.collect()
|
||||
}
|
||||
|
||||
// Sets multiple leaves from the specified tree index
|
||||
fn set_range<I: IntoIterator<Item = H::Fr>>(&mut self, start: usize, leaves: I) -> Result<()> {
|
||||
let leaves = leaves.into_iter().collect::<Vec<_>>();
|
||||
@@ -136,40 +168,43 @@ where
|
||||
}
|
||||
for (i, leaf) in leaves.iter().enumerate() {
|
||||
self.nodes.insert((self.depth, start + i), *leaf);
|
||||
self.cached_leaves_indices[start + i] = 1;
|
||||
self.recalculate_from(start + i)?;
|
||||
}
|
||||
self.next_index = max(self.next_index, start + leaves.len());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
|
||||
fn override_range<I, J>(&mut self, start: usize, leaves: I, indices: J) -> Result<()>
|
||||
where
|
||||
I: IntoIterator<Item = FrOf<Self::Hasher>>,
|
||||
J: IntoIterator<Item = usize>,
|
||||
{
|
||||
let leaves = leaves.into_iter().collect::<Vec<_>>();
|
||||
let to_remove_indices = to_remove_indices.into_iter().collect::<Vec<_>>();
|
||||
// check if the range is valid
|
||||
if leaves.len() + start - to_remove_indices.len() > self.capacity() {
|
||||
return Err(Report::msg("provided range exceeds set size"));
|
||||
let indices = indices.into_iter().collect::<Vec<_>>();
|
||||
let min_index = *indices.first().unwrap();
|
||||
let leaves_vec = leaves.into_iter().collect::<Vec<_>>();
|
||||
|
||||
let max_index = start + leaves_vec.len();
|
||||
|
||||
let mut set_values = vec![Self::Hasher::default_leaf(); max_index - min_index];
|
||||
|
||||
for i in min_index..start {
|
||||
if !indices.contains(&i) {
|
||||
let value = self.get_leaf(i);
|
||||
set_values[i - min_index] = value;
|
||||
}
|
||||
}
|
||||
|
||||
// remove leaves
|
||||
for i in &to_remove_indices {
|
||||
self.delete(*i)?;
|
||||
for i in 0..leaves_vec.len() {
|
||||
set_values[start - min_index + i] = leaves_vec[i];
|
||||
}
|
||||
|
||||
// add leaves
|
||||
for (i, leaf) in leaves.iter().enumerate() {
|
||||
self.nodes.insert((self.depth, start + i), *leaf);
|
||||
self.recalculate_from(start + i)?;
|
||||
for i in indices {
|
||||
self.cached_leaves_indices[i] = 0;
|
||||
}
|
||||
|
||||
self.next_index = max(
|
||||
self.next_index,
|
||||
start + leaves.len() - to_remove_indices.len(),
|
||||
);
|
||||
Ok(())
|
||||
self.set_range(start, set_values)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
|
||||
// Sets a leaf at the next available index
|
||||
@@ -183,6 +218,7 @@ where
|
||||
// We reset the leaf only if we previously set a leaf at that index
|
||||
if index < self.next_index {
|
||||
self.set(index, H::default_leaf())?;
|
||||
self.cached_leaves_indices[index] = 0;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -266,6 +302,7 @@ where
|
||||
i >>= 1;
|
||||
depth -= 1;
|
||||
self.nodes.insert((depth, i), h);
|
||||
self.cached_leaves_indices[index] = 1;
|
||||
if depth == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
// Tests adapted from https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/merkle_tree.rs
|
||||
#[cfg(test)]
|
||||
pub mod test {
|
||||
use std::{fmt::Display, str::FromStr};
|
||||
|
||||
use hex_literal::hex;
|
||||
use std::{fmt::Display, str::FromStr};
|
||||
use tiny_keccak::{Hasher as _, Keccak};
|
||||
use zerokit_utils::{
|
||||
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,
|
||||
@@ -35,7 +34,7 @@ pub mod test {
|
||||
|
||||
impl Display for TestFr {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", String::from_utf8_lossy(self.0.as_slice()))
|
||||
write!(f, "{}", hex::encode(self.0.as_slice()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,16 +46,28 @@ pub mod test {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u32> for TestFr {
|
||||
fn from(value: u32) -> Self {
|
||||
let mut bytes: Vec<u8> = vec![0; 28];
|
||||
bytes.extend_from_slice(&value.to_be_bytes());
|
||||
TestFr(bytes.as_slice().try_into().unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
const DEFAULT_DEPTH: usize = 2;
|
||||
|
||||
fn default_full_merkle_tree(depth: usize) -> FullMerkleTree<Keccak256> {
|
||||
FullMerkleTree::<Keccak256>::new(depth, TestFr([0; 32]), FullMerkleConfig::default())
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn default_optimal_merkle_tree(depth: usize) -> OptimalMerkleTree<Keccak256> {
|
||||
OptimalMerkleTree::<Keccak256>::new(depth, TestFr([0; 32]), OptimalMerkleConfig::default())
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_root() {
|
||||
let leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
]
|
||||
.map(|x| TestFr(x));
|
||||
|
||||
let default_tree_root = TestFr(hex!(
|
||||
"b4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30"
|
||||
));
|
||||
@@ -67,42 +78,190 @@ pub mod test {
|
||||
hex!("222ff5e0b5877792c2bc1670e2ccd0c2c97cd7bb1672a57d598db05092d3d72c"),
|
||||
hex!("a9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36"),
|
||||
]
|
||||
.map(|x| TestFr(x));
|
||||
.map(TestFr);
|
||||
|
||||
let mut tree =
|
||||
FullMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), FullMerkleConfig::default())
|
||||
.unwrap();
|
||||
let nof_leaves = 4;
|
||||
let leaves: Vec<TestFr> = (1..=nof_leaves as u32).map(TestFr::from).collect();
|
||||
|
||||
let mut tree = default_full_merkle_tree(DEFAULT_DEPTH);
|
||||
assert_eq!(tree.root(), default_tree_root);
|
||||
for i in 0..leaves.len() {
|
||||
for i in 0..nof_leaves {
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
assert_eq!(tree.root(), roots[i]);
|
||||
}
|
||||
|
||||
let mut tree =
|
||||
OptimalMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), OptimalMerkleConfig::default())
|
||||
.unwrap();
|
||||
let mut tree = default_optimal_merkle_tree(DEFAULT_DEPTH);
|
||||
assert_eq!(tree.root(), default_tree_root);
|
||||
for i in 0..leaves.len() {
|
||||
for i in 0..nof_leaves {
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
assert_eq!(tree.root(), roots[i]);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_empty_leaves_indices() {
|
||||
let depth = 4;
|
||||
let nof_leaves: usize = 1 << (depth - 1);
|
||||
let leaves: Vec<TestFr> = (0..nof_leaves as u32).map(TestFr::from).collect();
|
||||
let leaves_2: Vec<TestFr> = (0u32..2).map(TestFr::from).collect();
|
||||
let leaves_4: Vec<TestFr> = (0u32..4).map(TestFr::from).collect();
|
||||
|
||||
let mut tree_full = default_full_merkle_tree(depth);
|
||||
let _ = tree_full.set_range(0, leaves.clone());
|
||||
assert!(tree_full.get_empty_leaves_indices().is_empty());
|
||||
|
||||
let mut vec_idxs = Vec::new();
|
||||
for i in 0..nof_leaves {
|
||||
vec_idxs.push(i);
|
||||
let _ = tree_full.delete(i);
|
||||
assert_eq!(tree_full.get_empty_leaves_indices(), vec_idxs);
|
||||
}
|
||||
|
||||
for i in (0..nof_leaves).rev() {
|
||||
vec_idxs.pop();
|
||||
let _ = tree_full.set(i, leaves[i]);
|
||||
assert_eq!(tree_full.get_empty_leaves_indices(), vec_idxs);
|
||||
}
|
||||
|
||||
// Check situation when the number of items to insert is less than the number of items to delete
|
||||
tree_full
|
||||
.override_range(0, leaves_2.clone(), [0, 1, 2, 3])
|
||||
.unwrap();
|
||||
|
||||
// check if the indexes for write and delete are the same
|
||||
tree_full
|
||||
.override_range(0, leaves_4.clone(), [0, 1, 2, 3])
|
||||
.unwrap();
|
||||
assert_eq!(tree_full.get_empty_leaves_indices(), vec![]);
|
||||
|
||||
// check if indexes for deletion are before indexes for overwriting
|
||||
tree_full
|
||||
.override_range(4, leaves_4.clone(), [0, 1, 2, 3])
|
||||
.unwrap();
|
||||
assert_eq!(tree_full.get_empty_leaves_indices(), vec![0, 1, 2, 3]);
|
||||
|
||||
// check if the indices for write and delete do not overlap completely
|
||||
tree_full
|
||||
.override_range(2, leaves_4.clone(), [0, 1, 2, 3])
|
||||
.unwrap();
|
||||
assert_eq!(tree_full.get_empty_leaves_indices(), vec![0, 1]);
|
||||
|
||||
//// Optimal Merkle Tree Trest
|
||||
|
||||
let mut tree_opt = default_optimal_merkle_tree(depth);
|
||||
let _ = tree_opt.set_range(0, leaves.clone());
|
||||
assert!(tree_opt.get_empty_leaves_indices().is_empty());
|
||||
|
||||
let mut vec_idxs = Vec::new();
|
||||
for i in 0..nof_leaves {
|
||||
vec_idxs.push(i);
|
||||
let _ = tree_opt.delete(i);
|
||||
assert_eq!(tree_opt.get_empty_leaves_indices(), vec_idxs);
|
||||
}
|
||||
for i in (0..nof_leaves).rev() {
|
||||
vec_idxs.pop();
|
||||
let _ = tree_opt.set(i, leaves[i]);
|
||||
assert_eq!(tree_opt.get_empty_leaves_indices(), vec_idxs);
|
||||
}
|
||||
|
||||
// Check situation when the number of items to insert is less than the number of items to delete
|
||||
tree_opt
|
||||
.override_range(0, leaves_2.clone(), [0, 1, 2, 3])
|
||||
.unwrap();
|
||||
|
||||
// check if the indexes for write and delete are the same
|
||||
tree_opt
|
||||
.override_range(0, leaves_4.clone(), [0, 1, 2, 3])
|
||||
.unwrap();
|
||||
assert_eq!(tree_opt.get_empty_leaves_indices(), vec![]);
|
||||
|
||||
// check if indexes for deletion are before indexes for overwriting
|
||||
tree_opt
|
||||
.override_range(4, leaves_4.clone(), [0, 1, 2, 3])
|
||||
.unwrap();
|
||||
assert_eq!(tree_opt.get_empty_leaves_indices(), vec![0, 1, 2, 3]);
|
||||
|
||||
// check if the indices for write and delete do not overlap completely
|
||||
tree_opt
|
||||
.override_range(2, leaves_4.clone(), [0, 1, 2, 3])
|
||||
.unwrap();
|
||||
assert_eq!(tree_opt.get_empty_leaves_indices(), vec![0, 1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_subtree_root() {
|
||||
let depth = 3;
|
||||
let nof_leaves: usize = 6;
|
||||
let leaves: Vec<TestFr> = (0..nof_leaves as u32).map(TestFr::from).collect();
|
||||
|
||||
let mut tree_full = default_optimal_merkle_tree(depth);
|
||||
let _ = tree_full.set_range(0, leaves.iter().cloned());
|
||||
|
||||
for i in 0..nof_leaves {
|
||||
// check leaves
|
||||
assert_eq!(
|
||||
tree_full.get(i).unwrap(),
|
||||
tree_full.get_subtree_root(depth, i).unwrap()
|
||||
);
|
||||
|
||||
// check root
|
||||
assert_eq!(tree_full.root(), tree_full.get_subtree_root(0, i).unwrap());
|
||||
}
|
||||
|
||||
// check intermediate nodes
|
||||
for n in (1..=depth).rev() {
|
||||
for i in (0..(1 << n)).step_by(2) {
|
||||
let idx_l = i * (1 << (depth - n));
|
||||
let idx_r = (i + 1) * (1 << (depth - n));
|
||||
let idx_sr = idx_l;
|
||||
|
||||
let prev_l = tree_full.get_subtree_root(n, idx_l).unwrap();
|
||||
let prev_r = tree_full.get_subtree_root(n, idx_r).unwrap();
|
||||
let subroot = tree_full.get_subtree_root(n - 1, idx_sr).unwrap();
|
||||
|
||||
// check intermediate nodes
|
||||
assert_eq!(Keccak256::hash(&[prev_l, prev_r]), subroot);
|
||||
}
|
||||
}
|
||||
|
||||
let mut tree_opt = default_full_merkle_tree(depth);
|
||||
let _ = tree_opt.set_range(0, leaves.iter().cloned());
|
||||
|
||||
for i in 0..nof_leaves {
|
||||
// check leaves
|
||||
assert_eq!(
|
||||
tree_opt.get(i).unwrap(),
|
||||
tree_opt.get_subtree_root(depth, i).unwrap()
|
||||
);
|
||||
// check root
|
||||
assert_eq!(tree_opt.root(), tree_opt.get_subtree_root(0, i).unwrap());
|
||||
}
|
||||
|
||||
// check intermediate nodes
|
||||
for n in (1..=depth).rev() {
|
||||
for i in (0..(1 << n)).step_by(2) {
|
||||
let idx_l = i * (1 << (depth - n));
|
||||
let idx_r = (i + 1) * (1 << (depth - n));
|
||||
let idx_sr = idx_l;
|
||||
|
||||
let prev_l = tree_opt.get_subtree_root(n, idx_l).unwrap();
|
||||
let prev_r = tree_opt.get_subtree_root(n, idx_r).unwrap();
|
||||
let subroot = tree_opt.get_subtree_root(n - 1, idx_sr).unwrap();
|
||||
|
||||
// check intermediate nodes
|
||||
assert_eq!(Keccak256::hash(&[prev_l, prev_r]), subroot);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proof() {
|
||||
let leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
]
|
||||
.map(|x| TestFr(x));
|
||||
let nof_leaves = 4;
|
||||
let leaves: Vec<TestFr> = (0..nof_leaves as u32).map(TestFr::from).collect();
|
||||
|
||||
// We thest the FullMerkleTree implementation
|
||||
let mut tree =
|
||||
FullMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), FullMerkleConfig::default())
|
||||
.unwrap();
|
||||
for i in 0..leaves.len() {
|
||||
let mut tree = default_full_merkle_tree(DEFAULT_DEPTH);
|
||||
for i in 0..nof_leaves {
|
||||
// We set the leaves
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
|
||||
@@ -119,16 +278,12 @@ pub mod test {
|
||||
assert_eq!(proof.compute_root_from(&leaves[i]), tree.root());
|
||||
|
||||
// We check that the proof is not valid for another leaf
|
||||
assert!(!tree
|
||||
.verify(&leaves[(i + 1) % leaves.len()], &proof)
|
||||
.unwrap());
|
||||
assert!(!tree.verify(&leaves[(i + 1) % nof_leaves], &proof).unwrap());
|
||||
}
|
||||
|
||||
// We test the OptimalMerkleTree implementation
|
||||
let mut tree =
|
||||
OptimalMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), OptimalMerkleConfig::default())
|
||||
.unwrap();
|
||||
for i in 0..leaves.len() {
|
||||
let mut tree = default_optimal_merkle_tree(DEFAULT_DEPTH);
|
||||
for i in 0..nof_leaves {
|
||||
// We set the leaves
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
|
||||
@@ -145,34 +300,25 @@ pub mod test {
|
||||
assert_eq!(proof.compute_root_from(&leaves[i]), tree.root());
|
||||
|
||||
// We check that the proof is not valid for another leaf
|
||||
assert!(!tree
|
||||
.verify(&leaves[(i + 1) % leaves.len()], &proof)
|
||||
.unwrap());
|
||||
assert!(!tree.verify(&leaves[(i + 1) % nof_leaves], &proof).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_override_range() {
|
||||
let initial_leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
]
|
||||
.map(|x| TestFr(x));
|
||||
let nof_leaves = 4;
|
||||
let leaves: Vec<TestFr> = (0..nof_leaves as u32).map(TestFr::from).collect();
|
||||
|
||||
let mut tree =
|
||||
OptimalMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), OptimalMerkleConfig::default())
|
||||
.unwrap();
|
||||
let mut tree = default_optimal_merkle_tree(DEFAULT_DEPTH);
|
||||
|
||||
// We set the leaves
|
||||
tree.set_range(0, initial_leaves.iter().cloned()).unwrap();
|
||||
tree.set_range(0, leaves.iter().cloned()).unwrap();
|
||||
|
||||
let new_leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000005"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000006"),
|
||||
]
|
||||
.map(|x| TestFr(x));
|
||||
.map(TestFr);
|
||||
|
||||
let to_delete_indices: [usize; 2] = [0, 1];
|
||||
|
||||
@@ -185,8 +331,8 @@ pub mod test {
|
||||
.unwrap();
|
||||
|
||||
// ensure that the leaves are set correctly
|
||||
for i in 0..new_leaves.len() {
|
||||
assert_eq!(tree.get_leaf(i), new_leaves[i]);
|
||||
for (i, &new_leaf) in new_leaves.iter().enumerate() {
|
||||
assert_eq!(tree.get_leaf(i), new_leaf);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,16 +25,10 @@ mod test {
|
||||
input_clean = input_clean.trim().to_string();
|
||||
|
||||
if radix == 10 {
|
||||
BigUint::from_str_radix(&input_clean, radix)
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
BigUint::from_str_radix(&input_clean, radix).unwrap().into()
|
||||
} else {
|
||||
input_clean = input_clean.replace("0x", "");
|
||||
BigUint::from_str_radix(&input_clean, radix)
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
BigUint::from_str_radix(&input_clean, radix).unwrap().into()
|
||||
}
|
||||
}
|
||||
// The following constants were taken from https://github.com/arnaucube/poseidon-rs/blob/233027d6075a637c29ad84a8a44f5653b81f0410/src/constants.rs
|
||||
@@ -3500,21 +3494,21 @@ mod test {
|
||||
fn load_constants() -> (Vec<Vec<Fr>>, Vec<Vec<Vec<Fr>>>) {
|
||||
let (c_str, m_str) = constants();
|
||||
let mut c: Vec<Vec<Fr>> = Vec::new();
|
||||
for i in 0..c_str.len() {
|
||||
let mut cci: Vec<Fr> = Vec::new();
|
||||
for j in 0..c_str[i].len() {
|
||||
let b: Fr = str_to_fr(c_str[i][j], 10);
|
||||
cci.push(b);
|
||||
for c_i in c_str {
|
||||
let mut ci: Vec<Fr> = Vec::new();
|
||||
for c_i_j in c_i {
|
||||
let b: Fr = str_to_fr(c_i_j, 10);
|
||||
ci.push(b);
|
||||
}
|
||||
c.push(cci);
|
||||
c.push(ci);
|
||||
}
|
||||
let mut m: Vec<Vec<Vec<Fr>>> = Vec::new();
|
||||
for i in 0..m_str.len() {
|
||||
for m_i in m_str {
|
||||
let mut mi: Vec<Vec<Fr>> = Vec::new();
|
||||
for j in 0..m_str[i].len() {
|
||||
for m_i_j in m_i {
|
||||
let mut mij: Vec<Fr> = Vec::new();
|
||||
for k in 0..m_str[i][j].len() {
|
||||
let b: Fr = str_to_fr(m_str[i][j][k], 10);
|
||||
for m_i_j_k in m_i_j {
|
||||
let b: Fr = str_to_fr(m_i_j_k, 10);
|
||||
mij.push(b);
|
||||
}
|
||||
mi.push(mij);
|
||||
@@ -3542,7 +3536,7 @@ mod test {
|
||||
assert_eq!(loaded_m[i], poseidon_parameters[i].m);
|
||||
}
|
||||
} else {
|
||||
assert!(false);
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user