mirror of
https://github.com/vacp2p/zerokit.git
synced 2026-01-09 13:47:58 -05:00
Compare commits
4 Commits
v0.4.3
...
optimize-w
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1045e056d1 | ||
|
|
e22535dce7 | ||
|
|
9a22bca6f0 | ||
|
|
fd2facc34f |
49
.github/workflows/ci.yml
vendored
49
.github/workflows/ci.yml
vendored
@@ -3,20 +3,27 @@ on:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
- "!.github/workflows/*.yml"
|
||||
- "!rln-wasm/**"
|
||||
- "!rln/src/**"
|
||||
- "!rln/resources/**"
|
||||
- "!utils/src/**"
|
||||
- '**.md'
|
||||
- '!.github/workflows/*.yml'
|
||||
- '!multiplier/src/**'
|
||||
- '!private-settlement/src/**'
|
||||
- '!rln-wasm/**'
|
||||
- '!rln/src/**'
|
||||
- '!rln/resources/**'
|
||||
- '!semaphore/src/**'
|
||||
- '!utils/src/**'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
- "!.github/workflows/*.yml"
|
||||
- "!rln-wasm/**"
|
||||
- "!rln/src/**"
|
||||
- "!rln/resources/**"
|
||||
- "!utils/src/**"
|
||||
- '**.md'
|
||||
- '!.github/workflows/*.yml'
|
||||
- '!multiplier/src/**'
|
||||
- '!private-settlement/src/**'
|
||||
- '!rln-wasm/**'
|
||||
- '!rln/src/**'
|
||||
- '!rln/resources/**'
|
||||
- '!semaphore/src/**'
|
||||
- '!utils/src/**'
|
||||
|
||||
|
||||
name: Tests
|
||||
|
||||
@@ -25,10 +32,10 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [rln, utils]
|
||||
crate: [multiplier, semaphore, rln, utils]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
|
||||
name: test - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
@@ -44,16 +51,16 @@ jobs:
|
||||
run: make installdeps
|
||||
- name: cargo-make test
|
||||
run: |
|
||||
cargo make test --release
|
||||
cargo make test --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
|
||||
rln-wasm:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
|
||||
name: test - rln-wasm - ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
@@ -78,11 +85,11 @@ jobs:
|
||||
matrix:
|
||||
# we run lint tests only on ubuntu
|
||||
platform: [ubuntu-latest]
|
||||
crate: [rln, utils]
|
||||
crate: [multiplier, semaphore, rln, utils]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: lint - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
name: lint - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
@@ -103,7 +110,7 @@ jobs:
|
||||
- name: cargo clippy
|
||||
if: success() || failure()
|
||||
run: |
|
||||
cargo clippy --release -- -D warnings
|
||||
cargo clippy --release -- -D warnings
|
||||
working-directory: ${{ matrix.crate }}
|
||||
# We skip clippy on rln-wasm, since wasm target is managed by cargo make
|
||||
# Currently not treating warnings as error, too noisy
|
||||
@@ -127,4 +134,4 @@ jobs:
|
||||
- uses: boa-dev/criterion-compare-action@v3
|
||||
with:
|
||||
branchName: ${{ github.base_ref }}
|
||||
cwd: ${{ matrix.crate }}
|
||||
cwd: ${{ matrix.crate }}
|
||||
4
.gitmodules
vendored
Normal file
4
.gitmodules
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
[submodule "semaphore/vendor/semaphore"]
|
||||
path = semaphore/vendor/semaphore
|
||||
ignore = dirty
|
||||
url = https://github.com/appliedzkp/semaphore.git
|
||||
1776
Cargo.lock
generated
1776
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
13
Cargo.toml
13
Cargo.toml
@@ -1,5 +1,13 @@
|
||||
[workspace]
|
||||
members = ["rln", "rln-cli", "rln-wasm", "utils"]
|
||||
members = [
|
||||
"multiplier",
|
||||
"private-settlement",
|
||||
"semaphore",
|
||||
"rln",
|
||||
"rln-cli",
|
||||
"rln-wasm",
|
||||
"utils",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
# Compilation profile for any non-workspace member.
|
||||
@@ -11,3 +19,6 @@ opt-level = 3
|
||||
[profile.release.package."rln-wasm"]
|
||||
# Tell `rustc` to optimize for small code size.
|
||||
opt-level = "s"
|
||||
|
||||
[profile.release.package."semaphore"]
|
||||
codegen-units = 1
|
||||
|
||||
@@ -29,7 +29,4 @@ image = "ghcr.io/cross-rs/mips64-unknown-linux-gnuabi64:latest"
|
||||
image = "ghcr.io/cross-rs/mips64el-unknown-linux-gnuabi64:latest"
|
||||
|
||||
[target.mipsel-unknown-linux-gnu]
|
||||
image = "ghcr.io/cross-rs/mipsel-unknown-linux-gnu:latest"
|
||||
|
||||
[target.aarch64-linux-android]
|
||||
image = "ghcr.io/cross-rs/aarch64-linux-android:edge"
|
||||
image = "ghcr.io/cross-rs/mipsel-unknown-linux-gnu:latest"
|
||||
32
multiplier/Cargo.toml
Normal file
32
multiplier/Cargo.toml
Normal file
@@ -0,0 +1,32 @@
|
||||
[package]
|
||||
name = "multiplier"
|
||||
version = "0.3.0"
|
||||
edition = "2018"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
|
||||
# WASM operations
|
||||
# wasmer = { version = "2.0" }
|
||||
# fnv = { version = "1.0.3", default-features = false }
|
||||
# num = { version = "0.4.0" }
|
||||
# num-traits = { version = "0.2.0", default-features = false }
|
||||
|
||||
# ZKP Generation
|
||||
# ark-ff = { version = "0.3.0", default-features = false, features = ["parallel", "asm"] }
|
||||
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
|
||||
ark-bn254 = { version = "0.3.0" }
|
||||
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
|
||||
# ark-poly = { version = "^0.3.0", default-features = false, features = ["parallel"] }
|
||||
ark-serialize = { version = "0.3.0", default-features = false }
|
||||
|
||||
ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circom-2"], rev = "35ce5a9" }
|
||||
|
||||
# error handling
|
||||
color-eyre = "0.6.1"
|
||||
|
||||
# decoding of data
|
||||
# hex = "0.4.3"
|
||||
# byteorder = "1.4.3"
|
||||
7
multiplier/Makefile.toml
Normal file
7
multiplier/Makefile.toml
Normal file
@@ -0,0 +1,7 @@
|
||||
[tasks.build]
|
||||
command = "cargo"
|
||||
args = ["build", "--release"]
|
||||
|
||||
[tasks.test]
|
||||
command = "cargo"
|
||||
args = ["test", "--release"]
|
||||
21
multiplier/README.md
Normal file
21
multiplier/README.md
Normal file
@@ -0,0 +1,21 @@
|
||||
# Multiplier example
|
||||
|
||||
Example wrapper around a basic Circom circuit to test Circom 2 integration
|
||||
through ark-circom and FFI.
|
||||
|
||||
## Build and Test
|
||||
|
||||
To build and test, run the following commands within the module folder
|
||||
```bash
|
||||
cargo make build
|
||||
cargo make test
|
||||
```
|
||||
|
||||
## FFI
|
||||
|
||||
To generate C or Nim bindings from Rust FFI, use `cbindgen` or `nbindgen`:
|
||||
|
||||
```
|
||||
cbindgen . -o target/multiplier.h
|
||||
nbindgen . -o target/multiplier.nim
|
||||
```
|
||||
BIN
multiplier/resources/circom2_multiplier2.r1cs
Normal file
BIN
multiplier/resources/circom2_multiplier2.r1cs
Normal file
Binary file not shown.
BIN
multiplier/resources/circom2_multiplier2.wasm
Normal file
BIN
multiplier/resources/circom2_multiplier2.wasm
Normal file
Binary file not shown.
77
multiplier/src/ffi.rs
Normal file
77
multiplier/src/ffi.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
use crate::public::Multiplier;
|
||||
use std::slice;
|
||||
|
||||
/// Buffer struct is taken from
|
||||
/// https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs
|
||||
///
|
||||
/// Also heavily inspired by https://github.com/kilic/rln/blob/master/src/ffi.rs
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Buffer {
|
||||
pub ptr: *const u8,
|
||||
pub len: usize,
|
||||
}
|
||||
|
||||
impl From<&[u8]> for Buffer {
|
||||
fn from(src: &[u8]) -> Self {
|
||||
Self {
|
||||
ptr: &src[0] as *const u8,
|
||||
len: src.len(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&Buffer> for &'a [u8] {
|
||||
fn from(src: &Buffer) -> &'a [u8] {
|
||||
unsafe { slice::from_raw_parts(src.ptr, src.len) }
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new_circuit(ctx: *mut *mut Multiplier) -> bool {
|
||||
if let Ok(mul) = Multiplier::new() {
|
||||
unsafe { *ctx = Box::into_raw(Box::new(mul)) };
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn prove(ctx: *const Multiplier, output_buffer: *mut Buffer) -> bool {
|
||||
println!("multiplier ffi: prove");
|
||||
let mul = unsafe { &*ctx };
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
|
||||
match mul.prove(&mut output_data) {
|
||||
Ok(proof_data) => proof_data,
|
||||
Err(_) => return false,
|
||||
};
|
||||
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn verify(
|
||||
ctx: *const Multiplier,
|
||||
proof_buffer: *const Buffer,
|
||||
result_ptr: *mut u32,
|
||||
) -> bool {
|
||||
println!("multiplier ffi: verify");
|
||||
let mul = unsafe { &*ctx };
|
||||
let proof_data = <&[u8]>::from(unsafe { &*proof_buffer });
|
||||
if match mul.verify(proof_data) {
|
||||
Ok(verified) => verified,
|
||||
Err(_) => return false,
|
||||
} {
|
||||
unsafe { *result_ptr = 0 };
|
||||
} else {
|
||||
unsafe { *result_ptr = 1 };
|
||||
};
|
||||
true
|
||||
}
|
||||
2
multiplier/src/lib.rs
Normal file
2
multiplier/src/lib.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub mod ffi;
|
||||
pub mod public;
|
||||
49
multiplier/src/main.rs
Normal file
49
multiplier/src/main.rs
Normal file
@@ -0,0 +1,49 @@
|
||||
use ark_circom::{CircomBuilder, CircomConfig};
|
||||
use ark_std::rand::thread_rng;
|
||||
use color_eyre::{Report, Result};
|
||||
|
||||
use ark_bn254::Bn254;
|
||||
use ark_groth16::{
|
||||
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
|
||||
};
|
||||
|
||||
fn groth16_proof_example() -> Result<()> {
|
||||
let cfg = CircomConfig::<Bn254>::new(
|
||||
"./resources/circom2_multiplier2.wasm",
|
||||
"./resources/circom2_multiplier2.r1cs",
|
||||
)?;
|
||||
|
||||
let mut builder = CircomBuilder::new(cfg);
|
||||
builder.push_input("a", 3);
|
||||
builder.push_input("b", 11);
|
||||
|
||||
// create an empty instance for setting it up
|
||||
let circom = builder.setup();
|
||||
|
||||
let mut rng = thread_rng();
|
||||
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng)?;
|
||||
|
||||
let circom = builder.build()?;
|
||||
|
||||
let inputs = circom
|
||||
.get_public_inputs()
|
||||
.ok_or(Report::msg("no public inputs"))?;
|
||||
|
||||
let proof = prove(circom, ¶ms, &mut rng)?;
|
||||
|
||||
let pvk = prepare_verifying_key(¶ms.vk);
|
||||
|
||||
match verify_proof(&pvk, &proof, &inputs) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(_) => Err(Report::msg("not verified")),
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("Hello, world!");
|
||||
|
||||
match groth16_proof_example() {
|
||||
Ok(_) => println!("Success"),
|
||||
Err(_) => println!("Error"),
|
||||
}
|
||||
}
|
||||
79
multiplier/src/public.rs
Normal file
79
multiplier/src/public.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use ark_circom::{CircomBuilder, CircomCircuit, CircomConfig};
|
||||
use ark_std::rand::thread_rng;
|
||||
|
||||
use ark_bn254::Bn254;
|
||||
use ark_groth16::{
|
||||
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
|
||||
Proof, ProvingKey,
|
||||
};
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
use color_eyre::{Report, Result};
|
||||
use std::io::{Read, Write};
|
||||
|
||||
pub struct Multiplier {
|
||||
circom: CircomCircuit<Bn254>,
|
||||
params: ProvingKey<Bn254>,
|
||||
}
|
||||
|
||||
impl Multiplier {
|
||||
// TODO Break this apart here
|
||||
pub fn new() -> Result<Multiplier> {
|
||||
let cfg = CircomConfig::<Bn254>::new(
|
||||
"./resources/circom2_multiplier2.wasm",
|
||||
"./resources/circom2_multiplier2.r1cs",
|
||||
)?;
|
||||
|
||||
let mut builder = CircomBuilder::new(cfg);
|
||||
builder.push_input("a", 3);
|
||||
builder.push_input("b", 11);
|
||||
|
||||
// create an empty instance for setting it up
|
||||
let circom = builder.setup();
|
||||
|
||||
let mut rng = thread_rng();
|
||||
|
||||
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng)?;
|
||||
|
||||
let circom = builder.build()?;
|
||||
|
||||
Ok(Multiplier { circom, params })
|
||||
}
|
||||
|
||||
// TODO Input Read
|
||||
pub fn prove<W: Write>(&self, result_data: W) -> Result<()> {
|
||||
let mut rng = thread_rng();
|
||||
|
||||
// XXX: There's probably a better way to do this
|
||||
let circom = self.circom.clone();
|
||||
let params = self.params.clone();
|
||||
|
||||
let proof = prove(circom, ¶ms, &mut rng)?;
|
||||
|
||||
// XXX: Unclear if this is different from other serialization(s)
|
||||
proof.serialize(result_data)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn verify<R: Read>(&self, input_data: R) -> Result<bool> {
|
||||
let proof = Proof::deserialize(input_data)?;
|
||||
|
||||
let pvk = prepare_verifying_key(&self.params.vk);
|
||||
|
||||
// XXX Part of input data?
|
||||
let inputs = self
|
||||
.circom
|
||||
.get_public_inputs()
|
||||
.ok_or(Report::msg("no public inputs"))?;
|
||||
|
||||
let verified = verify_proof(&pvk, &proof, &inputs)?;
|
||||
|
||||
Ok(verified)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Multiplier {
|
||||
fn default() -> Self {
|
||||
Self::new().unwrap()
|
||||
}
|
||||
}
|
||||
21
multiplier/tests/public.rs
Normal file
21
multiplier/tests/public.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use multiplier::public::Multiplier;
|
||||
|
||||
#[test]
|
||||
fn multiplier_proof() {
|
||||
let mul = Multiplier::new().unwrap();
|
||||
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
let _ = mul.prove(&mut output_data);
|
||||
|
||||
let proof_data = &output_data[..];
|
||||
|
||||
// XXX Pass as arg?
|
||||
//let pvk = prepare_verifying_key(&mul.params.vk);
|
||||
|
||||
let verified = mul.verify(proof_data).unwrap();
|
||||
|
||||
assert!(verified);
|
||||
}
|
||||
}
|
||||
9
private-settlement/Cargo.toml
Normal file
9
private-settlement/Cargo.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
[package]
|
||||
name = "private-settlement"
|
||||
version = "0.3.0"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
7
private-settlement/Makefile.toml
Normal file
7
private-settlement/Makefile.toml
Normal file
@@ -0,0 +1,7 @@
|
||||
[tasks.build]
|
||||
command = "cargo"
|
||||
args = ["build", "--release"]
|
||||
|
||||
[tasks.test]
|
||||
command = "cargo"
|
||||
args = ["test", "--release"]
|
||||
11
private-settlement/README.md
Normal file
11
private-settlement/README.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# Private Settlement Module
|
||||
|
||||
This module is to provide APIs to manage, compute and verify [Private Settlement](https://rfc.vac.dev/spec/44/) zkSNARK proofs and primitives.
|
||||
|
||||
## Build and Test
|
||||
|
||||
To build and test, run the following commands within the module folder
|
||||
```bash
|
||||
cargo make build
|
||||
cargo make test
|
||||
```
|
||||
1
private-settlement/src/lib.rs
Normal file
1
private-settlement/src/lib.rs
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
11
private-settlement/tests/private-settlement.rs
Normal file
11
private-settlement/tests/private-settlement.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let result = 2 + 2;
|
||||
assert_eq!(result, 4);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,6 +7,7 @@ use std::vec::Vec;
|
||||
|
||||
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
|
||||
use num_bigint::BigInt;
|
||||
use rln::circuit::{default_vk, default_zkey};
|
||||
use rln::public::{hash, poseidon_hash, RLN};
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
@@ -181,13 +182,10 @@ impl<'a> ProcessArg for &'a [u8] {
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = newRLN)]
|
||||
pub fn wasm_new(
|
||||
tree_height: usize,
|
||||
zkey: Uint8Array,
|
||||
vk: Uint8Array,
|
||||
) -> Result<*mut RLNWrapper, String> {
|
||||
let instance = RLN::new_with_params(tree_height, zkey.to_vec(), vk.to_vec())
|
||||
.map_err(|err| format!("{:#?}", err))?;
|
||||
pub fn wasm_new() -> Result<*mut RLNWrapper, String> {
|
||||
let zkey = default_zkey().map_err(|err| format!("{:#?}", err))?;
|
||||
let vk = default_vk().map_err(|err| format!("{:#?}", err))?;
|
||||
let instance = RLN::new_with_params(zkey, vk).map_err(|err| format!("{:#?}", err))?;
|
||||
let wrapper = RLNWrapper { instance };
|
||||
Ok(Box::into_raw(Box::new(wrapper)))
|
||||
}
|
||||
@@ -276,6 +274,12 @@ pub fn rln_witness_to_json(
|
||||
Object::from_entries(&js_value).map_err(|err| format!("{:#?}", err))
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = generateMembershipKey)]
|
||||
pub fn wasm_key_gen(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
|
||||
call_with_output_and_error_msg!(ctx, key_gen, "could not generate membership keys")
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen]
|
||||
pub fn generate_rln_proof_with_witness(
|
||||
@@ -305,12 +309,6 @@ pub fn generate_rln_proof_with_witness(
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = generateMembershipKey)]
|
||||
pub fn wasm_key_gen(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
|
||||
call_with_output_and_error_msg!(ctx, key_gen, "could not generate membership keys")
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = generateExtendedMembershipKey)]
|
||||
pub fn wasm_extended_key_gen(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
|
||||
use rln::circuit::{Fr, TEST_TREE_HEIGHT};
|
||||
use rln::circuit::Fr;
|
||||
use rln::hashers::{hash_to_field, poseidon_hash};
|
||||
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, normalize_usize};
|
||||
use rln_wasm::*;
|
||||
@@ -21,16 +21,9 @@ mod tests {
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
pub async fn test_basic_flow() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let circom_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln.wasm");
|
||||
let zkey_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
|
||||
let vk_path =
|
||||
format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.json");
|
||||
let zkey = read_file(&zkey_path).unwrap();
|
||||
let vk = read_file(&vk_path).unwrap();
|
||||
|
||||
// Creating an instance of RLN
|
||||
let rln_instance = wasm_new(tree_height, zkey, vk).unwrap();
|
||||
let circom_path = format!("../rln/resources/tree_height_20/rln.wasm");
|
||||
let rln_instance = wasm_new().unwrap();
|
||||
|
||||
// Creating membership key
|
||||
let mem_keys = wasm_key_gen(rln_instance).unwrap();
|
||||
@@ -123,18 +116,10 @@ mod tests {
|
||||
let is_proof_valid = wasm_verify_with_roots(rln_instance, proof_with_signal, roots);
|
||||
assert!(is_proof_valid.unwrap(), "verifying proof with roots failed");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_metadata() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let zkey_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
|
||||
let vk_path =
|
||||
format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.json");
|
||||
let zkey = read_file(&zkey_path).unwrap();
|
||||
let vk = read_file(&vk_path).unwrap();
|
||||
|
||||
// Creating an instance of RLN
|
||||
let rln_instance = wasm_new(tree_height, zkey, vk).unwrap();
|
||||
let rln_instance = wasm_new().unwrap();
|
||||
|
||||
let test_metadata = Uint8Array::new(&JsValue::from_str("test"));
|
||||
// Inserting random metadata
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "rln"
|
||||
version = "0.4.3"
|
||||
version = "0.4.1"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "APIs to manage, compute and verify zkSNARK proofs and RLN primitives"
|
||||
@@ -19,19 +19,13 @@ doctest = false
|
||||
[dependencies]
|
||||
# ZKP Generation
|
||||
ark-ec = { version = "=0.4.1", default-features = false }
|
||||
ark-ff = { version = "=0.4.1", default-features = false, features = ["asm"] }
|
||||
ark-ff = { version = "=0.4.1", default-features = false, features = [ "asm"] }
|
||||
ark-std = { version = "=0.4.0", default-features = false }
|
||||
ark-bn254 = { version = "=0.4.0" }
|
||||
ark-groth16 = { version = "=0.4.0", features = [
|
||||
"parallel",
|
||||
], default-features = false }
|
||||
ark-relations = { version = "=0.4.0", default-features = false, features = [
|
||||
"std",
|
||||
] }
|
||||
ark-groth16 = { version = "=0.4.0", features = ["parallel"], default-features = false }
|
||||
ark-relations = { version = "=0.4.0", default-features = false, features = [ "std" ] }
|
||||
ark-serialize = { version = "=0.4.1", default-features = false }
|
||||
ark-circom = { version = "=0.1.0", default-features = false, features = [
|
||||
"circom-2",
|
||||
] }
|
||||
ark-circom = { version = "=0.1.0", default-features = false, features = ["circom-2"] }
|
||||
|
||||
# WASM
|
||||
wasmer = { version = "=2.3.0", default-features = false }
|
||||
@@ -42,15 +36,13 @@ thiserror = "=1.0.39"
|
||||
|
||||
# utilities
|
||||
cfg-if = "=1.0"
|
||||
num-bigint = { version = "=0.4.3", default-features = false, features = [
|
||||
"rand",
|
||||
] }
|
||||
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
|
||||
num-traits = "=0.2.15"
|
||||
once_cell = "=1.17.1"
|
||||
rand = "=0.8.5"
|
||||
rand_chacha = "=0.3.1"
|
||||
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
|
||||
utils = { package = "zerokit_utils", version = "=0.4.3", path = "../utils/", default-features = false }
|
||||
utils = { package = "zerokit_utils", version = "=0.4.1", path = "../utils/", default-features = false }
|
||||
|
||||
|
||||
# serialization
|
||||
@@ -65,13 +57,7 @@ criterion = { version = "=0.4.0", features = ["html_reports"] }
|
||||
|
||||
[features]
|
||||
default = ["parallel", "wasmer/sys-default", "pmtree-ft"]
|
||||
parallel = [
|
||||
"ark-ec/parallel",
|
||||
"ark-ff/parallel",
|
||||
"ark-std/parallel",
|
||||
"ark-groth16/parallel",
|
||||
"utils/parallel",
|
||||
]
|
||||
parallel = ["ark-ec/parallel", "ark-ff/parallel", "ark-std/parallel", "ark-groth16/parallel", "utils/parallel"]
|
||||
wasm = ["wasmer/js", "wasmer/std"]
|
||||
fullmerkletree = ["default"]
|
||||
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@@ -1,114 +0,0 @@
|
||||
{
|
||||
"protocol": "groth16",
|
||||
"curve": "bn128",
|
||||
"nPublic": 5,
|
||||
"vk_alpha_1": [
|
||||
"20491192805390485299153009773594534940189261866228447918068658471970481763042",
|
||||
"9383485363053290200918347156157836566562967994039712273449902621266178545958",
|
||||
"1"
|
||||
],
|
||||
"vk_beta_2": [
|
||||
[
|
||||
"6375614351688725206403948262868962793625744043794305715222011528459656738731",
|
||||
"4252822878758300859123897981450591353533073413197771768651442665752259397132"
|
||||
],
|
||||
[
|
||||
"10505242626370262277552901082094356697409835680220590971873171140371331206856",
|
||||
"21847035105528745403288232691147584728191162732299865338377159692350059136679"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_gamma_2": [
|
||||
[
|
||||
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
|
||||
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
|
||||
],
|
||||
[
|
||||
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
|
||||
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_delta_2": [
|
||||
[
|
||||
"3689226096868373144622340732612563195789744807442014147637039988348252818659",
|
||||
"18947459102520510468597269280688700807407684209892273827108603062925288762423"
|
||||
],
|
||||
[
|
||||
"5816405977664254142436796931495067997250259145480168934320978750042633353708",
|
||||
"14555486789839131710516067578112557185806110684461247253491378577062852578892"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_alphabeta_12": [
|
||||
[
|
||||
[
|
||||
"2029413683389138792403550203267699914886160938906632433982220835551125967885",
|
||||
"21072700047562757817161031222997517981543347628379360635925549008442030252106"
|
||||
],
|
||||
[
|
||||
"5940354580057074848093997050200682056184807770593307860589430076672439820312",
|
||||
"12156638873931618554171829126792193045421052652279363021382169897324752428276"
|
||||
],
|
||||
[
|
||||
"7898200236362823042373859371574133993780991612861777490112507062703164551277",
|
||||
"7074218545237549455313236346927434013100842096812539264420499035217050630853"
|
||||
]
|
||||
],
|
||||
[
|
||||
[
|
||||
"7077479683546002997211712695946002074877511277312570035766170199895071832130",
|
||||
"10093483419865920389913245021038182291233451549023025229112148274109565435465"
|
||||
],
|
||||
[
|
||||
"4595479056700221319381530156280926371456704509942304414423590385166031118820",
|
||||
"19831328484489333784475432780421641293929726139240675179672856274388269393268"
|
||||
],
|
||||
[
|
||||
"11934129596455521040620786944827826205713621633706285934057045369193958244500",
|
||||
"8037395052364110730298837004334506829870972346962140206007064471173334027475"
|
||||
]
|
||||
]
|
||||
],
|
||||
"IC": [
|
||||
[
|
||||
"5412646265162057015134786739992128493053406364679846617542694915593022919217",
|
||||
"9665511386935901867415947590751330959748921059696950821222365265700369811120",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"4294362651275803035824711662252687124584574009834787359330648404293309808795",
|
||||
"1861758671717754835450145961645465880215655915164196594175485865489885224285",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"1911114017568107170522785254288953144010421698038439931935418407428234018676",
|
||||
"13761363892532562822351086117281964648116890138564516558345965908415019790129",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"16312980235585837964428386585067529342038135099260965575497230302984635878053",
|
||||
"20286500347141875536561618770383759234192052027362539966911091298688849002783",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"21038649368092225315431823433752123495654049075935052064397443455654061176031",
|
||||
"6976971039866104284556300526186000690370678593992968176463280189048347216392",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"971745799362951123575710699973701411260115357326598060711339429906895409324",
|
||||
"12959821343398475313407440786226277845673045139874184400082186049649123071798",
|
||||
"1"
|
||||
]
|
||||
]
|
||||
}
|
||||
@@ -5,39 +5,31 @@ use ark_bn254::{
|
||||
G1Projective as ArkG1Projective, G2Affine as ArkG2Affine, G2Projective as ArkG2Projective,
|
||||
};
|
||||
use ark_circom::read_zkey;
|
||||
use ark_circom::WitnessCalculator;
|
||||
use ark_groth16::{ProvingKey, VerifyingKey};
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use cfg_if::cfg_if;
|
||||
use color_eyre::{Report, Result};
|
||||
use num_bigint::BigUint;
|
||||
use serde_json::Value;
|
||||
use std::io::Cursor;
|
||||
use std::str::FromStr;
|
||||
use wasmer::{Module, Store};
|
||||
|
||||
use cfg_if::cfg_if;
|
||||
|
||||
cfg_if! {
|
||||
if #[cfg(not(target_arch = "wasm32"))] {
|
||||
use ark_circom::{WitnessCalculator};
|
||||
use once_cell::sync::OnceCell;
|
||||
use std::sync::Mutex;
|
||||
use wasmer::{Module, Store};
|
||||
use include_dir::{include_dir, Dir};
|
||||
use std::path::Path;
|
||||
}
|
||||
}
|
||||
|
||||
const ZKEY_FILENAME: &str = "rln_final.zkey";
|
||||
const VK_FILENAME: &str = "verification_key.json";
|
||||
const WASM_FILENAME: &str = "rln.wasm";
|
||||
|
||||
// These parameters are used for tests
|
||||
// Note that the circuit and keys in TEST_RESOURCES_FOLDER are compiled for Merkle trees of height 20 & 32
|
||||
// Changing these parameters to other values than these defaults will cause zkSNARK proof verification to fail
|
||||
pub const TEST_PARAMETERS_INDEX: usize = 0;
|
||||
pub const TEST_TREE_HEIGHT: usize = [20, 32][TEST_PARAMETERS_INDEX];
|
||||
pub const TEST_RESOURCES_FOLDER: &str = ["tree_height_20", "tree_height_32"][TEST_PARAMETERS_INDEX];
|
||||
const ZKEY_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/rln_final.zkey");
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
static RESOURCES_DIR: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/resources");
|
||||
const VK_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/verification_key.json");
|
||||
|
||||
pub const WASM_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/rln.wasm");
|
||||
|
||||
// The following types define the pairing friendly elliptic curve, the underlying finite fields and groups default to this module
|
||||
// Note that proofs are serialized assuming Fr to be 4x8 = 32 bytes in size. Hence, changing to a curve with different encoding will make proof verification to fail
|
||||
@@ -62,22 +54,14 @@ pub fn zkey_from_raw(zkey_data: &Vec<u8>) -> Result<(ProvingKey<Curve>, Constrai
|
||||
}
|
||||
|
||||
// Loads the proving key
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn zkey_from_folder(
|
||||
resources_folder: &str,
|
||||
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
|
||||
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
|
||||
if let Some(zkey) = zkey {
|
||||
let mut c = Cursor::new(zkey.contents());
|
||||
let proving_key_and_matrices = read_zkey(&mut c)?;
|
||||
Ok(proving_key_and_matrices)
|
||||
} else {
|
||||
Err(Report::msg("No proving key found!"))
|
||||
}
|
||||
pub fn default_zkey() -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
|
||||
let mut c = Cursor::new(ZKEY_BYTES);
|
||||
let proving_key_and_matrices = read_zkey(&mut c)?;
|
||||
Ok(proving_key_and_matrices)
|
||||
}
|
||||
|
||||
// Loads the verification key from a bytes vector
|
||||
pub fn vk_from_raw(vk_data: &[u8], zkey_data: &Vec<u8>) -> Result<VerifyingKey<Curve>> {
|
||||
pub fn vk_from_raw(vk_data: &Vec<u8>, zkey_data: &Vec<u8>) -> Result<VerifyingKey<Curve>> {
|
||||
let verifying_key: VerifyingKey<Curve>;
|
||||
|
||||
if !vk_data.is_empty() {
|
||||
@@ -93,24 +77,8 @@ pub fn vk_from_raw(vk_data: &[u8], zkey_data: &Vec<u8>) -> Result<VerifyingKey<C
|
||||
}
|
||||
|
||||
// Loads the verification key
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn vk_from_folder(resources_folder: &str) -> Result<VerifyingKey<Curve>> {
|
||||
let vk = RESOURCES_DIR.get_file(Path::new(resources_folder).join(VK_FILENAME));
|
||||
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
|
||||
|
||||
let verifying_key: VerifyingKey<Curve>;
|
||||
if let Some(vk) = vk {
|
||||
verifying_key = vk_from_json(vk.contents_utf8().ok_or(Report::msg(
|
||||
"Could not read verification key from JSON file!",
|
||||
))?)?;
|
||||
Ok(verifying_key)
|
||||
} else if let Some(_zkey) = zkey {
|
||||
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
|
||||
verifying_key = proving_key.vk;
|
||||
Ok(verifying_key)
|
||||
} else {
|
||||
Err(Report::msg("No proving/verification key found!"))
|
||||
}
|
||||
pub fn default_vk() -> Result<VerifyingKey<Curve>> {
|
||||
vk_from_vector(VK_BYTES)
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@@ -127,25 +95,32 @@ pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> Result<&'static Mutex<WitnessCal
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> Result<WitnessCalculator> {
|
||||
let store = Store::default();
|
||||
let module = Module::new(&store, wasm_buffer)?;
|
||||
let witness_calculator = WitnessCalculator::from_module(module)?;
|
||||
Ok(witness_calculator)
|
||||
}
|
||||
|
||||
// Initializes the witness calculator
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn circom_from_folder(resources_folder: &str) -> Result<&'static Mutex<WitnessCalculator>> {
|
||||
pub fn default_circom() -> Result<&'static Mutex<WitnessCalculator>> {
|
||||
// We read the wasm file
|
||||
let wasm = RESOURCES_DIR.get_file(Path::new(resources_folder).join(WASM_FILENAME));
|
||||
circom_from_raw(WASM_BYTES.into())
|
||||
}
|
||||
|
||||
if let Some(wasm) = wasm {
|
||||
let wasm_buffer = wasm.contents();
|
||||
circom_from_raw(wasm_buffer.to_vec())
|
||||
} else {
|
||||
Err(Report::msg("No wasm file found!"))
|
||||
}
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn default_circom() -> Result<WitnessCalculator> {
|
||||
// We read the wasm file
|
||||
circom_from_raw(WASM_BYTES.into())
|
||||
}
|
||||
|
||||
// The following function implementations are taken/adapted from https://github.com/gakonst/ark-circom/blob/1732e15d6313fe176b0b1abb858ac9e095d0dbd7/src/zkey.rs
|
||||
|
||||
// Utilities to convert a json verification key in a groth16::VerificationKey
|
||||
fn fq_from_str(s: &str) -> Result<Fq> {
|
||||
Ok(Fq::from(BigUint::from_str(s)?))
|
||||
Ok(Fq::try_from(BigUint::from_str(s)?)?)
|
||||
}
|
||||
|
||||
// Extracts the element in G1 corresponding to its JSON serialization
|
||||
@@ -253,12 +228,8 @@ fn vk_from_vector(vk: &[u8]) -> Result<VerifyingKey<Curve>> {
|
||||
}
|
||||
|
||||
// Checks verification key to be correct with respect to proving key
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn check_vk_from_zkey(
|
||||
resources_folder: &str,
|
||||
verifying_key: VerifyingKey<Curve>,
|
||||
) -> Result<()> {
|
||||
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
|
||||
pub fn check_vk_from_zkey(verifying_key: VerifyingKey<Curve>) -> Result<()> {
|
||||
let (proving_key, _matrices) = default_zkey()?;
|
||||
if proving_key.vk == verifying_key {
|
||||
Ok(())
|
||||
} else {
|
||||
|
||||
@@ -193,8 +193,8 @@ impl<'a> From<&Buffer> for &'a [u8] {
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut *mut RLN) -> bool {
|
||||
match RLN::new(tree_height, input_buffer.process()) {
|
||||
pub extern "C" fn new(input_buffer: *const Buffer, ctx: *mut *mut RLN) -> bool {
|
||||
match RLN::new(input_buffer.process()) {
|
||||
Ok(rln) => {
|
||||
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
|
||||
true
|
||||
|
||||
@@ -246,8 +246,7 @@ impl ZerokitMerkleTree for PmTree {
|
||||
let data = self.tree.db.get(METADATA_KEY)?;
|
||||
|
||||
if data.is_none() {
|
||||
// send empty Metadata
|
||||
return Ok(Vec::new());
|
||||
return Err(Report::msg("metadata does not exist"));
|
||||
}
|
||||
Ok(data.unwrap())
|
||||
}
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
use crate::circuit::{vk_from_raw, zkey_from_raw, Curve, Fr};
|
||||
use crate::circuit::{Curve, Fr};
|
||||
use crate::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash};
|
||||
use crate::poseidon_tree::PoseidonTree;
|
||||
use crate::protocol::*;
|
||||
use crate::utils::*;
|
||||
use ark_circom::WitnessCalculator;
|
||||
/// This is the main public API for RLN module. It is used by the FFI, and should be
|
||||
/// used by tests etc as well
|
||||
use ark_groth16::Proof as ArkProof;
|
||||
@@ -13,14 +14,14 @@ use cfg_if::cfg_if;
|
||||
use color_eyre::{Report, Result};
|
||||
use num_bigint::BigInt;
|
||||
use std::io::Cursor;
|
||||
|
||||
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
cfg_if! {
|
||||
if #[cfg(not(target_arch = "wasm32"))] {
|
||||
use std::default::Default;
|
||||
use std::sync::Mutex;
|
||||
use crate::circuit::{circom_from_folder, vk_from_folder, circom_from_raw, zkey_from_folder, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
|
||||
use ark_circom::WitnessCalculator;
|
||||
use crate::circuit::{zkey_from_raw, circom_from_raw, vk_from_raw, default_zkey, default_vk, default_circom};
|
||||
use serde_json::{json, Value};
|
||||
use utils::{Hasher};
|
||||
use std::str::FromStr;
|
||||
@@ -71,21 +72,18 @@ impl RLN<'_> {
|
||||
/// let mut rln = RLN::new(tree_height, resources);
|
||||
/// ```
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn new<R: Read>(tree_height: usize, mut input_data: R) -> Result<RLN<'static>> {
|
||||
pub fn new<R: Read>(mut input_data: R) -> Result<RLN<'static>> {
|
||||
// We read input
|
||||
let mut input: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut input)?;
|
||||
|
||||
let rln_config: Value = serde_json::from_str(&String::from_utf8(input)?)?;
|
||||
let resources_folder = rln_config["resources_folder"]
|
||||
.as_str()
|
||||
.unwrap_or(TEST_RESOURCES_FOLDER);
|
||||
let tree_config = rln_config["tree_config"].to_string();
|
||||
|
||||
let witness_calculator = circom_from_folder(resources_folder)?;
|
||||
let witness_calculator = default_circom()?;
|
||||
|
||||
let proving_key = zkey_from_folder(resources_folder)?;
|
||||
let verification_key = vk_from_folder(resources_folder)?;
|
||||
let proving_key = default_zkey()?;
|
||||
let verification_key = default_vk()?;
|
||||
|
||||
let tree_config: <PoseidonTree as ZerokitMerkleTree>::Config = if tree_config.is_empty() {
|
||||
<PoseidonTree as ZerokitMerkleTree>::Config::default()
|
||||
@@ -95,7 +93,7 @@ impl RLN<'_> {
|
||||
|
||||
// We compute a default empty tree
|
||||
let tree = PoseidonTree::new(
|
||||
tree_height,
|
||||
20,
|
||||
<PoseidonTree as ZerokitMerkleTree>::Hasher::default_leaf(),
|
||||
tree_config,
|
||||
)?;
|
||||
@@ -188,22 +186,15 @@ impl RLN<'_> {
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn new_with_params(
|
||||
tree_height: usize,
|
||||
zkey_vec: Vec<u8>,
|
||||
vk_vec: Vec<u8>,
|
||||
zkey: (ProvingKey<Curve>, ConstraintMatrices<Fr>),
|
||||
vk: VerifyingKey<Curve>,
|
||||
) -> Result<RLN<'static>> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let witness_calculator = circom_from_raw(circom_vec)?;
|
||||
|
||||
let proving_key = zkey_from_raw(&zkey_vec)?;
|
||||
let verification_key = vk_from_raw(&vk_vec, &zkey_vec)?;
|
||||
|
||||
// We compute a default empty tree
|
||||
let tree = PoseidonTree::default(tree_height)?;
|
||||
let tree = PoseidonTree::default(20)?;
|
||||
|
||||
Ok(RLN {
|
||||
proving_key,
|
||||
verification_key,
|
||||
proving_key: zkey,
|
||||
verification_key: vk,
|
||||
tree,
|
||||
_marker: PhantomData,
|
||||
})
|
||||
@@ -743,6 +734,29 @@ impl RLN<'_> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn generate_rln_proof<R: Read, W: Write>(
|
||||
&mut self,
|
||||
witness_calculator: &mut WitnessCalculator,
|
||||
mut input_data: R,
|
||||
mut output_data: W,
|
||||
) -> Result<()> {
|
||||
// We read input RLN witness and we serialize_compressed it
|
||||
let mut witness_byte: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut witness_byte)?;
|
||||
let (rln_witness, _) = proof_inputs_to_rln_witness(&mut self.tree, &witness_byte)?;
|
||||
let proof_values = proof_values_from_witness(&rln_witness)?;
|
||||
|
||||
let proof = generate_proof(witness_calculator, &self.proving_key, &rln_witness)?;
|
||||
|
||||
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
|
||||
// This proof is compressed, i.e. 128 bytes long
|
||||
proof.serialize_compressed(&mut output_data)?;
|
||||
output_data.write_all(&serialize_proof_values(&proof_values))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO: this function seems to use redundant witness (as bigint and serialized) and should be refactored
|
||||
// Generate RLN Proof using a witness calculated from outside zerokit
|
||||
//
|
||||
@@ -1188,9 +1202,8 @@ impl RLN<'_> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
impl Default for RLN<'_> {
|
||||
fn default() -> Self {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let buffer = Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
Self::new(tree_height, buffer).unwrap()
|
||||
let buffer = Cursor::new(json!({}).to_string());
|
||||
Self::new(buffer).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::circuit::{Curve, Fr, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
|
||||
use crate::circuit::{Curve, Fr};
|
||||
use crate::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash};
|
||||
use crate::protocol::*;
|
||||
use crate::public::RLN;
|
||||
@@ -14,10 +14,11 @@ use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
const tree_height: usize = 20;
|
||||
|
||||
#[test]
|
||||
// We test merkle batch Merkle tree additions
|
||||
fn test_merkle_operations() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
@@ -28,9 +29,8 @@ fn test_merkle_operations() {
|
||||
}
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let input_buffer = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(input_buffer).unwrap();
|
||||
|
||||
// We first add leaves one by one specifying the index
|
||||
for (i, leaf) in leaves.iter().enumerate() {
|
||||
@@ -109,7 +109,6 @@ fn test_merkle_operations() {
|
||||
// We test leaf setting with a custom index, to enable batch updates to the root
|
||||
// Uses `set_leaves_from` to set leaves in a batch, from index `start_index`
|
||||
fn test_leaf_setting_with_index() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
@@ -124,9 +123,8 @@ fn test_leaf_setting_with_index() {
|
||||
let set_index = rng.gen_range(0..no_of_leaves) as usize;
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let input_buffer = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(input_buffer).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
@@ -185,7 +183,6 @@ fn test_leaf_setting_with_index() {
|
||||
#[test]
|
||||
// Tests the atomic_operation fn, which set_leaves_from uses internally
|
||||
fn test_atomic_operation() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
@@ -196,9 +193,8 @@ fn test_atomic_operation() {
|
||||
}
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let input_buffer = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(input_buffer).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
@@ -236,7 +232,6 @@ fn test_atomic_operation() {
|
||||
#[test]
|
||||
fn test_atomic_operation_zero_indexed() {
|
||||
// Test duplicated from https://github.com/waku-org/go-zerokit-rln/pull/12/files
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
@@ -247,9 +242,8 @@ fn test_atomic_operation_zero_indexed() {
|
||||
}
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let input_buffer = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(input_buffer).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
@@ -282,7 +276,6 @@ fn test_atomic_operation_zero_indexed() {
|
||||
#[test]
|
||||
fn test_atomic_operation_consistency() {
|
||||
// Test duplicated from https://github.com/waku-org/go-zerokit-rln/pull/12/files
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
@@ -293,9 +286,8 @@ fn test_atomic_operation_consistency() {
|
||||
}
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let input_buffer = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(input_buffer).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
@@ -336,7 +328,6 @@ fn test_atomic_operation_consistency() {
|
||||
#[test]
|
||||
// This test checks if `set_leaves_from` throws an error when the index is out of bounds
|
||||
fn test_set_leaves_bad_index() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
@@ -348,9 +339,8 @@ fn test_set_leaves_bad_index() {
|
||||
let bad_index = (1 << tree_height) - rng.gen_range(0..no_of_leaves) as usize;
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let input_buffer = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(input_buffer).unwrap();
|
||||
|
||||
// Get root of empty tree
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
@@ -411,11 +401,8 @@ fn value_to_string_vec(value: &Value) -> Vec<String> {
|
||||
|
||||
#[test]
|
||||
fn test_groth16_proof_hardcoded() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let input_buffer = Cursor::new(json!({}).to_string());
|
||||
let rln = RLN::new(input_buffer).unwrap();
|
||||
|
||||
let valid_snarkjs_proof = json!({
|
||||
"pi_a": [
|
||||
@@ -493,14 +480,11 @@ fn test_groth16_proof_hardcoded() {
|
||||
#[test]
|
||||
// This test is similar to the one in lib, but uses only public API
|
||||
fn test_groth16_proof() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let input_buffer = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(input_buffer).unwrap();
|
||||
|
||||
// Note: we only test Groth16 proof generation, so we ignore setting the tree in the RLN object
|
||||
let rln_witness = random_rln_witness(tree_height);
|
||||
let rln_witness = random_rln_witness(tree_height.into());
|
||||
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
|
||||
|
||||
// We compute a Groth16 proof
|
||||
@@ -530,7 +514,6 @@ fn test_groth16_proof() {
|
||||
|
||||
#[test]
|
||||
fn test_rln_proof() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
@@ -543,9 +526,8 @@ fn test_rln_proof() {
|
||||
}
|
||||
|
||||
// We create a new RLN instance
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let input_buffer = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(input_buffer).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
@@ -605,7 +587,6 @@ fn test_rln_proof() {
|
||||
|
||||
#[test]
|
||||
fn test_rln_with_witness() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
@@ -616,9 +597,8 @@ fn test_rln_with_witness() {
|
||||
}
|
||||
|
||||
// We create a new RLN instance
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let input_buffer = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(input_buffer).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
@@ -711,7 +691,6 @@ fn test_rln_with_witness() {
|
||||
#[test]
|
||||
fn proof_verification_with_roots() {
|
||||
// The first part is similar to test_rln_with_witness
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
@@ -722,9 +701,8 @@ fn proof_verification_with_roots() {
|
||||
}
|
||||
|
||||
// We create a new RLN instance
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let input_buffer = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(input_buffer).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
@@ -813,12 +791,9 @@ fn proof_verification_with_roots() {
|
||||
|
||||
#[test]
|
||||
fn test_recover_id_secret() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a new RLN instance
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let input_buffer = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(input_buffer).unwrap();
|
||||
|
||||
// Generate identity pair
|
||||
let (identity_secret_hash, id_commitment) = keygen();
|
||||
@@ -951,11 +926,9 @@ fn test_recover_id_secret() {
|
||||
#[test]
|
||||
fn test_get_leaf() {
|
||||
// We generate a random tree
|
||||
let tree_height = 10;
|
||||
let mut rng = thread_rng();
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let input_buffer = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(input_buffer).unwrap();
|
||||
|
||||
// We generate a random leaf
|
||||
let leaf = Fr::rand(&mut rng);
|
||||
@@ -977,12 +950,9 @@ fn test_get_leaf() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_metadata() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
fn test_metadata() {
|
||||
let input_buffer = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(input_buffer).unwrap();
|
||||
|
||||
let arbitrary_metadata: &[u8] = b"block_number:200000";
|
||||
rln.set_metadata(arbitrary_metadata).unwrap();
|
||||
@@ -993,18 +963,3 @@ fn test_valid_metadata() {
|
||||
|
||||
assert_eq!(arbitrary_metadata, received_metadata);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_metadata() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_metadata(&mut buffer).unwrap();
|
||||
let received_metadata = buffer.into_inner();
|
||||
|
||||
assert_eq!(received_metadata.len(), 0);
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ use num_traits::Num;
|
||||
use std::iter::Extend;
|
||||
|
||||
pub fn to_bigint(el: &Fr) -> Result<BigInt> {
|
||||
let res: BigUint = (*el).into();
|
||||
let res: BigUint = (*el).try_into()?;
|
||||
Ok(res.into())
|
||||
}
|
||||
|
||||
@@ -28,10 +28,10 @@ pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr> {
|
||||
input_clean = input_clean.trim().to_string();
|
||||
|
||||
if radix == 10 {
|
||||
Ok(BigUint::from_str_radix(&input_clean, radix)?.into())
|
||||
Ok(BigUint::from_str_radix(&input_clean, radix)?.try_into()?)
|
||||
} else {
|
||||
input_clean = input_clean.replace("0x", "");
|
||||
Ok(BigUint::from_str_radix(&input_clean, radix)?.into())
|
||||
Ok(BigUint::from_str_radix(&input_clean, radix)?.try_into()?)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
198
rln/tests/ffi.rs
198
rln/tests/ffi.rs
@@ -1,5 +1,7 @@
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
const tree_height: usize = 20;
|
||||
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
use rln::circuit::*;
|
||||
@@ -17,7 +19,6 @@ mod test {
|
||||
#[test]
|
||||
// We test merkle batch Merkle tree additions
|
||||
fn test_merkle_operations_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
@@ -29,9 +30,9 @@ mod test {
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resource_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_config = json!({}).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
let success = new(input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
@@ -132,14 +133,13 @@ mod test {
|
||||
// Uses `set_leaves_from` to set leaves in a batch
|
||||
fn test_leaf_setting_with_index_ffi() {
|
||||
// We create a new tree
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_config = json!({}).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
let success = new(input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
@@ -224,14 +224,13 @@ mod test {
|
||||
#[test]
|
||||
// This test is similar to the one in public.rs but it uses the RLN object as a pointer
|
||||
fn test_atomic_operation_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_config = json!({}).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
let success = new(input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
@@ -289,7 +288,6 @@ mod test {
|
||||
#[test]
|
||||
// This test is similar to the one in public.rs but it uses the RLN object as a pointer
|
||||
fn test_set_leaves_bad_index_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
@@ -303,9 +301,9 @@ mod test {
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_config = json!({}).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
let success = new(input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
@@ -339,14 +337,14 @@ mod test {
|
||||
#[test]
|
||||
// This test is similar to the one in lib, but uses only public C API
|
||||
fn test_merkle_proof_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
let user_message_limit = 1;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_config = json!({}).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
let success = new(input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
@@ -372,18 +370,16 @@ mod test {
|
||||
|
||||
use ark_ff::BigInt;
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 || TEST_TREE_HEIGHT == 32 {
|
||||
assert_eq!(
|
||||
root,
|
||||
BigInt([
|
||||
4939322235247991215,
|
||||
5110804094006647505,
|
||||
4427606543677101242,
|
||||
910933464535675827
|
||||
])
|
||||
.into()
|
||||
);
|
||||
}
|
||||
assert_eq!(
|
||||
root,
|
||||
BigInt([
|
||||
4939322235247991215,
|
||||
5110804094006647505,
|
||||
4427606543677101242,
|
||||
910933464535675827
|
||||
])
|
||||
.into()
|
||||
);
|
||||
|
||||
// We obtain the Merkle tree root
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
@@ -477,40 +473,36 @@ mod test {
|
||||
let mut expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
]);
|
||||
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
|
||||
}
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
expected_path_elements.append(&mut vec![
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
)
|
||||
.unwrap()]);
|
||||
expected_identity_path_index.append(&mut vec![0]);
|
||||
}
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
]);
|
||||
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
|
||||
|
||||
expected_path_elements.append(&mut vec![str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
16,
|
||||
)
|
||||
.unwrap()]);
|
||||
expected_identity_path_index.append(&mut vec![0]);
|
||||
|
||||
assert_eq!(path_elements, expected_path_elements);
|
||||
assert_eq!(identity_path_index, expected_identity_path_index);
|
||||
@@ -529,13 +521,11 @@ mod test {
|
||||
#[test]
|
||||
// Benchmarks proof generation and verification
|
||||
fn test_groth16_proofs_performance_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_config = json!({}).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
let success = new(input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
@@ -592,13 +582,11 @@ mod test {
|
||||
#[test]
|
||||
// Creating a RLN with raw data should generate same results as using a path to resources
|
||||
fn test_rln_raw_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a RLN instance using a resource folder path
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_config = json!({}).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
let success = new(input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
@@ -611,7 +599,7 @@ mod test {
|
||||
let (root_rln_folder, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
// Reading the raw data from the files required for instantiating a RLN instance using raw data
|
||||
let circom_path = format!("./resources/tree_height_{TEST_TREE_HEIGHT}/rln.wasm");
|
||||
let circom_path = format!("./resources/tree_height_20/rln.wasm");
|
||||
let mut circom_file = File::open(&circom_path).expect("no file found");
|
||||
let metadata = std::fs::metadata(&circom_path).expect("unable to read metadata");
|
||||
let mut circom_buffer = vec![0; metadata.len() as usize];
|
||||
@@ -619,7 +607,7 @@ mod test {
|
||||
.read_exact(&mut circom_buffer)
|
||||
.expect("buffer overflow");
|
||||
|
||||
let zkey_path = format!("./resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
|
||||
let zkey_path = format!("./resources/tree_height_20/rln_final.zkey");
|
||||
let mut zkey_file = File::open(&zkey_path).expect("no file found");
|
||||
let metadata = std::fs::metadata(&zkey_path).expect("unable to read metadata");
|
||||
let mut zkey_buffer = vec![0; metadata.len() as usize];
|
||||
@@ -627,7 +615,7 @@ mod test {
|
||||
.read_exact(&mut zkey_buffer)
|
||||
.expect("buffer overflow");
|
||||
|
||||
let vk_path = format!("./resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.json");
|
||||
let vk_path = format!("./resources/tree_height_20/verification_key.json");
|
||||
|
||||
let mut vk_file = File::open(&vk_path).expect("no file found");
|
||||
let metadata = std::fs::metadata(&vk_path).expect("unable to read metadata");
|
||||
@@ -668,9 +656,8 @@ mod test {
|
||||
#[test]
|
||||
// Computes and verifies an RLN ZK proof using FFI APIs
|
||||
fn test_rln_proof_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
let user_message_limit = Fr::from(100);
|
||||
let user_message_limit = Fr::from(65535);
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Fr> = Vec::new();
|
||||
@@ -683,9 +670,9 @@ mod test {
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_config = json!({}).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
let success = new(input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
@@ -703,6 +690,7 @@ mod test {
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret_hash, read) = bytes_le_to_fr(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..].to_vec());
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
|
||||
let identity_index: usize = no_of_leaves;
|
||||
|
||||
@@ -715,6 +703,7 @@ mod test {
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
|
||||
|
||||
let user_message_limit = Fr::from(100);
|
||||
let message_id = Fr::from(0);
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
|
||||
@@ -724,6 +713,9 @@ mod test {
|
||||
let success = set_next_leaf(rln_pointer, input_buffer);
|
||||
assert!(success, "set next leaf call failed");
|
||||
|
||||
// We generate a random rln_identifier
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
|
||||
// We prepare input for generate_rln_proof API
|
||||
// input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
@@ -763,7 +755,6 @@ mod test {
|
||||
// Computes and verifies an RLN ZK proof by checking proof's root against an input roots buffer
|
||||
fn test_verify_with_roots() {
|
||||
// First part similar to test_rln_proof_ffi
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
let user_message_limit = Fr::from(100);
|
||||
|
||||
@@ -776,9 +767,9 @@ mod test {
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_config = json!({}).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
let success = new(input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
@@ -813,6 +804,7 @@ mod test {
|
||||
let message_id = Fr::from(0);
|
||||
|
||||
// We set as leaf rate_commitment, its index would be equal to no_of_leaves
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
let leaf_ser = fr_to_bytes_le(&rate_commitment);
|
||||
let input_buffer = &Buffer::from(leaf_ser.as_ref());
|
||||
let success = set_next_leaf(rln_pointer, input_buffer);
|
||||
@@ -899,13 +891,11 @@ mod test {
|
||||
#[test]
|
||||
// Computes and verifies an RLN ZK proof using FFI APIs
|
||||
fn test_recover_id_secret_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_config = json!({}).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
let success = new(input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
@@ -1072,13 +1062,11 @@ mod test {
|
||||
#[test]
|
||||
// Tests hash to field using FFI APIs
|
||||
fn test_seeded_keygen_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_config = json!({}).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
let success = new(input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
@@ -1113,12 +1101,11 @@ mod test {
|
||||
#[test]
|
||||
// Tests hash to field using FFI APIs
|
||||
fn test_seeded_extended_keygen_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_config = json!({}).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
let success = new(input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
@@ -1218,13 +1205,12 @@ mod test {
|
||||
#[test]
|
||||
fn test_get_leaf() {
|
||||
// We create a RLN instance
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 1 << TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 1 << tree_height;
|
||||
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_config = json!({}).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
let success = new(input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
@@ -1261,14 +1247,13 @@ mod test {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_metadata() {
|
||||
fn test_metadata() {
|
||||
// We create a RLN instance
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_config = json!({}).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
let success = new(input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
@@ -1287,25 +1272,4 @@ mod test {
|
||||
|
||||
assert_eq!(result_data, seed_bytes.to_vec());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_metadata() {
|
||||
// We create a RLN instance
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_metadata(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get_metadata call failed");
|
||||
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
|
||||
assert_eq!(output_buffer.len, 0);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use ark_ff::BigInt;
|
||||
use rln::circuit::{
|
||||
circom_from_folder, vk_from_folder, zkey_from_folder, Fr, TEST_RESOURCES_FOLDER,
|
||||
TEST_TREE_HEIGHT,
|
||||
};
|
||||
use rln::circuit::{default_circom, default_vk, default_zkey, Fr};
|
||||
use rln::hashers::{hash_to_field, poseidon_hash};
|
||||
use rln::poseidon_tree::PoseidonTree;
|
||||
use rln::protocol::*;
|
||||
@@ -71,7 +68,6 @@ mod test {
|
||||
#[test]
|
||||
// We test Merkle tree generation, proofs and verification
|
||||
fn test_merkle_proof() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
|
||||
// generate identity
|
||||
@@ -81,36 +77,29 @@ mod test {
|
||||
|
||||
// generate merkle tree
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
tree_height,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
let mut tree =
|
||||
PoseidonTree::new(20, default_leaf, ConfigOf::<PoseidonTree>::default()).unwrap();
|
||||
tree.set(leaf_index, rate_commitment.into()).unwrap();
|
||||
|
||||
// We check correct computation of the root
|
||||
let root = tree.root();
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 || TEST_TREE_HEIGHT == 32 {
|
||||
assert_eq!(
|
||||
root,
|
||||
BigInt([
|
||||
4939322235247991215,
|
||||
5110804094006647505,
|
||||
4427606543677101242,
|
||||
910933464535675827
|
||||
])
|
||||
.into()
|
||||
);
|
||||
}
|
||||
assert_eq!(
|
||||
root,
|
||||
BigInt([
|
||||
4939322235247991215,
|
||||
5110804094006647505,
|
||||
4427606543677101242,
|
||||
910933464535675827
|
||||
])
|
||||
.into()
|
||||
);
|
||||
|
||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
// These values refers to TEST_TREE_HEIGHT == 16
|
||||
let mut expected_path_elements = vec![
|
||||
str_to_fr(
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
@@ -193,40 +182,36 @@ mod test {
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
// We add the remaining elements for the case TEST_TREE_HEIGHT = 20
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
]);
|
||||
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
|
||||
}
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
expected_path_elements.append(&mut vec![
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
)
|
||||
.unwrap()]);
|
||||
expected_identity_path_index.append(&mut vec![0]);
|
||||
}
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
]);
|
||||
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
|
||||
|
||||
expected_path_elements.append(&mut vec![str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
16,
|
||||
)
|
||||
.unwrap()]);
|
||||
expected_identity_path_index.append(&mut vec![0]);
|
||||
|
||||
assert_eq!(path_elements, expected_path_elements);
|
||||
assert_eq!(identity_path_index, expected_identity_path_index);
|
||||
@@ -239,9 +224,9 @@ mod test {
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_witness_from_json() {
|
||||
// We generate all relevant keys
|
||||
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let builder = circom_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let proving_key = default_zkey().unwrap();
|
||||
let verification_key = default_vk().unwrap();
|
||||
let builder = default_circom().unwrap();
|
||||
|
||||
// We compute witness from the json input example
|
||||
let witness_json = WITNESS_JSON_20;
|
||||
@@ -260,7 +245,6 @@ mod test {
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_end_to_end() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
|
||||
// Generate identity pair
|
||||
@@ -270,12 +254,8 @@ mod test {
|
||||
|
||||
//// generate merkle tree
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
tree_height,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
let mut tree =
|
||||
PoseidonTree::new(20, default_leaf, ConfigOf::<PoseidonTree>::default()).unwrap();
|
||||
tree.set(leaf_index, rate_commitment.into()).unwrap();
|
||||
|
||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||
@@ -299,9 +279,9 @@ mod test {
|
||||
.unwrap();
|
||||
|
||||
// We generate all relevant keys
|
||||
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let builder = circom_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let proving_key = default_zkey().unwrap();
|
||||
let verification_key = default_vk().unwrap();
|
||||
let builder = default_circom().unwrap();
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
|
||||
|
||||
@@ -3,7 +3,7 @@ mod test {
|
||||
use ark_ff::BigInt;
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
use rln::circuit::{Fr, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
|
||||
use rln::circuit::Fr;
|
||||
use rln::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
|
||||
use rln::protocol::{compute_tree_root, deserialize_identity_tuple};
|
||||
use rln::public::{hash as public_hash, poseidon_hash as public_poseidon_hash, RLN};
|
||||
@@ -14,13 +14,11 @@ mod test {
|
||||
#[test]
|
||||
// This test is similar to the one in lib, but uses only public API
|
||||
fn test_merkle_proof() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
let user_message_limit = 1;
|
||||
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let input_buffer = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(input_buffer).unwrap();
|
||||
|
||||
// generate identity
|
||||
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
|
||||
@@ -36,26 +34,15 @@ mod test {
|
||||
rln.get_root(&mut buffer).unwrap();
|
||||
let (root, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
assert_eq!(
|
||||
root,
|
||||
Fr::from(BigInt([
|
||||
17110646155607829651,
|
||||
5040045984242729823,
|
||||
6965416728592533086,
|
||||
2328960363755461975
|
||||
]))
|
||||
);
|
||||
} else if TEST_TREE_HEIGHT == 32 {
|
||||
assert_eq!(
|
||||
root,
|
||||
str_to_fr(
|
||||
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
|
||||
16
|
||||
)
|
||||
.unwrap()
|
||||
);
|
||||
}
|
||||
assert_eq!(
|
||||
root,
|
||||
Fr::from(BigInt([
|
||||
17110646155607829651,
|
||||
5040045984242729823,
|
||||
6965416728592533086,
|
||||
2328960363755461975
|
||||
]))
|
||||
);
|
||||
|
||||
// We check correct computation of merkle proof
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
@@ -148,40 +135,36 @@ mod test {
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
// We add the remaining elements for the case TEST_TREE_HEIGHT = 20
|
||||
if TEST_TREE_HEIGHT == 20 || TEST_TREE_HEIGHT == 32 {
|
||||
expected_path_elements.append(&mut vec![
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
]);
|
||||
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
|
||||
}
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
expected_path_elements.append(&mut vec![
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
)
|
||||
.unwrap()]);
|
||||
expected_identity_path_index.append(&mut vec![0]);
|
||||
}
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
]);
|
||||
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
|
||||
|
||||
expected_path_elements.append(&mut vec![str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
16,
|
||||
)
|
||||
.unwrap()]);
|
||||
expected_identity_path_index.append(&mut vec![0]);
|
||||
|
||||
assert_eq!(path_elements, expected_path_elements);
|
||||
assert_eq!(identity_path_index, expected_identity_path_index);
|
||||
|
||||
50
semaphore/Cargo.toml
Normal file
50
semaphore/Cargo.toml
Normal file
@@ -0,0 +1,50 @@
|
||||
[package]
|
||||
name = "semaphore-wrapper"
|
||||
version = "0.3.0"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[features]
|
||||
default = []
|
||||
dylib = [ "wasmer/dylib", "wasmer-engine-dylib", "wasmer-compiler-cranelift" ]
|
||||
|
||||
[dependencies]
|
||||
ark-bn254 = { version = "0.3.0" }
|
||||
ark-circom = { git = "https://github.com/gakonst/ark-circom", features=["circom-2"], rev = "35ce5a9" }
|
||||
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
|
||||
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
|
||||
ark-relations = { version = "0.3.0", default-features = false }
|
||||
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
|
||||
color-eyre = "0.6.2"
|
||||
once_cell = "1.17.1"
|
||||
rand = "0.8.5"
|
||||
semaphore = { git = "https://github.com/worldcoin/semaphore-rs", rev = "ee658c2"}
|
||||
ethers-core = { version = "2.0.10", default-features = false }
|
||||
ruint = { version = "1.10.0", features = [ "serde", "num-bigint", "ark-ff" ] }
|
||||
serde = "1.0"
|
||||
thiserror = "1.0.39"
|
||||
wasmer = { version = "2.3" }
|
||||
|
||||
[dev-dependencies]
|
||||
rand_chacha = "0.3.1"
|
||||
serde_json = "1.0.96"
|
||||
|
||||
[build-dependencies]
|
||||
color-eyre = "0.6.2"
|
||||
wasmer = { version = "2.3" }
|
||||
wasmer-engine-dylib = { version = "2.3.0", optional = true }
|
||||
wasmer-compiler-cranelift = { version = "3.3.0", optional = true }
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
lto = true
|
||||
panic = "abort"
|
||||
opt-level = 3
|
||||
|
||||
# Compilation profile for any non-workspace member.
|
||||
# Dependencies are optimized, even in a dev build. This improves dev performance
|
||||
# while having neglible impact on incremental build times.
|
||||
[profile.dev.package."*"]
|
||||
opt-level = 3
|
||||
7
semaphore/Makefile.toml
Normal file
7
semaphore/Makefile.toml
Normal file
@@ -0,0 +1,7 @@
|
||||
[tasks.build]
|
||||
command = "cargo"
|
||||
args = ["build", "--release"]
|
||||
|
||||
[tasks.test]
|
||||
command = "cargo"
|
||||
args = ["test", "--release"]
|
||||
18
semaphore/README.md
Normal file
18
semaphore/README.md
Normal file
@@ -0,0 +1,18 @@
|
||||
# Semaphore example package
|
||||
|
||||
This is basically a wrapper around/copy of
|
||||
https://github.com/worldcoin/semaphore-rs to illustrate how e.g. RLN package
|
||||
can be structured like.
|
||||
|
||||
Goal is also to provide a basic FFI around protocol.rs, which is currently not
|
||||
in scope for that project.
|
||||
|
||||
See that project for more information.
|
||||
|
||||
## Build and Test
|
||||
|
||||
To build and test, run the following commands within the module folder
|
||||
```bash
|
||||
cargo make build
|
||||
cargo make test
|
||||
```
|
||||
111
semaphore/build.rs
Normal file
111
semaphore/build.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
// Adapted from semaphore-rs/build.rs
|
||||
use color_eyre::eyre::{eyre, Result};
|
||||
use std::{
|
||||
path::{Component, Path, PathBuf},
|
||||
process::Command,
|
||||
};
|
||||
|
||||
const ZKEY_FILE: &str = "./vendor/semaphore/build/snark/semaphore_final.zkey";
|
||||
const WASM_FILE: &str = "./vendor/semaphore/build/snark/semaphore.wasm";
|
||||
|
||||
// See <https://internals.rust-lang.org/t/path-to-lexical-absolute/14940>
|
||||
fn absolute(path: &str) -> Result<PathBuf> {
|
||||
let path = Path::new(path);
|
||||
let mut absolute = if path.is_absolute() {
|
||||
PathBuf::new()
|
||||
} else {
|
||||
std::env::current_dir()?
|
||||
};
|
||||
for component in path.components() {
|
||||
match component {
|
||||
Component::CurDir => {}
|
||||
Component::ParentDir => {
|
||||
absolute.pop();
|
||||
}
|
||||
component => absolute.push(component.as_os_str()),
|
||||
}
|
||||
}
|
||||
Ok(absolute)
|
||||
}
|
||||
|
||||
fn build_circuit() -> Result<()> {
|
||||
println!("cargo:rerun-if-changed=./vendor/semaphore");
|
||||
let run = |cmd: &[&str]| -> Result<()> {
|
||||
// TODO: Use ExitCode::exit_ok() when stable.
|
||||
Command::new(cmd[0])
|
||||
.args(cmd[1..].iter())
|
||||
.current_dir("./vendor/semaphore")
|
||||
.status()?
|
||||
.success()
|
||||
.then_some(())
|
||||
.ok_or(eyre!("procees returned failure"))?;
|
||||
Ok(())
|
||||
};
|
||||
|
||||
// Compute absolute paths
|
||||
let zkey_file = absolute(ZKEY_FILE)?;
|
||||
let wasm_file = absolute(WASM_FILE)?;
|
||||
|
||||
// Build circuits if not exists
|
||||
// TODO: This does not rebuild if the semaphore submodule is changed.
|
||||
// NOTE: This requires npm / nodejs to be installed.
|
||||
if !(zkey_file.exists() && wasm_file.exists()) {
|
||||
run(&["npm", "install"])?;
|
||||
run(&["npm", "exec", "ts-node", "./scripts/compile-circuits.ts"])?;
|
||||
}
|
||||
assert!(zkey_file.exists());
|
||||
assert!(wasm_file.exists());
|
||||
|
||||
// Export generated paths
|
||||
println!("cargo:rustc-env=BUILD_RS_ZKEY_FILE={}", zkey_file.display());
|
||||
println!("cargo:rustc-env=BUILD_RS_WASM_FILE={}", wasm_file.display());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(feature = "dylib")]
|
||||
fn build_dylib() -> Result<()> {
|
||||
use enumset::enum_set;
|
||||
use std::{env, str::FromStr};
|
||||
use wasmer::{Module, Store, Target, Triple};
|
||||
use wasmer_compiler_cranelift::Cranelift;
|
||||
use wasmer_engine_dylib::Dylib;
|
||||
|
||||
let wasm_file = absolute(WASM_FILE)?;
|
||||
assert!(wasm_file.exists());
|
||||
|
||||
let out_dir = env::var("OUT_DIR")?;
|
||||
let out_dir = Path::new(&out_dir).to_path_buf();
|
||||
let dylib_file = out_dir.join("semaphore.dylib");
|
||||
println!(
|
||||
"cargo:rustc-env=CIRCUIT_WASM_DYLIB={}",
|
||||
dylib_file.display()
|
||||
);
|
||||
|
||||
if dylib_file.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Create a WASM engine for the target that can compile
|
||||
let triple = Triple::from_str(&env::var("TARGET")?).map_err(|e| eyre!(e))?;
|
||||
let cpu_features = enum_set!();
|
||||
let target = Target::new(triple, cpu_features);
|
||||
let compiler_config = Cranelift::default();
|
||||
let engine = Dylib::new(compiler_config).target(target).engine();
|
||||
|
||||
// Compile the WASM module
|
||||
let store = Store::new(&engine);
|
||||
let module = Module::from_file(&store, &wasm_file)?;
|
||||
module.serialize_to_file(&dylib_file)?;
|
||||
assert!(dylib_file.exists());
|
||||
println!("cargo:warning=Circuit dylib is in {}", dylib_file.display());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
build_circuit()?;
|
||||
#[cfg(feature = "dylib")]
|
||||
build_dylib()?;
|
||||
Ok(())
|
||||
}
|
||||
79
semaphore/src/circuit.rs
Normal file
79
semaphore/src/circuit.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
// Adapted from semaphore-rs/src/circuit.rs
|
||||
use ark_bn254::{Bn254, Fr};
|
||||
use ark_circom::{read_zkey, WitnessCalculator};
|
||||
use ark_groth16::ProvingKey;
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use core::include_bytes;
|
||||
use once_cell::sync::{Lazy, OnceCell};
|
||||
use std::{io::Cursor, sync::Mutex};
|
||||
use wasmer::{Module, Store};
|
||||
|
||||
#[cfg(feature = "dylib")]
|
||||
use std::{env, path::Path};
|
||||
#[cfg(feature = "dylib")]
|
||||
use wasmer::Dylib;
|
||||
|
||||
const ZKEY_BYTES: &[u8] = include_bytes!(env!("BUILD_RS_ZKEY_FILE"));
|
||||
|
||||
#[cfg(not(feature = "dylib"))]
|
||||
const WASM: &[u8] = include_bytes!(env!("BUILD_RS_WASM_FILE"));
|
||||
|
||||
static ZKEY: Lazy<(ProvingKey<Bn254>, ConstraintMatrices<Fr>)> = Lazy::new(|| {
|
||||
let mut reader = Cursor::new(ZKEY_BYTES);
|
||||
read_zkey(&mut reader).expect("zkey should be valid")
|
||||
});
|
||||
|
||||
static WITNESS_CALCULATOR: OnceCell<Mutex<WitnessCalculator>> = OnceCell::new();
|
||||
|
||||
/// Initialize the library.
|
||||
#[cfg(feature = "dylib")]
|
||||
pub fn initialize(dylib_path: &Path) {
|
||||
WITNESS_CALCULATOR
|
||||
.set(from_dylib(dylib_path))
|
||||
.expect("Failed to initialize witness calculator");
|
||||
|
||||
// Force init of ZKEY
|
||||
Lazy::force(&ZKEY);
|
||||
}
|
||||
|
||||
#[cfg(feature = "dylib")]
|
||||
fn from_dylib(path: &Path) -> Mutex<WitnessCalculator> {
|
||||
let store = Store::new(&Dylib::headless().engine());
|
||||
// The module must be exported using [`Module::serialize`].
|
||||
let module = unsafe {
|
||||
Module::deserialize_from_file(&store, path).expect("Failed to load wasm dylib module")
|
||||
};
|
||||
let result =
|
||||
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
|
||||
Mutex::new(result)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn zkey() -> &'static (ProvingKey<Bn254>, ConstraintMatrices<Fr>) {
|
||||
&ZKEY
|
||||
}
|
||||
|
||||
#[cfg(feature = "dylib")]
|
||||
#[must_use]
|
||||
pub fn witness_calculator() -> &'static Mutex<WitnessCalculator> {
|
||||
WITNESS_CALCULATOR.get_or_init(|| {
|
||||
let path = env::var("CIRCUIT_WASM_DYLIB").expect(
|
||||
"Semaphore-rs is not initialized. The library needs to be initialized before use when \
|
||||
build with the `cdylib` feature. You can initialize by calling `initialize` or \
|
||||
seting the `CIRCUIT_WASM_DYLIB` environment variable.",
|
||||
);
|
||||
from_dylib(Path::new(&path))
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "dylib"))]
|
||||
#[must_use]
|
||||
pub fn witness_calculator() -> &'static Mutex<WitnessCalculator> {
|
||||
WITNESS_CALCULATOR.get_or_init(|| {
|
||||
let store = Store::default();
|
||||
let module = Module::from_binary(&store, WASM).expect("wasm should be valid");
|
||||
let result =
|
||||
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
|
||||
Mutex::new(result)
|
||||
})
|
||||
}
|
||||
7
semaphore/src/lib.rs
Normal file
7
semaphore/src/lib.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
#![allow(clippy::multiple_crate_versions)]
|
||||
|
||||
pub mod circuit;
|
||||
pub mod protocol;
|
||||
|
||||
#[cfg(feature = "dylib")]
|
||||
pub use circuit::initialize;
|
||||
215
semaphore/src/protocol.rs
Normal file
215
semaphore/src/protocol.rs
Normal file
@@ -0,0 +1,215 @@
|
||||
// Adapted from semaphore-rs/src/protocol.rs
|
||||
// For illustration purposes only as an example protocol
|
||||
|
||||
// Private module
|
||||
use crate::circuit::{witness_calculator, zkey};
|
||||
|
||||
use ark_bn254::{Bn254, Parameters};
|
||||
use ark_circom::CircomReduction;
|
||||
use ark_ec::bn::Bn;
|
||||
use ark_groth16::{
|
||||
create_proof_with_reduction_and_matrices, prepare_verifying_key, Proof as ArkProof,
|
||||
};
|
||||
use ark_relations::r1cs::SynthesisError;
|
||||
use ark_std::UniformRand;
|
||||
use color_eyre::{Report, Result};
|
||||
use ethers_core::types::U256;
|
||||
use rand::{thread_rng, Rng};
|
||||
use semaphore::{
|
||||
identity::Identity,
|
||||
merkle_tree::{self, Branch},
|
||||
poseidon,
|
||||
poseidon_tree::PoseidonHash,
|
||||
Field,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::time::Instant;
|
||||
use thiserror::Error;
|
||||
|
||||
// Matches the private G1Tup type in ark-circom.
|
||||
pub type G1 = (U256, U256);
|
||||
|
||||
// Matches the private G2Tup type in ark-circom.
|
||||
pub type G2 = ([U256; 2], [U256; 2]);
|
||||
|
||||
/// Wrap a proof object so we have serde support
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Proof(G1, G2, G1);
|
||||
|
||||
impl From<ArkProof<Bn<Parameters>>> for Proof {
|
||||
fn from(proof: ArkProof<Bn<Parameters>>) -> Self {
|
||||
let proof = ark_circom::ethereum::Proof::from(proof);
|
||||
let (a, b, c) = proof.as_tuple();
|
||||
Self(a, b, c)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Proof> for ArkProof<Bn<Parameters>> {
|
||||
fn from(proof: Proof) -> Self {
|
||||
let eth_proof = ark_circom::ethereum::Proof {
|
||||
a: ark_circom::ethereum::G1 {
|
||||
x: proof.0 .0,
|
||||
y: proof.0 .1,
|
||||
},
|
||||
#[rustfmt::skip] // Rustfmt inserts some confusing spaces
|
||||
b: ark_circom::ethereum::G2 {
|
||||
// The order of coefficients is flipped.
|
||||
x: [proof.1.0[1], proof.1.0[0]],
|
||||
y: [proof.1.1[1], proof.1.1[0]],
|
||||
},
|
||||
c: ark_circom::ethereum::G1 {
|
||||
x: proof.2 .0,
|
||||
y: proof.2 .1,
|
||||
},
|
||||
};
|
||||
eth_proof.into()
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper to merkle proof into a bigint vector
|
||||
/// TODO: we should create a From trait for this
|
||||
fn merkle_proof_to_vec(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<Field> {
|
||||
proof
|
||||
.0
|
||||
.iter()
|
||||
.map(|x| match x {
|
||||
Branch::Left(value) | Branch::Right(value) => *value,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Generates the nullifier hash
|
||||
#[must_use]
|
||||
pub fn generate_nullifier_hash(identity: &Identity, external_nullifier: Field) -> Field {
|
||||
poseidon::hash2(external_nullifier, identity.nullifier)
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ProofError {
|
||||
#[error("Error reading circuit key: {0}")]
|
||||
CircuitKeyError(#[from] std::io::Error),
|
||||
#[error("Error producing witness: {0}")]
|
||||
WitnessError(Report),
|
||||
#[error("Error producing proof: {0}")]
|
||||
SynthesisError(#[from] SynthesisError),
|
||||
#[error("Error converting public input: {0}")]
|
||||
ToFieldError(#[from] ruint::ToFieldError),
|
||||
}
|
||||
|
||||
/// Generates a semaphore proof
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns a [`ProofError`] if proving fails.
|
||||
pub fn generate_proof(
|
||||
identity: &Identity,
|
||||
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
|
||||
external_nullifier_hash: Field,
|
||||
signal_hash: Field,
|
||||
) -> Result<Proof, ProofError> {
|
||||
generate_proof_rng(
|
||||
identity,
|
||||
merkle_proof,
|
||||
external_nullifier_hash,
|
||||
signal_hash,
|
||||
&mut thread_rng(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Generates a semaphore proof from entropy
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns a [`ProofError`] if proving fails.
|
||||
pub fn generate_proof_rng(
|
||||
identity: &Identity,
|
||||
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
|
||||
external_nullifier_hash: Field,
|
||||
signal_hash: Field,
|
||||
rng: &mut impl Rng,
|
||||
) -> Result<Proof, ProofError> {
|
||||
generate_proof_rs(
|
||||
identity,
|
||||
merkle_proof,
|
||||
external_nullifier_hash,
|
||||
signal_hash,
|
||||
ark_bn254::Fr::rand(rng),
|
||||
ark_bn254::Fr::rand(rng),
|
||||
)
|
||||
}
|
||||
|
||||
fn generate_proof_rs(
|
||||
identity: &Identity,
|
||||
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
|
||||
external_nullifier_hash: Field,
|
||||
signal_hash: Field,
|
||||
r: ark_bn254::Fr,
|
||||
s: ark_bn254::Fr,
|
||||
) -> Result<Proof, ProofError> {
|
||||
let inputs = [
|
||||
("identityNullifier", vec![identity.nullifier]),
|
||||
("identityTrapdoor", vec![identity.trapdoor]),
|
||||
("treePathIndices", merkle_proof.path_index()),
|
||||
("treeSiblings", merkle_proof_to_vec(merkle_proof)),
|
||||
("externalNullifier", vec![external_nullifier_hash]),
|
||||
("signalHash", vec![signal_hash]),
|
||||
];
|
||||
let inputs = inputs.into_iter().map(|(name, values)| {
|
||||
(
|
||||
name.to_string(),
|
||||
values.iter().copied().map(Into::into).collect::<Vec<_>>(),
|
||||
)
|
||||
});
|
||||
|
||||
let now = Instant::now();
|
||||
|
||||
let full_assignment = witness_calculator()
|
||||
.lock()
|
||||
.expect("witness_calculator mutex should not get poisoned")
|
||||
.calculate_witness_element::<Bn254, _>(inputs, false)
|
||||
.map_err(ProofError::WitnessError)?;
|
||||
|
||||
println!("witness generation took: {:.2?}", now.elapsed());
|
||||
|
||||
let now = Instant::now();
|
||||
let zkey = zkey();
|
||||
let ark_proof = create_proof_with_reduction_and_matrices::<_, CircomReduction>(
|
||||
&zkey.0,
|
||||
r,
|
||||
s,
|
||||
&zkey.1,
|
||||
zkey.1.num_instance_variables,
|
||||
zkey.1.num_constraints,
|
||||
full_assignment.as_slice(),
|
||||
)?;
|
||||
let proof = ark_proof.into();
|
||||
println!("proof generation took: {:.2?}", now.elapsed());
|
||||
|
||||
Ok(proof)
|
||||
}
|
||||
|
||||
/// Verifies a given semaphore proof
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns a [`ProofError`] if verifying fails. Verification failure does not
|
||||
/// necessarily mean the proof is incorrect.
|
||||
pub fn verify_proof(
|
||||
root: Field,
|
||||
nullifier_hash: Field,
|
||||
signal_hash: Field,
|
||||
external_nullifier_hash: Field,
|
||||
proof: &Proof,
|
||||
) -> Result<bool, ProofError> {
|
||||
let zkey = zkey();
|
||||
let pvk = prepare_verifying_key(&zkey.0.vk);
|
||||
|
||||
let public_inputs = [root, nullifier_hash, signal_hash, external_nullifier_hash]
|
||||
.iter()
|
||||
.map(ark_bn254::Fr::try_from)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let ark_proof = (*proof).into();
|
||||
let result = ark_groth16::verify_proof(&pvk, &ark_proof, &public_inputs[..])?;
|
||||
Ok(result)
|
||||
}
|
||||
115
semaphore/tests/protocol.rs
Normal file
115
semaphore/tests/protocol.rs
Normal file
@@ -0,0 +1,115 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ark_bn254::Parameters;
|
||||
use ark_ec::bn::Bn;
|
||||
use ark_groth16::Proof as ArkProof;
|
||||
use rand::{Rng, SeedableRng as _};
|
||||
use rand_chacha::ChaChaRng;
|
||||
use semaphore::{hash_to_field, identity::Identity, poseidon_tree::PoseidonTree, Field};
|
||||
use semaphore_wrapper::protocol::{
|
||||
generate_nullifier_hash, generate_proof, generate_proof_rng, verify_proof, Proof,
|
||||
};
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn test_semaphore() {
|
||||
// generate identity
|
||||
let id = Identity::from_seed(b"secret");
|
||||
|
||||
// generate merkle tree
|
||||
let leaf = Field::from(0);
|
||||
let mut tree = PoseidonTree::new(21, leaf);
|
||||
tree.set(0, id.commitment());
|
||||
|
||||
let merkle_proof = tree.proof(0).expect("proof should exist");
|
||||
let root = tree.root().into();
|
||||
|
||||
// change signal and external_nullifier here
|
||||
let signal_hash = hash_to_field(b"xxx");
|
||||
let external_nullifier_hash = hash_to_field(b"appId");
|
||||
|
||||
let nullifier_hash = generate_nullifier_hash(&id, external_nullifier_hash);
|
||||
|
||||
let proof =
|
||||
generate_proof(&id, &merkle_proof, external_nullifier_hash, signal_hash).unwrap();
|
||||
|
||||
let success = verify_proof(
|
||||
root,
|
||||
nullifier_hash,
|
||||
signal_hash,
|
||||
external_nullifier_hash,
|
||||
&proof,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert!(success);
|
||||
}
|
||||
|
||||
fn arb_proof(seed: u64) -> Proof {
|
||||
// Deterministic randomness for testing
|
||||
let mut rng = ChaChaRng::seed_from_u64(seed);
|
||||
|
||||
// generate identity
|
||||
let seed: [u8; 16] = rng.gen();
|
||||
let id = Identity::from_seed(&seed);
|
||||
|
||||
// generate merkle tree
|
||||
let leaf = Field::from(0);
|
||||
let mut tree = PoseidonTree::new(21, leaf);
|
||||
tree.set(0, id.commitment());
|
||||
|
||||
let merkle_proof = tree.proof(0).expect("proof should exist");
|
||||
|
||||
let external_nullifier: [u8; 16] = rng.gen();
|
||||
let external_nullifier_hash = hash_to_field(&external_nullifier);
|
||||
|
||||
let signal: [u8; 16] = rng.gen();
|
||||
let signal_hash = hash_to_field(&signal);
|
||||
|
||||
generate_proof_rng(
|
||||
&id,
|
||||
&merkle_proof,
|
||||
external_nullifier_hash,
|
||||
signal_hash,
|
||||
&mut rng,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proof_cast_roundtrip() {
|
||||
let proof = arb_proof(123);
|
||||
let ark_proof: ArkProof<Bn<Parameters>> = proof.into();
|
||||
let result: Proof = ark_proof.into();
|
||||
assert_eq!(proof, result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proof_serialize() {
|
||||
let proof = arb_proof(456);
|
||||
let json = serde_json::to_value(&proof).unwrap();
|
||||
assert_eq!(
|
||||
json,
|
||||
json!([
|
||||
[
|
||||
"0x249ae469686987ee9368da60dd177a8c42891c02f5760e955e590c79d55cfab2",
|
||||
"0xf22e25870f49388459d388afb24dcf6ec11bb2d4def1e2ec26d6e42f373aad8"
|
||||
],
|
||||
[
|
||||
[
|
||||
"0x17bd25dbd7436c30ea5b8a3a47aadf11ed646c4b25cc14a84ff8cbe0252ff1f8",
|
||||
"0x1c140668c56688367416534d57b4a14e5a825efdd5e121a6a2099f6dc4cd277b"
|
||||
],
|
||||
[
|
||||
"0x26a8524759d969ea0682a092cf7a551697d81962d6c998f543f81e52d83e05e1",
|
||||
"0x273eb3f796fd1807b9df9c6d769d983e3dabdc61677b75d48bb7691303b2c8dd"
|
||||
]
|
||||
],
|
||||
[
|
||||
"0x62715c53a0eb4c46dbb5f73f1fd7449b9c63d37c1ece65debc39b472065a90f",
|
||||
"0x114f7becc66f1cd7a8b01c89db8233622372fc0b6fc037c4313bca41e2377fd9"
|
||||
]
|
||||
])
|
||||
);
|
||||
}
|
||||
}
|
||||
1
semaphore/vendor/semaphore
vendored
Submodule
1
semaphore/vendor/semaphore
vendored
Submodule
Submodule semaphore/vendor/semaphore added at 5186a940ff
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "zerokit_utils"
|
||||
version = "0.4.3"
|
||||
version = "0.4.1"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "Various utilities for Zerokit"
|
||||
|
||||
Reference in New Issue
Block a user