Compare commits

..

5 Commits

Author SHA1 Message Date
rymnc
7dd019b0a5 chore: Release 2024-03-07 01:41:06 +05:30
Aaryamann Challani
78bec51fba feat(rln-v1): return empty metadata if it doesnt exist (#231) 2024-03-07 01:37:01 +05:30
rymnc
b0be32943e chore: Release 2024-01-23 09:30:38 -04:00
Richard Ramos
09da5304c1 fix: add support to aarch64-linux-android 2024-01-23 09:30:38 -04:00
Alvaro Revuelta
d5e09524d7 Expose generate proof with witness ffi (#227)
* Expose generate proof with witness ffi

* Force run CI

* Update ci.yml

* Fix wasm with_witness

* Remove forced CI run
2024-01-23 16:49:54 +05:30
78 changed files with 5246 additions and 4409 deletions

View File

@@ -3,32 +3,39 @@ on:
branches:
- master
paths-ignore:
- "**.md"
- "!.github/workflows/*.yml"
- "!rln-wasm/**"
- "!rln/src/**"
- "!rln/resources/**"
- "!utils/src/**"
- '**.md'
- '!.github/workflows/*.yml'
- '!multiplier/src/**'
- '!private-settlement/src/**'
- '!rln-wasm/**'
- '!rln/src/**'
- '!rln/resources/**'
- '!semaphore/src/**'
- '!utils/src/**'
pull_request:
paths-ignore:
- "**.md"
- "!.github/workflows/*.yml"
- "!rln-wasm/**"
- "!rln/src/**"
- "!rln/resources/**"
- "!utils/src/**"
- '**.md'
- '!.github/workflows/*.yml'
- '!multiplier/src/**'
- '!private-settlement/src/**'
- '!rln-wasm/**'
- '!rln/src/**'
- '!rln/resources/**'
- '!semaphore/src/**'
- '!utils/src/**'
name: Tests
jobs:
utils-test:
test:
strategy:
matrix:
platform: [ ubuntu-latest, macos-latest ]
crate: [ utils ]
platform: [ubuntu-latest, macos-latest]
crate: [multiplier, semaphore, rln, utils]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: test - ${{ matrix.crate }} - ${{ matrix.platform }}
steps:
- name: Checkout sources
@@ -44,43 +51,16 @@ jobs:
run: make installdeps
- name: cargo-make test
run: |
cargo make test --release
cargo make test --release
working-directory: ${{ matrix.crate }}
rln-test:
strategy:
matrix:
platform: [ ubuntu-latest, macos-latest ]
crate: [ rln ]
feature: [ "default", "arkzkey", "stateless" ]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: cargo-make test
run: |
cargo make test_${{ matrix.feature }} --release
working-directory: ${{ matrix.crate }}
rln-wasm:
strategy:
matrix:
platform: [ ubuntu-latest, macos-latest ]
platform: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: test - rln-wasm - ${{ matrix.platform }}
steps:
- uses: actions/checkout@v3
@@ -88,8 +68,7 @@ jobs:
uses: actions-rs/toolchain@v1
with:
profile: minimal
# TODO: Update to stable once wasmer supports it
toolchain: 1.82.0
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
@@ -105,12 +84,12 @@ jobs:
strategy:
matrix:
# we run lint tests only on ubuntu
platform: [ ubuntu-latest ]
crate: [ rln, utils ]
platform: [ubuntu-latest]
crate: [multiplier, semaphore, rln, utils]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: lint - ${{ matrix.crate }} - ${{ matrix.platform }}
name: lint - ${{ matrix.crate }} - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
@@ -131,20 +110,19 @@ jobs:
- name: cargo clippy
if: success() || failure()
run: |
cargo clippy --release -- -D warnings
cargo clippy --release -- -D warnings
working-directory: ${{ matrix.crate }}
# We skip clippy on rln-wasm, since wasm target is managed by cargo make
# Currently not treating warnings as error, too noisy
# -- -D warnings
benchmark-utils:
benchmark:
# run only in pull requests
if: github.event_name == 'pull_request'
strategy:
matrix:
# we run benchmark tests only on ubuntu
platform: [ ubuntu-latest ]
crate: [ utils ]
platform: [ubuntu-latest]
crate: [rln, utils]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
@@ -157,26 +135,3 @@ jobs:
with:
branchName: ${{ github.base_ref }}
cwd: ${{ matrix.crate }}
benchmark-rln:
# run only in pull requests
if: github.event_name == 'pull_request'
strategy:
matrix:
# we run benchmark tests only on ubuntu
platform: [ ubuntu-latest ]
crate: [ rln ]
feature: [ "default", "arkzkey" ]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: benchmark - ${{ matrix.platform }} - ${{ matrix.crate }} - ${{ matrix.feature }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
- uses: Swatinem/rust-cache@v2
- uses: boa-dev/criterion-compare-action@v3
with:
branchName: ${{ github.base_ref }}
cwd: ${{ matrix.crate }}
features: ${{ matrix.feature }}

View File

@@ -8,14 +8,10 @@ jobs:
linux:
strategy:
matrix:
feature: [ "default", "arkzkey", "stateless" ]
target:
target:
- x86_64-unknown-linux-gnu
- aarch64-unknown-linux-gnu
- i686-unknown-linux-gnu
include:
- feature: stateless
cargo_args: --exclude rln-cli
name: Linux build
runs-on: ubuntu-latest
steps:
@@ -33,16 +29,16 @@ jobs:
run: make installdeps
- name: cross build
run: |
cross build --release --target ${{ matrix.target }} --features ${{ matrix.feature }} --workspace --exclude rln-wasm ${{ matrix.cargo_args }}
cross build --release --target ${{ matrix.target }} --workspace --exclude rln-wasm
mkdir release
cp target/${{ matrix.target }}/release/librln* release/
tar -czvf ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz release/
tar -czvf ${{ matrix.target }}-rln.tar.gz release/
- name: Upload archive artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v2
with:
name: ${{ matrix.target }}-${{ matrix.feature }}-archive
path: ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz
name: ${{ matrix.target }}-archive
path: ${{ matrix.target }}-rln.tar.gz
retention-days: 2
macos:
@@ -50,13 +46,9 @@ jobs:
runs-on: macos-latest
strategy:
matrix:
feature: [ "default", "arkzkey", "stateless" ]
target:
target:
- x86_64-apple-darwin
- aarch64-apple-darwin
include:
- feature: stateless
cargo_args: --exclude rln-cli
steps:
- name: Checkout sources
uses: actions/checkout@v3
@@ -72,18 +64,18 @@ jobs:
run: make installdeps
- name: cross build
run: |
cross build --release --target ${{ matrix.target }} --features ${{ matrix.feature }} --workspace --exclude rln-wasm ${{ matrix.cargo_args }}
cross build --release --target ${{ matrix.target }} --workspace --exclude rln-wasm
mkdir release
cp target/${{ matrix.target }}/release/librln* release/
tar -czvf ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz release/
tar -czvf ${{ matrix.target }}-rln.tar.gz release/
- name: Upload archive artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v2
with:
name: ${{ matrix.target }}-${{ matrix.feature }}-archive
path: ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz
name: ${{ matrix.target }}-archive
path: ${{ matrix.target }}-rln.tar.gz
retention-days: 2
browser-rln-wasm:
name: Browser build (RLN WASM)
runs-on: ubuntu-latest
@@ -94,8 +86,7 @@ jobs:
uses: actions-rs/toolchain@v1
with:
profile: minimal
# TODO: Update to stable once wasmer supports it
toolchain: 1.82.0
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
@@ -111,15 +102,16 @@ jobs:
working-directory: rln-wasm
- name: Upload archive artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v2
with:
name: browser-rln-wasm-archive
path: rln-wasm/browser-rln-wasm.tar.gz
retention-days: 2
prepare-prerelease:
name: Prepare pre-release
needs: [ linux, macos, browser-rln-wasm ]
needs: [linux, macos, browser-rln-wasm]
runs-on: ubuntu-latest
steps:
- name: Checkout code
@@ -127,8 +119,8 @@ jobs:
with:
ref: master
- name: Download artifacts
uses: actions/download-artifact@v4
uses: actions/download-artifact@v2
- name: Delete tag
uses: dev-drprasad/delete-tag-and-release@v0.2.1
with:
@@ -150,7 +142,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Delete artifacts
uses: geekyeggo/delete-artifact@v5
uses: geekyeggo/delete-artifact@v1
with:
failOnError: false
name: |

8
.gitmodules vendored Normal file
View File

@@ -0,0 +1,8 @@
[submodule "rln/vendor/rln"]
path = rln/vendor/rln
ignore = dirty
url = https://github.com/Rate-Limiting-Nullifier/rln_circuits.git
[submodule "semaphore/vendor/semaphore"]
path = semaphore/vendor/semaphore
ignore = dirty
url = https://github.com/appliedzkp/semaphore.git

1469
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,13 @@
[workspace]
members = ["rln", "rln-cli", "rln-wasm", "utils"]
default-members = ["rln", "rln-cli", "utils"]
members = [
"multiplier",
"private-settlement",
"semaphore",
"rln",
"rln-cli",
"rln-wasm",
"utils",
]
resolver = "2"
# Compilation profile for any non-workspace member.
@@ -12,3 +19,6 @@ opt-level = 3
[profile.release.package."rln-wasm"]
# Tell `rustc` to optimize for small code size.
opt-level = "s"
[profile.release.package."semaphore"]
codegen-units = 1

View File

@@ -13,19 +13,15 @@ endif
installdeps: .pre-build
ifeq ($(shell uname),Darwin)
# commented due to https://github.com/orgs/Homebrew/discussions/4612
# @brew update
# commented due to https://github.com/orgs/Homebrew/discussions/4612
# @brew update
@brew install cmake ninja
else ifeq ($(shell uname),Linux)
@sudo apt-get update
@sudo apt-get install -y cmake ninja-build
endif
@git -C "wabt" pull || git clone --recursive https://github.com/WebAssembly/wabt.git "wabt"
@cd wabt && mkdir -p build && cd build && cmake .. -GNinja && ninja && sudo ninja install
@which wasm-pack || cargo install wasm-pack
# nvm already checks if it's installed, and no-ops if it is
@curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash
@. ${HOME}/.nvm/nvm.sh && nvm install 18.20.2 && nvm use 18.20.2;
@git clone --recursive https://github.com/WebAssembly/wabt.git
@cd wabt && mkdir build && cd build && cmake .. -GNinja && ninja && sudo ninja install
build: .pre-build
@cargo make build

View File

@@ -18,23 +18,13 @@ in Rust.
- [semaphore-rs](https://github.com/worldcoin/semaphore-rs) written in Rust based on ark-circom.
## Users
Zerokit is used by -
- [nwaku](https://github.com/waku-org/nwaku)
- [js-rln](https://github.com/waku-org/js-rln)
## Build and Test
To install missing dependencies, run the following commands from the root folder
```bash
make installdeps
```
To build and test all crates, run the following commands from the root folder
```bash
make build
make test
@@ -42,4 +32,4 @@ make test
## Release assets
We use [`cross-rs`](https://github.com/cross-rs/cross) to cross-compile and generate release assets for rln.
We use [`cross-rs`](https://github.com/cross-rs/cross) to cross-compile and generate release assets for rln.

32
multiplier/Cargo.toml Normal file
View File

@@ -0,0 +1,32 @@
[package]
name = "multiplier"
version = "0.3.0"
edition = "2018"
license = "MIT OR Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
# WASM operations
# wasmer = { version = "2.0" }
# fnv = { version = "1.0.3", default-features = false }
# num = { version = "0.4.0" }
# num-traits = { version = "0.2.0", default-features = false }
# ZKP Generation
# ark-ff = { version = "0.3.0", default-features = false, features = ["parallel", "asm"] }
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
ark-bn254 = { version = "0.3.0" }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
# ark-poly = { version = "^0.3.0", default-features = false, features = ["parallel"] }
ark-serialize = { version = "0.3.0", default-features = false }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circom-2"], rev = "35ce5a9" }
# error handling
color-eyre = "0.6.1"
# decoding of data
# hex = "0.4.3"
# byteorder = "1.4.3"

7
multiplier/Makefile.toml Normal file
View File

@@ -0,0 +1,7 @@
[tasks.build]
command = "cargo"
args = ["build", "--release"]
[tasks.test]
command = "cargo"
args = ["test", "--release"]

21
multiplier/README.md Normal file
View File

@@ -0,0 +1,21 @@
# Multiplier example
Example wrapper around a basic Circom circuit to test Circom 2 integration
through ark-circom and FFI.
## Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```
## FFI
To generate C or Nim bindings from Rust FFI, use `cbindgen` or `nbindgen`:
```
cbindgen . -o target/multiplier.h
nbindgen . -o target/multiplier.nim
```

Binary file not shown.

Binary file not shown.

77
multiplier/src/ffi.rs Normal file
View File

@@ -0,0 +1,77 @@
use crate::public::Multiplier;
use std::slice;
/// Buffer struct is taken from
/// https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs
///
/// Also heavily inspired by https://github.com/kilic/rln/blob/master/src/ffi.rs
#[repr(C)]
#[derive(Clone, Debug, PartialEq)]
pub struct Buffer {
pub ptr: *const u8,
pub len: usize,
}
impl From<&[u8]> for Buffer {
fn from(src: &[u8]) -> Self {
Self {
ptr: &src[0] as *const u8,
len: src.len(),
}
}
}
impl<'a> From<&Buffer> for &'a [u8] {
fn from(src: &Buffer) -> &'a [u8] {
unsafe { slice::from_raw_parts(src.ptr, src.len) }
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn new_circuit(ctx: *mut *mut Multiplier) -> bool {
if let Ok(mul) = Multiplier::new() {
unsafe { *ctx = Box::into_raw(Box::new(mul)) };
true
} else {
false
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn prove(ctx: *const Multiplier, output_buffer: *mut Buffer) -> bool {
println!("multiplier ffi: prove");
let mul = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
match mul.prove(&mut output_data) {
Ok(proof_data) => proof_data,
Err(_) => return false,
};
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn verify(
ctx: *const Multiplier,
proof_buffer: *const Buffer,
result_ptr: *mut u32,
) -> bool {
println!("multiplier ffi: verify");
let mul = unsafe { &*ctx };
let proof_data = <&[u8]>::from(unsafe { &*proof_buffer });
if match mul.verify(proof_data) {
Ok(verified) => verified,
Err(_) => return false,
} {
unsafe { *result_ptr = 0 };
} else {
unsafe { *result_ptr = 1 };
};
true
}

2
multiplier/src/lib.rs Normal file
View File

@@ -0,0 +1,2 @@
pub mod ffi;
pub mod public;

49
multiplier/src/main.rs Normal file
View File

@@ -0,0 +1,49 @@
use ark_circom::{CircomBuilder, CircomConfig};
use ark_std::rand::thread_rng;
use color_eyre::{Report, Result};
use ark_bn254::Bn254;
use ark_groth16::{
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
};
fn groth16_proof_example() -> Result<()> {
let cfg = CircomConfig::<Bn254>::new(
"./resources/circom2_multiplier2.wasm",
"./resources/circom2_multiplier2.r1cs",
)?;
let mut builder = CircomBuilder::new(cfg);
builder.push_input("a", 3);
builder.push_input("b", 11);
// create an empty instance for setting it up
let circom = builder.setup();
let mut rng = thread_rng();
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng)?;
let circom = builder.build()?;
let inputs = circom
.get_public_inputs()
.ok_or(Report::msg("no public inputs"))?;
let proof = prove(circom, &params, &mut rng)?;
let pvk = prepare_verifying_key(&params.vk);
match verify_proof(&pvk, &proof, &inputs) {
Ok(_) => Ok(()),
Err(_) => Err(Report::msg("not verified")),
}
}
fn main() {
println!("Hello, world!");
match groth16_proof_example() {
Ok(_) => println!("Success"),
Err(_) => println!("Error"),
}
}

79
multiplier/src/public.rs Normal file
View File

@@ -0,0 +1,79 @@
use ark_circom::{CircomBuilder, CircomCircuit, CircomConfig};
use ark_std::rand::thread_rng;
use ark_bn254::Bn254;
use ark_groth16::{
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
Proof, ProvingKey,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use color_eyre::{Report, Result};
use std::io::{Read, Write};
pub struct Multiplier {
circom: CircomCircuit<Bn254>,
params: ProvingKey<Bn254>,
}
impl Multiplier {
// TODO Break this apart here
pub fn new() -> Result<Multiplier> {
let cfg = CircomConfig::<Bn254>::new(
"./resources/circom2_multiplier2.wasm",
"./resources/circom2_multiplier2.r1cs",
)?;
let mut builder = CircomBuilder::new(cfg);
builder.push_input("a", 3);
builder.push_input("b", 11);
// create an empty instance for setting it up
let circom = builder.setup();
let mut rng = thread_rng();
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng)?;
let circom = builder.build()?;
Ok(Multiplier { circom, params })
}
// TODO Input Read
pub fn prove<W: Write>(&self, result_data: W) -> Result<()> {
let mut rng = thread_rng();
// XXX: There's probably a better way to do this
let circom = self.circom.clone();
let params = self.params.clone();
let proof = prove(circom, &params, &mut rng)?;
// XXX: Unclear if this is different from other serialization(s)
proof.serialize(result_data)?;
Ok(())
}
pub fn verify<R: Read>(&self, input_data: R) -> Result<bool> {
let proof = Proof::deserialize(input_data)?;
let pvk = prepare_verifying_key(&self.params.vk);
// XXX Part of input data?
let inputs = self
.circom
.get_public_inputs()
.ok_or(Report::msg("no public inputs"))?;
let verified = verify_proof(&pvk, &proof, &inputs)?;
Ok(verified)
}
}
impl Default for Multiplier {
fn default() -> Self {
Self::new().unwrap()
}
}

View File

@@ -0,0 +1,21 @@
#[cfg(test)]
mod tests {
use multiplier::public::Multiplier;
#[test]
fn multiplier_proof() {
let mul = Multiplier::new().unwrap();
let mut output_data: Vec<u8> = Vec::new();
let _ = mul.prove(&mut output_data);
let proof_data = &output_data[..];
// XXX Pass as arg?
//let pvk = prepare_verifying_key(&mul.params.vk);
let verified = mul.verify(proof_data).unwrap();
assert!(verified);
}
}

View File

@@ -0,0 +1,9 @@
[package]
name = "private-settlement"
version = "0.3.0"
edition = "2021"
license = "MIT OR Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]

View File

@@ -0,0 +1,7 @@
[tasks.build]
command = "cargo"
args = ["build", "--release"]
[tasks.test]
command = "cargo"
args = ["test", "--release"]

View File

@@ -0,0 +1,11 @@
# Private Settlement Module
This module is to provide APIs to manage, compute and verify [Private Settlement](https://rfc.vac.dev/spec/44/) zkSNARK proofs and primitives.
## Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```

View File

@@ -0,0 +1 @@

View File

@@ -0,0 +1,11 @@
#[cfg(test)]
mod tests {
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
let result = 2 + 2;
assert_eq!(result, 4);
}
}
}

View File

@@ -4,7 +4,7 @@ version = "0.3.0"
edition = "2021"
[dependencies]
rln = { path = "../rln", default-features = true, features = ["arkzkey"] }
rln = { path = "../rln" }
clap = { version = "4.2.7", features = ["cargo", "derive", "env"]}
clap_derive = { version = "=4.2.0" }
color-eyre = "=0.6.2"

View File

@@ -37,11 +37,7 @@ fn main() -> Result<()> {
tree_config_input,
}) => {
let mut resources: Vec<Vec<u8>> = Vec::new();
#[cfg(feature = "arkzkey")]
let filenames = ["rln.wasm", "rln_final.arkzkey", "verification_key.arkvkey"];
#[cfg(not(feature = "arkzkey"))]
let filenames = ["rln.wasm", "rln_final.zkey", "verification_key.arkvkey"];
for filename in filenames {
for filename in ["rln.wasm", "rln_final.zkey", "verification_key.json"] {
let fullpath = config.join(Path::new(filename));
let mut file = File::open(&fullpath)?;
let metadata = std::fs::metadata(&fullpath)?;

View File

@@ -5,12 +5,12 @@ use std::fs::File;
use crate::config::{Config, InnerConfig};
#[derive(Default)]
pub(crate) struct State {
pub rln: Option<RLN>,
pub(crate) struct State<'a> {
pub rln: Option<RLN<'a>>,
}
impl State {
pub(crate) fn load_state() -> Result<State> {
impl<'a> State<'a> {
pub(crate) fn load_state() -> Result<State<'a>> {
let config = Config::load_config()?;
let rln = if let Some(InnerConfig { file, tree_height }) = config.inner {
let resources = File::open(&file)?;

View File

@@ -1,6 +1,6 @@
[package]
name = "rln-wasm"
version = "0.1.0"
version = "0.0.9"
edition = "2021"
license = "MIT or Apache2"
@@ -11,16 +11,10 @@ crate-type = ["cdylib", "rlib"]
default = ["console_error_panic_hook"]
[dependencies]
rln = { path = "../rln", default-features = false, features = [
"wasm",
"stateless",
], version = "=0.6.1"}
num-bigint = { version = "0.4", default-features = false, features = [
"rand",
"serde",
] }
rln = { path = "../rln", default-features = false, features = ["wasm"] }
num-bigint = { version = "0.4", default-features = false, features = ["rand", "serde"] }
wasmer = { version = "2.3", default-features = false, features = ["js", "std"] }
web-sys = { version = "0.3", features = ["console"] }
web-sys = {version = "0.3", features=["console"]}
getrandom = { version = "0.2.7", default-features = false, features = ["js"] }
wasm-bindgen = "0.2.63"
serde-wasm-bindgen = "0.4"
@@ -32,8 +26,8 @@ serde_json = "1.0.85"
# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for
# code size when deploying.
console_error_panic_hook = { version = "0.1.7", optional = true }
zerokit_utils = { version = "0.5.1", path = "../utils" }
[dev-dependencies]
wasm-bindgen-test = "0.3.13"
wasm-bindgen-futures = "0.4.33"

View File

@@ -7,7 +7,11 @@ script = "sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/
[tasks.build]
clear = true
dependencies = ["pack-build", "pack-rename", "post-build"]
dependencies = [
"pack-build",
"pack-rename",
"post-build"
]
[tasks.post-build]
command = "wasm-strip"
@@ -25,7 +29,3 @@ args = ["login"]
[tasks.publish]
command = "wasm-pack"
args = ["publish", "--access", "public", "--target", "web"]
[tasks.bench]
command = "echo"
args = ["'No benchmarks available for this project'"]

View File

@@ -19,7 +19,7 @@ pub fn init_panic_hook() {
pub struct RLNWrapper {
// The purpose of this wrapper is to hold a RLN instance with the 'static lifetime
// because wasm_bindgen does not allow returning elements with lifetimes
instance: RLN,
instance: RLN<'static>,
}
// Macro to call methods with arbitrary amount of arguments,
@@ -150,8 +150,8 @@ impl<T> ProcessArg for Vec<T> {
}
}
impl ProcessArg for *const RLN {
type ReturnType = &'static RLN;
impl<'a> ProcessArg for *const RLN<'a> {
type ReturnType = &'a RLN<'a>;
fn process(self) -> Self::ReturnType {
unsafe { &*self }
}
@@ -182,27 +182,96 @@ impl<'a> ProcessArg for &'a [u8] {
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = newRLN)]
pub fn wasm_new(
tree_height: usize,
zkey: Uint8Array,
vk: Uint8Array,
) -> Result<*mut RLNWrapper, String> {
let instance = RLN::new_with_params(zkey.to_vec(), vk.to_vec())
let instance = RLN::new_with_params(tree_height, zkey.to_vec(), vk.to_vec())
.map_err(|err| format!("{:#?}", err))?;
let wrapper = RLNWrapper { instance };
Ok(Box::into_raw(Box::new(wrapper)))
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = getSerializedRLNWitness)]
pub fn wasm_get_serialized_rln_witness(
ctx: *mut RLNWrapper,
input: Uint8Array,
) -> Result<Uint8Array, String> {
let rln_witness = call!(ctx, get_serialized_rln_witness, &input.to_vec()[..])
.map_err(|err| format!("{:#?}", err))?;
Ok(Uint8Array::from(&rln_witness[..]))
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = insertMember)]
pub fn wasm_set_next_leaf(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
call_with_error_msg!(
ctx,
set_next_leaf,
"could not insert member into merkle tree".to_string(),
&input.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = setLeavesFrom)]
pub fn wasm_set_leaves_from(
ctx: *mut RLNWrapper,
index: usize,
input: Uint8Array,
) -> Result<(), String> {
call_with_error_msg!(
ctx,
set_leaves_from,
"could not set multiple leaves".to_string(),
index,
&*input.to_vec()
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = deleteLeaf)]
pub fn wasm_delete_leaf(ctx: *mut RLNWrapper, index: usize) -> Result<(), String> {
call_with_error_msg!(ctx, delete_leaf, "could not delete leaf".to_string(), index)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = setMetadata)]
pub fn wasm_set_metadata(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
call_with_error_msg!(
ctx,
set_metadata,
"could not set metadata".to_string(),
&*input.to_vec()
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = getMetadata)]
pub fn wasm_get_metadata(ctx: *mut RLNWrapper) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(ctx, get_metadata, "could not get metadata".to_string())
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = initTreeWithLeaves)]
pub fn wasm_init_tree_with_leaves(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
call_with_error_msg!(
ctx,
init_tree_with_leaves,
"could not init merkle tree".to_string(),
&*input.to_vec()
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = RLNWitnessToJson)]
pub fn rln_witness_to_json(
ctx: *mut RLNWrapper,
serialized_witness: Uint8Array,
) -> Result<Object, String> {
let inputs = call!(
ctx,
get_rln_witness_bigint_json,
&serialized_witness.to_vec()[..]
)
.map_err(|err| err.to_string())?;
let inputs = call!(ctx, get_rln_witness_json, &serialized_witness.to_vec()[..])
.map_err(|err| err.to_string())?;
let js_value = serde_wasm_bindgen::to_value(&inputs).map_err(|err| err.to_string())?;
Object::from_entries(&js_value).map_err(|err| format!("{:#?}", err))
}
@@ -289,6 +358,17 @@ pub fn wasm_recover_id_secret(
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = verifyRLNProof)]
pub fn wasm_verify_rln_proof(ctx: *const RLNWrapper, proof: Uint8Array) -> Result<bool, String> {
call_bool_method_with_error_msg!(
ctx,
verify_rln_proof,
"error while verifying rln proof".to_string(),
&proof.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = verifyWithRoots)]
pub fn wasm_verify_with_roots(
@@ -305,6 +385,12 @@ pub fn wasm_verify_with_roots(
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = getRoot)]
pub fn wasm_get_root(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(ctx, get_root, "could not obtain root")
}
#[wasm_bindgen(js_name = hash)]
pub fn wasm_hash(input: Uint8Array) -> Result<Uint8Array, String> {
fn_call_with_output_and_error_msg!(hash, "could not generate hash", &input.to_vec()[..])

View File

@@ -3,17 +3,11 @@
#[cfg(test)]
mod tests {
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
use rln::circuit::{Fr, TEST_TREE_HEIGHT};
use rln::hashers::{hash_to_field, poseidon_hash};
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, normalize_usize};
use rln::circuit::TEST_TREE_HEIGHT;
use rln::utils::normalize_usize;
use rln_wasm::*;
use rln::poseidon_tree::PoseidonTree;
use rln::utils::vec_fr_to_bytes_le;
use wasm_bindgen::{prelude::*, JsValue};
use wasm_bindgen_test::wasm_bindgen_test;
use zerokit_utils::merkle_tree::merkle_tree::ZerokitMerkleTree;
use zerokit_utils::ZerokitMerkleProof;
use rln::utils::vec_u8_to_bytes_le;
#[wasm_bindgen(module = "src/utils.js")]
extern "C" {
@@ -30,58 +24,45 @@ mod tests {
let circom_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln.wasm");
let zkey_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
let vk_path =
format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.arkvkey");
format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.json");
let zkey = read_file(&zkey_path).unwrap();
let vk = read_file(&vk_path).unwrap();
// Creating an instance of RLN
let rln_instance = wasm_new(zkey, vk).unwrap();
let mut tree = PoseidonTree::default(TEST_TREE_HEIGHT).unwrap();
let rln_instance = wasm_new(tree_height, zkey, vk).unwrap();
// Creating membership key
let mem_keys = wasm_key_gen(rln_instance).unwrap();
let id_key = mem_keys.subarray(0, 32);
let id_commitment = mem_keys.subarray(32, 64);
let idkey = mem_keys.subarray(0, 32);
let idcommitment = mem_keys.subarray(32, 64);
// Insert PK
wasm_set_next_leaf(rln_instance, idcommitment).unwrap();
// Prepare the message
let signal = b"Hello World";
let signal = "Hello World".as_bytes();
let identity_index = tree.leaves_set();
// Setting up the epoch and rln_identifier
let epoch = hash_to_field(b"test-epoch");
let rln_identifier = hash_to_field(b"test-rln-identifier");
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
let external_nullifier = fr_to_bytes_le(&external_nullifier);
let user_message_limit = Fr::from(100);
let message_id = fr_to_bytes_le(&Fr::from(0));
let (id_commitment_fr, _) = bytes_le_to_fr(&id_commitment.to_vec()[..]);
let rate_commitment = poseidon_hash(&[id_commitment_fr, user_message_limit]);
tree.update_next(rate_commitment).unwrap();
let x = hash_to_field(signal);
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
// Setting up the epoch (With 0s for the test)
let epoch = Uint8Array::new_with_length(32);
epoch.fill(0, 0, 32);
let identity_index: usize = 0;
// Serializing the message
let mut serialized_vec: Vec<u8> = Vec::new();
serialized_vec.append(&mut id_key.to_vec());
serialized_vec.append(&mut fr_to_bytes_le(&user_message_limit).to_vec());
serialized_vec.append(&mut message_id.to_vec());
serialized_vec.append(&mut vec_fr_to_bytes_le(&path_elements).unwrap());
serialized_vec.append(&mut vec_u8_to_bytes_le(&identity_path_index).unwrap());
serialized_vec.append(&mut fr_to_bytes_le(&x));
serialized_vec.append(&mut external_nullifier.to_vec());
serialized_vec.append(&mut idkey.to_vec());
serialized_vec.append(&mut normalize_usize(identity_index));
serialized_vec.append(&mut epoch.to_vec());
serialized_vec.append(&mut normalize_usize(signal.len()));
serialized_vec.append(&mut signal.to_vec());
let serialized_message = Uint8Array::from(&serialized_vec[..]);
let serialized_rln_witness =
wasm_get_serialized_rln_witness(rln_instance, serialized_message).unwrap();
// Obtaining inputs that should be sent to circom witness calculator
let json_inputs =
rln_witness_to_json(rln_instance, serialized_message.clone()).unwrap();
rln_witness_to_json(rln_instance, serialized_rln_witness.clone()).unwrap();
// Calculating witness with JS
// (Using a JSON since wasm_bindgen does not like Result<Vec<JsBigInt>,JsValue>)
@@ -101,7 +82,7 @@ mod tests {
let proof = generate_rln_proof_with_witness(
rln_instance,
calculated_witness.into(),
serialized_message,
serialized_rln_witness,
)
.unwrap();
@@ -111,13 +92,44 @@ mod tests {
proof_bytes.append(&mut signal.to_vec());
let proof_with_signal = Uint8Array::from(&proof_bytes[..]);
// Validate Proof
let is_proof_valid = wasm_verify_rln_proof(rln_instance, proof_with_signal);
assert!(
is_proof_valid.unwrap(),
"validating proof generated with wasm failed"
);
// Validating Proof with Roots
let root = tree.root();
let root_le = fr_to_bytes_le(&root);
let roots = Uint8Array::from(&root_le[..]);
let root = wasm_get_root(rln_instance).unwrap();
let roots = Uint8Array::from(&root.to_vec()[..]);
let proof_with_signal = Uint8Array::from(&proof_bytes[..]);
let is_proof_valid = wasm_verify_with_roots(rln_instance, proof_with_signal, roots);
assert!(is_proof_valid.unwrap(), "verifying proof with roots failed");
}
#[wasm_bindgen_test]
fn test_metadata() {
let tree_height = TEST_TREE_HEIGHT;
let circom_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln.wasm");
let zkey_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
let vk_path =
format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.json");
let zkey = read_file(&zkey_path).unwrap();
let vk = read_file(&vk_path).unwrap();
// Creating an instance of RLN
let rln_instance = wasm_new(tree_height, zkey, vk).unwrap();
let test_metadata = Uint8Array::new(&JsValue::from_str("test"));
// Inserting random metadata
wasm_set_metadata(rln_instance, test_metadata.clone()).unwrap();
// Getting metadata
let metadata = wasm_get_metadata(rln_instance).unwrap();
assert_eq!(metadata.to_vec(), test_metadata.to_vec());
}
}

View File

@@ -1,6 +1,6 @@
[package]
name = "rln"
version = "0.6.1"
version = "0.3.6"
edition = "2021"
license = "MIT OR Apache-2.0"
description = "APIs to manage, compute and verify zkSNARK proofs and RLN primitives"
@@ -19,20 +19,13 @@ doctest = false
[dependencies]
# ZKP Generation
ark-ec = { version = "=0.4.1", default-features = false }
ark-ff = { version = "=0.4.1", default-features = false, features = ["asm"] }
ark-ff = { version = "=0.4.1", default-features = false, features = [ "asm"] }
ark-std = { version = "=0.4.0", default-features = false }
ark-bn254 = { version = "=0.4.0" }
ark-groth16 = { version = "=0.4.0", features = [
"parallel",
], default-features = false }
ark-relations = { version = "=0.4.0", default-features = false, features = [
"std",
] }
ark-groth16 = { version = "=0.4.0", features = ["parallel"], default-features = false }
ark-relations = { version = "=0.4.0", default-features = false, features = [ "std" ] }
ark-serialize = { version = "=0.4.1", default-features = false }
ark-circom = { version = "=0.1.0", default-features = false, features = [
"circom-2",
] }
ark-zkey = { version = "0.1.0", optional = true, default-features = false }
ark-circom = { version = "=0.1.0", default-features = false, features = ["circom-2"] }
# WASM
wasmer = { version = "=2.3.0", default-features = false }
@@ -43,23 +36,19 @@ thiserror = "=1.0.39"
# utilities
cfg-if = "=1.0"
num-bigint = { version = "=0.4.3", default-features = false, features = [
"rand",
] }
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
num-traits = "=0.2.15"
once_cell = "=1.17.1"
lazy_static = "=1.4.0"
rand = "=0.8.5"
rand_chacha = "=0.3.1"
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
utils = { package = "zerokit_utils", version = "=0.5.1", path = "../utils/", default-features = false }
utils = { package = "zerokit_utils", version = "=0.3.2", path = "../utils/", default-features = false }
# serialization
serde_json = "=1.0.96"
serde = { version = "=1.0.163", features = ["derive"] }
document-features = { version = "=0.2.10", optional = true }
include_dir = "=0.7.3"
[dev-dependencies]
sled = "=0.34.7"
@@ -67,17 +56,9 @@ criterion = { version = "=0.4.0", features = ["html_reports"] }
[features]
default = ["parallel", "wasmer/sys-default", "pmtree-ft"]
parallel = [
"ark-ec/parallel",
"ark-ff/parallel",
"ark-std/parallel",
"ark-groth16/parallel",
"utils/parallel",
]
parallel = ["ark-ec/parallel", "ark-ff/parallel", "ark-std/parallel", "ark-groth16/parallel", "utils/parallel"]
wasm = ["wasmer/js", "wasmer/std"]
fullmerkletree = ["default"]
arkzkey = ["ark-zkey"]
stateless = []
# Note: pmtree feature is still experimental
pmtree-ft = ["utils/pmtree-ft"]
@@ -85,23 +66,3 @@ pmtree-ft = ["utils/pmtree-ft"]
[[bench]]
name = "pmtree_benchmark"
harness = false
[[bench]]
name = "circuit_loading_benchmark"
harness = false
[[bench]]
name = "circuit_loading_arkzkey_benchmark"
harness = false
required-features = ["arkzkey"]
[[bench]]
name = "circuit_deser_benchmark"
harness = false
[[bench]]
name = "poseidon_tree_benchmark"
harness = false
[package.metadata.docs.rs]
all-features = true

View File

@@ -2,18 +2,10 @@
command = "cargo"
args = ["build", "--release"]
[tasks.test_default]
[tasks.test]
command = "cargo"
args = ["test", "--release"]
[tasks.test_stateless]
command = "cargo"
args = ["test", "--release", "--features", "stateless"]
[tasks.test_arkzkey]
command = "cargo"
args = ["test", "--release", "--features", "arkzkey"]
[tasks.bench]
command = "cargo"
args = ["bench"]

View File

@@ -3,7 +3,6 @@
This module provides APIs to manage, compute and verify [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and RLN primitives.
## Pre-requisites
### Install dependencies and clone repo
```sh
@@ -15,7 +14,6 @@ cd zerokit/rln
### Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
@@ -23,11 +21,11 @@ cargo make test
### Compile ZK circuits
The `rln` (https://github.com/rate-limiting-nullifier/circom-rln) repository, which contains the RLN circuit implementation is a submodule of zerokit RLN.
The `rln` (https://github.com/privacy-scaling-explorations/rln) repository, which contains the RLN circuit implementation is a submodule of zerokit RLN.
To compile the RLN circuit
```sh
``` sh
# Update submodules
git submodule update --init --recursive
@@ -54,10 +52,11 @@ include "./rln-base.circom";
component main {public [x, epoch, rln_identifier ]} = RLN(N);
```
However, if `N` is too big, this might require a bigger Powers of Tau ceremony than the one hardcoded in `./scripts/build-circuits.sh`, which is `2^14`.
However, if `N` is too big, this might require a bigger Powers of Tau ceremony than the one hardcoded in `./scripts/build-circuits.sh`, which is `2^14`.
In such case we refer to the official [Circom documentation](https://docs.circom.io/getting-started/proving-circuits/#powers-of-tau) for instructions on how to run an appropriate Powers of Tau ceremony and Phase 2 in order to compile the desired circuit.
Currently, the `rln` module comes with 2 [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources) RLN circuits having Merkle tree of height `20` and `32`, respectively.
Currently, the `rln` module comes with three [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources) RLN circuits having Merkle tree of height `15`, `19` and `20`, respectively.
## Getting started
@@ -74,7 +73,7 @@ rln = { git = "https://github.com/vacp2p/zerokit" }
First, we need to create a RLN object for a chosen input Merkle tree size.
Note that we need to pass to RLN object constructor the path where the circuit (`rln.wasm`, built for the input tree size), the corresponding proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) and verification key (`verification_key.arkvkey`, optional) are found.
Note that we need to pass to RLN object constructor the path where the circuit (`rln.wasm`, built for the input tree size), the corresponding proving key (`rln_final.zkey`) and verification key (`verification_key.json`, optional) are found.
In the following we will use [cursors](https://doc.rust-lang.org/std/io/struct.Cursor.html) as readers/writers for interfacing with RLN public APIs.
@@ -83,14 +82,14 @@ use rln::protocol::*;
use rln::public::*;
use std::io::Cursor;
// We set the RLN parameters:
// We set the RLN parameters:
// - the tree height;
// - the tree config, if it is not defined, the default value will be set
// - the circuit resource folder (requires a trailing "/").
let tree_height = 20;
let input = Cursor::new(json!({}).to_string());
let resources = Cursor::new("../zerokit/rln/resources/tree_height_20/");
// We create a new RLN instance
let mut rln = RLN::new(tree_height, input);
let mut rln = RLN::new(tree_height, resources);
```
### Generate an identity keypair
@@ -103,62 +102,50 @@ let mut buffer = Cursor::new(Vec::<u8>::new());
rln.key_gen(&mut buffer).unwrap();
// We deserialize the keygen output to obtain
// the identity_secret and id_commitment
// the identiy_secret and id_commitment
let (identity_secret_hash, id_commitment) = deserialize_identity_pair(buffer.into_inner());
```
### Add Rate commitment to the RLN Merkle tree
### Add ID commitment to the RLN Merkle tree
```rust
// We define the tree index where id_commitment will be added
let id_index = 10;
let user_message_limit = 10;
// We serialize id_commitment and pass it to set_leaf
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
let mut buffer = Cursor::new(serialize_field_element(rate_commitment));
let mut buffer = Cursor::new(serialize_field_element(id_commitment));
rln.set_leaf(id_index, &mut buffer).unwrap();
```
Note that when tree leaves are not explicitly set by the user (in this example, all those with index less and greater than `10`), their values is set to an hardcoded default (all-`0` bytes in current implementation).
### Set external nullifier
### Set epoch
The `external nullifier` includes two parameters.
The first one is `epoch` and it's used to identify messages received in a certain time frame. It usually corresponds to the current UNIX time but can also be set to a random value or generated by a seed, provided that it corresponds to a field element.
The second one is `rln_identifier` and it's used to prevent a RLN ZK proof generated for one application to be re-used in another one.
The epoch, sometimes referred to as _external nullifier_, is used to identify messages received in a certain time frame. It usually corresponds to the current UNIX time but can also be set to a random value or generated by a seed, provided that it corresponds to a field element.
```rust
// We generate epoch from a date seed and we ensure is
// mapped to a field element by hashing-to-field its content
let epoch = hash_to_field(b"Today at noon, this year");
// We generate rln_identifier from a date seed and we ensure is
// mapped to a field element by hashing-to-field its content
let rln_identifier = hash_to_field(b"test-rln-identifier");
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
```
### Set signal
The signal is the message for which we are computing a RLN proof.
```rust
// We set our signal
// We set our signal
let signal = b"RLN is awesome";
```
### Generate a RLN proof
We prepare the input to the proof generation routine.
We prepare the input to the proof generation routine.
Input buffer is serialized as `[ identity_key | id_index | external_nullifier | user_message_limit | message_id | signal_len | signal ]`.
Input buffer is serialized as `[ identity_key | id_index | epoch | signal_len | signal ]`.
```rust
// We prepare input to the proof generation routine
let proof_input = prepare_prove_input(identity_secret_hash, id_index, external_nullifier, signal);
let proof_input = prepare_prove_input(identity_secret_hash, id_index, epoch, signal);
```
We are now ready to generate a RLN ZK proof along with the _public outputs_ of the ZK circuit evaluation.
@@ -175,11 +162,12 @@ rln.generate_rln_proof(&mut in_buffer, &mut out_buffer)
let proof_data = out_buffer.into_inner();
```
The byte vector `proof_data` is serialized as `[ zk-proof | tree_root | external_nullifier | share_x | share_y | nullifier ]`.
The byte vector `proof_data` is serialized as `[ zk-proof | tree_root | epoch | share_x | share_y | nullifier | rln_identifier ]`.
### Verify a RLN proof
We prepare the input to the proof verification routine.
We prepare the input to the proof verification routine.
Input buffer is serialized as `[proof_data | signal_len | signal ]`, where `proof_data` is (computed as) the output obtained by `generate_rln_proof`.
@@ -192,21 +180,17 @@ let mut in_buffer = Cursor::new(verify_data);
let verified = rln.verify(&mut in_buffer).unwrap();
```
We check if the proof verification was successful:
We check if the proof verification was successful:
```rust
// We ensure the proof is valid
assert!(verified);
```
## Get involved!
Zerokit RLN public and FFI APIs allow interaction with many more features than what briefly showcased above.
We invite you to check our API documentation by running
```rust
cargo doc --no-deps
```
and look at unit tests to have an hint on how to interface and use them.
and look at unit tests to have an hint on how to interface and use them.

View File

@@ -1,22 +0,0 @@
use criterion::{criterion_group, criterion_main, Criterion};
use rln::circuit::{vk_from_ark_serialized, VK_BYTES};
// Here we benchmark how long the deserialization of the
// verifying_key takes, only testing the json => verifying_key conversion,
// and skipping conversion from bytes => string => serde_json::Value
pub fn vk_deserialize_benchmark(c: &mut Criterion) {
let vk = VK_BYTES;
c.bench_function("vk::vk_from_ark_serialized", |b| {
b.iter(|| {
let _ = vk_from_ark_serialized(vk);
})
});
}
criterion_group! {
name = benches;
config = Criterion::default().measurement_time(std::time::Duration::from_secs(10));
targets = vk_deserialize_benchmark
}
criterion_main!(benches);

View File

@@ -1,43 +0,0 @@
use criterion::{criterion_group, criterion_main, Criterion};
use rln::circuit::{
read_arkzkey_from_bytes_compressed, read_arkzkey_from_bytes_uncompressed, ARKZKEY_BYTES,
ARKZKEY_BYTES_UNCOMPR,
};
pub fn uncompressed_bench(c: &mut Criterion) {
let arkzkey = ARKZKEY_BYTES_UNCOMPR.to_vec();
let size = arkzkey.len() as f32;
println!(
"Size of uncompressed arkzkey: {:.2?} MB",
size / 1024.0 / 1024.0
);
c.bench_function("arkzkey::arkzkey_from_raw_uncompressed", |b| {
b.iter(|| {
let r = read_arkzkey_from_bytes_uncompressed(&arkzkey);
assert_eq!(r.is_ok(), true);
})
});
}
pub fn compressed_bench(c: &mut Criterion) {
let arkzkey = ARKZKEY_BYTES.to_vec();
let size = arkzkey.len() as f32;
println!(
"Size of compressed arkzkey: {:.2?} MB",
size / 1024.0 / 1024.0
);
c.bench_function("arkzkey::arkzkey_from_raw_compressed", |b| {
b.iter(|| {
let r = read_arkzkey_from_bytes_compressed(&arkzkey);
assert_eq!(r.is_ok(), true);
})
});
}
criterion_group! {
name = benches;
config = Criterion::default().measurement_time(std::time::Duration::from_secs(250));
targets = uncompressed_bench, compressed_bench
}
criterion_main!(benches);

View File

@@ -1,24 +0,0 @@
use ark_circom::read_zkey;
use criterion::{criterion_group, criterion_main, Criterion};
use std::io::Cursor;
pub fn zkey_load_benchmark(c: &mut Criterion) {
let zkey = rln::circuit::ZKEY_BYTES.to_vec();
let size = zkey.len() as f32;
println!("Size of zkey: {:.2?} MB", size / 1024.0 / 1024.0);
c.bench_function("zkey::zkey_from_raw", |b| {
b.iter(|| {
let mut reader = Cursor::new(zkey.clone());
let r = read_zkey(&mut reader);
assert_eq!(r.is_ok(), true);
})
});
}
criterion_group! {
name = benches;
config = Criterion::default().measurement_time(std::time::Duration::from_secs(250));
targets = zkey_load_benchmark
}
criterion_main!(benches);

View File

@@ -1,7 +1,8 @@
use criterion::{criterion_group, criterion_main, Criterion};
use rln::{circuit::Fr, pm_tree_adapter::PmTree};
use utils::ZerokitMerkleTree;
use rln::{circuit::Fr, pm_tree_adapter::PmTree};
pub fn pmtree_benchmark(c: &mut Criterion) {
let mut tree = PmTree::default(2).unwrap();
@@ -37,19 +38,6 @@ pub fn pmtree_benchmark(c: &mut Criterion) {
tree.get(0).unwrap();
})
});
// check intermediate node getter which required additional computation of sub root index
c.bench_function("Pmtree::get_subtree_root", |b| {
b.iter(|| {
tree.get_subtree_root(1, 0).unwrap();
})
});
c.bench_function("Pmtree::get_empty_leaves_indices", |b| {
b.iter(|| {
tree.get_empty_leaves_indices();
})
});
}
criterion_group!(benches, pmtree_benchmark);

View File

@@ -1,79 +0,0 @@
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use rln::{
circuit::{Fr, TEST_TREE_HEIGHT},
hashers::PoseidonHash,
};
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleTree};
pub fn get_leaves(n: u32) -> Vec<Fr> {
(0..n).map(|s| Fr::from(s)).collect()
}
pub fn optimal_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
c.bench_function("OptimalMerkleTree::<Poseidon>::full_height_gen", |b| {
b.iter(|| {
OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
})
});
let mut group = c.benchmark_group("Set");
for &n in [1u32, 10, 100].iter() {
let leaves = get_leaves(n);
let mut tree = OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
group.bench_function(
BenchmarkId::new("OptimalMerkleTree::<Poseidon>::set", n),
|b| {
b.iter(|| {
for (i, l) in leaves.iter().enumerate() {
let _ = tree.set(i, *l);
}
})
},
);
group.bench_function(
BenchmarkId::new("OptimalMerkleTree::<Poseidon>::set_range", n),
|b| b.iter(|| tree.set_range(0, leaves.iter().cloned())),
);
}
group.finish();
}
pub fn full_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
c.bench_function("FullMerkleTree::<Poseidon>::full_height_gen", |b| {
b.iter(|| {
FullMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
})
});
let mut group = c.benchmark_group("Set");
for &n in [1u32, 10, 100].iter() {
let leaves = get_leaves(n);
let mut tree = FullMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
group.bench_function(
BenchmarkId::new("FullMerkleTree::<Poseidon>::set", n),
|b| {
b.iter(|| {
for (i, l) in leaves.iter().enumerate() {
let _ = tree.set(i, *l);
}
})
},
);
group.bench_function(
BenchmarkId::new("FullMerkleTree::<Poseidon>::set_range", n),
|b| b.iter(|| tree.set_range(0, leaves.iter().cloned())),
);
}
group.finish();
}
criterion_group!(
benches,
optimal_merkle_tree_poseidon_benchmark,
full_merkle_tree_poseidon_benchmark
);
criterion_main!(benches);

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,119 @@
{
"protocol": "groth16",
"curve": "bn128",
"nPublic": 6,
"vk_alpha_1": [
"20124996762962216725442980738609010303800849578410091356605067053491763969391",
"9118593021526896828671519912099489027245924097793322973632351264852174143923",
"1"
],
"vk_beta_2": [
[
"4693952934005375501364248788849686435240706020501681709396105298107971354382",
"14346958885444710485362620645446987998958218205939139994511461437152241966681"
],
[
"16851772916911573982706166384196538392731905827088356034885868448550849804972",
"823612331030938060799959717749043047845343400798220427319188951998582076532"
],
[
"1",
"0"
]
],
"vk_gamma_2": [
[
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
],
[
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
],
[
"1",
"0"
]
],
"vk_delta_2": [
[
"1361919643088555407518565462732544232965454074504004321739078395285189557133",
"20823246840633598579879223919854294301857184404415306521912631074982696570306"
],
[
"7088590198103342249937795923142619828109070290720888704402714617857746884833",
"8191367139632195506244169264298620546181137131063303219908889318280111188437"
],
[
"1",
"0"
]
],
"vk_alphabeta_12": [
[
[
"12608968655665301215455851857466367636344427685631271961542642719683786103711",
"9849575605876329747382930567422916152871921500826003490242628251047652318086"
],
[
"6322029441245076030714726551623552073612922718416871603535535085523083939021",
"8700115492541474338049149013125102281865518624059015445617546140629435818912"
],
[
"10674973475340072635573101639867487770811074181475255667220644196793546640210",
"2926286967251299230490668407790788696102889214647256022788211245826267484824"
]
],
[
[
"9660441540778523475944706619139394922744328902833875392144658911530830074820",
"19548113127774514328631808547691096362144426239827206966690021428110281506546"
],
[
"1870837942477655969123169532603615788122896469891695773961478956740992497097",
"12536105729661705698805725105036536744930776470051238187456307227425796690780"
],
[
"21811903352654147452884857281720047789720483752548991551595462057142824037334",
"19021616763967199151052893283384285352200445499680068407023236283004353578353"
]
]
],
"IC": [
[
"17643142412395322664866141827318671249236739056291610144830020671604112279111",
"13273439661778801509295280274403992505521239023074387826870538372514206268318",
"1"
],
[
"12325966053136615826793633393742326952102053533176311103856731330114882211366",
"6439956820140153832120005353467272867287237423425778281905068783317736451260",
"1"
],
[
"20405310272367450124741832665322768131899487413829191383721623069139009993137",
"21336772016824870564600007750206596010566056069977718959140462128560786193566",
"1"
],
[
"4007669092231576644992949839487535590075070172447826102934640178940614212519",
"7597503385395289202372182678960254605827199004598882158153019657732525465207",
"1"
],
[
"4545695279389338758267531646940033299700127241196839077811942492841603458462",
"6635771967009274882904456432128877995932122611166121203658485990305433499873",
"1"
],
[
"7876954805169515500747828488548350352651069599547377092970620945851311591012",
"7571431725691513008054581132582771105743462534789373657638701712901679323321",
"1"
],
[
"5563973122249220346301217166900152021860462617567141574881706390202619333219",
"5147729144109676590873823097632042430451708874867871369293332620382492068692",
"1"
]
]
}

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,119 @@
{
"protocol": "groth16",
"curve": "bn128",
"nPublic": 6,
"vk_alpha_1": [
"20124996762962216725442980738609010303800849578410091356605067053491763969391",
"9118593021526896828671519912099489027245924097793322973632351264852174143923",
"1"
],
"vk_beta_2": [
[
"4693952934005375501364248788849686435240706020501681709396105298107971354382",
"14346958885444710485362620645446987998958218205939139994511461437152241966681"
],
[
"16851772916911573982706166384196538392731905827088356034885868448550849804972",
"823612331030938060799959717749043047845343400798220427319188951998582076532"
],
[
"1",
"0"
]
],
"vk_gamma_2": [
[
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
],
[
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
],
[
"1",
"0"
]
],
"vk_delta_2": [
[
"16125279975606773676640811113051624654121459921695914044301154938920321009721",
"14844345250267029614093295465313288254479124604567709177260777529651293576873"
],
[
"20349277326920398483890518242229158117668855310237215044647746783223259766294",
"19338776107510040969200058390413661029003750817172740054990168933780935479540"
],
[
"1",
"0"
]
],
"vk_alphabeta_12": [
[
[
"12608968655665301215455851857466367636344427685631271961542642719683786103711",
"9849575605876329747382930567422916152871921500826003490242628251047652318086"
],
[
"6322029441245076030714726551623552073612922718416871603535535085523083939021",
"8700115492541474338049149013125102281865518624059015445617546140629435818912"
],
[
"10674973475340072635573101639867487770811074181475255667220644196793546640210",
"2926286967251299230490668407790788696102889214647256022788211245826267484824"
]
],
[
[
"9660441540778523475944706619139394922744328902833875392144658911530830074820",
"19548113127774514328631808547691096362144426239827206966690021428110281506546"
],
[
"1870837942477655969123169532603615788122896469891695773961478956740992497097",
"12536105729661705698805725105036536744930776470051238187456307227425796690780"
],
[
"21811903352654147452884857281720047789720483752548991551595462057142824037334",
"19021616763967199151052893283384285352200445499680068407023236283004353578353"
]
]
],
"IC": [
[
"5645604624116784480262312750033349186912223090668673154853165165224747369512",
"5656337658385597582701340925622307146226708710361427687425735166776477641124",
"1"
],
[
"8216930132302312821663833393171053651364962198587857550991047765311607638330",
"19934865864074163318938688021560358348660709566570123384268356491416384822148",
"1"
],
[
"11046959016591768534564223076484566731774575511709349452804727872479525392631",
"9401797690410912638766111919371607085248054251975419812613989999345815833269",
"1"
],
[
"13216594148914395028254776738842380005944817065680915990743659996725367876414",
"11541283802841111343960351782994043892623551381569479006737253908665900144087",
"1"
],
[
"6957074593219251760608960101283708711892008557897337713430173510328411964571",
"21673833055087220750009279957462375662312260098732685145862504142183400549467",
"1"
],
[
"20795071270535109448604057031148356571036039566776607847840379441839742201050",
"21654952744643117202636583766828639581880877547772465264383291983528268115687",
"1"
],
[
"19143058772755719660075704757531991493801758701561469885274062297246796623789",
"3996020163280925980543600106196205910576345230982361007978823537163123181007",
"1"
]
]
}

View File

@@ -0,0 +1,119 @@
{
"protocol": "groth16",
"curve": "bn128",
"nPublic": 6,
"vk_alpha_1": [
"20124996762962216725442980738609010303800849578410091356605067053491763969391",
"9118593021526896828671519912099489027245924097793322973632351264852174143923",
"1"
],
"vk_beta_2": [
[
"4693952934005375501364248788849686435240706020501681709396105298107971354382",
"14346958885444710485362620645446987998958218205939139994511461437152241966681"
],
[
"16851772916911573982706166384196538392731905827088356034885868448550849804972",
"823612331030938060799959717749043047845343400798220427319188951998582076532"
],
[
"1",
"0"
]
],
"vk_gamma_2": [
[
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
],
[
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
],
[
"1",
"0"
]
],
"vk_delta_2": [
[
"8353516066399360694538747105302262515182301251524941126222712285088022964076",
"9329524012539638256356482961742014315122377605267454801030953882967973561832"
],
[
"16805391589556134376869247619848130874761233086443465978238468412168162326401",
"10111259694977636294287802909665108497237922060047080343914303287629927847739"
],
[
"1",
"0"
]
],
"vk_alphabeta_12": [
[
[
"12608968655665301215455851857466367636344427685631271961542642719683786103711",
"9849575605876329747382930567422916152871921500826003490242628251047652318086"
],
[
"6322029441245076030714726551623552073612922718416871603535535085523083939021",
"8700115492541474338049149013125102281865518624059015445617546140629435818912"
],
[
"10674973475340072635573101639867487770811074181475255667220644196793546640210",
"2926286967251299230490668407790788696102889214647256022788211245826267484824"
]
],
[
[
"9660441540778523475944706619139394922744328902833875392144658911530830074820",
"19548113127774514328631808547691096362144426239827206966690021428110281506546"
],
[
"1870837942477655969123169532603615788122896469891695773961478956740992497097",
"12536105729661705698805725105036536744930776470051238187456307227425796690780"
],
[
"21811903352654147452884857281720047789720483752548991551595462057142824037334",
"19021616763967199151052893283384285352200445499680068407023236283004353578353"
]
]
],
"IC": [
[
"11992897507809711711025355300535923222599547639134311050809253678876341466909",
"17181525095924075896332561978747020491074338784673526378866503154966799128110",
"1"
],
[
"17018665030246167677911144513385572506766200776123272044534328594850561667818",
"18601114175490465275436712413925513066546725461375425769709566180981674884464",
"1"
],
[
"18799470100699658367834559797874857804183288553462108031963980039244731716542",
"13064227487174191981628537974951887429496059857753101852163607049188825592007",
"1"
],
[
"17432501889058124609368103715904104425610382063762621017593209214189134571156",
"13406815149699834788256141097399354592751313348962590382887503595131085938635",
"1"
],
[
"10320964835612716439094703312987075811498239445882526576970512041988148264481",
"9024164961646353611176283204118089412001502110138072989569118393359029324867",
"1"
],
[
"718355081067365548229685160476620267257521491773976402837645005858953849298",
"14635482993933988261008156660773180150752190597753512086153001683711587601974",
"1"
],
[
"11777720285956632126519898515392071627539405001940313098390150593689568177535",
"8483603647274280691250972408211651407952870456587066148445913156086740744515",
"1"
]
]
}

View File

@@ -4,63 +4,41 @@ use ark_bn254::{
Bn254, Fq as ArkFq, Fq2 as ArkFq2, Fr as ArkFr, G1Affine as ArkG1Affine,
G1Projective as ArkG1Projective, G2Affine as ArkG2Affine, G2Projective as ArkG2Projective,
};
use ark_circom::read_zkey;
use ark_groth16::{ProvingKey, VerifyingKey};
use ark_relations::r1cs::ConstraintMatrices;
use ark_serialize::CanonicalDeserialize;
use cfg_if::cfg_if;
use color_eyre::{Report, Result};
use num_bigint::BigUint;
use serde_json::Value;
use std::io::Cursor;
use std::str::FromStr;
#[cfg(not(target_arch = "wasm32"))]
use {
ark_circom::WitnessCalculator,
lazy_static::lazy_static,
std::sync::{Arc, Mutex},
wasmer::{Module, Store},
};
#[cfg(feature = "arkzkey")]
use {
ark_zkey::{read_arkzkey_from_bytes, SerializableConstraintMatrices, SerializableProvingKey},
color_eyre::eyre::WrapErr,
};
#[cfg(not(feature = "arkzkey"))]
use {ark_circom::read_zkey, std::io::Cursor};
#[cfg(feature = "arkzkey")]
pub const ARKZKEY_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/rln_final.arkzkey");
#[cfg(feature = "arkzkey")]
pub const ARKZKEY_BYTES_UNCOMPR: &[u8] =
include_bytes!("../resources/tree_height_20/rln_final_uncompr.arkzkey");
pub const ZKEY_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/rln_final.zkey");
pub const VK_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/verification_key.arkvkey");
const WASM_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/rln.wasm");
#[cfg(not(target_arch = "wasm32"))]
lazy_static! {
#[cfg(not(target_arch = "wasm32"))]
static ref ZKEY: (ProvingKey<Curve>, ConstraintMatrices<Fr>) = {
cfg_if! {
if #[cfg(feature = "arkzkey")] {
read_arkzkey_from_bytes_uncompressed(ARKZKEY_BYTES_UNCOMPR).expect("Failed to read arkzkey")
} else {
let mut reader = Cursor::new(ZKEY_BYTES);
read_zkey(&mut reader).expect("Failed to read zkey")
}
}
};
#[cfg(not(target_arch = "wasm32"))]
static ref VK: VerifyingKey<Curve> = vk_from_ark_serialized(VK_BYTES).expect("Failed to read vk");
#[cfg(not(target_arch = "wasm32"))]
static ref WITNESS_CALCULATOR: Arc<Mutex<WitnessCalculator>> = {
circom_from_raw(WASM_BYTES).expect("Failed to create witness calculator")
};
cfg_if! {
if #[cfg(not(target_arch = "wasm32"))] {
use ark_circom::{WitnessCalculator};
use once_cell::sync::OnceCell;
use std::sync::Mutex;
use wasmer::{Module, Store};
use include_dir::{include_dir, Dir};
use std::path::Path;
}
}
pub const TEST_TREE_HEIGHT: usize = 20;
const ZKEY_FILENAME: &str = "rln_final.zkey";
const VK_FILENAME: &str = "verification_key.json";
const WASM_FILENAME: &str = "rln.wasm";
// These parameters are used for tests
// Note that the circuit and keys in TEST_RESOURCES_FOLDER are compiled for Merkle trees of height 15, 19 and 20
// Changing these parameters to other values than these defaults will cause zkSNARK proof verification to fail
pub const TEST_PARAMETERS_INDEX: usize = 2;
pub const TEST_TREE_HEIGHT: usize = [15, 19, 20][TEST_PARAMETERS_INDEX];
pub const TEST_RESOURCES_FOLDER: &str =
["tree_height_15", "tree_height_19", "tree_height_20"][TEST_PARAMETERS_INDEX];
#[cfg(not(target_arch = "wasm32"))]
static RESOURCES_DIR: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/resources");
// The following types define the pairing friendly elliptic curve, the underlying finite fields and groups default to this module
// Note that proofs are serialized assuming Fr to be 4x8 = 32 bytes in size. Hence, changing to a curve with different encoding will make proof verification to fail
@@ -74,152 +52,217 @@ pub type G2Affine = ArkG2Affine;
pub type G2Projective = ArkG2Projective;
// Loads the proving key using a bytes vector
pub fn zkey_from_raw(zkey_data: &[u8]) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
if zkey_data.is_empty() {
return Err(Report::msg("No proving key found!"));
pub fn zkey_from_raw(zkey_data: &Vec<u8>) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
if !zkey_data.is_empty() {
let mut c = Cursor::new(zkey_data);
let proving_key_and_matrices = read_zkey(&mut c)?;
Ok(proving_key_and_matrices)
} else {
Err(Report::msg("No proving key found!"))
}
let proving_key_and_matrices = match () {
#[cfg(feature = "arkzkey")]
() => read_arkzkey_from_bytes(zkey_data)?,
#[cfg(not(feature = "arkzkey"))]
() => {
let mut reader = Cursor::new(zkey_data);
read_zkey(&mut reader)?
}
};
Ok(proving_key_and_matrices)
}
// Loads the proving key
#[cfg(not(target_arch = "wasm32"))]
pub fn zkey_from_folder() -> &'static (ProvingKey<Curve>, ConstraintMatrices<Fr>) {
&ZKEY
pub fn zkey_from_folder(
resources_folder: &str,
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
if let Some(zkey) = zkey {
let mut c = Cursor::new(zkey.contents());
let proving_key_and_matrices = read_zkey(&mut c)?;
Ok(proving_key_and_matrices)
} else {
Err(Report::msg("No proving key found!"))
}
}
// Loads the verification key from a bytes vector
pub fn vk_from_raw(vk_data: &[u8], zkey_data: &[u8]) -> Result<VerifyingKey<Curve>> {
pub fn vk_from_raw(vk_data: &Vec<u8>, zkey_data: &Vec<u8>) -> Result<VerifyingKey<Curve>> {
let verifying_key: VerifyingKey<Curve>;
if !vk_data.is_empty() {
return vk_from_ark_serialized(vk_data);
}
if !zkey_data.is_empty() {
verifying_key = vk_from_vector(vk_data)?;
Ok(verifying_key)
} else if !zkey_data.is_empty() {
let (proving_key, _matrices) = zkey_from_raw(zkey_data)?;
return Ok(proving_key.vk);
verifying_key = proving_key.vk;
Ok(verifying_key)
} else {
Err(Report::msg("No proving/verification key found!"))
}
Err(Report::msg("No proving/verification key found!"))
}
// Loads the verification key
#[cfg(not(target_arch = "wasm32"))]
pub fn vk_from_folder() -> &'static VerifyingKey<Curve> {
&VK
pub fn vk_from_folder(resources_folder: &str) -> Result<VerifyingKey<Curve>> {
let vk = RESOURCES_DIR.get_file(Path::new(resources_folder).join(VK_FILENAME));
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
let verifying_key: VerifyingKey<Curve>;
if let Some(vk) = vk {
verifying_key = vk_from_json(vk.contents_utf8().ok_or(Report::msg(
"Could not read verification key from JSON file!",
))?)?;
Ok(verifying_key)
} else if let Some(_zkey) = zkey {
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
verifying_key = proving_key.vk;
Ok(verifying_key)
} else {
Err(Report::msg("No proving/verification key found!"))
}
}
#[cfg(not(target_arch = "wasm32"))]
static WITNESS_CALCULATOR: OnceCell<Mutex<WitnessCalculator>> = OnceCell::new();
// Initializes the witness calculator using a bytes vector
#[cfg(not(target_arch = "wasm32"))]
pub fn circom_from_raw(wasm_buffer: &[u8]) -> Result<Arc<Mutex<WitnessCalculator>>> {
let module = Module::new(&Store::default(), wasm_buffer)?;
let result = WitnessCalculator::from_module(module)?;
Ok(Arc::new(Mutex::new(result)))
pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> Result<&'static Mutex<WitnessCalculator>> {
WITNESS_CALCULATOR.get_or_try_init(|| {
let store = Store::default();
let module = Module::new(&store, wasm_buffer)?;
let result = WitnessCalculator::from_module(module)?;
Ok::<Mutex<WitnessCalculator>, Report>(Mutex::new(result))
})
}
// Initializes the witness calculator
#[cfg(not(target_arch = "wasm32"))]
pub fn circom_from_folder() -> &'static Arc<Mutex<WitnessCalculator>> {
&WITNESS_CALCULATOR
pub fn circom_from_folder(resources_folder: &str) -> Result<&'static Mutex<WitnessCalculator>> {
// We read the wasm file
let wasm = RESOURCES_DIR.get_file(Path::new(resources_folder).join(WASM_FILENAME));
if let Some(wasm) = wasm {
let wasm_buffer = wasm.contents();
circom_from_raw(wasm_buffer.to_vec())
} else {
Err(Report::msg("No wasm file found!"))
}
}
// Computes the verification key from a bytes vector containing pre-processed ark-serialized verification key
// uncompressed, unchecked
pub fn vk_from_ark_serialized(data: &[u8]) -> Result<VerifyingKey<Curve>> {
let vk = VerifyingKey::<Curve>::deserialize_uncompressed_unchecked(data)?;
Ok(vk)
// The following function implementations are taken/adapted from https://github.com/gakonst/ark-circom/blob/1732e15d6313fe176b0b1abb858ac9e095d0dbd7/src/zkey.rs
// Utilities to convert a json verification key in a groth16::VerificationKey
fn fq_from_str(s: &str) -> Result<Fq> {
Ok(Fq::try_from(BigUint::from_str(s)?)?)
}
// Extracts the element in G1 corresponding to its JSON serialization
fn json_to_g1(json: &Value, key: &str) -> Result<G1Affine> {
let els: Vec<String> = json
.get(key)
.ok_or(Report::msg("no json value"))?
.as_array()
.ok_or(Report::msg("value not an array"))?
.iter()
.map(|i| i.as_str().ok_or(Report::msg("element is not a string")))
.map(|x| x.map(|v| v.to_owned()))
.collect::<Result<Vec<String>>>()?;
Ok(G1Affine::from(G1Projective::new(
fq_from_str(&els[0])?,
fq_from_str(&els[1])?,
fq_from_str(&els[2])?,
)))
}
// Extracts the vector of G1 elements corresponding to its JSON serialization
fn json_to_g1_vec(json: &Value, key: &str) -> Result<Vec<G1Affine>> {
let els: Vec<Vec<String>> = json
.get(key)
.ok_or(Report::msg("no json value"))?
.as_array()
.ok_or(Report::msg("value not an array"))?
.iter()
.map(|i| {
i.as_array()
.ok_or(Report::msg("element is not an array"))
.and_then(|array| {
array
.iter()
.map(|x| x.as_str().ok_or(Report::msg("element is not a string")))
.map(|x| x.map(|v| v.to_owned()))
.collect::<Result<Vec<String>>>()
})
})
.collect::<Result<Vec<Vec<String>>>>()?;
let mut res = vec![];
for coords in els {
res.push(G1Affine::from(G1Projective::new(
fq_from_str(&coords[0])?,
fq_from_str(&coords[1])?,
fq_from_str(&coords[2])?,
)))
}
Ok(res)
}
// Extracts the element in G2 corresponding to its JSON serialization
fn json_to_g2(json: &Value, key: &str) -> Result<G2Affine> {
let els: Vec<Vec<String>> = json
.get(key)
.ok_or(Report::msg("no json value"))?
.as_array()
.ok_or(Report::msg("value not an array"))?
.iter()
.map(|i| {
i.as_array()
.ok_or(Report::msg("element is not an array"))
.and_then(|array| {
array
.iter()
.map(|x| x.as_str().ok_or(Report::msg("element is not a string")))
.map(|x| x.map(|v| v.to_owned()))
.collect::<Result<Vec<String>>>()
})
})
.collect::<Result<Vec<Vec<String>>>>()?;
let x = Fq2::new(fq_from_str(&els[0][0])?, fq_from_str(&els[0][1])?);
let y = Fq2::new(fq_from_str(&els[1][0])?, fq_from_str(&els[1][1])?);
let z = Fq2::new(fq_from_str(&els[2][0])?, fq_from_str(&els[2][1])?);
Ok(G2Affine::from(G2Projective::new(x, y, z)))
}
// Converts JSON to a VerifyingKey
fn to_verifying_key(json: serde_json::Value) -> Result<VerifyingKey<Curve>> {
Ok(VerifyingKey {
alpha_g1: json_to_g1(&json, "vk_alpha_1")?,
beta_g2: json_to_g2(&json, "vk_beta_2")?,
gamma_g2: json_to_g2(&json, "vk_gamma_2")?,
delta_g2: json_to_g2(&json, "vk_delta_2")?,
gamma_abc_g1: json_to_g1_vec(&json, "IC")?,
})
}
// Computes the verification key from its JSON serialization
fn vk_from_json(vk: &str) -> Result<VerifyingKey<Curve>> {
let json: Value = serde_json::from_str(vk)?;
to_verifying_key(json)
}
// Computes the verification key from a bytes vector containing its JSON serialization
fn vk_from_vector(vk: &[u8]) -> Result<VerifyingKey<Curve>> {
let json = String::from_utf8(vk.to_vec())?;
let json: Value = serde_json::from_str(&json)?;
to_verifying_key(json)
}
// Checks verification key to be correct with respect to proving key
#[cfg(not(target_arch = "wasm32"))]
pub fn check_vk_from_zkey(verifying_key: VerifyingKey<Curve>) -> Result<()> {
let (proving_key, _matrices) = zkey_from_folder();
pub fn check_vk_from_zkey(
resources_folder: &str,
verifying_key: VerifyingKey<Curve>,
) -> Result<()> {
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
if proving_key.vk == verifying_key {
Ok(())
} else {
Err(Report::msg("verifying_keys are not equal"))
}
}
////////////////////////////////////////////////////////
// Functions from [arkz-key](https://github.com/zkmopro/ark-zkey/blob/main/src/lib.rs#L106)
// without print and allow to choose between compressed and uncompressed arkzkey
////////////////////////////////////////////////////////
#[cfg(feature = "arkzkey")]
pub fn read_arkzkey_from_bytes_uncompressed(
arkzkey_data: &[u8],
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
if arkzkey_data.is_empty() {
return Err(Report::msg("No proving key found!"));
}
let mut cursor = std::io::Cursor::new(arkzkey_data);
let serialized_proving_key =
SerializableProvingKey::deserialize_uncompressed_unchecked(&mut cursor)
.wrap_err("Failed to deserialize proving key")?;
let serialized_constraint_matrices =
SerializableConstraintMatrices::deserialize_uncompressed_unchecked(&mut cursor)
.wrap_err("Failed to deserialize constraint matrices")?;
// Get on right form for API
let proving_key: ProvingKey<Bn254> = serialized_proving_key.0;
let constraint_matrices: ConstraintMatrices<ark_bn254::Fr> = ConstraintMatrices {
num_instance_variables: serialized_constraint_matrices.num_instance_variables,
num_witness_variables: serialized_constraint_matrices.num_witness_variables,
num_constraints: serialized_constraint_matrices.num_constraints,
a_num_non_zero: serialized_constraint_matrices.a_num_non_zero,
b_num_non_zero: serialized_constraint_matrices.b_num_non_zero,
c_num_non_zero: serialized_constraint_matrices.c_num_non_zero,
a: serialized_constraint_matrices.a.data,
b: serialized_constraint_matrices.b.data,
c: serialized_constraint_matrices.c.data,
};
Ok((proving_key, constraint_matrices))
}
#[cfg(feature = "arkzkey")]
pub fn read_arkzkey_from_bytes_compressed(
arkzkey_data: &[u8],
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
if arkzkey_data.is_empty() {
return Err(Report::msg("No proving key found!"));
}
let mut cursor = std::io::Cursor::new(arkzkey_data);
let serialized_proving_key =
SerializableProvingKey::deserialize_compressed_unchecked(&mut cursor)
.wrap_err("Failed to deserialize proving key")?;
let serialized_constraint_matrices =
SerializableConstraintMatrices::deserialize_compressed_unchecked(&mut cursor)
.wrap_err("Failed to deserialize constraint matrices")?;
// Get on right form for API
let proving_key: ProvingKey<Bn254> = serialized_proving_key.0;
let constraint_matrices: ConstraintMatrices<ark_bn254::Fr> = ConstraintMatrices {
num_instance_variables: serialized_constraint_matrices.num_instance_variables,
num_witness_variables: serialized_constraint_matrices.num_witness_variables,
num_constraints: serialized_constraint_matrices.num_constraints,
a_num_non_zero: serialized_constraint_matrices.a_num_non_zero,
b_num_non_zero: serialized_constraint_matrices.b_num_non_zero,
c_num_non_zero: serialized_constraint_matrices.c_num_non_zero,
a: serialized_constraint_matrices.a.data,
b: serialized_constraint_matrices.b.data,
c: serialized_constraint_matrices.c.data,
};
Ok((proving_key, constraint_matrices))
}

View File

@@ -143,24 +143,24 @@ impl ProcessArg for *const Buffer {
}
}
impl ProcessArg for *const RLN {
type ReturnType = &'static RLN;
impl<'a> ProcessArg for *const RLN<'a> {
type ReturnType = &'a RLN<'a>;
fn process(self) -> Self::ReturnType {
unsafe { &*self }
}
}
impl ProcessArg for *mut RLN {
type ReturnType = &'static mut RLN;
impl<'a> ProcessArg for *mut RLN<'a> {
type ReturnType = &'a mut RLN<'a>;
fn process(self) -> Self::ReturnType {
unsafe { &mut *self }
}
}
///// Buffer struct is taken from
///// <https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs>
/////
///// Also heavily inspired by <https://github.com/kilic/rln/blob/master/src/ffi.rs>
/// Buffer struct is taken from
/// <https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs>
///
/// Also heavily inspired by <https://github.com/kilic/rln/blob/master/src/ffi.rs>
#[repr(C)]
#[derive(Clone, Debug, PartialEq)]
@@ -192,7 +192,6 @@ impl<'a> From<&Buffer> for &'a [u8] {
////////////////////////////////////////////////////////
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[cfg(not(feature = "stateless"))]
#[no_mangle]
pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut *mut RLN) -> bool {
match RLN::new(tree_height, input_buffer.process()) {
@@ -208,23 +207,6 @@ pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[cfg(feature = "stateless")]
#[no_mangle]
pub extern "C" fn new(ctx: *mut *mut RLN) -> bool {
match RLN::new() {
Ok(rln) => {
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
Err(err) => {
eprintln!("could not instantiate rln: {err}");
false
}
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[cfg(not(feature = "stateless"))]
#[no_mangle]
pub extern "C" fn new_with_params(
tree_height: usize,
@@ -252,79 +234,47 @@ pub extern "C" fn new_with_params(
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[cfg(feature = "stateless")]
#[no_mangle]
pub extern "C" fn new_with_params(
circom_buffer: *const Buffer,
zkey_buffer: *const Buffer,
vk_buffer: *const Buffer,
ctx: *mut *mut RLN,
) -> bool {
match RLN::new_with_params(
circom_buffer.process().to_vec(),
zkey_buffer.process().to_vec(),
vk_buffer.process().to_vec(),
) {
Ok(rln) => {
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
Err(err) => {
eprintln!("could not instantiate rln: {err}");
false
}
}
}
////////////////////////////////////////////////////////
// Merkle tree APIs
////////////////////////////////////////////////////////
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn set_tree(ctx: *mut RLN, tree_height: usize) -> bool {
call!(ctx, set_tree, tree_height)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn delete_leaf(ctx: *mut RLN, index: usize) -> bool {
call!(ctx, delete_leaf, index)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn set_leaf(ctx: *mut RLN, index: usize, input_buffer: *const Buffer) -> bool {
call!(ctx, set_leaf, index, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn get_leaf(ctx: *mut RLN, index: usize, output_buffer: *mut Buffer) -> bool {
call_with_output_arg!(ctx, get_leaf, output_buffer, index)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn leaves_set(ctx: *mut RLN) -> usize {
ctx.process().leaves_set()
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn set_next_leaf(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
call!(ctx, set_next_leaf, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn set_leaves_from(
ctx: *mut RLN,
index: usize,
@@ -335,14 +285,12 @@ pub extern "C" fn set_leaves_from(
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn init_tree_with_leaves(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
call!(ctx, init_tree_with_leaves, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn atomic_operation(
ctx: *mut RLN,
index: usize,
@@ -354,7 +302,6 @@ pub extern "C" fn atomic_operation(
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn seq_atomic_operation(
ctx: *mut RLN,
leaves_buffer: *const Buffer,
@@ -371,14 +318,12 @@ pub extern "C" fn seq_atomic_operation(
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn get_root(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
call_with_output_arg!(ctx, get_root, output_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn get_proof(ctx: *const RLN, index: usize, output_buffer: *mut Buffer) -> bool {
call_with_output_arg!(ctx, get_proof, output_buffer, index)
}
@@ -408,7 +353,6 @@ pub extern "C" fn verify(
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn generate_rln_proof(
ctx: *mut RLN,
input_buffer: *const Buffer,
@@ -434,7 +378,6 @@ pub extern "C" fn generate_rln_proof_with_witness(
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn verify_rln_proof(
ctx: *const RLN,
proof_buffer: *const Buffer,
@@ -518,21 +461,18 @@ pub extern "C" fn recover_id_secret(
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn set_metadata(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
call!(ctx, set_metadata, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn get_metadata(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
call_with_output_arg!(ctx, get_metadata, output_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn flush(ctx: *mut RLN) -> bool {
call!(ctx, flush)
}

View File

@@ -7,8 +7,6 @@ pub mod pm_tree_adapter;
pub mod poseidon_tree;
pub mod protocol;
pub mod public;
#[cfg(test)]
pub mod public_api_tests;
pub mod utils;
#[cfg(not(target_arch = "wasm32"))]

View File

@@ -5,7 +5,6 @@ use std::str::FromStr;
use color_eyre::{Report, Result};
use serde_json::Value;
use utils::pmtree::tree::Key;
use utils::pmtree::{Database, Hasher};
use utils::*;
@@ -17,9 +16,6 @@ const METADATA_KEY: [u8; 8] = *b"metadata";
pub struct PmTree {
tree: pmtree::MerkleTree<SledDB, PoseidonHash>,
/// The indices of leaves which are set into zero upto next_index.
/// Set to 0 if the leaf is empty and set to 1 in otherwise.
cached_leaves_indices: Vec<u8>,
// metadata that an application may use to store additional information
metadata: Vec<u8>,
}
@@ -147,7 +143,6 @@ impl ZerokitMerkleTree for PmTree {
Ok(PmTree {
tree,
cached_leaves_indices: vec![0; 1 << depth],
metadata: Vec::new(),
})
}
@@ -160,7 +155,7 @@ impl ZerokitMerkleTree for PmTree {
self.tree.capacity()
}
fn leaves_set(&self) -> usize {
fn leaves_set(&mut self) -> usize {
self.tree.leaves_set()
}
@@ -175,9 +170,7 @@ impl ZerokitMerkleTree for PmTree {
fn set(&mut self, index: usize, leaf: FrOf<Self::Hasher>) -> Result<()> {
self.tree
.set(index, leaf)
.map_err(|e| Report::msg(e.to_string()))?;
self.cached_leaves_indices[index] = 1;
Ok(())
.map_err(|e| Report::msg(e.to_string()))
}
fn set_range<I: IntoIterator<Item = FrOf<Self::Hasher>>>(
@@ -185,51 +178,15 @@ impl ZerokitMerkleTree for PmTree {
start: usize,
values: I,
) -> Result<()> {
let v = values.into_iter().collect::<Vec<_>>();
self.tree
.set_range(start, v.clone().into_iter())
.map_err(|e| Report::msg(e.to_string()))?;
for i in start..v.len() {
self.cached_leaves_indices[i] = 1
}
Ok(())
.set_range(start, values)
.map_err(|e| Report::msg(e.to_string()))
}
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>> {
self.tree.get(index).map_err(|e| Report::msg(e.to_string()))
}
fn get_subtree_root(&self, n: usize, index: usize) -> Result<FrOf<Self::Hasher>> {
if n > self.depth() {
return Err(Report::msg("level exceeds depth size"));
}
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
if n == 0 {
Ok(self.root())
} else if n == self.depth() {
self.get(index)
} else {
let node = self
.tree
.get_elem(Key::new(n, index >> (self.depth() - n)))
.unwrap();
Ok(node)
}
}
fn get_empty_leaves_indices(&self) -> Vec<usize> {
let next_idx = self.leaves_set();
self.cached_leaves_indices
.iter()
.take(next_idx)
.enumerate()
.filter(|&(_, &v)| v == 0u8)
.map(|(idx, _)| idx)
.collect()
}
fn override_range<I: IntoIterator<Item = FrOf<Self::Hasher>>, J: IntoIterator<Item = usize>>(
&mut self,
start: usize,
@@ -244,7 +201,7 @@ impl ZerokitMerkleTree for PmTree {
(0, 0) => Err(Report::msg("no leaves or indices to be removed")),
(1, 0) => self.set(start, leaves[0]),
(0, 1) => self.delete(indices[0]),
(_, 0) => self.set_range(start, leaves),
(_, 0) => self.set_range_with_leaves(start, leaves),
(0, _) => self.remove_indices(&indices),
(_, _) => self.remove_indices_and_set_leaves(start, leaves, &indices),
}
@@ -259,9 +216,7 @@ impl ZerokitMerkleTree for PmTree {
fn delete(&mut self, index: usize) -> Result<()> {
self.tree
.delete(index)
.map_err(|e| Report::msg(e.to_string()))?;
self.cached_leaves_indices[index] = 0;
Ok(())
.map_err(|e| Report::msg(e.to_string()))
}
fn proof(&self, index: usize) -> Result<Self::Proof> {
@@ -306,6 +261,12 @@ type PmTreeHasher = <PmTree as ZerokitMerkleTree>::Hasher;
type FrOfPmTreeHasher = FrOf<PmTreeHasher>;
impl PmTree {
fn set_range_with_leaves(&mut self, start: usize, leaves: Vec<FrOfPmTreeHasher>) -> Result<()> {
self.tree
.set_range(start, leaves)
.map_err(|e| Report::msg(e.to_string()))
}
fn remove_indices(&mut self, indices: &[usize]) -> Result<()> {
let start = indices[0];
let end = indices.last().unwrap() + 1;
@@ -314,12 +275,7 @@ impl PmTree {
self.tree
.set_range(start, new_leaves)
.map_err(|e| Report::msg(e.to_string()))?;
for i in start..end {
self.cached_leaves_indices[i] = 0
}
Ok(())
.map_err(|e| Report::msg(e.to_string()))
}
fn remove_indices_and_set_leaves(
@@ -345,17 +301,8 @@ impl PmTree {
}
self.tree
.set_range(start, set_values)
.map_err(|e| Report::msg(e.to_string()))?;
for i in indices {
self.cached_leaves_indices[*i] = 0;
}
for i in start..(max_index - min_index) {
self.cached_leaves_indices[i] = 1
}
Ok(())
.set_range(min_index, set_values)
.map_err(|e| Report::msg(e.to_string()))
}
}

View File

@@ -4,13 +4,11 @@ use ark_circom::{CircomReduction, WitnessCalculator};
use ark_groth16::{prepare_verifying_key, Groth16, Proof as ArkProof, ProvingKey, VerifyingKey};
use ark_relations::r1cs::ConstraintMatrices;
use ark_relations::r1cs::SynthesisError;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::{rand::thread_rng, UniformRand};
use color_eyre::{Report, Result};
use num_bigint::BigInt;
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha20Rng;
use serde::{Deserialize, Serialize};
#[cfg(not(target_arch = "wasm32"))]
use std::sync::Mutex;
#[cfg(debug_assertions)]
@@ -31,21 +29,14 @@ use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
// RLN Witness data structure and utility functions
///////////////////////////////////////////////////////
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[derive(Debug, PartialEq)]
pub struct RLNWitnessInput {
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
identity_secret: Fr,
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
user_message_limit: Fr,
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
message_id: Fr,
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
path_elements: Vec<Fr>,
identity_path_index: Vec<u8>,
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
x: Fr,
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
external_nullifier: Fr,
epoch: Fr,
rln_identifier: Fr,
}
#[derive(Debug, PartialEq)]
@@ -56,7 +47,8 @@ pub struct RLNProofValues {
pub root: Fr,
// Public Inputs:
pub x: Fr,
pub external_nullifier: Fr,
pub epoch: Fr,
pub rln_identifier: Fr,
}
pub fn serialize_field_element(element: Fr) -> Vec<u8> {
@@ -98,46 +90,25 @@ pub fn deserialize_identity_tuple(serialized: Vec<u8>) -> (Fr, Fr, Fr, Fr) {
)
}
/// Serializes witness
///
/// # Errors
///
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Result<Vec<u8>> {
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&rln_witness.identity_secret));
serialized.append(&mut fr_to_bytes_le(&rln_witness.user_message_limit));
serialized.append(&mut fr_to_bytes_le(&rln_witness.message_id));
serialized.append(&mut vec_fr_to_bytes_le(&rln_witness.path_elements)?);
serialized.append(&mut vec_u8_to_bytes_le(&rln_witness.identity_path_index)?);
serialized.append(&mut fr_to_bytes_le(&rln_witness.x));
serialized.append(&mut fr_to_bytes_le(&rln_witness.external_nullifier));
serialized.append(&mut fr_to_bytes_le(&rln_witness.epoch));
serialized.append(&mut fr_to_bytes_le(&rln_witness.rln_identifier));
Ok(serialized)
}
/// Deserializes witness
///
/// # Errors
///
/// Returns an error if `message_id` is not within `user_message_limit`.
pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)> {
let mut all_read: usize = 0;
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (user_message_limit, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (message_id, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
message_id_range_check(&message_id, &user_message_limit)?;
let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..])?;
all_read += read;
@@ -147,9 +118,13 @@ pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)
let (x, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (external_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
// TODO: check rln_identifier against public::RLN_IDENTIFIER
if serialized.len() != all_read {
return Err(Report::msg("serialized length is not equal to all_read"));
}
@@ -160,9 +135,8 @@ pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)
path_elements,
identity_path_index,
x,
external_nullifier,
user_message_limit,
message_id,
epoch,
rln_identifier,
},
all_read,
))
@@ -170,7 +144,7 @@ pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)
// This function deserializes input for kilic's rln generate_proof public API
// https://github.com/kilic/rln/blob/7ac74183f8b69b399e3bc96c1ae8ab61c026dc43/src/public.rs#L148
// input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
// return value is a rln witness populated according to this information
pub fn proof_inputs_to_rln_witness(
tree: &mut PoseidonTree,
@@ -186,13 +160,7 @@ pub fn proof_inputs_to_rln_witness(
))?;
all_read += 8;
let (user_message_limit, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (message_id, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (external_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let signal_len = usize::try_from(u64::from_le_bytes(
@@ -208,49 +176,83 @@ pub fn proof_inputs_to_rln_witness(
let x = hash_to_field(&signal);
let rln_identifier = hash_to_field(RLN_IDENTIFIER);
Ok((
RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
user_message_limit,
message_id,
x,
external_nullifier,
epoch,
rln_identifier,
},
all_read,
))
}
/// Creates `RLNWitnessInput` from it's fields.
///
/// # Errors
///
/// Returns an error if `message_id` is not within `user_message_limit`.
pub fn rln_witness_from_values(
identity_secret: Fr,
merkle_proof: &MerkleProof,
x: Fr,
external_nullifier: Fr,
user_message_limit: Fr,
message_id: Fr,
) -> Result<RLNWitnessInput> {
message_id_range_check(&message_id, &user_message_limit)?;
pub fn rln_witness_from_json(input_json_str: &str) -> Result<RLNWitnessInput> {
let input_json: serde_json::Value =
serde_json::from_str(input_json_str).expect("JSON was not well-formatted");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
let identity_secret = str_to_fr(&input_json["identity_secret"].to_string(), 10)?;
let path_elements = input_json["path_elements"]
.as_array()
.ok_or(Report::msg("not an array"))?
.iter()
.map(|v| str_to_fr(&v.to_string(), 10))
.collect::<Result<_>>()?;
let identity_path_index_array = input_json["identity_path_index"]
.as_array()
.ok_or(Report::msg("not an arrray"))?;
let mut identity_path_index: Vec<u8> = vec![];
for v in identity_path_index_array {
identity_path_index.push(v.as_u64().ok_or(Report::msg("not a u64 value"))? as u8);
}
let x = str_to_fr(&input_json["x"].to_string(), 10)?;
let epoch = str_to_fr(&input_json["epoch"].to_string(), 16)?;
let rln_identifier = str_to_fr(&input_json["rln_identifier"].to_string(), 10)?;
// TODO: check rln_identifier against public::RLN_IDENTIFIER
Ok(RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
external_nullifier,
user_message_limit,
message_id,
epoch,
rln_identifier,
})
}
pub fn rln_witness_from_values(
identity_secret: Fr,
merkle_proof: &MerkleProof,
x: Fr,
epoch: Fr,
//rln_identifier: Fr,
) -> RLNWitnessInput {
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
let rln_identifier = hash_to_field(RLN_IDENTIFIER);
RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
epoch,
rln_identifier,
}
}
pub fn random_rln_witness(tree_height: usize) -> RLNWitnessInput {
let mut rng = thread_rng();
@@ -267,26 +269,21 @@ pub fn random_rln_witness(tree_height: usize) -> RLNWitnessInput {
identity_path_index.push(rng.gen_range(0..2) as u8);
}
let user_message_limit = Fr::from(100);
let message_id = Fr::from(1);
RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
external_nullifier: poseidon_hash(&[epoch, rln_identifier]),
user_message_limit,
message_id,
epoch,
rln_identifier,
}
}
pub fn proof_values_from_witness(rln_witness: &RLNWitnessInput) -> Result<RLNProofValues> {
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
pub fn proof_values_from_witness(rln_witness: &RLNWitnessInput) -> RLNProofValues {
// y share
let external_nullifier = poseidon_hash(&[rln_witness.epoch, rln_witness.rln_identifier]);
let a_0 = rln_witness.identity_secret;
let a_1 = poseidon_hash(&[a_0, rln_witness.external_nullifier, rln_witness.message_id]);
let a_1 = poseidon_hash(&[a_0, external_nullifier]);
let y = a_0 + rln_witness.x * a_1;
// Nullifier
@@ -295,28 +292,30 @@ pub fn proof_values_from_witness(rln_witness: &RLNWitnessInput) -> Result<RLNPro
// Merkle tree root computations
let root = compute_tree_root(
&rln_witness.identity_secret,
&rln_witness.user_message_limit,
&rln_witness.path_elements,
&rln_witness.identity_path_index,
true,
);
Ok(RLNProofValues {
RLNProofValues {
y,
nullifier,
root,
x: rln_witness.x,
external_nullifier: rln_witness.external_nullifier,
})
epoch: rln_witness.epoch,
rln_identifier: rln_witness.rln_identifier,
}
}
pub fn serialize_proof_values(rln_proof_values: &RLNProofValues) -> Vec<u8> {
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.root));
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.external_nullifier));
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.epoch));
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.x));
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.y));
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.nullifier));
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.rln_identifier));
serialized
}
@@ -329,7 +328,7 @@ pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) {
let (root, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (external_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (x, read) = bytes_le_to_fr(&serialized[all_read..]);
@@ -341,13 +340,17 @@ pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) {
let (nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
(
RLNProofValues {
y,
nullifier,
root,
x,
external_nullifier,
epoch,
rln_identifier,
},
all_read,
)
@@ -356,14 +359,14 @@ pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) {
pub fn prepare_prove_input(
identity_secret: Fr,
id_index: usize,
external_nullifier: Fr,
epoch: Fr,
signal: &[u8],
) -> Vec<u8> {
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret));
serialized.append(&mut normalize_usize(id_index));
serialized.append(&mut fr_to_bytes_le(&external_nullifier));
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
@@ -386,13 +389,15 @@ pub fn prepare_verify_input(proof_data: Vec<u8>, signal: &[u8]) -> Vec<u8> {
///////////////////////////////////////////////////////
pub fn compute_tree_root(
identity_secret: &Fr,
user_message_limit: &Fr,
leaf: &Fr,
path_elements: &[Fr],
identity_path_index: &[u8],
hash_leaf: bool,
) -> Fr {
let id_commitment = poseidon_hash(&[*identity_secret]);
let mut root = poseidon_hash(&[id_commitment, *user_message_limit]);
let mut root = *leaf;
if hash_leaf {
root = poseidon_hash(&[root]);
}
for i in 0..identity_path_index.len() {
if identity_path_index[i] == 0 {
@@ -483,7 +488,11 @@ pub fn extended_seeded_keygen(signal: &[u8]) -> (Fr, Fr, Fr, Fr) {
)
}
pub fn compute_id_secret(share1: (Fr, Fr), share2: (Fr, Fr)) -> Result<Fr, String> {
pub fn compute_id_secret(
share1: (Fr, Fr),
share2: (Fr, Fr),
external_nullifier: Fr,
) -> Result<Fr, String> {
// Assuming a0 is the identity secret and a1 = poseidonHash([a0, external_nullifier]),
// a (x,y) share satisfies the following relation
// y = a_0 + x * a_1
@@ -497,7 +506,14 @@ pub fn compute_id_secret(share1: (Fr, Fr), share2: (Fr, Fr)) -> Result<Fr, Strin
let a_0 = y1 - x1 * a_1;
// If shares come from the same polynomial, a0 is correctly recovered and a1 = poseidonHash([a0, external_nullifier])
Ok(a_0)
let computed_a_1 = poseidon_hash(&[a_0, external_nullifier]);
if a_1 == computed_a_1 {
// We successfully recovered the identity secret
Ok(a_0)
} else {
Err("Cannot recover identity_secret_hash from provided shares".into())
}
}
///////////////////////////////////////////////////////
@@ -578,17 +594,10 @@ pub fn generate_proof_with_witness(
Ok(proof)
}
/// Formats inputs for witness calculation
///
/// # Errors
///
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
pub fn inputs_for_witness_calculation(
rln_witness: &RLNWitnessInput,
) -> Result<[(&str, Vec<BigInt>); 7]> {
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
// We convert the path indexes to field elements
) -> Result<[(&str, Vec<BigInt>); 6]> {
// We confert the path indexes to field elements
// TODO: check if necessary
let mut path_elements = Vec::new();
@@ -604,20 +613,16 @@ pub fn inputs_for_witness_calculation(
Ok([
(
"identitySecret",
"identity_secret",
vec![to_bigint(&rln_witness.identity_secret)?],
),
(
"userMessageLimit",
vec![to_bigint(&rln_witness.user_message_limit)?],
),
("messageId", vec![to_bigint(&rln_witness.message_id)?]),
("pathElements", path_elements),
("identityPathIndex", identity_path_index),
("path_elements", path_elements),
("identity_path_index", identity_path_index),
("x", vec![to_bigint(&rln_witness.x)?]),
("epoch", vec![to_bigint(&rln_witness.epoch)?]),
(
"externalNullifier",
vec![to_bigint(&rln_witness.external_nullifier)?],
"rln_identifier",
vec![to_bigint(&rln_witness.rln_identifier)?],
),
])
}
@@ -666,6 +671,7 @@ pub fn generate_proof(
// If in debug mode, we measure and later print time take to compute proof
#[cfg(debug_assertions)]
let now = Instant::now();
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
&proving_key.0,
r,
@@ -699,7 +705,8 @@ pub fn verify_proof(
proof_values.root,
proof_values.nullifier,
proof_values.x,
proof_values.external_nullifier,
proof_values.epoch,
proof_values.rln_identifier,
];
// Check that the proof is valid
@@ -718,60 +725,11 @@ pub fn verify_proof(
Ok(verified)
}
// auxiliary function for serialisation Fr to json using ark serilize
fn ark_se<S, A: CanonicalSerialize>(a: &A, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut bytes = vec![];
a.serialize_compressed(&mut bytes)
.map_err(serde::ser::Error::custom)?;
s.serialize_bytes(&bytes)
}
// auxiliary function for deserialisation Fr to json using ark serilize
fn ark_de<'de, D, A: CanonicalDeserialize>(data: D) -> Result<A, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let s: Vec<u8> = serde::de::Deserialize::deserialize(data)?;
let a = A::deserialize_compressed_unchecked(s.as_slice());
a.map_err(serde::de::Error::custom)
}
/// Converts a JSON value into [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object.
/// Get CIRCOM JSON inputs
///
/// # Errors
///
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
pub fn rln_witness_from_json(input_json: serde_json::Value) -> Result<RLNWitnessInput> {
let rln_witness: RLNWitnessInput = serde_json::from_value(input_json).unwrap();
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
Ok(rln_witness)
}
/// Converts a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object to the corresponding JSON serialization.
///
/// # Errors
///
/// Returns an error if `message_id` is not within `user_message_limit`.
pub fn rln_witness_to_json(rln_witness: &RLNWitnessInput) -> Result<serde_json::Value> {
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
let rln_witness_json = serde_json::to_value(rln_witness)?;
Ok(rln_witness_json)
}
/// Converts a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object to the corresponding JSON serialization.
/// Before serialisation the data should be translated into big int for further calculation in the witness calculator.
///
/// # Errors
///
/// Returns an error if `message_id` is not within `user_message_limit`.
pub fn rln_witness_to_bigint_json(rln_witness: &RLNWitnessInput) -> Result<serde_json::Value> {
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
/// Returns a JSON object containing the inputs necessary to calculate
/// the witness with CIRCOM on javascript
pub fn get_json_inputs(rln_witness: &RLNWitnessInput) -> Result<serde_json::Value> {
let mut path_elements = Vec::new();
for v in rln_witness.path_elements.iter() {
@@ -785,23 +743,13 @@ pub fn rln_witness_to_bigint_json(rln_witness: &RLNWitnessInput) -> Result<serde
.for_each(|v| identity_path_index.push(BigInt::from(*v).to_str_radix(10)));
let inputs = serde_json::json!({
"identitySecret": to_bigint(&rln_witness.identity_secret)?.to_str_radix(10),
"userMessageLimit": to_bigint(&rln_witness.user_message_limit)?.to_str_radix(10),
"messageId": to_bigint(&rln_witness.message_id)?.to_str_radix(10),
"pathElements": path_elements,
"identityPathIndex": identity_path_index,
"identity_secret": to_bigint(&rln_witness.identity_secret)?.to_str_radix(10),
"path_elements": path_elements,
"identity_path_index": identity_path_index,
"x": to_bigint(&rln_witness.x)?.to_str_radix(10),
"externalNullifier": to_bigint(&rln_witness.external_nullifier)?.to_str_radix(10),
"epoch": format!("0x{:064x}", to_bigint(&rln_witness.epoch)?),
"rln_identifier": to_bigint(&rln_witness.rln_identifier)?.to_str_radix(10),
});
Ok(inputs)
}
pub fn message_id_range_check(message_id: &Fr, user_message_limit: &Fr) -> Result<()> {
if message_id > user_message_limit {
return Err(color_eyre::Report::msg(
"message_id is not within user_message_limit",
));
}
Ok(())
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -5,12 +5,10 @@ use ark_ff::PrimeField;
use color_eyre::{Report, Result};
use num_bigint::{BigInt, BigUint};
use num_traits::Num;
use serde_json::json;
use std::io::Cursor;
use std::iter::Extend;
pub fn to_bigint(el: &Fr) -> Result<BigInt> {
let res: BigUint = (*el).into();
let res: BigUint = (*el).try_into()?;
Ok(res.into())
}
@@ -30,10 +28,10 @@ pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr> {
input_clean = input_clean.trim().to_string();
if radix == 10 {
Ok(BigUint::from_str_radix(&input_clean, radix)?.into())
Ok(BigUint::from_str_radix(&input_clean, radix)?.try_into()?)
} else {
input_clean = input_clean.replace("0x", "");
Ok(BigUint::from_str_radix(&input_clean, radix)?.into())
Ok(BigUint::from_str_radix(&input_clean, radix)?.try_into()?)
}
}
@@ -181,24 +179,6 @@ pub fn normalize_usize(input: usize) -> Vec<u8> {
normalized_usize
}
pub fn bytes_le_to_vec_usize(input: &[u8]) -> Result<Vec<usize>> {
let nof_elem = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
if nof_elem == 0 {
Ok(vec![])
} else {
let elements: Vec<usize> = input[8..]
.chunks(8)
.map(|ch| usize::from_le_bytes(ch[0..8].try_into().unwrap()))
.collect();
Ok(elements)
}
}
// using for test
pub fn generate_input_buffer() -> Cursor<String> {
Cursor::new(json!({}).to_string())
}
/* Old conversion utilities between different libraries data types
// Conversion Utilities between poseidon-rs Field and arkworks Fr (in order to call directly poseidon-rs' poseidon_hash)

File diff suppressed because it is too large Load Diff

View File

@@ -4,25 +4,48 @@
#[cfg(test)]
mod test {
use rln::hashers::{poseidon_hash, PoseidonHash};
use rln::{circuit::*, poseidon_tree::PoseidonTree};
use rln::circuit::*;
use rln::hashers::PoseidonHash;
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
#[test]
// The test is checked correctness for `FullMerkleTree` and `OptimalMerkleTree` with Poseidon hash
fn test_zerokit_merkle_implementations() {
/// A basic performance comparison between the two supported Merkle Tree implementations
fn test_zerokit_merkle_implementations_performances() {
use std::time::{Duration, Instant};
let tree_height = 20;
let sample_size = 100;
let leaves: Vec<Fr> = (0..sample_size).map(|s| Fr::from(s)).collect();
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
let mut gen_time_full: u128 = 0;
let mut upd_time_full: u128 = 0;
let mut gen_time_opt: u128 = 0;
let mut upd_time_opt: u128 = 0;
for _ in 0..sample_size.try_into().unwrap() {
let now = Instant::now();
FullMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
gen_time_full += now.elapsed().as_nanos();
let now = Instant::now();
OptimalMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
gen_time_opt += now.elapsed().as_nanos();
}
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
for i in 0..sample_size.try_into().unwrap() {
let now = Instant::now();
tree_full.set(i, leaves[i]).unwrap();
upd_time_full += now.elapsed().as_nanos();
let proof = tree_full.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
let now = Instant::now();
tree_opt.set(i, leaves[i]).unwrap();
upd_time_opt += now.elapsed().as_nanos();
let proof = tree_opt.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
}
@@ -32,108 +55,26 @@ mod test {
let tree_opt_root = tree_opt.root();
assert_eq!(tree_full_root, tree_opt_root);
}
#[test]
fn test_subtree_root() {
const DEPTH: usize = 3;
const LEAVES_LEN: usize = 6;
let mut tree = PoseidonTree::default(DEPTH).unwrap();
let leaves: Vec<Fr> = (0..LEAVES_LEN).map(|s| Fr::from(s as i32)).collect();
let _ = tree.set_range(0, leaves);
for i in 0..LEAVES_LEN {
// check leaves
assert_eq!(
tree.get(i).unwrap(),
tree.get_subtree_root(DEPTH, i).unwrap()
);
// check root
assert_eq!(tree.root(), tree.get_subtree_root(0, i).unwrap());
}
// check intermediate nodes
for n in (1..=DEPTH).rev() {
for i in (0..(1 << n)).step_by(2) {
let idx_l = i * (1 << (DEPTH - n));
let idx_r = (i + 1) * (1 << (DEPTH - n));
let idx_sr = idx_l;
let prev_l = tree.get_subtree_root(n, idx_l).unwrap();
let prev_r = tree.get_subtree_root(n, idx_r).unwrap();
let subroot = tree.get_subtree_root(n - 1, idx_sr).unwrap();
assert_eq!(poseidon_hash(&[prev_l, prev_r]), subroot);
}
}
}
#[test]
fn test_get_empty_leaves_indices() {
let depth = 4;
let nof_leaves: usize = 1 << (depth - 1);
let mut tree = PoseidonTree::default(depth).unwrap();
let leaves: Vec<Fr> = (0..nof_leaves).map(|s| Fr::from(s as i32)).collect();
// check set_range
let _ = tree.set_range(0, leaves.clone());
assert!(tree.get_empty_leaves_indices().is_empty());
let mut vec_idxs = Vec::new();
// check delete function
for i in 0..nof_leaves {
vec_idxs.push(i);
let _ = tree.delete(i);
assert_eq!(tree.get_empty_leaves_indices(), vec_idxs);
}
// check set function
for i in (0..nof_leaves).rev() {
vec_idxs.pop();
let _ = tree.set(i, leaves[i]);
assert_eq!(tree.get_empty_leaves_indices(), vec_idxs);
}
// check remove_indices_and_set_leaves inside override_range function
assert!(tree.get_empty_leaves_indices().is_empty());
let leaves_2: Vec<Fr> = (0..2).map(|s| Fr::from(s as i32)).collect();
tree.override_range(0, leaves_2.clone(), [0, 1, 2, 3])
.unwrap();
assert_eq!(tree.get_empty_leaves_indices(), vec![2, 3]);
// check remove_indices inside override_range function
tree.override_range(0, [], [0, 1]).unwrap();
assert_eq!(tree.get_empty_leaves_indices(), vec![0, 1, 2, 3]);
// check set_range inside override_range function
tree.override_range(0, leaves_2.clone(), []).unwrap();
assert_eq!(tree.get_empty_leaves_indices(), vec![2, 3]);
let leaves_4: Vec<Fr> = (0..4).map(|s| Fr::from(s as i32)).collect();
// check if the indexes for write and delete are the same
tree.override_range(0, leaves_4.clone(), [0, 1, 2, 3])
.unwrap();
assert!(tree.get_empty_leaves_indices().is_empty());
// check if indexes for deletion are before indexes for overwriting
tree.override_range(4, leaves_4.clone(), [0, 1, 2, 3])
.unwrap();
// The result will be like this, because in the set_range function in pmtree
// the next_index value is increased not by the number of elements to insert,
// but by the union of indices for deleting and inserting.
assert_eq!(
tree.get_empty_leaves_indices(),
vec![0, 1, 2, 3, 8, 9, 10, 11]
println!(" Average tree generation time:");
println!(
" - Full Merkle Tree: {:?}",
Duration::from_nanos((gen_time_full / sample_size).try_into().unwrap())
);
println!(
" - Optimal Merkle Tree: {:?}",
Duration::from_nanos((gen_time_opt / sample_size).try_into().unwrap())
);
// check if the indices for write and delete do not overlap completely
tree.override_range(2, leaves_4.clone(), [0, 1, 2, 3])
.unwrap();
// The result will be like this, because in the set_range function in pmtree
// the next_index value is increased not by the number of elements to insert,
// but by the union of indices for deleting and inserting.
// + we've already set to 6 and 7 in previous test
assert_eq!(tree.get_empty_leaves_indices(), vec![0, 1, 8, 9, 10, 11]);
println!(" Average update_next execution time:");
println!(
" - Full Merkle Tree: {:?}",
Duration::from_nanos((upd_time_full / sample_size).try_into().unwrap())
);
println!(
" - Optimal Merkle Tree: {:?}",
Duration::from_nanos((upd_time_opt / sample_size).try_into().unwrap())
);
}
}

View File

@@ -1,8 +1,9 @@
#[cfg(test)]
mod test {
use ark_ff::BigInt;
use rln::circuit::zkey_from_folder;
use rln::circuit::{circom_from_folder, vk_from_folder, Fr, TEST_TREE_HEIGHT};
use rln::circuit::{
circom_from_folder, vk_from_folder, zkey_from_folder, Fr, TEST_RESOURCES_FOLDER,
TEST_TREE_HEIGHT,
};
use rln::hashers::{hash_to_field, poseidon_hash};
use rln::poseidon_tree::PoseidonTree;
use rln::protocol::*;
@@ -11,135 +12,365 @@ mod test {
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
const WITNESS_JSON_15: &str = r#"
{
"identity_secret": "12825549237505733615964533204745049909430608936689388901883576945030025938736",
"path_elements": [
"18622655742232062119094611065896226799484910997537830749762961454045300666333",
"20590447254980891299813706518821659736846425329007960381537122689749540452732",
"7423237065226347324353380772367382631490014989348495481811164164159255474657",
"11286972368698509976183087595462810875513684078608517520839298933882497716792",
"3607627140608796879659380071776844901612302623152076817094415224584923813162",
"19712377064642672829441595136074946683621277828620209496774504837737984048981",
"20775607673010627194014556968476266066927294572720319469184847051418138353016",
"3396914609616007258851405644437304192397291162432396347162513310381425243293",
"21551820661461729022865262380882070649935529853313286572328683688269863701601",
"6573136701248752079028194407151022595060682063033565181951145966236778420039",
"12413880268183407374852357075976609371175688755676981206018884971008854919922",
"14271763308400718165336499097156975241954733520325982997864342600795471836726",
"20066985985293572387227381049700832219069292839614107140851619262827735677018",
"9394776414966240069580838672673694685292165040808226440647796406499139370960",
"11331146992410411304059858900317123658895005918277453009197229807340014528524"
],
"identity_path_index": [
1,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
],
"x": "8143228284048792769012135629627737459844825626241842423967352803501040982",
"epoch": "0x0000005b612540fc986b42322f8cb91c2273afad58ed006fdba0c97b4b16b12f",
"rln_identifier": "11412926387081627876309792396682864042420635853496105400039841573530884328439"
}
"#;
// Input generated with protocol::random_rln_witness
const WITNESS_JSON_19: &str = r#"
{
"identity_secret": "922538810348594125658702672067738675294669207539999802857585668079702330450",
"path_elements": [
"16059714054680148404543504061485737353203416489071538960876865983954285286166",
"3041470753871943901334053763207316028823782848445723460227667780327106380356",
"2557297527793326315072058421057853700096944625924483912548759909801348042183",
"6677578602456189582427063963562590713054668181987223110955234085327917303436",
"2250827150965576973906150764756422151438812678308727218463995574869267980301",
"1895457427602709606993445561553433669787657053834360973759981803464906070980",
"11033689991077061346803816826729204895841441316315304395980565540264104346466",
"18588752216879570844240300406954267039026327526134910835334500497981810174976",
"19346480964028499661277403659363466542857230928032088490855656809181891953123",
"21460193770370072688835316363068413651465631481105148051902686770759127189327",
"20906347653364838502964722817589315918082261023317339146393355650507243340078",
"13466599592974387800162739317046838825289754472645703919149409009404541432954",
"9617165663598957201253074168824246164494443748556931540348223968573884172285",
"6936463137584425684797785981770877165377386163416057257854261010817156666898",
"369902028235468424790098825415813437044876310542601948037281422841675126849",
"13510969869821080499683463562609720931680005714401083864659516045615497273644",
"2567921390740781421487331055530491683313154421589525170472201828596388395736",
"14360870889466292805403568662660511177232987619663547772298178013674025998478",
"4735344599616284973799984501493858013178071155960162022656706545116168334293"
],
"identity_path_index": [
1,
0,
1,
0,
1,
1,
0,
0,
1,
1,
1,
0,
0,
0,
1,
0,
1,
1,
0
],
"x": "6427050788896290028100534859169645070970780055911091444144195464808120686416",
"epoch": "0x2bd155d9f85c741044da6909d144f9cc5ce8e0d545a9ed4921b156e8b8569bab",
"rln_identifier": "2193983000213424579594329476781986065965849144986973472766961413131458022566"
}
"#;
const WITNESS_JSON_20: &str = r#"
{
"identity_secret": "13732353453861280511150022598793312186188599006979552959297495195757997428306",
"path_elements": [
"20463525608687844300981085488128968694844212760055234622292326942405619575964",
"8040856403709217901175408904825741112286158901303127670929462145501210871313",
"3776499751255585163563840252112871568402966629435152937692711318702338789837",
"19415813252626942110541463414404411443562242499365750694284604341271149125679",
"19414720788761208006634240390286942738242262010168559813148115573784354129237",
"17680594732844291740094158892269696200077963275550625226493856898849422516043",
"16009199741350632715210088346611798597033333293348807000623441780059543674510",
"18743496911007535170857676824393811326863602477260615792503039058813338644738",
"1029572792321380246989475723806770724699749375691788486434716005338938722216",
"21713138150151063186050010182615713685603650963220209951496401043119768920892",
"6713732504049401389983008178456811894856018247924860823028704114266363984580",
"2746686888799473963221285145390361693256731812094259845879519459924507786594",
"18620748467731297359505500266677881218553438497271819903304075323783392031715",
"2446201221122671119406471414204229600430018713181038717206670749886932158104",
"12037171942017611311954851302868199608036334625783560875426350283156617524597",
"21798743392351780927808323348278035105395367759688979232116905142049921734349",
"17450230289417496971557215666910229260621413088991137405744457922069827319039",
"20936854099128086256353520300046664152516566958630447858438908748907198510485",
"13513344965831154386658059617477268600255664386844920822248038939666265737046",
"15546319496880899251450021422131511560001766832580480193115646510655765306630"
],
"identity_path_index": [
0,
1,
0,
0,
1,
1,
0,
0,
1,
1,
0,
0,
0,
1,
0,
1,
1,
0,
0,
0
],
"x": "18073935665561339809445069958310044423750771681863480888589546877024349720547",
"epoch": "0x147e4c23a43a1ddca78d94bcd28147f62ca74b3dc7e56bb0a314a954b9f0e567",
"rln_identifier": "2193983000213424579594329476781986065965849144986973472766961413131458022566"
}
"#;
#[test]
// We test Merkle tree generation, proofs and verification
fn test_merkle_proof() {
let tree_height = TEST_TREE_HEIGHT;
let leaf_index = 3;
// generate identity
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
let id_commitment = poseidon_hash(&[identity_secret_hash]);
let rate_commitment = poseidon_hash(&[id_commitment, 100.into()]);
let id_commitment = poseidon_hash(&vec![identity_secret_hash]);
// generate merkle tree
let default_leaf = Fr::from(0);
let mut tree = PoseidonTree::new(
TEST_TREE_HEIGHT,
tree_height,
default_leaf,
ConfigOf::<PoseidonTree>::default(),
)
.unwrap();
tree.set(leaf_index, rate_commitment.into()).unwrap();
tree.set(leaf_index, id_commitment.into()).unwrap();
// We check correct computation of the root
let root = tree.root();
assert_eq!(
root,
BigInt([
4939322235247991215,
5110804094006647505,
4427606543677101242,
910933464535675827
])
.into()
);
if TEST_TREE_HEIGHT == 15 {
assert_eq!(
root,
str_to_fr(
"0x1984f2e01184aef5cb974640898a5f5c25556554e2b06d99d4841badb8b198cd",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 19 {
assert_eq!(
root,
str_to_fr(
"0x219ceb53f2b1b7a6cf74e80d50d44d68ecb4a53c6cc65b25593c8d56343fb1fe",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 20 {
assert_eq!(
root,
str_to_fr(
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
.unwrap()
);
}
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
// We check correct computation of the path and indexes
let expected_path_elements: Vec<Fr> = [
"0x0000000000000000000000000000000000000000000000000000000000000000",
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
]
.map(|e| str_to_fr(e, 16).unwrap())
.to_vec();
// These values refers to TEST_TREE_HEIGHT == 16
let mut expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
)
.unwrap(),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
)
.unwrap(),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
)
.unwrap(),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
)
.unwrap(),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
)
.unwrap(),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
)
.unwrap(),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
)
.unwrap(),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
)
.unwrap(),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
)
.unwrap(),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
)
.unwrap(),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
)
.unwrap(),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
)
.unwrap(),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
)
.unwrap(),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
)
.unwrap(),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
)
.unwrap(),
];
let expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
let mut expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
// We add the remaining elements for the case TEST_TREE_HEIGHT = 20
if TEST_TREE_HEIGHT == 19 || TEST_TREE_HEIGHT == 20 {
expected_path_elements.append(&mut vec![
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
)
.unwrap(),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
)
.unwrap(),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
)
.unwrap(),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
)
.unwrap(),
]);
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
}
if TEST_TREE_HEIGHT == 20 {
expected_path_elements.append(&mut vec![str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
)
.unwrap()]);
expected_identity_path_index.append(&mut vec![0]);
}
assert_eq!(path_elements, expected_path_elements);
assert_eq!(identity_path_index, expected_identity_path_index);
// We check correct verification of the proof
assert!(tree.verify(&rate_commitment, &merkle_proof).unwrap());
}
fn get_test_witness() -> RLNWitnessInput {
let leaf_index = 3;
// Generate identity pair
let (identity_secret_hash, id_commitment) = keygen();
let user_message_limit = Fr::from(100);
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
//// generate merkle tree
let default_leaf = Fr::from(0);
let mut tree = PoseidonTree::new(
TEST_TREE_HEIGHT,
default_leaf,
ConfigOf::<PoseidonTree>::default(),
)
.unwrap();
tree.set(leaf_index, rate_commitment.into()).unwrap();
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let signal = b"hey hey";
let x = hash_to_field(signal);
// We set the remaining values to random ones
let epoch = hash_to_field(b"test-epoch");
let rln_identifier = hash_to_field(b"test-rln-identifier");
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
rln_witness_from_values(
identity_secret_hash,
&merkle_proof,
x,
external_nullifier,
user_message_limit,
Fr::from(1),
)
.unwrap()
assert!(tree.verify(&id_commitment, &merkle_proof).unwrap());
}
#[test]
// We test a RLN proof generation and verification
fn test_witness_from_json() {
// We generate all relevant keys
let proving_key = zkey_from_folder();
let verification_key = vk_from_folder();
let builder = circom_from_folder();
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let builder = circom_from_folder(TEST_RESOURCES_FOLDER).unwrap();
// We compute witness from the json input
let rln_witness = get_test_witness();
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
assert_eq!(rln_witness_deser, rln_witness);
// We compute witness from the json input example
let mut witness_json: &str = "";
if TEST_TREE_HEIGHT == 15 {
witness_json = WITNESS_JSON_15;
} else if TEST_TREE_HEIGHT == 19 {
witness_json = WITNESS_JSON_19;
} else if TEST_TREE_HEIGHT == 20 {
witness_json = WITNESS_JSON_20;
}
let rln_witness = rln_witness_from_json(witness_json);
let rln_witness_unwrapped = rln_witness.unwrap();
// Let's generate a zkSNARK proof
let proof = generate_proof(builder, &proving_key, &rln_witness_deser).unwrap();
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
let proof = generate_proof(builder, &proving_key, &rln_witness_unwrapped).unwrap();
let proof_values = proof_values_from_witness(&rln_witness_unwrapped);
// Let's verify the proof
let verified = verify_proof(&verification_key, &proof, &proof_values);
@@ -150,20 +381,47 @@ mod test {
#[test]
// We test a RLN proof generation and verification
fn test_end_to_end() {
let rln_witness = get_test_witness();
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
assert_eq!(rln_witness_deser, rln_witness);
let tree_height = TEST_TREE_HEIGHT;
let leaf_index = 3;
// Generate identity pair
let (identity_secret_hash, id_commitment) = keygen();
//// generate merkle tree
let default_leaf = Fr::from(0);
let mut tree = PoseidonTree::new(
tree_height,
default_leaf,
ConfigOf::<PoseidonTree>::default(),
)
.unwrap();
tree.set(leaf_index, id_commitment.into()).unwrap();
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let signal = b"hey hey";
let x = hash_to_field(signal);
// We set the remaining values to random ones
let epoch = hash_to_field(b"test-epoch");
//let rln_identifier = hash_to_field(b"test-rln-identifier");
let rln_witness: RLNWitnessInput = rln_witness_from_values(
identity_secret_hash,
&merkle_proof,
x,
epoch, /*, rln_identifier*/
);
// We generate all relevant keys
let proving_key = zkey_from_folder();
let verification_key = vk_from_folder();
let builder = circom_from_folder();
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let builder = circom_from_folder(TEST_RESOURCES_FOLDER).unwrap();
// Let's generate a zkSNARK proof
let proof = generate_proof(builder, &proving_key, &rln_witness_deser).unwrap();
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
let proof_values = proof_values_from_witness(&rln_witness);
// Let's verify the proof
let success = verify_proof(&verification_key, &proof, &proof_values).unwrap();
@@ -173,19 +431,25 @@ mod test {
#[test]
fn test_witness_serialization() {
// We test witness JSON serialization
let rln_witness = get_test_witness();
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
assert_eq!(rln_witness_deser, rln_witness);
// We test witness serialization
let mut witness_json: &str = "";
if TEST_TREE_HEIGHT == 15 {
witness_json = WITNESS_JSON_15;
} else if TEST_TREE_HEIGHT == 19 {
witness_json = WITNESS_JSON_19;
} else if TEST_TREE_HEIGHT == 20 {
witness_json = WITNESS_JSON_20;
}
let rln_witness = rln_witness_from_json(witness_json).unwrap();
let ser = serialize_witness(&rln_witness).unwrap();
let (deser, _) = deserialize_witness(&ser).unwrap();
assert_eq!(rln_witness, deser);
// We test Proof values serialization
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
let proof_values = proof_values_from_witness(&rln_witness);
let ser = serialize_proof_values(&proof_values);
let (deser, _) = deserialize_proof_values(&ser);
assert_eq!(proof_values, deser);

View File

@@ -1,59 +1,66 @@
#[cfg(test)]
mod test {
use ark_ff::BigInt;
use ark_std::{rand::thread_rng, UniformRand};
use rand::Rng;
use rln::circuit::{Fr, TEST_TREE_HEIGHT};
use rln::circuit::{Fr, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
use rln::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
use rln::protocol::{compute_tree_root, deserialize_identity_tuple};
use rln::public::{hash as public_hash, poseidon_hash as public_poseidon_hash, RLN};
use rln::utils::*;
use serde_json::json;
use std::io::Cursor;
#[test]
// This test is similar to the one in lib, but uses only public API
#[cfg(not(feature = "stateless"))]
fn test_merkle_proof() {
let tree_height = TEST_TREE_HEIGHT;
let leaf_index = 3;
let user_message_limit = 1;
let mut rln = RLN::new(TEST_TREE_HEIGHT, generate_input_buffer()).unwrap();
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
// generate identity
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
let id_commitment = utils_poseidon_hash(&vec![identity_secret_hash]);
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit.into()]);
// check that leaves indices is empty
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_empty_leaves_indices(&mut buffer).unwrap();
let idxs = bytes_le_to_vec_usize(&buffer.into_inner()).unwrap();
assert!(idxs.is_empty());
// We pass rate_commitment as Read buffer to RLN's set_leaf
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
// We pass id_commitment as Read buffer to RLN's set_leaf
let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment));
rln.set_leaf(leaf_index, &mut buffer).unwrap();
// check that leaves before leaf_index is set to zero
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_empty_leaves_indices(&mut buffer).unwrap();
let idxs = bytes_le_to_vec_usize(&buffer.into_inner()).unwrap();
assert_eq!(idxs, [0, 1, 2]);
// We check correct computation of the root
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root, _) = bytes_le_to_fr(&buffer.into_inner());
assert_eq!(
root,
Fr::from(BigInt([
17110646155607829651,
5040045984242729823,
6965416728592533086,
2328960363755461975
]))
);
if TEST_TREE_HEIGHT == 15 {
assert_eq!(
root,
str_to_fr(
"0x1984f2e01184aef5cb974640898a5f5c25556554e2b06d99d4841badb8b198cd",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 19 {
assert_eq!(
root,
str_to_fr(
"0x219ceb53f2b1b7a6cf74e80d50d44d68ecb4a53c6cc65b25593c8d56343fb1fe",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 20 {
assert_eq!(
root,
str_to_fr(
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
.unwrap()
);
}
// We check correct computation of merkle proof
let mut buffer = Cursor::new(Vec::<u8>::new());
@@ -64,67 +71,129 @@ mod test {
let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec()).unwrap();
// We check correct computation of the path and indexes
let expected_path_elements: Vec<Fr> = [
"0x0000000000000000000000000000000000000000000000000000000000000000",
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
]
.map(|e| str_to_fr(e, 16).unwrap())
.to_vec();
let mut expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
)
.unwrap(),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
)
.unwrap(),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
)
.unwrap(),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
)
.unwrap(),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
)
.unwrap(),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
)
.unwrap(),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
)
.unwrap(),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
)
.unwrap(),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
)
.unwrap(),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
)
.unwrap(),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
)
.unwrap(),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
)
.unwrap(),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
)
.unwrap(),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
)
.unwrap(),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
)
.unwrap(),
];
let expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
let mut expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
// We add the remaining elements for the case TEST_TREE_HEIGHT = 20
if TEST_TREE_HEIGHT == 19 || TEST_TREE_HEIGHT == 20 {
expected_path_elements.append(&mut vec![
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
)
.unwrap(),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
)
.unwrap(),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
)
.unwrap(),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
)
.unwrap(),
]);
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
}
if TEST_TREE_HEIGHT == 20 {
expected_path_elements.append(&mut vec![str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
)
.unwrap()]);
expected_identity_path_index.append(&mut vec![0]);
}
assert_eq!(path_elements, expected_path_elements);
assert_eq!(identity_path_index, expected_identity_path_index);
// check subtree root computation for leaf 0 for all corresponding node until the root
let l_idx = 0;
for n in (1..=TEST_TREE_HEIGHT).rev() {
let idx_l = l_idx * (1 << (TEST_TREE_HEIGHT - n));
let idx_r = (l_idx + 1) * (1 << (TEST_TREE_HEIGHT - n));
let idx_sr = idx_l;
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_subtree_root(n, idx_l, &mut buffer).unwrap();
let (prev_l, _) = bytes_le_to_fr(&buffer.into_inner());
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_subtree_root(n, idx_r, &mut buffer).unwrap();
let (prev_r, _) = bytes_le_to_fr(&buffer.into_inner());
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_subtree_root(n - 1, idx_sr, &mut buffer).unwrap();
let (subroot, _) = bytes_le_to_fr(&buffer.into_inner());
let res = utils_poseidon_hash(&[prev_l, prev_r]);
assert_eq!(res, subroot);
}
// We double check that the proof computed from public API is correct
let root_from_proof = compute_tree_root(
&identity_secret_hash,
&user_message_limit.into(),
&path_elements,
&identity_path_index,
);
let root_from_proof =
compute_tree_root(&id_commitment, &path_elements, &identity_path_index, false);
assert_eq!(root, root_from_proof);
}

1
rln/vendor/rln vendored Submodule

Submodule rln/vendor/rln added at fc86ad156a

50
semaphore/Cargo.toml Normal file
View File

@@ -0,0 +1,50 @@
[package]
name = "semaphore-wrapper"
version = "0.3.0"
edition = "2021"
license = "MIT OR Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = []
dylib = [ "wasmer/dylib", "wasmer-engine-dylib", "wasmer-compiler-cranelift" ]
[dependencies]
ark-bn254 = { version = "0.3.0" }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features=["circom-2"], rev = "35ce5a9" }
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
ark-relations = { version = "0.3.0", default-features = false }
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
color-eyre = "0.6.1"
once_cell = "1.8"
rand = "0.8.4"
semaphore = { git = "https://github.com/worldcoin/semaphore-rs", rev = "ee658c2"}
ethers-core = { version = "2.0.8", default-features = false }
ruint = { version = "1.2.0", features = [ "serde", "num-bigint", "ark-ff" ] }
serde = "1.0"
thiserror = "1.0.0"
wasmer = { version = "2.0" }
[dev-dependencies]
rand_chacha = "0.3.1"
serde_json = "1.0.79"
[build-dependencies]
color-eyre = "0.6.1"
wasmer = { version = "2.0" }
wasmer-engine-dylib = { version = "2.2.1", optional = true }
wasmer-compiler-cranelift = { version = "3.1.1", optional = true }
[profile.release]
codegen-units = 1
lto = true
panic = "abort"
opt-level = 3
# Compilation profile for any non-workspace member.
# Dependencies are optimized, even in a dev build. This improves dev performance
# while having neglible impact on incremental build times.
[profile.dev.package."*"]
opt-level = 3

7
semaphore/Makefile.toml Normal file
View File

@@ -0,0 +1,7 @@
[tasks.build]
command = "cargo"
args = ["build", "--release"]
[tasks.test]
command = "cargo"
args = ["test", "--release"]

18
semaphore/README.md Normal file
View File

@@ -0,0 +1,18 @@
# Semaphore example package
This is basically a wrapper around/copy of
https://github.com/worldcoin/semaphore-rs to illustrate how e.g. RLN package
can be structured like.
Goal is also to provide a basic FFI around protocol.rs, which is currently not
in scope for that project.
See that project for more information.
## Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```

111
semaphore/build.rs Normal file
View File

@@ -0,0 +1,111 @@
// Adapted from semaphore-rs/build.rs
use color_eyre::eyre::{eyre, Result};
use std::{
path::{Component, Path, PathBuf},
process::Command,
};
const ZKEY_FILE: &str = "./vendor/semaphore/build/snark/semaphore_final.zkey";
const WASM_FILE: &str = "./vendor/semaphore/build/snark/semaphore.wasm";
// See <https://internals.rust-lang.org/t/path-to-lexical-absolute/14940>
fn absolute(path: &str) -> Result<PathBuf> {
let path = Path::new(path);
let mut absolute = if path.is_absolute() {
PathBuf::new()
} else {
std::env::current_dir()?
};
for component in path.components() {
match component {
Component::CurDir => {}
Component::ParentDir => {
absolute.pop();
}
component => absolute.push(component.as_os_str()),
}
}
Ok(absolute)
}
fn build_circuit() -> Result<()> {
println!("cargo:rerun-if-changed=./vendor/semaphore");
let run = |cmd: &[&str]| -> Result<()> {
// TODO: Use ExitCode::exit_ok() when stable.
Command::new(cmd[0])
.args(cmd[1..].iter())
.current_dir("./vendor/semaphore")
.status()?
.success()
.then_some(())
.ok_or(eyre!("procees returned failure"))?;
Ok(())
};
// Compute absolute paths
let zkey_file = absolute(ZKEY_FILE)?;
let wasm_file = absolute(WASM_FILE)?;
// Build circuits if not exists
// TODO: This does not rebuild if the semaphore submodule is changed.
// NOTE: This requires npm / nodejs to be installed.
if !(zkey_file.exists() && wasm_file.exists()) {
run(&["npm", "install"])?;
run(&["npm", "exec", "ts-node", "./scripts/compile-circuits.ts"])?;
}
assert!(zkey_file.exists());
assert!(wasm_file.exists());
// Export generated paths
println!("cargo:rustc-env=BUILD_RS_ZKEY_FILE={}", zkey_file.display());
println!("cargo:rustc-env=BUILD_RS_WASM_FILE={}", wasm_file.display());
Ok(())
}
#[cfg(feature = "dylib")]
fn build_dylib() -> Result<()> {
use enumset::enum_set;
use std::{env, str::FromStr};
use wasmer::{Module, Store, Target, Triple};
use wasmer_compiler_cranelift::Cranelift;
use wasmer_engine_dylib::Dylib;
let wasm_file = absolute(WASM_FILE)?;
assert!(wasm_file.exists());
let out_dir = env::var("OUT_DIR")?;
let out_dir = Path::new(&out_dir).to_path_buf();
let dylib_file = out_dir.join("semaphore.dylib");
println!(
"cargo:rustc-env=CIRCUIT_WASM_DYLIB={}",
dylib_file.display()
);
if dylib_file.exists() {
return Ok(());
}
// Create a WASM engine for the target that can compile
let triple = Triple::from_str(&env::var("TARGET")?).map_err(|e| eyre!(e))?;
let cpu_features = enum_set!();
let target = Target::new(triple, cpu_features);
let compiler_config = Cranelift::default();
let engine = Dylib::new(compiler_config).target(target).engine();
// Compile the WASM module
let store = Store::new(&engine);
let module = Module::from_file(&store, &wasm_file)?;
module.serialize_to_file(&dylib_file)?;
assert!(dylib_file.exists());
println!("cargo:warning=Circuit dylib is in {}", dylib_file.display());
Ok(())
}
fn main() -> Result<()> {
build_circuit()?;
#[cfg(feature = "dylib")]
build_dylib()?;
Ok(())
}

79
semaphore/src/circuit.rs Normal file
View File

@@ -0,0 +1,79 @@
// Adapted from semaphore-rs/src/circuit.rs
use ark_bn254::{Bn254, Fr};
use ark_circom::{read_zkey, WitnessCalculator};
use ark_groth16::ProvingKey;
use ark_relations::r1cs::ConstraintMatrices;
use core::include_bytes;
use once_cell::sync::{Lazy, OnceCell};
use std::{io::Cursor, sync::Mutex};
use wasmer::{Module, Store};
#[cfg(feature = "dylib")]
use std::{env, path::Path};
#[cfg(feature = "dylib")]
use wasmer::Dylib;
const ZKEY_BYTES: &[u8] = include_bytes!(env!("BUILD_RS_ZKEY_FILE"));
#[cfg(not(feature = "dylib"))]
const WASM: &[u8] = include_bytes!(env!("BUILD_RS_WASM_FILE"));
static ZKEY: Lazy<(ProvingKey<Bn254>, ConstraintMatrices<Fr>)> = Lazy::new(|| {
let mut reader = Cursor::new(ZKEY_BYTES);
read_zkey(&mut reader).expect("zkey should be valid")
});
static WITNESS_CALCULATOR: OnceCell<Mutex<WitnessCalculator>> = OnceCell::new();
/// Initialize the library.
#[cfg(feature = "dylib")]
pub fn initialize(dylib_path: &Path) {
WITNESS_CALCULATOR
.set(from_dylib(dylib_path))
.expect("Failed to initialize witness calculator");
// Force init of ZKEY
Lazy::force(&ZKEY);
}
#[cfg(feature = "dylib")]
fn from_dylib(path: &Path) -> Mutex<WitnessCalculator> {
let store = Store::new(&Dylib::headless().engine());
// The module must be exported using [`Module::serialize`].
let module = unsafe {
Module::deserialize_from_file(&store, path).expect("Failed to load wasm dylib module")
};
let result =
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
Mutex::new(result)
}
#[must_use]
pub fn zkey() -> &'static (ProvingKey<Bn254>, ConstraintMatrices<Fr>) {
&ZKEY
}
#[cfg(feature = "dylib")]
#[must_use]
pub fn witness_calculator() -> &'static Mutex<WitnessCalculator> {
WITNESS_CALCULATOR.get_or_init(|| {
let path = env::var("CIRCUIT_WASM_DYLIB").expect(
"Semaphore-rs is not initialized. The library needs to be initialized before use when \
build with the `cdylib` feature. You can initialize by calling `initialize` or \
seting the `CIRCUIT_WASM_DYLIB` environment variable.",
);
from_dylib(Path::new(&path))
})
}
#[cfg(not(feature = "dylib"))]
#[must_use]
pub fn witness_calculator() -> &'static Mutex<WitnessCalculator> {
WITNESS_CALCULATOR.get_or_init(|| {
let store = Store::default();
let module = Module::from_binary(&store, WASM).expect("wasm should be valid");
let result =
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
Mutex::new(result)
})
}

7
semaphore/src/lib.rs Normal file
View File

@@ -0,0 +1,7 @@
#![allow(clippy::multiple_crate_versions)]
pub mod circuit;
pub mod protocol;
#[cfg(feature = "dylib")]
pub use circuit::initialize;

215
semaphore/src/protocol.rs Normal file
View File

@@ -0,0 +1,215 @@
// Adapted from semaphore-rs/src/protocol.rs
// For illustration purposes only as an example protocol
// Private module
use crate::circuit::{witness_calculator, zkey};
use ark_bn254::{Bn254, Parameters};
use ark_circom::CircomReduction;
use ark_ec::bn::Bn;
use ark_groth16::{
create_proof_with_reduction_and_matrices, prepare_verifying_key, Proof as ArkProof,
};
use ark_relations::r1cs::SynthesisError;
use ark_std::UniformRand;
use color_eyre::{Report, Result};
use ethers_core::types::U256;
use rand::{thread_rng, Rng};
use semaphore::{
identity::Identity,
merkle_tree::{self, Branch},
poseidon,
poseidon_tree::PoseidonHash,
Field,
};
use serde::{Deserialize, Serialize};
use std::time::Instant;
use thiserror::Error;
// Matches the private G1Tup type in ark-circom.
pub type G1 = (U256, U256);
// Matches the private G2Tup type in ark-circom.
pub type G2 = ([U256; 2], [U256; 2]);
/// Wrap a proof object so we have serde support
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Proof(G1, G2, G1);
impl From<ArkProof<Bn<Parameters>>> for Proof {
fn from(proof: ArkProof<Bn<Parameters>>) -> Self {
let proof = ark_circom::ethereum::Proof::from(proof);
let (a, b, c) = proof.as_tuple();
Self(a, b, c)
}
}
impl From<Proof> for ArkProof<Bn<Parameters>> {
fn from(proof: Proof) -> Self {
let eth_proof = ark_circom::ethereum::Proof {
a: ark_circom::ethereum::G1 {
x: proof.0 .0,
y: proof.0 .1,
},
#[rustfmt::skip] // Rustfmt inserts some confusing spaces
b: ark_circom::ethereum::G2 {
// The order of coefficients is flipped.
x: [proof.1.0[1], proof.1.0[0]],
y: [proof.1.1[1], proof.1.1[0]],
},
c: ark_circom::ethereum::G1 {
x: proof.2 .0,
y: proof.2 .1,
},
};
eth_proof.into()
}
}
/// Helper to merkle proof into a bigint vector
/// TODO: we should create a From trait for this
fn merkle_proof_to_vec(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<Field> {
proof
.0
.iter()
.map(|x| match x {
Branch::Left(value) | Branch::Right(value) => *value,
})
.collect()
}
/// Generates the nullifier hash
#[must_use]
pub fn generate_nullifier_hash(identity: &Identity, external_nullifier: Field) -> Field {
poseidon::hash2(external_nullifier, identity.nullifier)
}
#[derive(Error, Debug)]
pub enum ProofError {
#[error("Error reading circuit key: {0}")]
CircuitKeyError(#[from] std::io::Error),
#[error("Error producing witness: {0}")]
WitnessError(Report),
#[error("Error producing proof: {0}")]
SynthesisError(#[from] SynthesisError),
#[error("Error converting public input: {0}")]
ToFieldError(#[from] ruint::ToFieldError),
}
/// Generates a semaphore proof
///
/// # Errors
///
/// Returns a [`ProofError`] if proving fails.
pub fn generate_proof(
identity: &Identity,
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
external_nullifier_hash: Field,
signal_hash: Field,
) -> Result<Proof, ProofError> {
generate_proof_rng(
identity,
merkle_proof,
external_nullifier_hash,
signal_hash,
&mut thread_rng(),
)
}
/// Generates a semaphore proof from entropy
///
/// # Errors
///
/// Returns a [`ProofError`] if proving fails.
pub fn generate_proof_rng(
identity: &Identity,
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
external_nullifier_hash: Field,
signal_hash: Field,
rng: &mut impl Rng,
) -> Result<Proof, ProofError> {
generate_proof_rs(
identity,
merkle_proof,
external_nullifier_hash,
signal_hash,
ark_bn254::Fr::rand(rng),
ark_bn254::Fr::rand(rng),
)
}
fn generate_proof_rs(
identity: &Identity,
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
external_nullifier_hash: Field,
signal_hash: Field,
r: ark_bn254::Fr,
s: ark_bn254::Fr,
) -> Result<Proof, ProofError> {
let inputs = [
("identityNullifier", vec![identity.nullifier]),
("identityTrapdoor", vec![identity.trapdoor]),
("treePathIndices", merkle_proof.path_index()),
("treeSiblings", merkle_proof_to_vec(merkle_proof)),
("externalNullifier", vec![external_nullifier_hash]),
("signalHash", vec![signal_hash]),
];
let inputs = inputs.into_iter().map(|(name, values)| {
(
name.to_string(),
values.iter().copied().map(Into::into).collect::<Vec<_>>(),
)
});
let now = Instant::now();
let full_assignment = witness_calculator()
.lock()
.expect("witness_calculator mutex should not get poisoned")
.calculate_witness_element::<Bn254, _>(inputs, false)
.map_err(ProofError::WitnessError)?;
println!("witness generation took: {:.2?}", now.elapsed());
let now = Instant::now();
let zkey = zkey();
let ark_proof = create_proof_with_reduction_and_matrices::<_, CircomReduction>(
&zkey.0,
r,
s,
&zkey.1,
zkey.1.num_instance_variables,
zkey.1.num_constraints,
full_assignment.as_slice(),
)?;
let proof = ark_proof.into();
println!("proof generation took: {:.2?}", now.elapsed());
Ok(proof)
}
/// Verifies a given semaphore proof
///
/// # Errors
///
/// Returns a [`ProofError`] if verifying fails. Verification failure does not
/// necessarily mean the proof is incorrect.
pub fn verify_proof(
root: Field,
nullifier_hash: Field,
signal_hash: Field,
external_nullifier_hash: Field,
proof: &Proof,
) -> Result<bool, ProofError> {
let zkey = zkey();
let pvk = prepare_verifying_key(&zkey.0.vk);
let public_inputs = [root, nullifier_hash, signal_hash, external_nullifier_hash]
.iter()
.map(ark_bn254::Fr::try_from)
.collect::<Result<Vec<_>, _>>()?;
let ark_proof = (*proof).into();
let result = ark_groth16::verify_proof(&pvk, &ark_proof, &public_inputs[..])?;
Ok(result)
}

115
semaphore/tests/protocol.rs Normal file
View File

@@ -0,0 +1,115 @@
#[cfg(test)]
mod tests {
use ark_bn254::Parameters;
use ark_ec::bn::Bn;
use ark_groth16::Proof as ArkProof;
use rand::{Rng, SeedableRng as _};
use rand_chacha::ChaChaRng;
use semaphore::{hash_to_field, identity::Identity, poseidon_tree::PoseidonTree, Field};
use semaphore_wrapper::protocol::{
generate_nullifier_hash, generate_proof, generate_proof_rng, verify_proof, Proof,
};
use serde_json::json;
#[test]
fn test_semaphore() {
// generate identity
let id = Identity::from_seed(b"secret");
// generate merkle tree
let leaf = Field::from(0);
let mut tree = PoseidonTree::new(21, leaf);
tree.set(0, id.commitment());
let merkle_proof = tree.proof(0).expect("proof should exist");
let root = tree.root().into();
// change signal and external_nullifier here
let signal_hash = hash_to_field(b"xxx");
let external_nullifier_hash = hash_to_field(b"appId");
let nullifier_hash = generate_nullifier_hash(&id, external_nullifier_hash);
let proof =
generate_proof(&id, &merkle_proof, external_nullifier_hash, signal_hash).unwrap();
let success = verify_proof(
root,
nullifier_hash,
signal_hash,
external_nullifier_hash,
&proof,
)
.unwrap();
assert!(success);
}
fn arb_proof(seed: u64) -> Proof {
// Deterministic randomness for testing
let mut rng = ChaChaRng::seed_from_u64(seed);
// generate identity
let seed: [u8; 16] = rng.gen();
let id = Identity::from_seed(&seed);
// generate merkle tree
let leaf = Field::from(0);
let mut tree = PoseidonTree::new(21, leaf);
tree.set(0, id.commitment());
let merkle_proof = tree.proof(0).expect("proof should exist");
let external_nullifier: [u8; 16] = rng.gen();
let external_nullifier_hash = hash_to_field(&external_nullifier);
let signal: [u8; 16] = rng.gen();
let signal_hash = hash_to_field(&signal);
generate_proof_rng(
&id,
&merkle_proof,
external_nullifier_hash,
signal_hash,
&mut rng,
)
.unwrap()
}
#[test]
fn test_proof_cast_roundtrip() {
let proof = arb_proof(123);
let ark_proof: ArkProof<Bn<Parameters>> = proof.into();
let result: Proof = ark_proof.into();
assert_eq!(proof, result);
}
#[test]
fn test_proof_serialize() {
let proof = arb_proof(456);
let json = serde_json::to_value(&proof).unwrap();
assert_eq!(
json,
json!([
[
"0x249ae469686987ee9368da60dd177a8c42891c02f5760e955e590c79d55cfab2",
"0xf22e25870f49388459d388afb24dcf6ec11bb2d4def1e2ec26d6e42f373aad8"
],
[
[
"0x17bd25dbd7436c30ea5b8a3a47aadf11ed646c4b25cc14a84ff8cbe0252ff1f8",
"0x1c140668c56688367416534d57b4a14e5a825efdd5e121a6a2099f6dc4cd277b"
],
[
"0x26a8524759d969ea0682a092cf7a551697d81962d6c998f543f81e52d83e05e1",
"0x273eb3f796fd1807b9df9c6d769d983e3dabdc61677b75d48bb7691303b2c8dd"
]
],
[
"0x62715c53a0eb4c46dbb5f73f1fd7449b9c63d37c1ece65debc39b472065a90f",
"0x114f7becc66f1cd7a8b01c89db8233622372fc0b6fc037c4313bca41e2377fd9"
]
])
);
}
}

1
semaphore/vendor/semaphore vendored Submodule

View File

@@ -1,6 +1,6 @@
[package]
name = "zerokit_utils"
version = "0.5.1"
version = "0.3.2"
edition = "2021"
license = "MIT OR Apache-2.0"
description = "Various utilities for Zerokit"
@@ -13,15 +13,11 @@ bench = false
[dependencies]
ark-ff = { version = "=0.4.1", default-features = false, features = ["asm"] }
num-bigint = { version = "=0.4.3", default-features = false, features = [
"rand",
] }
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
color-eyre = "=0.6.2"
pmtree = { package = "vacp2p_pmtree", version = "=2.0.2", optional = true }
pmtree = { package = "pmtree", version = "=2.0.0", optional = true}
sled = "=0.34.7"
serde = "=1.0.163"
lazy_static = "1.4.0"
hex = "0.4"
[dev-dependencies]
ark-bn254 = "=0.4.0"

View File

@@ -1,7 +1,5 @@
use criterion::{criterion_group, criterion_main, Criterion};
use hex_literal::hex;
use lazy_static::lazy_static;
use std::{fmt::Display, str::FromStr};
use tiny_keccak::{Hasher as _, Keccak};
use zerokit_utils::{
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,
@@ -11,59 +9,38 @@ use zerokit_utils::{
#[derive(Clone, Copy, Eq, PartialEq)]
struct Keccak256;
#[derive(Clone, Copy, Eq, PartialEq, Debug, Default)]
struct TestFr([u8; 32]);
impl Hasher for Keccak256 {
type Fr = TestFr;
type Fr = [u8; 32];
fn default_leaf() -> Self::Fr {
TestFr([0; 32])
[0; 32]
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
let mut output = [0; 32];
let mut hasher = Keccak::v256();
for element in inputs {
hasher.update(element.0.as_slice());
hasher.update(element);
}
hasher.finalize(&mut output);
TestFr(output)
output
}
}
impl Display for TestFr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", String::from_utf8_lossy(self.0.as_slice()))
}
}
impl FromStr for TestFr {
type Err = std::string::FromUtf8Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(TestFr(s.as_bytes().try_into().unwrap()))
}
}
lazy_static! {
static ref LEAVES: [TestFr; 4] = [
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
]
.map(TestFr);
}
pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), OptimalMerkleConfig::default())
.unwrap();
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default()).unwrap();
let leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
c.bench_function("OptimalMerkleTree::set", |b| {
b.iter(|| {
tree.set(0, LEAVES[0]).unwrap();
tree.set(0, leaves[0]).unwrap();
})
});
@@ -75,7 +52,7 @@ pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
c.bench_function("OptimalMerkleTree::override_range", |b| {
b.iter(|| {
tree.override_range(0, *LEAVES, [0, 1, 2, 3]).unwrap();
tree.override_range(0, leaves, [0, 1, 2, 3]).unwrap();
})
});
@@ -90,28 +67,22 @@ pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
tree.get(0).unwrap();
})
});
// check intermediate node getter which required additional computation of sub root index
c.bench_function("OptimalMerkleTree::get_subtree_root", |b| {
b.iter(|| {
tree.get_subtree_root(1, 0).unwrap();
})
});
c.bench_function("OptimalMerkleTree::get_empty_leaves_indices", |b| {
b.iter(|| {
tree.get_empty_leaves_indices();
})
});
}
pub fn full_merkle_tree_benchmark(c: &mut Criterion) {
let mut tree =
FullMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), FullMerkleConfig::default()).unwrap();
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
let leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
c.bench_function("FullMerkleTree::set", |b| {
b.iter(|| {
tree.set(0, LEAVES[0]).unwrap();
tree.set(0, leaves[0]).unwrap();
})
});
@@ -123,7 +94,7 @@ pub fn full_merkle_tree_benchmark(c: &mut Criterion) {
c.bench_function("FullMerkleTree::override_range", |b| {
b.iter(|| {
tree.override_range(0, *LEAVES, [0, 1, 2, 3]).unwrap();
tree.override_range(0, leaves, [0, 1, 2, 3]).unwrap();
})
});
@@ -138,19 +109,6 @@ pub fn full_merkle_tree_benchmark(c: &mut Criterion) {
tree.get(0).unwrap();
})
});
// check intermediate node getter which required additional computation of sub root index
c.bench_function("FullMerkleTree::get_subtree_root", |b| {
b.iter(|| {
tree.get_subtree_root(1, 0).unwrap();
})
});
c.bench_function("FullMerkleTree::get_empty_leaves_indices", |b| {
b.iter(|| {
tree.get_empty_leaves_indices();
})
});
}
criterion_group!(

View File

@@ -8,7 +8,7 @@ use std::{
};
////////////////////////////////////////////////////////////
///// Full Merkle Tree Implementation
/// Full Merkle Tree Implementation
////////////////////////////////////////////////////////////
/// Merkle tree with all leaf and intermediate hashes stored
@@ -26,10 +26,6 @@ pub struct FullMerkleTree<H: Hasher> {
/// The tree nodes
nodes: Vec<H::Fr>,
/// The indices of leaves which are set into zero upto next_index.
/// Set to 0 if the leaf is empty and set to 1 in otherwise.
cached_leaves_indices: Vec<u8>,
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
// (deletions leave next_index unchanged)
next_index: usize,
@@ -100,7 +96,6 @@ where
depth,
cached_nodes,
nodes,
cached_leaves_indices: vec![0; 1 << depth],
next_index,
metadata: Vec::new(),
})
@@ -121,7 +116,7 @@ where
}
// Returns the total number of leaves set
fn leaves_set(&self) -> usize {
fn leaves_set(&mut self) -> usize {
self.next_index
}
@@ -146,42 +141,6 @@ where
Ok(self.nodes[self.capacity() + leaf - 1])
}
fn get_subtree_root(&self, n: usize, index: usize) -> Result<H::Fr> {
if n > self.depth() {
return Err(Report::msg("level exceeds depth size"));
}
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
if n == 0 {
Ok(self.root())
} else if n == self.depth {
self.get(index)
} else {
let mut idx = self.capacity() + index - 1;
let mut nd = self.depth;
loop {
let parent = self.parent(idx).unwrap();
nd -= 1;
if nd == n {
return Ok(self.nodes[parent]);
} else {
idx = parent;
continue;
}
}
}
}
fn get_empty_leaves_indices(&self) -> Vec<usize> {
self.cached_leaves_indices
.iter()
.take(self.next_index)
.enumerate()
.filter(|&(_, &v)| v == 0u8)
.map(|(idx, _)| idx)
.collect()
}
// Sets tree nodes, starting from start index
// Function proper of FullMerkleTree implementation
fn set_range<I: IntoIterator<Item = FrOf<Self::Hasher>>>(
@@ -199,7 +158,6 @@ where
}
hashes.into_iter().for_each(|hash| {
self.nodes[index + count] = hash;
self.cached_leaves_indices[start + count] = 1;
count += 1;
});
if count != 0 {
@@ -209,36 +167,37 @@ where
Ok(())
}
fn override_range<I, J>(&mut self, start: usize, leaves: I, indices: J) -> Result<()>
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
where
I: IntoIterator<Item = FrOf<Self::Hasher>>,
J: IntoIterator<Item = usize>,
{
let indices = indices.into_iter().collect::<Vec<_>>();
let min_index = *indices.first().unwrap();
let leaves_vec = leaves.into_iter().collect::<Vec<_>>();
let max_index = start + leaves_vec.len();
let mut set_values = vec![Self::Hasher::default_leaf(); max_index - min_index];
for i in min_index..start {
if !indices.contains(&i) {
let value = self.get(i)?;
set_values[i - min_index] = value;
}
let index = self.capacity() + start - 1;
let mut count = 0;
let leaves = leaves.into_iter().collect::<Vec<_>>();
let to_remove_indices = to_remove_indices.into_iter().collect::<Vec<_>>();
// first count number of hashes, and check that they fit in the tree
// then insert into the tree
if leaves.len() + start - to_remove_indices.len() > self.capacity() {
return Err(Report::msg("provided hashes do not fit in the tree"));
}
for i in 0..leaves_vec.len() {
set_values[start - min_index + i] = leaves_vec[i];
// remove leaves
for i in &to_remove_indices {
self.delete(*i)?;
}
for i in indices {
self.cached_leaves_indices[i] = 0;
// insert new leaves
for hash in leaves {
self.nodes[index + count] = hash;
count += 1;
}
self.set_range(start, set_values)
.map_err(|e| Report::msg(e.to_string()))
if count != 0 {
self.update_nodes(index, index + (count - 1))?;
self.next_index = max(self.next_index, start + count - to_remove_indices.len());
}
Ok(())
}
// Sets a leaf at the next available index
@@ -252,7 +211,6 @@ where
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
self.cached_leaves_indices[index] = 0;
}
Ok(())
}

View File

@@ -21,7 +21,7 @@ use color_eyre::Result;
/// and the hash function used to initialize a Merkle Tree implementation
pub trait Hasher {
/// Type of the leaf and tree node
type Fr: Clone + Copy + Eq + Default + std::fmt::Debug + std::fmt::Display + FromStr;
type Fr: Clone + Copy + Eq;
/// Returns the default tree leaf
fn default_leaf() -> Self::Fr;
@@ -47,16 +47,14 @@ pub trait ZerokitMerkleTree {
Self: Sized;
fn depth(&self) -> usize;
fn capacity(&self) -> usize;
fn leaves_set(&self) -> usize;
fn leaves_set(&mut self) -> usize;
fn root(&self) -> FrOf<Self::Hasher>;
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>>;
fn get_subtree_root(&self, n: usize, index: usize) -> Result<FrOf<Self::Hasher>>;
fn set(&mut self, index: usize, leaf: FrOf<Self::Hasher>) -> Result<()>;
fn set_range<I>(&mut self, start: usize, leaves: I) -> Result<()>
where
I: IntoIterator<Item = FrOf<Self::Hasher>>;
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>>;
fn get_empty_leaves_indices(&self) -> Vec<usize>;
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
where
I: IntoIterator<Item = FrOf<Self::Hasher>>,

View File

@@ -6,7 +6,7 @@ use std::str::FromStr;
use std::{cmp::max, fmt::Debug};
////////////////////////////////////////////////////////////
///// Optimal Merkle Tree Implementation
/// Optimal Merkle Tree Implementation
////////////////////////////////////////////////////////////
/// The Merkle tree structure
@@ -27,10 +27,6 @@ where
/// The tree nodes
nodes: HashMap<(usize, usize), H::Fr>,
/// The indices of leaves which are set into zero upto next_index.
/// Set to 0 if the leaf is empty and set to 1 in otherwise.
cached_leaves_indices: Vec<u8>,
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
// (deletions leave next_index unchanged)
next_index: usize,
@@ -55,9 +51,7 @@ impl FromStr for OptimalMerkleConfig {
}
}
////////////////////////////////////////////////////////////
///// Implementations
////////////////////////////////////////////////////////////
/// Implementations
impl<H: Hasher> ZerokitMerkleTree for OptimalMerkleTree<H>
where
@@ -84,7 +78,6 @@ where
cached_nodes: cached_nodes.clone(),
depth,
nodes: HashMap::new(),
cached_leaves_indices: vec![0; 1 << depth],
next_index: 0,
metadata: Vec::new(),
})
@@ -105,7 +98,7 @@ where
}
// Returns the total number of leaves set
fn leaves_set(&self) -> usize {
fn leaves_set(&mut self) -> usize {
self.next_index
}
@@ -115,22 +108,6 @@ where
self.get_node(0, 0)
}
fn get_subtree_root(&self, n: usize, index: usize) -> Result<H::Fr> {
if n > self.depth() {
return Err(Report::msg("level exceeds depth size"));
}
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
if n == 0 {
Ok(self.root())
} else if n == self.depth {
self.get(index)
} else {
Ok(self.get_node(n, index >> (self.depth - n)))
}
}
// Sets a leaf at the specified tree index
fn set(&mut self, index: usize, leaf: H::Fr) -> Result<()> {
if index >= self.capacity() {
@@ -139,7 +116,6 @@ where
self.nodes.insert((self.depth, index), leaf);
self.recalculate_from(index)?;
self.next_index = max(self.next_index, index + 1);
self.cached_leaves_indices[index] = 1;
Ok(())
}
@@ -151,16 +127,6 @@ where
Ok(self.get_node(self.depth, index))
}
fn get_empty_leaves_indices(&self) -> Vec<usize> {
self.cached_leaves_indices
.iter()
.take(self.next_index)
.enumerate()
.filter(|&(_, &v)| v == 0u8)
.map(|(idx, _)| idx)
.collect()
}
// Sets multiple leaves from the specified tree index
fn set_range<I: IntoIterator<Item = H::Fr>>(&mut self, start: usize, leaves: I) -> Result<()> {
let leaves = leaves.into_iter().collect::<Vec<_>>();
@@ -170,43 +136,40 @@ where
}
for (i, leaf) in leaves.iter().enumerate() {
self.nodes.insert((self.depth, start + i), *leaf);
self.cached_leaves_indices[start + i] = 1;
self.recalculate_from(start + i)?;
}
self.next_index = max(self.next_index, start + leaves.len());
Ok(())
}
fn override_range<I, J>(&mut self, start: usize, leaves: I, indices: J) -> Result<()>
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
where
I: IntoIterator<Item = FrOf<Self::Hasher>>,
J: IntoIterator<Item = usize>,
{
let indices = indices.into_iter().collect::<Vec<_>>();
let min_index = *indices.first().unwrap();
let leaves_vec = leaves.into_iter().collect::<Vec<_>>();
let max_index = start + leaves_vec.len();
let mut set_values = vec![Self::Hasher::default_leaf(); max_index - min_index];
for i in min_index..start {
if !indices.contains(&i) {
let value = self.get_leaf(i);
set_values[i - min_index] = value;
}
let leaves = leaves.into_iter().collect::<Vec<_>>();
let to_remove_indices = to_remove_indices.into_iter().collect::<Vec<_>>();
// check if the range is valid
if leaves.len() + start - to_remove_indices.len() > self.capacity() {
return Err(Report::msg("provided range exceeds set size"));
}
for i in 0..leaves_vec.len() {
set_values[start - min_index + i] = leaves_vec[i];
// remove leaves
for i in &to_remove_indices {
self.delete(*i)?;
}
for i in indices {
self.cached_leaves_indices[i] = 0;
// add leaves
for (i, leaf) in leaves.iter().enumerate() {
self.nodes.insert((self.depth, start + i), *leaf);
self.recalculate_from(start + i)?;
}
self.set_range(start, set_values)
.map_err(|e| Report::msg(e.to_string()))
self.next_index = max(
self.next_index,
start + leaves.len() - to_remove_indices.len(),
);
Ok(())
}
// Sets a leaf at the next available index
@@ -220,7 +183,6 @@ where
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
self.cached_leaves_indices[index] = 0;
}
Ok(())
}
@@ -304,7 +266,6 @@ where
i >>= 1;
depth -= 1;
self.nodes.insert((depth, i), h);
self.cached_leaves_indices[index] = 1;
if depth == 0 {
break;
}

View File

@@ -1,8 +1,7 @@
// Tests adapted from https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/merkle_tree.rs
#[cfg(test)]
pub mod test {
mod test {
use hex_literal::hex;
use std::{fmt::Display, str::FromStr};
use tiny_keccak::{Hasher as _, Keccak};
use zerokit_utils::{
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,
@@ -11,257 +10,74 @@ pub mod test {
#[derive(Clone, Copy, Eq, PartialEq)]
struct Keccak256;
#[derive(Clone, Copy, Eq, PartialEq, Debug, Default)]
struct TestFr([u8; 32]);
impl Hasher for Keccak256 {
type Fr = TestFr;
type Fr = [u8; 32];
fn default_leaf() -> Self::Fr {
TestFr([0; 32])
[0; 32]
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
let mut output = [0; 32];
let mut hasher = Keccak::v256();
for element in inputs {
hasher.update(element.0.as_slice());
hasher.update(element);
}
hasher.finalize(&mut output);
TestFr(output)
output
}
}
impl Display for TestFr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", hex::encode(self.0.as_slice()))
}
}
impl FromStr for TestFr {
type Err = std::string::FromUtf8Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(TestFr(s.as_bytes().try_into().unwrap()))
}
}
impl From<u32> for TestFr {
fn from(value: u32) -> Self {
let mut bytes: Vec<u8> = vec![0; 28];
bytes.extend_from_slice(&value.to_be_bytes());
TestFr(bytes.as_slice().try_into().unwrap())
}
}
const DEFAULT_DEPTH: usize = 2;
fn default_full_merkle_tree(depth: usize) -> FullMerkleTree<Keccak256> {
FullMerkleTree::<Keccak256>::new(depth, TestFr([0; 32]), FullMerkleConfig::default())
.unwrap()
}
fn default_optimal_merkle_tree(depth: usize) -> OptimalMerkleTree<Keccak256> {
OptimalMerkleTree::<Keccak256>::new(depth, TestFr([0; 32]), OptimalMerkleConfig::default())
.unwrap()
}
#[test]
fn test_root() {
let default_tree_root = TestFr(hex!(
"b4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30"
));
let leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
let default_tree_root =
hex!("b4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30");
let roots = [
hex!("c1ba1812ff680ce84c1d5b4f1087eeb08147a4d510f3496b2849df3a73f5af95"),
hex!("893760ec5b5bee236f29e85aef64f17139c3c1b7ff24ce64eb6315fca0f2485b"),
hex!("222ff5e0b5877792c2bc1670e2ccd0c2c97cd7bb1672a57d598db05092d3d72c"),
hex!("a9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36"),
]
.map(TestFr);
];
let nof_leaves = 4;
let leaves: Vec<TestFr> = (1..=nof_leaves as u32).map(TestFr::from).collect();
let mut tree = default_full_merkle_tree(DEFAULT_DEPTH);
let mut tree =
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
assert_eq!(tree.root(), default_tree_root);
for i in 0..nof_leaves {
for i in 0..leaves.len() {
tree.set(i, leaves[i]).unwrap();
assert_eq!(tree.root(), roots[i]);
}
let mut tree = default_optimal_merkle_tree(DEFAULT_DEPTH);
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
.unwrap();
assert_eq!(tree.root(), default_tree_root);
for i in 0..nof_leaves {
for i in 0..leaves.len() {
tree.set(i, leaves[i]).unwrap();
assert_eq!(tree.root(), roots[i]);
}
}
#[test]
fn test_get_empty_leaves_indices() {
let depth = 4;
let nof_leaves: usize = 1 << (depth - 1);
let leaves: Vec<TestFr> = (0..nof_leaves as u32).map(TestFr::from).collect();
let leaves_2: Vec<TestFr> = (0u32..2).map(TestFr::from).collect();
let leaves_4: Vec<TestFr> = (0u32..4).map(TestFr::from).collect();
let mut tree_full = default_full_merkle_tree(depth);
let _ = tree_full.set_range(0, leaves.clone());
assert!(tree_full.get_empty_leaves_indices().is_empty());
let mut vec_idxs = Vec::new();
for i in 0..nof_leaves {
vec_idxs.push(i);
let _ = tree_full.delete(i);
assert_eq!(tree_full.get_empty_leaves_indices(), vec_idxs);
}
for i in (0..nof_leaves).rev() {
vec_idxs.pop();
let _ = tree_full.set(i, leaves[i]);
assert_eq!(tree_full.get_empty_leaves_indices(), vec_idxs);
}
// Check situation when the number of items to insert is less than the number of items to delete
tree_full
.override_range(0, leaves_2.clone(), [0, 1, 2, 3])
.unwrap();
// check if the indexes for write and delete are the same
tree_full
.override_range(0, leaves_4.clone(), [0, 1, 2, 3])
.unwrap();
assert_eq!(tree_full.get_empty_leaves_indices(), vec![]);
// check if indexes for deletion are before indexes for overwriting
tree_full
.override_range(4, leaves_4.clone(), [0, 1, 2, 3])
.unwrap();
assert_eq!(tree_full.get_empty_leaves_indices(), vec![0, 1, 2, 3]);
// check if the indices for write and delete do not overlap completely
tree_full
.override_range(2, leaves_4.clone(), [0, 1, 2, 3])
.unwrap();
assert_eq!(tree_full.get_empty_leaves_indices(), vec![0, 1]);
//// Optimal Merkle Tree Trest
let mut tree_opt = default_optimal_merkle_tree(depth);
let _ = tree_opt.set_range(0, leaves.clone());
assert!(tree_opt.get_empty_leaves_indices().is_empty());
let mut vec_idxs = Vec::new();
for i in 0..nof_leaves {
vec_idxs.push(i);
let _ = tree_opt.delete(i);
assert_eq!(tree_opt.get_empty_leaves_indices(), vec_idxs);
}
for i in (0..nof_leaves).rev() {
vec_idxs.pop();
let _ = tree_opt.set(i, leaves[i]);
assert_eq!(tree_opt.get_empty_leaves_indices(), vec_idxs);
}
// Check situation when the number of items to insert is less than the number of items to delete
tree_opt
.override_range(0, leaves_2.clone(), [0, 1, 2, 3])
.unwrap();
// check if the indexes for write and delete are the same
tree_opt
.override_range(0, leaves_4.clone(), [0, 1, 2, 3])
.unwrap();
assert_eq!(tree_opt.get_empty_leaves_indices(), vec![]);
// check if indexes for deletion are before indexes for overwriting
tree_opt
.override_range(4, leaves_4.clone(), [0, 1, 2, 3])
.unwrap();
assert_eq!(tree_opt.get_empty_leaves_indices(), vec![0, 1, 2, 3]);
// check if the indices for write and delete do not overlap completely
tree_opt
.override_range(2, leaves_4.clone(), [0, 1, 2, 3])
.unwrap();
assert_eq!(tree_opt.get_empty_leaves_indices(), vec![0, 1]);
}
#[test]
fn test_subtree_root() {
let depth = 3;
let nof_leaves: usize = 6;
let leaves: Vec<TestFr> = (0..nof_leaves as u32).map(TestFr::from).collect();
let mut tree_full = default_optimal_merkle_tree(depth);
let _ = tree_full.set_range(0, leaves.iter().cloned());
for i in 0..nof_leaves {
// check leaves
assert_eq!(
tree_full.get(i).unwrap(),
tree_full.get_subtree_root(depth, i).unwrap()
);
// check root
assert_eq!(tree_full.root(), tree_full.get_subtree_root(0, i).unwrap());
}
// check intermediate nodes
for n in (1..=depth).rev() {
for i in (0..(1 << n)).step_by(2) {
let idx_l = i * (1 << (depth - n));
let idx_r = (i + 1) * (1 << (depth - n));
let idx_sr = idx_l;
let prev_l = tree_full.get_subtree_root(n, idx_l).unwrap();
let prev_r = tree_full.get_subtree_root(n, idx_r).unwrap();
let subroot = tree_full.get_subtree_root(n - 1, idx_sr).unwrap();
// check intermediate nodes
assert_eq!(Keccak256::hash(&[prev_l, prev_r]), subroot);
}
}
let mut tree_opt = default_full_merkle_tree(depth);
let _ = tree_opt.set_range(0, leaves.iter().cloned());
for i in 0..nof_leaves {
// check leaves
assert_eq!(
tree_opt.get(i).unwrap(),
tree_opt.get_subtree_root(depth, i).unwrap()
);
// check root
assert_eq!(tree_opt.root(), tree_opt.get_subtree_root(0, i).unwrap());
}
// check intermediate nodes
for n in (1..=depth).rev() {
for i in (0..(1 << n)).step_by(2) {
let idx_l = i * (1 << (depth - n));
let idx_r = (i + 1) * (1 << (depth - n));
let idx_sr = idx_l;
let prev_l = tree_opt.get_subtree_root(n, idx_l).unwrap();
let prev_r = tree_opt.get_subtree_root(n, idx_r).unwrap();
let subroot = tree_opt.get_subtree_root(n - 1, idx_sr).unwrap();
// check intermediate nodes
assert_eq!(Keccak256::hash(&[prev_l, prev_r]), subroot);
}
}
}
#[test]
fn test_proof() {
let nof_leaves = 4;
let leaves: Vec<TestFr> = (0..nof_leaves as u32).map(TestFr::from).collect();
let leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
// We thest the FullMerkleTree implementation
let mut tree = default_full_merkle_tree(DEFAULT_DEPTH);
for i in 0..nof_leaves {
let mut tree =
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
for i in 0..leaves.len() {
// We set the leaves
tree.set(i, leaves[i]).unwrap();
@@ -278,12 +94,16 @@ pub mod test {
assert_eq!(proof.compute_root_from(&leaves[i]), tree.root());
// We check that the proof is not valid for another leaf
assert!(!tree.verify(&leaves[(i + 1) % nof_leaves], &proof).unwrap());
assert!(!tree
.verify(&leaves[(i + 1) % leaves.len()], &proof)
.unwrap());
}
// We test the OptimalMerkleTree implementation
let mut tree = default_optimal_merkle_tree(DEFAULT_DEPTH);
for i in 0..nof_leaves {
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
.unwrap();
for i in 0..leaves.len() {
// We set the leaves
tree.set(i, leaves[i]).unwrap();
@@ -300,25 +120,32 @@ pub mod test {
assert_eq!(proof.compute_root_from(&leaves[i]), tree.root());
// We check that the proof is not valid for another leaf
assert!(!tree.verify(&leaves[(i + 1) % nof_leaves], &proof).unwrap());
assert!(!tree
.verify(&leaves[(i + 1) % leaves.len()], &proof)
.unwrap());
}
}
#[test]
fn test_override_range() {
let nof_leaves = 4;
let leaves: Vec<TestFr> = (0..nof_leaves as u32).map(TestFr::from).collect();
let initial_leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
let mut tree = default_optimal_merkle_tree(DEFAULT_DEPTH);
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
.unwrap();
// We set the leaves
tree.set_range(0, leaves.iter().cloned()).unwrap();
tree.set_range(0, initial_leaves.iter().cloned()).unwrap();
let new_leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000005"),
hex!("0000000000000000000000000000000000000000000000000000000000000006"),
]
.map(TestFr);
];
let to_delete_indices: [usize; 2] = [0, 1];
@@ -331,8 +158,8 @@ pub mod test {
.unwrap();
// ensure that the leaves are set correctly
for (i, &new_leaf) in new_leaves.iter().enumerate() {
assert_eq!(tree.get_leaf(i), new_leaf);
for i in 0..new_leaves.len() {
assert_eq!(tree.get_leaf(i), new_leaves[i]);
}
}
}

View File

@@ -25,10 +25,16 @@ mod test {
input_clean = input_clean.trim().to_string();
if radix == 10 {
BigUint::from_str_radix(&input_clean, radix).unwrap().into()
BigUint::from_str_radix(&input_clean, radix)
.unwrap()
.try_into()
.unwrap()
} else {
input_clean = input_clean.replace("0x", "");
BigUint::from_str_radix(&input_clean, radix).unwrap().into()
BigUint::from_str_radix(&input_clean, radix)
.unwrap()
.try_into()
.unwrap()
}
}
// The following constants were taken from https://github.com/arnaucube/poseidon-rs/blob/233027d6075a637c29ad84a8a44f5653b81f0410/src/constants.rs
@@ -3494,21 +3500,21 @@ mod test {
fn load_constants() -> (Vec<Vec<Fr>>, Vec<Vec<Vec<Fr>>>) {
let (c_str, m_str) = constants();
let mut c: Vec<Vec<Fr>> = Vec::new();
for c_i in c_str {
let mut ci: Vec<Fr> = Vec::new();
for c_i_j in c_i {
let b: Fr = str_to_fr(c_i_j, 10);
ci.push(b);
for i in 0..c_str.len() {
let mut cci: Vec<Fr> = Vec::new();
for j in 0..c_str[i].len() {
let b: Fr = str_to_fr(c_str[i][j], 10);
cci.push(b);
}
c.push(ci);
c.push(cci);
}
let mut m: Vec<Vec<Vec<Fr>>> = Vec::new();
for m_i in m_str {
for i in 0..m_str.len() {
let mut mi: Vec<Vec<Fr>> = Vec::new();
for m_i_j in m_i {
for j in 0..m_str[i].len() {
let mut mij: Vec<Fr> = Vec::new();
for m_i_j_k in m_i_j {
let b: Fr = str_to_fr(m_i_j_k, 10);
for k in 0..m_str[i][j].len() {
let b: Fr = str_to_fr(m_str[i][j][k], 10);
mij.push(b);
}
mi.push(mij);
@@ -3536,7 +3542,7 @@ mod test {
assert_eq!(loaded_m[i], poseidon_parameters[i].m);
}
} else {
unreachable!();
assert!(false);
}
}
}