feat(wasm): rework rln-wasm and rln-wasm-utils modules, remove buffer-based serialization, and update public.rs and protocol.rs accordingly (#352)

This commit is contained in:
Vinh Trịnh
2025-12-01 17:33:46 +07:00
committed by GitHub
parent c74ab11c82
commit 5c73af1130
50 changed files with 5287 additions and 1393 deletions

View File

@@ -9,7 +9,6 @@ on:
- "!rln/src/**"
- "!rln/resources/**"
- "!utils/src/**"
- "!rln-wasm-utils/**"
pull_request:
types: [opened, synchronize, reopened, ready_for_review]
paths-ignore:
@@ -19,7 +18,6 @@ on:
- "!rln/src/**"
- "!rln/resources/**"
- "!utils/src/**"
- "!rln-wasm-utils/**"
name: CI
@@ -136,36 +134,14 @@ jobs:
run: cargo make test_parallel --release
working-directory: ${{ matrix.crate }}
rln-wasm-utils-test:
# skip tests on draft PRs
if: github.event_name == 'push' || (github.event_name == 'pull_request' && !github.event.pull_request.draft)
strategy:
matrix:
platform: [ubuntu-latest, macos-latest]
crate: [rln-wasm-utils]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: Test - ${{ matrix.crate }} - ${{ matrix.platform }}
steps:
- uses: actions/checkout@v4
- name: Install stable toolchain
uses: dtolnay/rust-toolchain@stable
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: Test rln-wasm-utils
run: cargo make test --release
working-directory: ${{ matrix.crate }}
lint:
# run on both ready and draft PRs
if: github.event_name == 'push' || (github.event_name == 'pull_request' && !github.event.pull_request.draft)
strategy:
matrix:
# we run lint tests only on ubuntu
# run lint tests only on ubuntu
platform: [ubuntu-latest]
crate: [rln, rln-wasm, rln-wasm-utils, utils]
crate: [rln, rln-wasm, utils]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
@@ -177,6 +153,9 @@ jobs:
uses: dtolnay/rust-toolchain@stable
with:
components: rustfmt, clippy
- name: Install wasm32 target
if: matrix.crate == 'rln-wasm'
run: rustup target add wasm32-unknown-unknown
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
@@ -184,18 +163,23 @@ jobs:
if: success() || failure()
run: cargo fmt -- --check
working-directory: ${{ matrix.crate }}
- name: Check clippy
if: success() || failure()
- name: Check clippy (wasm)
if: (success() || failure()) && (matrix.crate == 'rln-wasm')
run: |
cargo clippy --all-targets --release -- -D warnings
cargo clippy --target wasm32-unknown-unknown --tests --release -- -D warnings
working-directory: ${{ matrix.crate }}
- name: Check clippy (native)
if: (success() || failure()) && (matrix.crate != 'rln-wasm')
run: |
cargo clippy --all-targets --tests --release -- -D warnings
working-directory: ${{ matrix.crate }}
benchmark-utils:
# run only on ready pull requests
# run only on ready PRs
if: github.event_name == 'pull_request' && !github.event.pull_request.draft
strategy:
matrix:
# we run benchmark tests only on ubuntu
# run benchmark tests only on ubuntu
platform: [ubuntu-latest]
crate: [utils]
runs-on: ${{ matrix.platform }}
@@ -212,11 +196,11 @@ jobs:
cwd: ${{ matrix.crate }}
benchmark-rln:
# run only on ready pull requests
# run only on ready PRs
if: github.event_name == 'pull_request' && !github.event.pull_request.draft
strategy:
matrix:
# we run benchmark tests only on ubuntu
# run benchmark tests only on ubuntu
platform: [ubuntu-latest]
crate: [rln]
feature: ["default"]

View File

@@ -97,6 +97,7 @@ jobs:
feature:
- "default"
- "parallel"
- "utils"
steps:
- name: Checkout sources
uses: actions/checkout@v4
@@ -123,10 +124,11 @@ jobs:
wasm-pack build --release --target web --scope waku
fi
sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/package.json.bak
wasm-opt pkg/rln_wasm_bg.wasm -Oz --strip-debug --strip-dwarf \
--remove-unused-module-elements --vacuum -o pkg/rln_wasm_bg.wasm
if [[ ${{ matrix.feature }} == "utils" ]]; then
sed -i.bak 's/rln-wasm/zerokit-rln-wasm-utils/g' pkg/package.json && rm pkg/package.json.bak
else
sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/package.json.bak
fi
mkdir release
cp -r pkg/* release/
@@ -139,42 +141,9 @@ jobs:
path: rln-wasm/rln-wasm-${{ matrix.feature }}.tar.gz
retention-days: 2
rln-wasm-utils:
name: Build rln-wasm-utils
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Install stable toolchain
uses: dtolnay/rust-toolchain@stable
with:
targets: wasm32-unknown-unknown
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: Build rln-wasm-utils package
run: |
wasm-pack build --release --target web --scope waku
sed -i.bak 's/rln-wasm-utils/zerokit-rln-wasm-utils/g' pkg/package.json && rm pkg/package.json.bak
wasm-opt pkg/rln_wasm_utils_bg.wasm -Oz --strip-debug --strip-dwarf \
--remove-unused-module-elements --vacuum -o pkg/rln_wasm_utils_bg.wasm
mkdir release
cp -r pkg/* release/
tar -czvf rln-wasm-utils.tar.gz release/
working-directory: rln-wasm-utils
- name: Upload archive artifact
uses: actions/upload-artifact@v4
with:
name: rln-wasm-utils-archive
path: rln-wasm-utils/rln-wasm-utils.tar.gz
retention-days: 2
prepare-prerelease:
name: Prepare pre-release
needs: [linux, macos, rln-wasm, rln-wasm-utils]
needs: [linux, macos, rln-wasm]
runs-on: ubuntu-latest
steps:
- name: Checkout code

6
.gitignore vendored
View File

@@ -7,9 +7,6 @@ tmp/
# Generated by Cargo will have compiled files and executables
/target
# Generated by rln-cli
rln-cli/database
# Generated by Nix
result
@@ -27,3 +24,6 @@ rln/ffi_c_examples/database
# FFI Nim examples
rln/ffi_nim_examples/main
rln/ffi_nim_examples/database
# Vscode
.vscode

View File

@@ -47,13 +47,6 @@ cd rln-wasm && cargo make test_browser # Test in browser headless mode
cd rln-wasm && cargo make test_parallel # Test parallel features
```
Choose the appropriate test commands based on your changes:
- Core RLN changes: `make test`
- Stateless features: `cargo make test_stateless`
- WASM/browser features: `cargo make test_browser`
- Parallel computation: `cargo make test_parallel`
### Tools
We recommend using the [markdownlint extension](https://marketplace.visualstudio.com/items?itemName=DavidAnson.vscode-markdownlint)
@@ -107,7 +100,6 @@ Use scopes to improve the Changelog:
- `rln` - Core RLN implementation
- `rln-cli` - Command-line interface
- `rln-wasm` - WebAssembly bindings
- `rln-wasm-utils` - WebAssembly utilities
- `utils` - Cryptographic utilities (Merkle trees, Poseidon hash)
- `ci` - Continuous integration

8
Cargo.lock generated
View File

@@ -777,12 +777,6 @@ dependencies = [
"cpufeatures",
]
[[package]]
name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "libc"
version = "0.2.177"
@@ -1189,7 +1183,6 @@ dependencies = [
"cfg-if",
"criterion",
"document-features",
"lazy_static",
"num-bigint",
"num-traits",
"once_cell",
@@ -1846,7 +1839,6 @@ dependencies = [
"criterion",
"hex",
"hex-literal",
"lazy_static",
"num-bigint",
"num-traits",
"rayon",

View File

@@ -1,6 +1,6 @@
[workspace]
members = ["rln", "utils"]
exclude = ["rln-cli", "rln-wasm", "rln-wasm-utils"]
exclude = ["rln-cli", "rln-wasm"]
resolver = "2"
# Compilation profile for any non-workspace member.

View File

@@ -13,12 +13,13 @@ endif
installdeps: .pre-build
ifeq ($(shell uname),Darwin)
@brew install ninja binaryen
@brew install ninja
else ifeq ($(shell uname),Linux)
@if [ -f /etc/os-release ] && grep -q "ID=nixos" /etc/os-release; then \
echo "Detected NixOS, skipping apt-get installation."; \
echo "Detected NixOS, skipping apt installation."; \
else \
sudo apt-get install -y cmake ninja-build binaryen; \
sudo apt update; \
sudo apt install -y cmake ninja-build; \
fi
endif
@which wasm-pack > /dev/null && wasm-pack --version | grep -q "0.13.1" || cargo install wasm-pack --version=0.13.1

View File

@@ -6,10 +6,9 @@ tmp/
# Generated by Cargo will have compiled files and executables
/target
Cargo.lock
# Generated by rln-wasm
pkg/
# Generated by rln-cli
/database
# Generated by Nix
result

1883
rln-cli/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -23,3 +23,6 @@ serde = { version = "1.0", features = ["derive"] }
[features]
default = ["rln/pmtree-ft", "rln/parallel"]
stateless = ["rln/stateless", "rln/parallel"]
[package.metadata.docs.rs]
all-features = true

View File

@@ -10,7 +10,7 @@ use color_eyre::{eyre::eyre, Result};
use rln::{
circuit::{Fr, TEST_TREE_DEPTH},
hashers::{hash_to_field_le, poseidon_hash, PoseidonHash},
protocol::{keygen, prepare_verify_input, rln_witness_from_values, serialize_witness},
protocol::{keygen, prepare_verify_input, serialize_witness},
public::RLN,
utils::{fr_to_bytes_le, IdSecret},
};
@@ -130,15 +130,16 @@ impl RLNSystem {
let merkle_proof = self.tree.proof(user_index)?;
let x = hash_to_field_le(signal.as_bytes());
let rln_witness = rln_witness_from_values(
let rln_witness = RLNWitnessInput::new(
identity.identity_secret_hash.clone(),
Fr::from(MESSAGE_LIMIT),
Fr::from(message_id),
merkle_proof.get_path_elements(),
merkle_proof.get_path_index(),
x,
external_nullifier,
Fr::from(MESSAGE_LIMIT),
Fr::from(message_id),
)?;
)
.unwrap();
let serialized = serialize_witness(&rln_witness)?;
let mut input_buffer = Cursor::new(serialized);

View File

@@ -1,35 +0,0 @@
[package]
name = "rln-wasm-utils"
version = "0.1.0"
edition = "2024"
[lib]
crate-type = ["cdylib", "rlib"]
[dependencies]
# TODO: remove this once we have a proper release
rln = { path = "../rln", version = "0.9.0", default-features = false, features = ["stateless"] }
js-sys = "0.3.77"
wasm-bindgen = "0.2.100"
rand = "0.8.5"
# The `console_error_panic_xhook` crate provides better debugging of panics by
# logging them with `console.error`. This is great for development, but requires
# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for
# code size when deploying.
console_error_panic_hook = { version = "0.1.7", optional = true }
[target.'cfg(target_arch = "wasm32")'.dependencies]
getrandom = { version = "0.2.16", features = ["js"] }
[dev-dependencies]
wasm-bindgen-test = "0.3.50"
web-sys = { version = "0.3.77", features = ["console"] }
ark-std = { version = "0.5.0", default-features = false }
[features]
default = ["console_error_panic_hook"]
[package.metadata.docs.rs]
all-features = true

View File

@@ -1,36 +0,0 @@
[tasks.build]
clear = true
dependencies = ["pack_build", "pack_rename", "pack_resize"]
[tasks.pack_build]
command = "wasm-pack"
args = ["build", "--release", "--target", "web", "--scope", "waku"]
[tasks.pack_rename]
script = "sed -i.bak 's/rln-wasm-utils/zerokit-rln-wasm-utils/g' pkg/package.json && rm pkg/package.json.bak"
[tasks.pack_resize]
command = "wasm-opt"
args = [
"pkg/rln_wasm_utils_bg.wasm",
"-Oz",
"--strip-debug",
"--strip-dwarf",
"--remove-unused-module-elements",
"--vacuum",
"-o",
"pkg/rln_wasm_utils_bg.wasm",
]
[tasks.test]
command = "wasm-pack"
args = [
"test",
"--release",
"--node",
"--target",
"wasm32-unknown-unknown",
"--",
"--nocapture",
]
dependencies = ["build"]

View File

@@ -1,206 +0,0 @@
# RLN WASM Utils
[![npm version](https://badge.fury.io/js/@waku%2Fzerokit-rln-wasm.svg)](https://badge.fury.io/js/@waku%2Fzerokit-rln-wasm-utils)
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)
[![License: Apache 2.0](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
The Zerokit RLN WASM Utils Module provides WebAssembly bindings for Rate-Limiting Nullifier [RLN](https://rfc.vac.dev/spec/32/) cryptographic primitives.
This module offers comprehensive functionality for identity generation and hashing needed for RLN applications.
## Features
### Identity Generation
- **Random Identity Generation**: Generate cryptographically secure random identities
- **Seeded Identity Generation**: Generate deterministic identities from seeds
- **Extended Identity Generation**: Generate extended identities with additional parameters
- **Seeded Extended Identity Generation**: Generate deterministic extended identities from seeds
- **Endianness Support**: Both little-endian and big-endian serialization support
### Hashing
- **Standard Hashing**: Hash arbitrary data to field elements
- **Poseidon Hashing**: Advanced cryptographic hashing using Poseidon hash function
- **Endianness Support**: Both little-endian and big-endian serialization support
## API Reference
### Identity Generation Functions
#### `generateMembershipKey(isLittleEndian: boolean): Uint8Array`
Generates a random membership key pair (identity secret and commitment).
**Inputs:**
- `isLittleEndian`: Boolean indicating endianness for serialization
**Outputs:** Serialized identity pair as `Uint8Array` in corresponding endianness
#### `generateExtendedMembershipKey(isLittleEndian: boolean): Uint8Array`
Generates an extended membership key with additional parameters.
**Inputs:**
- `isLittleEndian`: Boolean indicating endianness for serialization
**Outputs:** Serialized extended identity tuple as `Uint8Array` in corresponding endianness
#### `generateSeededMembershipKey(seed: Uint8Array, isLittleEndian: boolean): Uint8Array`
Generates a deterministic membership key from a seed.
**Inputs:**
- `seed`: Seed data as `Uint8Array`
- `isLittleEndian`: Boolean indicating endianness for serialization
**Outputs:** Serialized identity pair as `Uint8Array` in corresponding endianness
#### `generateSeededExtendedMembershipKey(seed: Uint8Array, isLittleEndian: boolean): Uint8Array`
Generates a deterministic extended membership key from a seed.
**Inputs:**
- `seed`: Seed data as `Uint8Array`
- `isLittleEndian`: Boolean indicating endianness for serialization
**Outputs:** Serialized extended identity tuple as `Uint8Array` in corresponding endianness
### Hashing Functions
#### `hash(input: Uint8Array, isLittleEndian: boolean): Uint8Array`
Hashes input data to a field element.
**Inputs:**
- `input`: Input data as `Uint8Array`
- `isLittleEndian`: Boolean indicating endianness for serialization
**Outputs:** Serialized hash result as `Uint8Array` in corresponding endianness
#### `poseidonHash(input: Uint8Array, isLittleEndian: boolean): Uint8Array`
Computes Poseidon hash of input field elements.
**Inputs:**
- `input`: Serialized field elements as `Uint8Array` (format: length + field elements)
- `isLittleEndian`: Boolean indicating endianness for serialization
**Outputs:** Serialized hash result as `Uint8Array` in corresponding endianness
## Usage Examples
### JavaScript/TypeScript
```javascript
import init, {
generateMembershipKey,
generateSeededMembershipKey,
hash,
poseidonHash
} from '@waku/zerokit-rln-wasm-utils';
// Initialize the WASM module
await init();
// Generate a random membership key
const membershipKey = generateMembershipKey(true); // little-endian
console.log('Membership key:', membershipKey);
// Generate a deterministic membership key from seed
const seed = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
const seededKey = generateSeededMembershipKey(seed, true);
console.log('Seeded key:', seededKey);
// Hash some data
const input = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
const hashResult = hash(input, true);
console.log('Hash result:', hashResult);
// Poseidon hash with field elements
const fieldElements = new Uint8Array([
// Length (8 bytes) + field elements (32 bytes each)
1, 0, 0, 0, 0, 0, 0, 0, // length = 1
// field element data...
]);
const poseidonResult = poseidonHash(fieldElements, true);
console.log('Poseidon hash:', poseidonResult);
```
## Install Dependencies
> [!NOTE]
> This project requires the following tools:
>
> - `wasm-pack` - for compiling Rust to WebAssembly
> - `cargo-make` - for running build commands
> - `nvm` - to install and manage Node.js
>
> Ensure all dependencies are installed before proceeding.
### Manually
#### Install `wasm-pack`
```bash
cargo install wasm-pack --version=0.13.1
```
#### Install `cargo-make`
```bash
cargo install cargo-make
```
#### Install `Node.js`
If you don't have `nvm` (Node Version Manager), install it by following
the [installation instructions](https://github.com/nvm-sh/nvm?tab=readme-ov-file#install--update-script).
After installing `nvm`, install and use Node.js `v22.14.0`:
```bash
nvm install 22.14.0
nvm use 22.14.0
nvm alias default 22.14.0
```
If you already have Node.js installed,
check your version with `node -v` command — the version must be strictly greater than 22.
### Or install everything
You can run the following command from the root of the repository to install all required dependencies for `zerokit`
```bash
make installdeps
```
## Building the library
First, navigate to the rln-wasm-utils directory:
```bash
cd rln-wasm-utils
```
Compile rln-wasm-utils for `wasm32-unknown-unknown`:
```bash
cargo make build
```
## Running tests
```bash
cargo make test
```
## License
This project is licensed under both MIT and Apache 2.0 licenses. See the LICENSE files for details.

View File

@@ -1,112 +0,0 @@
#![cfg(target_arch = "wasm32")]
use js_sys::Uint8Array;
use rln::public::{
extended_key_gen, hash, key_gen, poseidon_hash, seeded_extended_key_gen, seeded_key_gen,
};
use std::vec::Vec;
use wasm_bindgen::prelude::*;
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateMembershipKey)]
pub fn wasm_key_gen(is_little_endian: bool) -> Result<Uint8Array, String> {
let mut output_data: Vec<u8> = Vec::new();
if let Err(err) = key_gen(&mut output_data, is_little_endian) {
std::mem::forget(output_data);
Err(format!(
"Msg: could not generate membership keys, Error: {:#?}",
err
))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateExtendedMembershipKey)]
pub fn wasm_extended_key_gen(is_little_endian: bool) -> Result<Uint8Array, String> {
let mut output_data: Vec<u8> = Vec::new();
if let Err(err) = extended_key_gen(&mut output_data, is_little_endian) {
std::mem::forget(output_data);
Err(format!(
"Msg: could not generate membership keys, Error: {:#?}",
err
))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateSeededMembershipKey)]
pub fn wasm_seeded_key_gen(seed: Uint8Array, is_little_endian: bool) -> Result<Uint8Array, String> {
let mut output_data: Vec<u8> = Vec::new();
let input_data = &seed.to_vec()[..];
if let Err(err) = seeded_key_gen(input_data, &mut output_data, is_little_endian) {
std::mem::forget(output_data);
Err(format!(
"Msg: could not generate membership key, Error: {:#?}",
err
))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateSeededExtendedMembershipKey)]
pub fn wasm_seeded_extended_key_gen(
seed: Uint8Array,
is_little_endian: bool,
) -> Result<Uint8Array, String> {
let mut output_data: Vec<u8> = Vec::new();
let input_data = &seed.to_vec()[..];
if let Err(err) = seeded_extended_key_gen(input_data, &mut output_data, is_little_endian) {
std::mem::forget(output_data);
Err(format!(
"Msg: could not generate membership key, Error: {:#?}",
err
))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
#[wasm_bindgen(js_name = hash)]
pub fn wasm_hash(input: Uint8Array, is_little_endian: bool) -> Result<Uint8Array, String> {
let mut output_data: Vec<u8> = Vec::new();
let input_data = &input.to_vec()[..];
if let Err(err) = hash(input_data, &mut output_data, is_little_endian) {
std::mem::forget(output_data);
Err(format!("Msg: could not generate hash, Error: {:#?}", err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
#[wasm_bindgen(js_name = poseidonHash)]
pub fn wasm_poseidon_hash(input: Uint8Array, is_little_endian: bool) -> Result<Uint8Array, String> {
let mut output_data: Vec<u8> = Vec::new();
let input_data = &input.to_vec()[..];
if let Err(err) = poseidon_hash(input_data, &mut output_data, is_little_endian) {
std::mem::forget(output_data);
Err(format!(
"Msg: could not generate poseidon hash, Error: {:#?}",
err
))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}

View File

@@ -1,114 +0,0 @@
#![cfg(target_arch = "wasm32")]
#[cfg(test)]
mod test {
use ark_std::{UniformRand, rand::thread_rng};
use rand::Rng;
use rln::circuit::Fr;
use rln::hashers::{ROUND_PARAMS, hash_to_field_le, poseidon_hash};
use rln::protocol::{
deserialize_identity_pair_be, deserialize_identity_pair_le, deserialize_identity_tuple_be,
deserialize_identity_tuple_le,
};
use rln::utils::{bytes_le_to_fr, vec_fr_to_bytes_le};
use rln_wasm_utils::{
wasm_extended_key_gen, wasm_hash, wasm_key_gen, wasm_poseidon_hash,
wasm_seeded_extended_key_gen, wasm_seeded_key_gen,
};
use wasm_bindgen_test::*;
#[wasm_bindgen_test]
fn test_wasm_key_gen() {
let result_le = wasm_key_gen(true);
assert!(result_le.is_ok());
deserialize_identity_pair_le(result_le.unwrap().to_vec());
let result_be = wasm_key_gen(false);
assert!(result_be.is_ok());
deserialize_identity_pair_be(result_be.unwrap().to_vec());
}
#[wasm_bindgen_test]
fn test_wasm_extended_key_gen() {
let result_le = wasm_extended_key_gen(true);
assert!(result_le.is_ok());
deserialize_identity_tuple_le(result_le.unwrap().to_vec());
let result_be = wasm_extended_key_gen(false);
assert!(result_be.is_ok());
deserialize_identity_tuple_be(result_be.unwrap().to_vec());
}
#[wasm_bindgen_test]
fn test_wasm_seeded_key_gen() {
// Create a test seed
let seed_data = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
let seed = js_sys::Uint8Array::from(&seed_data[..]);
let result_le = wasm_seeded_key_gen(seed.clone(), true);
assert!(result_le.is_ok());
let fr_le = deserialize_identity_pair_le(result_le.unwrap().to_vec());
let result_be = wasm_seeded_key_gen(seed, false);
assert!(result_be.is_ok());
let fr_be = deserialize_identity_pair_be(result_be.unwrap().to_vec());
assert_eq!(fr_le, fr_be);
}
#[wasm_bindgen_test]
fn test_wasm_seeded_extended_key_gen() {
// Create a test seed
let seed_data = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
let seed = js_sys::Uint8Array::from(&seed_data[..]);
let result_le = wasm_seeded_extended_key_gen(seed.clone(), true);
assert!(result_le.is_ok());
let fr_le = deserialize_identity_tuple_le(result_le.unwrap().to_vec());
let result_be = wasm_seeded_extended_key_gen(seed, false);
assert!(result_be.is_ok());
let fr_be = deserialize_identity_tuple_be(result_be.unwrap().to_vec());
assert_eq!(fr_le, fr_be);
}
#[wasm_bindgen_test]
fn test_wasm_hash() {
// Create test input data
let signal: [u8; 32] = [0; 32];
let input = js_sys::Uint8Array::from(&signal[..]);
let result_le = wasm_hash(input.clone(), true);
assert!(result_le.is_ok());
let serialized_hash = result_le.unwrap().to_vec();
let (hash1, _) = bytes_le_to_fr(&serialized_hash);
let hash2 = hash_to_field_le(&signal);
assert_eq!(hash1, hash2);
}
#[wasm_bindgen_test]
fn test_wasm_poseidon_hash() {
let mut rng = thread_rng();
let number_of_inputs = rng.gen_range(1..ROUND_PARAMS.len());
let mut inputs = Vec::with_capacity(number_of_inputs);
for _ in 0..number_of_inputs {
inputs.push(Fr::rand(&mut rng));
}
let inputs_ser = vec_fr_to_bytes_le(&inputs);
let input = js_sys::Uint8Array::from(&inputs_ser[..]);
let expected_hash = poseidon_hash(inputs.as_ref());
let result_le = wasm_poseidon_hash(input.clone(), true);
assert!(result_le.is_ok());
let serialized_hash = result_le.unwrap().to_vec();
let (received_hash, _) = bytes_le_to_fr(&serialized_hash);
assert_eq!(received_hash, expected_hash);
}
}

5
rln-wasm/.gitignore vendored
View File

@@ -6,10 +6,11 @@ tmp/
# Generated by Cargo will have compiled files and executables
/target
Cargo.lock
# Generated by rln-wasm
pkg/
/pkg
/examples/node_modules
/examples/package-lock.json
# Generated by Nix
result

1759
rln-wasm/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,7 @@
name = "rln-wasm"
version = "0.3.0"
edition = "2021"
license = "MIT or Apache2"
license = "MIT OR Apache-2.0"
[lib]
crate-type = ["cdylib", "rlib"]
@@ -11,17 +11,20 @@ crate-type = ["cdylib", "rlib"]
rln = { path = "../rln", version = "0.9.0", default-features = false, features = [
"stateless",
] }
rln-wasm-utils = { path = "../rln-wasm-utils", version = "0.1.0", default-features = false }
zerokit_utils = { path = "../utils", version = "0.7.0", default-features = false }
num-bigint = { version = "0.4.6", default-features = false }
js-sys = "0.3.77"
wasm-bindgen = "0.2.100"
serde-wasm-bindgen = "0.6.5"
serde = "1.0"
wasm-bindgen-rayon = { version = "1.3.0", features = [
"no-bundler",
], optional = true }
ark-relations = { version = "0.5.1", features = ["std"] }
ark-groth16 = { version = "0.5.0", default-features = false }
rand = "0.8.5"
# The `console_error_panic_xhook` crate provides better debugging of panics by
# The `console_error_panic_hook` crate provides better debugging of panics by
# logging them with `console.error`. This is great for development, but requires
# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for
# code size when deploying.
@@ -34,14 +37,17 @@ getrandom = { version = "0.2.16", features = ["js"] }
serde_json = "1.0.141"
wasm-bindgen-test = "0.3.50"
wasm-bindgen-futures = "0.4.50"
ark-std = { version = "0.5.0", default-features = false }
[dev-dependencies.web-sys]
version = "0.3.77"
features = ["Window", "Navigator"]
[features]
default = ["console_error_panic_hook"]
parallel = ["rln/parallel", "wasm-bindgen-rayon"]
default = []
utils = []
panic_hook = ["console_error_panic_hook"]
parallel = ["rln/parallel", "wasm-bindgen-rayon", "ark-groth16/parallel"]
[package.metadata.docs.rs]
all-features = true

View File

@@ -1,10 +1,14 @@
[tasks.build]
clear = true
dependencies = ["pack_build", "pack_rename", "pack_resize"]
dependencies = ["pack_build", "pack_rename"]
[tasks.build_parallel]
clear = true
dependencies = ["pack_build_parallel", "pack_rename", "pack_resize"]
dependencies = ["pack_build_parallel", "pack_rename"]
[tasks.build_utils]
clear = true
dependencies = ["pack_build_utils", "pack_rename_utils"]
[tasks.pack_build]
command = "wasm-pack"
@@ -29,22 +33,28 @@ args = [
"-Z",
"build-std=panic_abort,std",
]
[tasks.pack_rename]
script = "sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/package.json.bak"
[tasks.pack_resize]
command = "wasm-opt"
[tasks.pack_build_utils]
command = "wasm-pack"
args = [
"pkg/rln_wasm_bg.wasm",
"-Oz",
"--strip-debug",
"--strip-dwarf",
"--remove-unused-module-elements",
"--vacuum",
"-o",
"pkg/rln_wasm_bg.wasm",
"build",
"--release",
"--target",
"web",
"--scope",
"waku",
"--no-default-features",
"--features",
"utils",
]
[tasks.pack_rename_utils]
script = "sed -i.bak 's/rln-wasm/zerokit-rln-wasm-utils/g' pkg/package.json && rm pkg/package.json.bak"
[tasks.test]
command = "wasm-pack"
args = [
@@ -95,5 +105,21 @@ args = [
]
dependencies = ["build_parallel"]
[tasks.test_utils]
command = "wasm-pack"
args = [
"test",
"--release",
"--node",
"--target",
"wasm32-unknown-unknown",
"--no-default-features",
"--features",
"utils",
"--",
"--nocapture",
]
dependencies = ["build_utils"]
[tasks.bench]
disabled = true

View File

@@ -72,6 +72,12 @@ Compile zerokit for `wasm32-unknown-unknown`:
cargo make build
```
Or you can build the utility functions only without RLN proof generation and verification:
```bash
cargo make build_utils
```
## Running tests and benchmarks
```bash
@@ -84,6 +90,18 @@ If you want to run the tests in browser headless mode, you can use the following
cargo make test_browser
```
If you want to test only the utility functions after running `cargo make build_utils`, you can use the following command:
```bash
cargo make test_utils
```
## Examples
Working examples demonstrating proof generation, proof verification and slashing:
- [Node example](./examples/index.js) and [README](./examples/Readme.md)
## Parallel computation
The library supports parallel computation using the `wasm-bindgen-rayon` crate,
@@ -109,7 +127,15 @@ To enable parallel computation for WebAssembly threads, you can use the followin
cargo make build_parallel
```
### WebAssembly Threading Support
### Running parallel tests and benchmarks
If you want to run the parallel tests in browser headless mode, you can use the following command:
```bash
cargo make test_parallel
```
### WebAssembly Threads Support
Most modern browsers support WebAssembly threads,
but they require the following headers to enable `SharedArrayBuffer`, which is necessary for multithreading:
@@ -122,7 +148,7 @@ Without these, the application will fall back to single-threaded mode.
## Feature detection
If you're targeting [older browser versions that didn't support WebAssembly threads yet](https://webassembly.org/roadmap/),
you'll likely want to create two builds - one with thread support and one without -
you'll likely want to create two builds - one with threads support and one without -
and use feature detection to choose the right one on the JavaScript side.
You can use [wasm-feature-detect](https://github.com/GoogleChromeLabs/wasm-feature-detect) library for this purpose.

View File

@@ -0,0 +1,20 @@
# Compile and Run
## Build the rln-wasm package at the root of rln-wasm
```bash
cargo make build
```
## Move into this directory and install dependencies
```bash
cd examples
npm install
```
## Run
```bash
npm start
```

245
rln-wasm/examples/index.js Normal file
View File

@@ -0,0 +1,245 @@
import { readFileSync } from "fs";
import { fileURLToPath } from "url";
import { dirname, join } from "path";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
async function calculateWitness(circomPath, inputs, witnessCalculatorFile) {
const wasmFile = readFileSync(circomPath);
const wasmFileBuffer = wasmFile.slice(
wasmFile.byteOffset,
wasmFile.byteOffset + wasmFile.byteLength
);
const witnessCalculator = await witnessCalculatorFile(wasmFileBuffer);
const calculatedWitness = await witnessCalculator.calculateWitness(
inputs,
false
);
return calculatedWitness;
}
async function main() {
const rlnWasm = await import("../pkg/rln_wasm.js");
const wasmPath = join(__dirname, "../pkg/rln_wasm_bg.wasm");
const wasmBytes = readFileSync(wasmPath);
rlnWasm.initSync({ module: wasmBytes });
const zkeyPath = join(
__dirname,
"../../rln/resources/tree_depth_20/rln_final.arkzkey"
);
const circomPath = join(
__dirname,
"../../rln/resources/tree_depth_20/rln.wasm"
);
const witnessCalculatorPath = join(
__dirname,
"../resources/witness_calculator.js"
);
const { builder: witnessCalculatorFile } = await import(
witnessCalculatorPath
);
console.log("Creating RLN instance");
const zkeyData = readFileSync(zkeyPath);
const rlnInstance = new rlnWasm.WasmRLN(new Uint8Array(zkeyData));
console.log("RLN instance created successfully");
console.log("\nGenerating identity keys");
const identity = rlnWasm.Identity.generate();
const identitySecret = identity.getSecretHash();
const idCommitment = identity.getCommitment();
console.log("Identity generated");
console.log(" - identity_secret = " + identitySecret.debug());
console.log(" - id_commitment = " + idCommitment.debug());
console.log("\nCreating message limit");
const userMessageLimit = rlnWasm.WasmFr.fromUint(1);
console.log(" - user_message_limit = " + userMessageLimit.debug());
console.log("\nComputing rate commitment");
const rateCommitment = rlnWasm.Hasher.poseidonHashPair(
idCommitment,
userMessageLimit
);
console.log(" - rate_commitment = " + rateCommitment.debug());
console.log("\nWasmFr serialization: WasmFr <-> bytes");
const serRateCommitment = rateCommitment.toBytesLE();
console.log(" - serialized rate_commitment =", serRateCommitment);
const deserRateCommitment = rlnWasm.WasmFr.fromBytesLE(serRateCommitment);
console.log(
" - deserialized rate_commitment = " + deserRateCommitment.debug()
);
console.log("\nBuilding Merkle path for stateless mode");
const treeDepth = 20;
const defaultLeaf = rlnWasm.WasmFr.zero();
const defaultHashes = [];
defaultHashes[0] = rlnWasm.Hasher.poseidonHashPair(defaultLeaf, defaultLeaf);
for (let i = 1; i < treeDepth - 1; i++) {
defaultHashes[i] = rlnWasm.Hasher.poseidonHashPair(
defaultHashes[i - 1],
defaultHashes[i - 1]
);
}
const pathElements = new rlnWasm.VecWasmFr();
pathElements.push(defaultLeaf);
for (let i = 1; i < treeDepth; i++) {
pathElements.push(defaultHashes[i - 1]);
}
const identityPathIndex = new Uint8Array(treeDepth);
console.log("\nVecWasmFr serialization: VecWasmFr <-> bytes");
const serPathElements = pathElements.toBytesLE();
console.log(" - serialized path_elements = ", serPathElements);
const deserPathElements = rlnWasm.VecWasmFr.fromBytesLE(serPathElements);
console.log(" - deserialized path_elements = ", deserPathElements.debug());
console.log("\nUint8Array serialization: Uint8Array <-> bytes");
const serPathIndex = rlnWasm.Uint8ArrayUtils.toBytesLE(identityPathIndex);
console.log(" - serialized path_index =", serPathIndex);
const deserPathIndex = rlnWasm.Uint8ArrayUtils.fromBytesLE(serPathIndex);
console.log(" - deserialized path_index =", deserPathIndex);
console.log("\nComputing Merkle root for stateless mode");
console.log(" - computing root for index 0 with rate_commitment");
let computedRoot = rlnWasm.Hasher.poseidonHashPair(
rateCommitment,
defaultLeaf
);
for (let i = 1; i < treeDepth; i++) {
computedRoot = rlnWasm.Hasher.poseidonHashPair(
computedRoot,
defaultHashes[i - 1]
);
}
console.log(" - computed_root = " + computedRoot.debug());
console.log("\nHashing signal");
const signal = new Uint8Array([
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0,
]);
const x = rlnWasm.Hasher.hashToFieldLE(signal);
console.log(" - x = " + x.debug());
console.log("\nHashing epoch");
const epochStr = "test-epoch";
const epoch = rlnWasm.Hasher.hashToFieldLE(
new TextEncoder().encode(epochStr)
);
console.log(" - epoch = " + epoch.debug());
console.log("\nHashing RLN identifier");
const rlnIdStr = "test-rln-identifier";
const rlnIdentifier = rlnWasm.Hasher.hashToFieldLE(
new TextEncoder().encode(rlnIdStr)
);
console.log(" - rln_identifier = " + rlnIdentifier.debug());
console.log("\nComputing Poseidon hash for external nullifier");
const externalNullifier = rlnWasm.Hasher.poseidonHashPair(
epoch,
rlnIdentifier
);
console.log(" - external_nullifier = " + externalNullifier.debug());
console.log("\nCreating message_id");
const messageId = rlnWasm.WasmFr.fromUint(0);
console.log(" - message_id = " + messageId.debug());
console.log("\nGenerating RLN Proof");
const witness = new rlnWasm.WasmRLNWitnessInput(
identitySecret,
userMessageLimit,
messageId,
pathElements,
identityPathIndex,
x,
externalNullifier
);
const witnessJson = witness.toBigIntJson();
const calculatedWitness = await calculateWitness(
circomPath,
witnessJson,
witnessCalculatorFile
);
const proof = rlnInstance.generateProofWithWitness(
calculatedWitness,
witness
);
console.log("Proof generated successfully");
console.log("\nVerifying Proof");
const roots = new rlnWasm.VecWasmFr();
roots.push(computedRoot);
const isValid = rlnInstance.verifyWithRoots(proof, roots, x);
if (isValid) {
console.log("Proof verified successfully");
} else {
console.log("Proof verification failed");
return;
}
console.log(
"\nSimulating double-signaling attack (same epoch, different message)"
);
console.log("\nHashing second signal");
const signal2 = new Uint8Array([
11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
]);
const x2 = rlnWasm.Hasher.hashToFieldLE(signal2);
console.log(" - x2 = " + x2.debug());
console.log("\nCreating second message with the same id");
const messageId2 = rlnWasm.WasmFr.fromUint(0);
console.log(" - message_id2 = " + messageId2.debug());
console.log("\nGenerating second RLN Proof");
const witness2 = new rlnWasm.WasmRLNWitnessInput(
identitySecret,
userMessageLimit,
messageId2,
pathElements,
identityPathIndex,
x2,
externalNullifier
);
const witnessJson2 = witness2.toBigIntJson();
const calculatedWitness2 = await calculateWitness(
circomPath,
witnessJson2,
witnessCalculatorFile
);
const proof2 = rlnInstance.generateProofWithWitness(
calculatedWitness2,
witness2
);
console.log("Second proof generated successfully");
console.log("\nVerifying second proof");
const isValid2 = rlnInstance.verifyWithRoots(proof2, roots, x2);
if (isValid2) {
console.log("Second proof verified successfully");
console.log("\nRecovering identity secret");
const recoveredSecret = rlnWasm.WasmRLNProof.recoverIdSecret(proof, proof2);
console.log(" - recovered_secret = " + recoveredSecret.debug());
console.log(" - original_secret = " + identitySecret.debug());
console.log("Slashing successful: Identity is recovered!");
} else {
console.log("Second proof verification failed");
}
}
main().catch(console.error);

View File

@@ -0,0 +1,13 @@
{
"name": "rln-wasm-node-example",
"version": "1.0.0",
"description": "Node.js example for RLN WASM",
"type": "module",
"main": "index.js",
"scripts": {
"start": "node index.js"
},
"dependencies": {
"@waku/zerokit-rln-wasm": "file:../../pkg"
}
}

View File

@@ -1,212 +1,18 @@
#![cfg(target_arch = "wasm32")]
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
use num_bigint::BigInt;
use rln::public::RLN;
use std::vec::Vec;
use wasm_bindgen::prelude::*;
mod wasm_rln;
mod wasm_utils;
#[cfg(feature = "parallel")]
#[cfg(not(feature = "utils"))]
pub use wasm_rln::{WasmRLN, WasmRLNProof, WasmRLNWitnessInput};
pub use wasm_utils::{ExtendedIdentity, Hasher, Identity, VecWasmFr, WasmFr};
#[cfg(all(feature = "parallel", not(feature = "utils")))]
pub use wasm_bindgen_rayon::init_thread_pool;
#[cfg(feature = "panic_hook")]
#[wasm_bindgen(js_name = initPanicHook)]
pub fn init_panic_hook() {
console_error_panic_hook::set_once();
}
#[wasm_bindgen(js_name = RLN)]
pub struct RLNWrapper {
// The purpose of this wrapper is to hold a RLN instance with the 'static lifetime
// because wasm_bindgen does not allow returning elements with lifetimes
instance: RLN,
}
// Macro to call methods with arbitrary amount of arguments,
// which have the last argument is output buffer pointer
// First argument to the macro is context,
// second is the actual method on `RLN`
// third is the aforementioned output buffer argument
// rest are all other arguments to the method
macro_rules! call_with_output_and_error_msg {
// this variant is needed for the case when
// there are zero other arguments
($instance:expr, $method:ident, $error_msg:expr) => {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
if let Err(err) = new_instance.instance.$method(&mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
($instance:expr, $method:ident, $error_msg:expr, $( $arg:expr ),* ) => {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
if let Err(err) = new_instance.instance.$method($($arg.process()),*, &mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
}
macro_rules! call {
($instance:expr, $method:ident $(, $arg:expr)*) => {
{
let new_instance: &mut RLNWrapper = $instance.process();
new_instance.instance.$method($($arg.process()),*)
}
}
}
macro_rules! call_bool_method_with_error_msg {
($instance:expr, $method:ident, $error_msg:expr $(, $arg:expr)*) => {
{
let new_instance: &RLNWrapper = $instance.process();
new_instance.instance.$method($($arg.process()),*).map_err(|err| format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
}
}
}
trait ProcessArg {
type ReturnType;
fn process(self) -> Self::ReturnType;
}
impl ProcessArg for usize {
type ReturnType = usize;
fn process(self) -> Self::ReturnType {
self
}
}
impl<T> ProcessArg for Vec<T> {
type ReturnType = Vec<T>;
fn process(self) -> Self::ReturnType {
self
}
}
impl ProcessArg for *const RLN {
type ReturnType = &'static RLN;
fn process(self) -> Self::ReturnType {
unsafe { &*self }
}
}
impl ProcessArg for *const RLNWrapper {
type ReturnType = &'static RLNWrapper;
fn process(self) -> Self::ReturnType {
unsafe { &*self }
}
}
impl ProcessArg for *mut RLNWrapper {
type ReturnType = &'static mut RLNWrapper;
fn process(self) -> Self::ReturnType {
unsafe { &mut *self }
}
}
impl<'a> ProcessArg for &'a [u8] {
type ReturnType = &'a [u8];
fn process(self) -> Self::ReturnType {
self
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = newRLN)]
pub fn wasm_new(zkey: Uint8Array) -> Result<*mut RLNWrapper, String> {
let instance = RLN::new_with_params(zkey.to_vec()).map_err(|err| format!("{:#?}", err))?;
let wrapper = RLNWrapper { instance };
Ok(Box::into_raw(Box::new(wrapper)))
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = rlnWitnessToJson)]
pub fn wasm_rln_witness_to_json(
ctx: *mut RLNWrapper,
serialized_witness: Uint8Array,
) -> Result<Object, String> {
let inputs = call!(
ctx,
get_rln_witness_bigint_json,
&serialized_witness.to_vec()[..]
)
.map_err(|err| err.to_string())?;
let js_value = serde_wasm_bindgen::to_value(&inputs).map_err(|err| err.to_string())?;
Object::from_entries(&js_value).map_err(|err| format!("{:#?}", err))
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateRLNProofWithWitness)]
pub fn wasm_generate_rln_proof_with_witness(
ctx: *mut RLNWrapper,
calculated_witness: Vec<JsBigInt>,
serialized_witness: Uint8Array,
) -> Result<Uint8Array, String> {
let mut witness_vec: Vec<BigInt> = vec![];
for v in calculated_witness {
witness_vec.push(
v.to_string(10)
.map_err(|err| format!("{:#?}", err))?
.as_string()
.ok_or("not a string error")?
.parse::<BigInt>()
.map_err(|err| format!("{:#?}", err))?,
);
}
call_with_output_and_error_msg!(
ctx,
generate_rln_proof_with_witness,
"could not generate proof",
witness_vec,
serialized_witness.to_vec()
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = recovedIDSecret)]
pub fn wasm_recover_id_secret(
ctx: *const RLNWrapper,
input_proof_data_1: Uint8Array,
input_proof_data_2: Uint8Array,
) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(
ctx,
recover_id_secret,
"could not recover id secret",
&input_proof_data_1.to_vec()[..],
&input_proof_data_2.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = verifyWithRoots)]
pub fn wasm_verify_with_roots(
ctx: *const RLNWrapper,
proof: Uint8Array,
roots: Uint8Array,
) -> Result<bool, String> {
call_bool_method_with_error_msg!(
ctx,
verify_with_roots,
"error while verifying proof with roots".to_string(),
&proof.to_vec()[..],
&roots.to_vec()[..]
)
}

186
rln-wasm/src/wasm_rln.rs Normal file
View File

@@ -0,0 +1,186 @@
#![cfg(target_arch = "wasm32")]
#![cfg(not(feature = "utils"))]
use crate::wasm_utils::{VecWasmFr, WasmFr};
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
use num_bigint::BigInt;
use rln::{
circuit::{zkey_from_raw, Fr, Proof},
protocol::{
compute_id_secret, generate_proof_with_witness, proof_values_from_witness,
rln_witness_to_bigint_json, verify_proof, RLNProofValues, RLNWitnessInput, RLN,
},
utils::IdSecret,
};
use serde::Serialize;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
pub struct WasmRLN(RLN);
#[wasm_bindgen]
impl WasmRLN {
#[wasm_bindgen(constructor)]
pub fn new(zkey_buffer: &Uint8Array) -> Result<WasmRLN, String> {
let zkey = zkey_from_raw(&zkey_buffer.to_vec()).map_err(|err| err.to_string())?;
let rln = RLN { zkey };
Ok(WasmRLN(rln))
}
#[wasm_bindgen(js_name = generateProofWithWitness)]
pub fn generate_proof_with_witness(
&self,
calculated_witness: Vec<JsBigInt>,
rln_witness: &WasmRLNWitnessInput,
) -> Result<WasmRLNProof, String> {
let proof_values =
proof_values_from_witness(&rln_witness.0).map_err(|err| err.to_string())?;
let calculated_witness_bigint: Vec<BigInt> = calculated_witness
.iter()
.map(|js_bigint| {
let str_val = js_bigint.to_string(10).unwrap().as_string().unwrap();
str_val.parse::<BigInt>().unwrap()
})
.collect();
let proof = generate_proof_with_witness(calculated_witness_bigint, &self.0.zkey)
.map_err(|err| err.to_string())?;
Ok(WasmRLNProof {
proof_values,
proof,
})
}
#[wasm_bindgen(js_name = verifyWithRoots)]
pub fn verify_with_roots(
&self,
proof: &WasmRLNProof,
roots: &VecWasmFr,
x: &WasmFr,
) -> Result<bool, String> {
let proof_verified = verify_proof(&self.0.zkey.0.vk, &proof.proof, &proof.proof_values)
.map_err(|err| err.to_string())?;
if !proof_verified {
return Ok(false);
}
let roots_verified = if roots.length() == 0 {
true
} else {
(0..roots.length())
.filter_map(|i| roots.get(i))
.any(|root| *root == proof.proof_values.root)
};
let signal_verified = **x == proof.proof_values.x;
Ok(proof_verified && roots_verified && signal_verified)
}
}
#[wasm_bindgen]
pub struct WasmRLNProof {
proof: Proof,
proof_values: RLNProofValues,
}
#[wasm_bindgen]
impl WasmRLNProof {
#[wasm_bindgen(getter)]
pub fn y(&self) -> WasmFr {
WasmFr::from(self.proof_values.y)
}
#[wasm_bindgen(getter)]
pub fn nullifier(&self) -> WasmFr {
WasmFr::from(self.proof_values.nullifier)
}
#[wasm_bindgen(getter)]
pub fn root(&self) -> WasmFr {
WasmFr::from(self.proof_values.root)
}
#[wasm_bindgen(getter)]
pub fn x(&self) -> WasmFr {
WasmFr::from(self.proof_values.x)
}
#[wasm_bindgen(getter, js_name = externalNullifier)]
pub fn external_nullifier(&self) -> WasmFr {
WasmFr::from(self.proof_values.external_nullifier)
}
#[wasm_bindgen(js_name = recoverIdSecret)]
pub fn recover_id_secret(
proof_1: &WasmRLNProof,
proof_2: &WasmRLNProof,
) -> Result<WasmFr, String> {
let external_nullifier_1 = proof_1.proof_values.external_nullifier;
let external_nullifier_2 = proof_2.proof_values.external_nullifier;
if external_nullifier_1 != external_nullifier_2 {
return Err("External nullifiers do not match".to_string());
}
let share1 = (proof_1.proof_values.x, proof_1.proof_values.y);
let share2 = (proof_2.proof_values.x, proof_2.proof_values.y);
let recovered_identity_secret_hash =
compute_id_secret(share1, share2).map_err(|err| err.to_string())?;
Ok(WasmFr::from(*recovered_identity_secret_hash))
}
}
#[wasm_bindgen]
pub struct WasmRLNWitnessInput(RLNWitnessInput);
#[wasm_bindgen]
impl WasmRLNWitnessInput {
#[wasm_bindgen(constructor)]
pub fn new(
identity_secret: &WasmFr,
user_message_limit: &WasmFr,
message_id: &WasmFr,
path_elements: &VecWasmFr,
identity_path_index: &Uint8Array,
x: &WasmFr,
external_nullifier: &WasmFr,
) -> Result<WasmRLNWitnessInput, String> {
let mut identity_secret_fr = identity_secret.inner();
let path_elements: Vec<Fr> = path_elements.inner();
let identity_path_index: Vec<u8> = identity_path_index.to_vec();
let rln_witness = RLNWitnessInput::new(
IdSecret::from(&mut identity_secret_fr),
user_message_limit.inner(),
message_id.inner(),
path_elements,
identity_path_index,
x.inner(),
external_nullifier.inner(),
)
.map_err(|err| err.to_string())?;
Ok(WasmRLNWitnessInput(rln_witness))
}
#[wasm_bindgen(js_name = toBigIntJson)]
pub fn to_bigint_json(&self) -> Result<Object, String> {
let inputs = rln_witness_to_bigint_json(&self.0).map_err(|err| err.to_string())?;
let serializer = serde_wasm_bindgen::Serializer::json_compatible();
let js_value = inputs
.serialize(&serializer)
.map_err(|err| err.to_string())?;
js_value
.dyn_into::<Object>()
.map_err(|err| format!("{:#?}", err))
}
}

327
rln-wasm/src/wasm_utils.rs Normal file
View File

@@ -0,0 +1,327 @@
#![cfg(target_arch = "wasm32")]
use js_sys::Uint8Array;
use rln::{
circuit::Fr,
hashers::{hash_to_field_be, hash_to_field_le, poseidon_hash},
protocol::{extended_keygen, extended_seeded_keygen, keygen, seeded_keygen},
utils::{
bytes_be_to_fr, bytes_be_to_vec_fr, bytes_be_to_vec_u8, bytes_le_to_fr, bytes_le_to_vec_fr,
bytes_le_to_vec_u8, fr_to_bytes_be, fr_to_bytes_le, vec_fr_to_bytes_be, vec_fr_to_bytes_le,
vec_u8_to_bytes_be, vec_u8_to_bytes_le,
},
};
use std::ops::Deref;
use wasm_bindgen::prelude::*;
// WasmFr
#[wasm_bindgen]
#[derive(Debug, Clone, Copy, PartialEq, Default)]
pub struct WasmFr(Fr);
impl From<Fr> for WasmFr {
fn from(fr: Fr) -> Self {
Self(fr)
}
}
impl Deref for WasmFr {
type Target = Fr;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[wasm_bindgen]
impl WasmFr {
#[wasm_bindgen(js_name = zero)]
pub fn zero() -> Self {
Self(Fr::from(0u32))
}
#[wasm_bindgen(js_name = one)]
pub fn one() -> Self {
Self(Fr::from(1u32))
}
#[wasm_bindgen(js_name = fromUint)]
pub fn from_uint(value: u32) -> Self {
Self(Fr::from(value))
}
#[wasm_bindgen(js_name = fromBytesLE)]
pub fn from_bytes_le(bytes: &Uint8Array) -> Self {
let bytes_vec = bytes.to_vec();
let (fr, _) = bytes_le_to_fr(&bytes_vec);
Self(fr)
}
#[wasm_bindgen(js_name = fromBytesBE)]
pub fn from_bytes_be(bytes: &Uint8Array) -> Self {
let bytes_vec = bytes.to_vec();
let (fr, _) = bytes_be_to_fr(&bytes_vec);
Self(fr)
}
#[wasm_bindgen(js_name = toBytesLE)]
pub fn to_bytes_le(&self) -> Uint8Array {
let bytes = fr_to_bytes_le(&self.0);
Uint8Array::from(&bytes[..])
}
#[wasm_bindgen(js_name = toBytesBE)]
pub fn to_bytes_be(&self) -> Uint8Array {
let bytes = fr_to_bytes_be(&self.0);
Uint8Array::from(&bytes[..])
}
#[wasm_bindgen(js_name = debug)]
pub fn debug(&self) -> String {
format!("{:?}", self.0)
}
}
impl WasmFr {
pub fn inner(&self) -> Fr {
self.0
}
}
// VecWasmFr
#[wasm_bindgen]
#[derive(Debug, Clone, PartialEq, Default)]
pub struct VecWasmFr(Vec<Fr>);
#[wasm_bindgen]
impl VecWasmFr {
#[wasm_bindgen(constructor)]
pub fn new() -> Self {
Self(Vec::new())
}
#[wasm_bindgen(js_name = fromBytesLE)]
pub fn from_bytes_le(bytes: &Uint8Array) -> Result<VecWasmFr, String> {
let bytes_vec = bytes.to_vec();
bytes_le_to_vec_fr(&bytes_vec)
.map(|(vec_fr, _)| VecWasmFr(vec_fr))
.map_err(|err| err.to_string())
}
#[wasm_bindgen(js_name = fromBytesBE)]
pub fn from_bytes_be(bytes: &Uint8Array) -> Result<VecWasmFr, String> {
let bytes_vec = bytes.to_vec();
bytes_be_to_vec_fr(&bytes_vec)
.map(|(vec_fr, _)| VecWasmFr(vec_fr))
.map_err(|err| err.to_string())
}
#[wasm_bindgen(js_name = toBytesLE)]
pub fn to_bytes_le(&self) -> Uint8Array {
let bytes = vec_fr_to_bytes_le(&self.0);
Uint8Array::from(&bytes[..])
}
#[wasm_bindgen(js_name = toBytesBE)]
pub fn to_bytes_be(&self) -> Uint8Array {
let bytes = vec_fr_to_bytes_be(&self.0);
Uint8Array::from(&bytes[..])
}
#[wasm_bindgen(js_name = get)]
pub fn get(&self, index: usize) -> Option<WasmFr> {
self.0.get(index).map(|&fr| WasmFr(fr))
}
#[wasm_bindgen(js_name = length)]
pub fn length(&self) -> usize {
self.0.len()
}
#[wasm_bindgen(js_name = push)]
pub fn push(&mut self, element: &WasmFr) {
self.0.push(element.0);
}
#[wasm_bindgen(js_name = debug)]
pub fn debug(&self) -> String {
format!("{:?}", self.0)
}
}
impl VecWasmFr {
pub fn inner(&self) -> Vec<Fr> {
self.0.clone()
}
}
// Uint8Array
#[wasm_bindgen]
pub struct Uint8ArrayUtils;
#[wasm_bindgen]
impl Uint8ArrayUtils {
#[wasm_bindgen(js_name = toBytesLE)]
pub fn to_bytes_le(input: &Uint8Array) -> Uint8Array {
let input_vec = input.to_vec();
let bytes = vec_u8_to_bytes_le(&input_vec);
Uint8Array::from(&bytes[..])
}
#[wasm_bindgen(js_name = toBytesBE)]
pub fn to_bytes_be(input: &Uint8Array) -> Uint8Array {
let input_vec = input.to_vec();
let bytes = vec_u8_to_bytes_be(&input_vec);
Uint8Array::from(&bytes[..])
}
#[wasm_bindgen(js_name = fromBytesLE)]
pub fn from_bytes_le(bytes: &Uint8Array) -> Result<Uint8Array, String> {
let bytes_vec = bytes.to_vec();
bytes_le_to_vec_u8(&bytes_vec)
.map(|(vec_u8, _)| Uint8Array::from(&vec_u8[..]))
.map_err(|err| err.to_string())
}
#[wasm_bindgen(js_name = fromBytesBE)]
pub fn from_bytes_be(bytes: &Uint8Array) -> Result<Uint8Array, String> {
let bytes_vec = bytes.to_vec();
bytes_be_to_vec_u8(&bytes_vec)
.map(|(vec_u8, _)| Uint8Array::from(&vec_u8[..]))
.map_err(|err| err.to_string())
}
}
// Utility APIs
#[wasm_bindgen]
pub struct Hasher;
#[wasm_bindgen]
impl Hasher {
#[wasm_bindgen(js_name = hashToFieldLE)]
pub fn hash_to_field_le(input: &Uint8Array) -> WasmFr {
WasmFr(hash_to_field_le(&input.to_vec()))
}
#[wasm_bindgen(js_name = hashToFieldBE)]
pub fn hash_to_field_be(input: &Uint8Array) -> WasmFr {
WasmFr(hash_to_field_be(&input.to_vec()))
}
#[wasm_bindgen(js_name = poseidonHashPair)]
pub fn poseidon_hash_pair(a: &WasmFr, b: &WasmFr) -> WasmFr {
WasmFr(poseidon_hash(&[a.0, b.0]))
}
}
#[wasm_bindgen]
pub struct Identity {
identity_secret_hash: Fr,
id_commitment: Fr,
}
#[wasm_bindgen]
impl Identity {
#[wasm_bindgen(js_name = generate)]
pub fn generate() -> Identity {
let (identity_secret_hash, id_commitment) = keygen();
Identity {
identity_secret_hash: *identity_secret_hash,
id_commitment,
}
}
#[wasm_bindgen(js_name = generateSeeded)]
pub fn generate_seeded(seed: &Uint8Array) -> Identity {
let seed_vec = seed.to_vec();
let (identity_secret_hash, id_commitment) = seeded_keygen(&seed_vec);
Identity {
identity_secret_hash,
id_commitment,
}
}
#[wasm_bindgen(js_name = getSecretHash)]
pub fn get_secret_hash(&self) -> WasmFr {
WasmFr(self.identity_secret_hash)
}
#[wasm_bindgen(js_name = getCommitment)]
pub fn get_commitment(&self) -> WasmFr {
WasmFr(self.id_commitment)
}
#[wasm_bindgen(js_name = toArray)]
pub fn to_array(&self) -> VecWasmFr {
VecWasmFr(vec![self.identity_secret_hash, self.id_commitment])
}
}
#[wasm_bindgen]
pub struct ExtendedIdentity {
identity_trapdoor: Fr,
identity_nullifier: Fr,
identity_secret_hash: Fr,
id_commitment: Fr,
}
#[wasm_bindgen]
impl ExtendedIdentity {
#[wasm_bindgen(js_name = generate)]
pub fn generate() -> ExtendedIdentity {
let (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) =
extended_keygen();
ExtendedIdentity {
identity_trapdoor,
identity_nullifier,
identity_secret_hash,
id_commitment,
}
}
#[wasm_bindgen(js_name = generateSeeded)]
pub fn generate_seeded(seed: &Uint8Array) -> ExtendedIdentity {
let seed_vec = seed.to_vec();
let (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) =
extended_seeded_keygen(&seed_vec);
ExtendedIdentity {
identity_trapdoor,
identity_nullifier,
identity_secret_hash,
id_commitment,
}
}
#[wasm_bindgen(js_name = getTrapdoor)]
pub fn get_trapdoor(&self) -> WasmFr {
WasmFr(self.identity_trapdoor)
}
#[wasm_bindgen(js_name = getNullifier)]
pub fn get_nullifier(&self) -> WasmFr {
WasmFr(self.identity_nullifier)
}
#[wasm_bindgen(js_name = getSecretHash)]
pub fn get_secret_hash(&self) -> WasmFr {
WasmFr(self.identity_secret_hash)
}
#[wasm_bindgen(js_name = getCommitment)]
pub fn get_commitment(&self) -> WasmFr {
WasmFr(self.id_commitment)
}
#[wasm_bindgen(js_name = toArray)]
pub fn to_array(&self) -> VecWasmFr {
VecWasmFr(vec![
self.identity_trapdoor,
self.identity_nullifier,
self.identity_secret_hash,
self.id_commitment,
])
}
}

View File

@@ -1,17 +1,14 @@
#![cfg(target_arch = "wasm32")]
#![cfg(not(feature = "utils"))]
#[cfg(test)]
mod test {
use js_sys::{BigInt as JsBigInt, Date, Object, Uint8Array};
use rln::circuit::{Fr, TEST_TREE_DEPTH};
use rln::hashers::{hash_to_field_le, poseidon_hash, PoseidonHash};
use rln::protocol::{prepare_verify_input, rln_witness_from_values, serialize_witness};
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, IdSecret};
use rln::circuit::TEST_TREE_DEPTH;
use rln::hashers::PoseidonHash;
use rln_wasm::{
wasm_generate_rln_proof_with_witness, wasm_new, wasm_rln_witness_to_json,
wasm_verify_with_roots,
Hasher, Identity, VecWasmFr, WasmFr, WasmRLN, WasmRLNProof, WasmRLNWitnessInput,
};
use rln_wasm_utils::wasm_key_gen;
use wasm_bindgen::{prelude::wasm_bindgen, JsValue};
use wasm_bindgen_test::{console_log, wasm_bindgen_test, wasm_bindgen_test_configure};
use zerokit_utils::{
@@ -105,134 +102,131 @@ mod test {
let mut results = String::from("\nbenchmarks:\n");
let iterations = 10;
let zkey = readFile(&ARKZKEY_BYTES).expect("Failed to read zkey file");
let zkey = readFile(ARKZKEY_BYTES).expect("Failed to read zkey file");
// Benchmark wasm_new
let start_wasm_new = Date::now();
// Benchmark RLN instance creation
let start_rln_new = Date::now();
for _ in 0..iterations {
let _ = wasm_new(zkey.clone()).expect("Failed to create RLN instance");
let _ = WasmRLN::new(&zkey).expect("Failed to create RLN instance");
}
let wasm_new_result = Date::now() - start_wasm_new;
let rln_new_result = Date::now() - start_rln_new;
// Create RLN instance for other benchmarks
let rln_instance = wasm_new(zkey).expect("Failed to create RLN instance");
let rln_instance = WasmRLN::new(&zkey).expect("Failed to create RLN instance");
let mut tree: OptimalMerkleTree<PoseidonHash> =
OptimalMerkleTree::default(TEST_TREE_DEPTH).expect("Failed to create tree");
// Benchmark wasm_key_gen
let start_wasm_key_gen = Date::now();
// Benchmark generate identity
let start_identity_gen = Date::now();
for _ in 0..iterations {
let _ = wasm_key_gen(true).expect("Failed to generate keys");
let _ = Identity::generate();
}
let wasm_key_gen_result = Date::now() - start_wasm_key_gen;
let identity_gen_result = Date::now() - start_identity_gen;
// Generate identity pair for other benchmarks
let mem_keys = wasm_key_gen(true).expect("Failed to generate keys");
let id_key = mem_keys.subarray(0, 32);
let (identity_secret_hash, _) = IdSecret::from_bytes_le(&id_key.to_vec());
let (id_commitment, _) = bytes_le_to_fr(&mem_keys.subarray(32, 64).to_vec());
// Generate identity for other benchmarks
let identity_pair = Identity::generate();
let identity_secret_hash = identity_pair.get_secret_hash();
let id_commitment = identity_pair.get_commitment();
let epoch = hash_to_field_le(b"test-epoch");
let rln_identifier = hash_to_field_le(b"test-rln-identifier");
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
let epoch = Hasher::hash_to_field_le(&Uint8Array::from(b"test-epoch" as &[u8]));
let rln_identifier =
Hasher::hash_to_field_le(&Uint8Array::from(b"test-rln-identifier" as &[u8]));
let external_nullifier = Hasher::poseidon_hash_pair(&epoch, &rln_identifier);
let identity_index = tree.leaves_set();
let user_message_limit = Fr::from(100);
let user_message_limit = WasmFr::from_uint(100);
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
tree.update_next(rate_commitment)
let rate_commitment = Hasher::poseidon_hash_pair(&id_commitment, &user_message_limit);
tree.update_next(*rate_commitment)
.expect("Failed to update tree");
let message_id = Fr::from(0);
let message_id = WasmFr::from_uint(0);
let signal: [u8; 32] = [0; 32];
let x = hash_to_field_le(&signal);
let x = Hasher::hash_to_field_le(&Uint8Array::from(&signal[..]));
let merkle_proof: OptimalMerkleProof<PoseidonHash> = tree
.proof(identity_index)
.expect("Failed to generate merkle proof");
let rln_witness = rln_witness_from_values(
identity_secret_hash,
merkle_proof.get_path_elements(),
merkle_proof.get_path_index(),
x,
external_nullifier,
user_message_limit,
message_id,
let mut path_elements = VecWasmFr::new();
for path_element in merkle_proof.get_path_elements() {
path_elements.push(&WasmFr::from(path_element));
}
let path_index = Uint8Array::from(&merkle_proof.get_path_index()[..]);
let rln_witness_input = WasmRLNWitnessInput::new(
&identity_secret_hash,
&user_message_limit,
&message_id,
&path_elements,
&path_index,
&x,
&external_nullifier,
)
.expect("Failed to create RLN witness");
.expect("Failed to create WasmRLNWitnessInput");
let serialized_witness =
serialize_witness(&rln_witness).expect("Failed to serialize witness");
let witness_buffer = Uint8Array::from(&serialized_witness[..]);
let rln_witness_input_bigint_json = rln_witness_input
.to_bigint_json()
.expect("Failed to convert witness to BigInt JSON");
let json_inputs = wasm_rln_witness_to_json(rln_instance, witness_buffer.clone())
.expect("Failed to convert witness to JSON");
// Benchmark calculateWitness
// Benchmark witness calculation
let start_calculate_witness = Date::now();
for _ in 0..iterations {
let _ = calculateWitness(&CIRCOM_BYTES, json_inputs.clone())
let _ = calculateWitness(CIRCOM_BYTES, rln_witness_input_bigint_json.clone())
.await
.expect("Failed to calculate witness");
}
let calculate_witness_result = Date::now() - start_calculate_witness;
// Calculate witness for other benchmarks
let calculated_witness_json = calculateWitness(&CIRCOM_BYTES, json_inputs)
.await
.expect("Failed to calculate witness")
.as_string()
.expect("Failed to convert calculated witness to string");
let calculated_witness_str =
calculateWitness(CIRCOM_BYTES, rln_witness_input_bigint_json.clone())
.await
.expect("Failed to calculate witness")
.as_string()
.expect("Failed to convert calculated witness to string");
let calculated_witness_vec_str: Vec<String> =
serde_json::from_str(&calculated_witness_json).expect("Failed to parse JSON");
serde_json::from_str(&calculated_witness_str).expect("Failed to parse JSON");
let calculated_witness: Vec<JsBigInt> = calculated_witness_vec_str
.iter()
.map(|x| JsBigInt::new(&x.into()).expect("Failed to create JsBigInt"))
.collect();
// Benchmark wasm_generate_rln_proof_with_witness
let start_wasm_generate_rln_proof_with_witness = Date::now();
// Benchmark proof generation with witness
let start_generate_proof_with_witness = Date::now();
for _ in 0..iterations {
let _ = wasm_generate_rln_proof_with_witness(
rln_instance,
calculated_witness.clone(),
witness_buffer.clone(),
)
.expect("Failed to generate proof");
}
let wasm_generate_rln_proof_with_witness_result =
Date::now() - start_wasm_generate_rln_proof_with_witness;
// Generate a proof for other benchmarks
let proof =
wasm_generate_rln_proof_with_witness(rln_instance, calculated_witness, witness_buffer)
let _ = rln_instance
.generate_proof_with_witness(calculated_witness.clone(), &rln_witness_input)
.expect("Failed to generate proof");
let proof_data = proof.to_vec();
let verify_input = prepare_verify_input(proof_data, &signal);
let input_buffer = Uint8Array::from(&verify_input[..]);
let root = tree.root();
let roots_serialized = fr_to_bytes_le(&root);
let roots_buffer = Uint8Array::from(&roots_serialized[..]);
// Benchmark wasm_verify_with_roots
let start_wasm_verify_with_roots = Date::now();
for _ in 0..iterations {
let _ =
wasm_verify_with_roots(rln_instance, input_buffer.clone(), roots_buffer.clone())
.expect("Failed to verify proof");
}
let wasm_verify_with_roots_result = Date::now() - start_wasm_verify_with_roots;
let generate_proof_with_witness_result = Date::now() - start_generate_proof_with_witness;
// Verify the proof with the root
let is_proof_valid = wasm_verify_with_roots(rln_instance, input_buffer, roots_buffer)
// Generate proof with witness for other benchmarks
let proof: WasmRLNProof = rln_instance
.generate_proof_with_witness(calculated_witness, &rln_witness_input)
.expect("Failed to generate proof");
let root = WasmFr::from(tree.root());
let mut roots = VecWasmFr::new();
roots.push(&root);
// Benchmark proof verification with the root
let start_verify_with_roots = Date::now();
for _ in 0..iterations {
let _ = rln_instance
.verify_with_roots(&proof, &roots, &x)
.expect("Failed to verify proof");
}
let verify_with_roots_result = Date::now() - start_verify_with_roots;
// Verify proof with the root for other benchmarks
let is_proof_valid = rln_instance
.verify_with_roots(&proof, &roots, &x)
.expect("Failed to verify proof");
assert!(is_proof_valid, "verification failed");
// Format and display results
// Format and display the benchmark results
let format_duration = |duration_ms: f64| -> String {
let avg_ms = duration_ms / (iterations as f64);
if avg_ms >= 1000.0 {
@@ -242,22 +236,25 @@ mod test {
}
};
results.push_str(&format!("wasm_new: {}\n", format_duration(wasm_new_result)));
results.push_str(&format!(
"wasm_key_gen: {}\n",
format_duration(wasm_key_gen_result)
"RLN instance creation: {}\n",
format_duration(rln_new_result)
));
results.push_str(&format!(
"calculateWitness: {}\n",
"Identity generation: {}\n",
format_duration(identity_gen_result)
));
results.push_str(&format!(
"Witness calculation: {}\n",
format_duration(calculate_witness_result)
));
results.push_str(&format!(
"wasm_generate_rln_proof_with_witness: {}\n",
format_duration(wasm_generate_rln_proof_with_witness_result)
"Proof generation with witness: {}\n",
format_duration(generate_proof_with_witness_result)
));
results.push_str(&format!(
"wasm_verify_with_roots: {}\n",
format_duration(wasm_verify_with_roots_result)
"Proof verification with roots: {}\n",
format_duration(verify_with_roots_result)
));
// Log the results

View File

@@ -1,26 +1,20 @@
#![cfg(not(feature = "parallel"))]
#![cfg(target_arch = "wasm32")]
#![cfg(not(feature = "utils"))]
#[cfg(test)]
mod test {
use js_sys::{BigInt as JsBigInt, Date, Object, Uint8Array};
use rln::circuit::{Fr, TEST_TREE_DEPTH};
use rln::hashers::{hash_to_field_le, poseidon_hash, PoseidonHash};
use rln::protocol::{prepare_verify_input, rln_witness_from_values, serialize_witness};
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, IdSecret};
use rln::circuit::TEST_TREE_DEPTH;
use rln::hashers::PoseidonHash;
use rln_wasm::{
wasm_generate_rln_proof_with_witness, wasm_new, wasm_rln_witness_to_json,
wasm_verify_with_roots,
Hasher, Identity, VecWasmFr, WasmFr, WasmRLN, WasmRLNProof, WasmRLNWitnessInput,
};
use rln_wasm_utils::wasm_key_gen;
use wasm_bindgen::{prelude::wasm_bindgen, JsValue};
use wasm_bindgen_test::{console_log, wasm_bindgen_test};
use zerokit_utils::{
OptimalMerkleProof, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree,
};
const WITNESS_CALCULATOR_JS: &str = include_str!("../resources/witness_calculator.js");
#[wasm_bindgen(inline_js = r#"
const fs = require("fs");
@@ -73,6 +67,8 @@ mod test {
async fn calculateWitness(circom_path: &str, input: Object) -> Result<JsValue, JsValue>;
}
const WITNESS_CALCULATOR_JS: &str = include_str!("../resources/witness_calculator.js");
const ARKZKEY_PATH: &str = "../rln/resources/tree_depth_20/rln_final.arkzkey";
const CIRCOM_PATH: &str = "../rln/resources/tree_depth_20/rln.wasm";
@@ -86,134 +82,131 @@ mod test {
let mut results = String::from("\nbenchmarks:\n");
let iterations = 10;
let zkey = readFile(&ARKZKEY_PATH).expect("Failed to read zkey file");
let zkey = readFile(ARKZKEY_PATH).expect("Failed to read zkey file");
// Benchmark wasm_new
let start_wasm_new = Date::now();
// Benchmark RLN instance creation
let start_rln_new = Date::now();
for _ in 0..iterations {
let _ = wasm_new(zkey.clone()).expect("Failed to create RLN instance");
let _ = WasmRLN::new(&zkey).expect("Failed to create RLN instance");
}
let wasm_new_result = Date::now() - start_wasm_new;
let rln_new_result = Date::now() - start_rln_new;
// Create RLN instance for other benchmarks
let rln_instance = wasm_new(zkey).expect("Failed to create RLN instance");
let rln_instance = WasmRLN::new(&zkey).expect("Failed to create RLN instance");
let mut tree: OptimalMerkleTree<PoseidonHash> =
OptimalMerkleTree::default(TEST_TREE_DEPTH).expect("Failed to create tree");
// Benchmark wasm_key_gen
let start_wasm_key_gen = Date::now();
// Benchmark generate identity
let start_identity_gen = Date::now();
for _ in 0..iterations {
let _ = wasm_key_gen(true).expect("Failed to generate keys");
let _ = Identity::generate();
}
let wasm_key_gen_result = Date::now() - start_wasm_key_gen;
let identity_gen_result = Date::now() - start_identity_gen;
// Generate identity pair for other benchmarks
let mem_keys = wasm_key_gen(true).expect("Failed to generate keys");
let id_key = mem_keys.subarray(0, 32);
let (identity_secret_hash, _) = IdSecret::from_bytes_le(&id_key.to_vec());
let (id_commitment, _) = bytes_le_to_fr(&mem_keys.subarray(32, 64).to_vec());
// Generate identity for other benchmarks
let identity_pair = Identity::generate();
let identity_secret_hash = identity_pair.get_secret_hash();
let id_commitment = identity_pair.get_commitment();
let epoch = hash_to_field_le(b"test-epoch");
let rln_identifier = hash_to_field_le(b"test-rln-identifier");
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
let epoch = Hasher::hash_to_field_le(&Uint8Array::from(b"test-epoch" as &[u8]));
let rln_identifier =
Hasher::hash_to_field_le(&Uint8Array::from(b"test-rln-identifier" as &[u8]));
let external_nullifier = Hasher::poseidon_hash_pair(&epoch, &rln_identifier);
let identity_index = tree.leaves_set();
let user_message_limit = Fr::from(100);
let user_message_limit = WasmFr::from_uint(100);
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
tree.update_next(rate_commitment)
let rate_commitment = Hasher::poseidon_hash_pair(&id_commitment, &user_message_limit);
tree.update_next(*rate_commitment)
.expect("Failed to update tree");
let message_id = Fr::from(0);
let message_id = WasmFr::from_uint(0);
let signal: [u8; 32] = [0; 32];
let x = hash_to_field_le(&signal);
let x = Hasher::hash_to_field_le(&Uint8Array::from(&signal[..]));
let merkle_proof: OptimalMerkleProof<PoseidonHash> = tree
.proof(identity_index)
.expect("Failed to generate merkle proof");
let rln_witness = rln_witness_from_values(
identity_secret_hash,
merkle_proof.get_path_elements(),
merkle_proof.get_path_index(),
x,
external_nullifier,
user_message_limit,
message_id,
let mut path_elements = VecWasmFr::new();
for path_element in merkle_proof.get_path_elements() {
path_elements.push(&WasmFr::from(path_element));
}
let path_index = Uint8Array::from(&merkle_proof.get_path_index()[..]);
let rln_witness_input = WasmRLNWitnessInput::new(
&identity_secret_hash,
&user_message_limit,
&message_id,
&path_elements,
&path_index,
&x,
&external_nullifier,
)
.expect("Failed to create RLN witness");
.expect("Failed to create WasmRLNWitnessInput");
let serialized_witness =
serialize_witness(&rln_witness).expect("Failed to serialize witness");
let witness_buffer = Uint8Array::from(&serialized_witness[..]);
let rln_witness_input_bigint_json = rln_witness_input
.to_bigint_json()
.expect("Failed to convert witness to BigInt JSON");
let json_inputs = wasm_rln_witness_to_json(rln_instance, witness_buffer.clone())
.expect("Failed to convert witness to JSON");
// Benchmark calculateWitness
// Benchmark witness calculation
let start_calculate_witness = Date::now();
for _ in 0..iterations {
let _ = calculateWitness(&CIRCOM_PATH, json_inputs.clone())
let _ = calculateWitness(CIRCOM_PATH, rln_witness_input_bigint_json.clone())
.await
.expect("Failed to calculate witness");
}
let calculate_witness_result = Date::now() - start_calculate_witness;
// Calculate witness for other benchmarks
let calculated_witness_json = calculateWitness(&CIRCOM_PATH, json_inputs)
.await
.expect("Failed to calculate witness")
.as_string()
.expect("Failed to convert calculated witness to string");
let calculated_witness_str =
calculateWitness(CIRCOM_PATH, rln_witness_input_bigint_json.clone())
.await
.expect("Failed to calculate witness")
.as_string()
.expect("Failed to convert calculated witness to string");
let calculated_witness_vec_str: Vec<String> =
serde_json::from_str(&calculated_witness_json).expect("Failed to parse JSON");
serde_json::from_str(&calculated_witness_str).expect("Failed to parse JSON");
let calculated_witness: Vec<JsBigInt> = calculated_witness_vec_str
.iter()
.map(|x| JsBigInt::new(&x.into()).expect("Failed to create JsBigInt"))
.collect();
// Benchmark wasm_generate_rln_proof_with_witness
let start_wasm_generate_rln_proof_with_witness = Date::now();
// Benchmark proof generation with witness
let start_generate_proof_with_witness = Date::now();
for _ in 0..iterations {
let _ = wasm_generate_rln_proof_with_witness(
rln_instance,
calculated_witness.clone(),
witness_buffer.clone(),
)
.expect("Failed to generate proof");
}
let wasm_generate_rln_proof_with_witness_result =
Date::now() - start_wasm_generate_rln_proof_with_witness;
// Generate a proof for other benchmarks
let proof =
wasm_generate_rln_proof_with_witness(rln_instance, calculated_witness, witness_buffer)
let _ = rln_instance
.generate_proof_with_witness(calculated_witness.clone(), &rln_witness_input)
.expect("Failed to generate proof");
let proof_data = proof.to_vec();
let verify_input = prepare_verify_input(proof_data, &signal);
let input_buffer = Uint8Array::from(&verify_input[..]);
let root = tree.root();
let roots_serialized = fr_to_bytes_le(&root);
let roots_buffer = Uint8Array::from(&roots_serialized[..]);
// Benchmark wasm_verify_with_roots
let start_wasm_verify_with_roots = Date::now();
for _ in 0..iterations {
let _ =
wasm_verify_with_roots(rln_instance, input_buffer.clone(), roots_buffer.clone())
.expect("Failed to verify proof");
}
let wasm_verify_with_roots_result = Date::now() - start_wasm_verify_with_roots;
let generate_proof_with_witness_result = Date::now() - start_generate_proof_with_witness;
// Verify the proof with the root
let is_proof_valid = wasm_verify_with_roots(rln_instance, input_buffer, roots_buffer)
// Generate proof with witness for other benchmarks
let proof: WasmRLNProof = rln_instance
.generate_proof_with_witness(calculated_witness, &rln_witness_input)
.expect("Failed to generate proof");
let root = WasmFr::from(tree.root());
let mut roots = VecWasmFr::new();
roots.push(&root);
// Benchmark proof verification with the root
let start_verify_with_roots = Date::now();
for _ in 0..iterations {
let _ = rln_instance
.verify_with_roots(&proof, &roots, &x)
.expect("Failed to verify proof");
}
let verify_with_roots_result = Date::now() - start_verify_with_roots;
// Verify proof with the root for other benchmarks
let is_proof_valid = rln_instance
.verify_with_roots(&proof, &roots, &x)
.expect("Failed to verify proof");
assert!(is_proof_valid, "verification failed");
// Format and display results
// Format and display the benchmark results
let format_duration = |duration_ms: f64| -> String {
let avg_ms = duration_ms / (iterations as f64);
if avg_ms >= 1000.0 {
@@ -223,22 +216,25 @@ mod test {
}
};
results.push_str(&format!("wasm_new: {}\n", format_duration(wasm_new_result)));
results.push_str(&format!(
"wasm_key_gen: {}\n",
format_duration(wasm_key_gen_result)
"RLN instance creation: {}\n",
format_duration(rln_new_result)
));
results.push_str(&format!(
"calculate_witness: {}\n",
"Identity generation: {}\n",
format_duration(identity_gen_result)
));
results.push_str(&format!(
"Witness calculation: {}\n",
format_duration(calculate_witness_result)
));
results.push_str(&format!(
"wasm_generate_rln_proof_with_witness: {}\n",
format_duration(wasm_generate_rln_proof_with_witness_result)
"Proof generation with witness: {}\n",
format_duration(generate_proof_with_witness_result)
));
results.push_str(&format!(
"wasm_verify_with_roots: {}\n",
format_duration(wasm_verify_with_roots_result)
"Proof verification with roots: {}\n",
format_duration(verify_with_roots_result)
));
// Log the results

229
rln-wasm/tests/utils.rs Normal file
View File

@@ -0,0 +1,229 @@
#![cfg(target_arch = "wasm32")]
#[cfg(test)]
mod test {
use ark_std::rand::thread_rng;
use js_sys::Uint8Array;
use rand::Rng;
use rln::circuit::Fr;
use rln::hashers::poseidon_hash;
use rln::utils::{fr_to_bytes_be, fr_to_bytes_le, str_to_fr, IdSecret};
use rln_wasm::{ExtendedIdentity, Hasher, Identity, VecWasmFr, WasmFr};
use std::assert_eq;
use wasm_bindgen_test::wasm_bindgen_test;
#[wasm_bindgen_test]
fn test_keygen_wasm() {
let identity = Identity::generate();
let identity_secret_hash = *identity.get_secret_hash();
let id_commitment = *identity.get_commitment();
assert_ne!(identity_secret_hash, Fr::from(0u8));
assert_ne!(id_commitment, Fr::from(0u8));
let arr = identity.to_array();
assert_eq!(arr.length(), 2);
assert_eq!(*arr.get(0).unwrap(), identity_secret_hash);
assert_eq!(*arr.get(1).unwrap(), id_commitment);
}
#[wasm_bindgen_test]
fn test_extended_keygen_wasm() {
let identity = ExtendedIdentity::generate();
let identity_trapdoor = *identity.get_trapdoor();
let identity_nullifier = *identity.get_nullifier();
let identity_secret_hash = *identity.get_secret_hash();
let id_commitment = *identity.get_commitment();
assert_ne!(identity_trapdoor, Fr::from(0u8));
assert_ne!(identity_nullifier, Fr::from(0u8));
assert_ne!(identity_secret_hash, Fr::from(0u8));
assert_ne!(id_commitment, Fr::from(0u8));
let arr = identity.to_array();
assert_eq!(arr.length(), 4);
assert_eq!(*arr.get(0).unwrap(), identity_trapdoor);
assert_eq!(*arr.get(1).unwrap(), identity_nullifier);
assert_eq!(*arr.get(2).unwrap(), identity_secret_hash);
assert_eq!(*arr.get(3).unwrap(), id_commitment);
}
#[wasm_bindgen_test]
fn test_seeded_keygen_wasm() {
let seed_bytes: Vec<u8> = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
let seed = Uint8Array::from(&seed_bytes[..]);
let identity = Identity::generate_seeded(&seed);
let identity_secret_hash = *identity.get_secret_hash();
let id_commitment = *identity.get_commitment();
let expected_identity_secret_hash_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
)
.unwrap();
let expected_id_commitment_seed_bytes = str_to_fr(
"0xbf16d2b5c0d6f9d9d561e05bfca16a81b4b873bb063508fae360d8c74cef51f",
16,
)
.unwrap();
assert_eq!(
identity_secret_hash,
expected_identity_secret_hash_seed_bytes
);
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
}
#[wasm_bindgen_test]
fn test_seeded_extended_keygen_wasm() {
let seed_bytes: Vec<u8> = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
let seed = Uint8Array::from(&seed_bytes[..]);
let identity = ExtendedIdentity::generate_seeded(&seed);
let identity_trapdoor = *identity.get_trapdoor();
let identity_nullifier = *identity.get_nullifier();
let identity_secret_hash = *identity.get_secret_hash();
let id_commitment = *identity.get_commitment();
let expected_identity_trapdoor_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
)
.unwrap();
let expected_identity_nullifier_seed_bytes = str_to_fr(
"0x1f18714c7bc83b5bca9e89d404cf6f2f585bc4c0f7ed8b53742b7e2b298f50b4",
16,
)
.unwrap();
let expected_identity_secret_hash_seed_bytes = str_to_fr(
"0x2aca62aaa7abaf3686fff2caf00f55ab9462dc12db5b5d4bcf3994e671f8e521",
16,
)
.unwrap();
let expected_id_commitment_seed_bytes = str_to_fr(
"0x68b66aa0a8320d2e56842581553285393188714c48f9b17acd198b4f1734c5c",
16,
)
.unwrap();
assert_eq!(identity_trapdoor, expected_identity_trapdoor_seed_bytes);
assert_eq!(identity_nullifier, expected_identity_nullifier_seed_bytes);
assert_eq!(
identity_secret_hash,
expected_identity_secret_hash_seed_bytes
);
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
}
#[wasm_bindgen_test]
fn test_wasmfr() {
let wasmfr_zero = WasmFr::zero();
let fr_zero = Fr::from(0u8);
assert_eq!(*wasmfr_zero, fr_zero);
let wasmfr_one = WasmFr::one();
let fr_one = Fr::from(1u8);
assert_eq!(*wasmfr_one, fr_one);
let wasmfr_int = WasmFr::from_uint(42);
let fr_int = Fr::from(42u8);
assert_eq!(*wasmfr_int, fr_int);
let wasmfr_debug_str = wasmfr_int.debug();
assert_eq!(wasmfr_debug_str.to_string(), "42");
let identity = Identity::generate();
let mut id_secret_fr = *identity.get_secret_hash();
let id_secret_hash = IdSecret::from(&mut id_secret_fr);
let id_commitment = *identity.get_commitment();
let wasmfr_id_secret_hash = *identity.get_secret_hash();
assert_eq!(wasmfr_id_secret_hash, *id_secret_hash);
let wasmfr_id_commitment = *identity.get_commitment();
assert_eq!(wasmfr_id_commitment, id_commitment);
}
#[wasm_bindgen_test]
fn test_vec_wasmfr() {
let vec_fr = vec![Fr::from(1u8), Fr::from(2u8), Fr::from(3u8), Fr::from(4u8)];
let mut vec_wasmfr = VecWasmFr::new();
for fr in &vec_fr {
vec_wasmfr.push(&WasmFr::from(*fr));
}
let bytes_le = vec_wasmfr.to_bytes_le();
let expected_le = rln::utils::vec_fr_to_bytes_le(&vec_fr);
assert_eq!(bytes_le.to_vec(), expected_le);
let bytes_be = vec_wasmfr.to_bytes_be();
let expected_be = rln::utils::vec_fr_to_bytes_be(&vec_fr);
assert_eq!(bytes_be.to_vec(), expected_be);
let vec_wasmfr_from_le = match VecWasmFr::from_bytes_le(&bytes_le) {
Ok(v) => v,
Err(err) => panic!("VecWasmFr::from_bytes_le call failed: {}", err),
};
assert_eq!(vec_wasmfr_from_le.length(), vec_wasmfr.length());
for i in 0..vec_wasmfr.length() {
assert_eq!(
*vec_wasmfr_from_le.get(i).unwrap(),
*vec_wasmfr.get(i).unwrap()
);
}
let vec_wasmfr_from_be = match VecWasmFr::from_bytes_be(&bytes_be) {
Ok(v) => v,
Err(err) => panic!("VecWasmFr::from_bytes_be call failed: {}", err),
};
for i in 0..vec_wasmfr.length() {
assert_eq!(
*vec_wasmfr_from_be.get(i).unwrap(),
*vec_wasmfr.get(i).unwrap()
);
}
}
#[wasm_bindgen_test]
fn test_hash_to_field_wasm() {
let mut rng = thread_rng();
let signal_gen: [u8; 32] = rng.gen();
let signal = Uint8Array::from(&signal_gen[..]);
let wasmfr_le_1 = Hasher::hash_to_field_le(&signal);
let fr_le_2 = rln::hashers::hash_to_field_le(&signal_gen);
assert_eq!(*wasmfr_le_1, fr_le_2);
let wasmfr_be_1 = Hasher::hash_to_field_be(&signal);
let fr_be_2 = rln::hashers::hash_to_field_be(&signal_gen);
assert_eq!(*wasmfr_be_1, fr_be_2);
assert_eq!(*wasmfr_le_1, *wasmfr_be_1);
assert_eq!(fr_le_2, fr_be_2);
let hash_wasmfr_le_1 = wasmfr_le_1.to_bytes_le();
let hash_fr_le_2 = fr_to_bytes_le(&fr_le_2);
assert_eq!(hash_wasmfr_le_1.to_vec(), hash_fr_le_2);
let hash_wasmfr_be_1 = wasmfr_be_1.to_bytes_be();
let hash_fr_be_2 = fr_to_bytes_be(&fr_be_2);
assert_eq!(hash_wasmfr_be_1.to_vec(), hash_fr_be_2);
assert_ne!(hash_wasmfr_le_1.to_vec(), hash_wasmfr_be_1.to_vec());
assert_ne!(hash_fr_le_2, hash_fr_be_2);
}
#[wasm_bindgen_test]
fn test_poseidon_hash_pair_wasm() {
let input_1 = Fr::from(42u8);
let input_2 = Fr::from(99u8);
let expected_hash = poseidon_hash(&[input_1, input_2]);
let wasmfr_1 = WasmFr::from_uint(42);
let wasmfr_2 = WasmFr::from_uint(99);
let received_hash = Hasher::poseidon_hash_pair(&wasmfr_1, &wasmfr_2);
assert_eq!(*received_hash, expected_hash);
}
}

View File

@@ -36,7 +36,6 @@ cfg-if = "1.0"
num-bigint = { version = "0.4.6", default-features = false, features = ["std"] }
num-traits = "0.2.19"
once_cell = "1.21.3"
lazy_static = "1.5.0"
rand = "0.8.5"
rand_chacha = "0.3.1"
ruint = { version = "1.17.0", default-features = false, features = [

View File

@@ -17,7 +17,7 @@ use graph::Node;
pub type InputSignalsInfo = HashMap<String, (usize, usize)>;
pub fn calc_witness<I: IntoIterator<Item = (String, Vec<FrOrSecret>)>>(
pub(crate) fn calc_witness<I: IntoIterator<Item = (String, Vec<FrOrSecret>)>>(
inputs: I,
graph_data: &[u8],
) -> Vec<Fr> {

View File

@@ -1,16 +1,18 @@
// This file is based on the code by iden3. Its preimage can be found here:
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/storage.rs
use ark_bn254::Fr;
use ark_ff::PrimeField;
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use prost::Message;
use std::io::{Read, Write};
use crate::circuit::iden3calc::{
graph,
graph::{Operation, TresOperation, UnoOperation},
proto, InputSignalsInfo,
use crate::circuit::{
iden3calc::{
graph,
graph::{Operation, TresOperation, UnoOperation},
proto, InputSignalsInfo,
},
Fr,
};
// format of the wtns.graph file:

View File

@@ -4,30 +4,32 @@ pub mod error;
pub mod iden3calc;
pub mod qap;
use ::lazy_static::lazy_static;
use ark_bn254::{
Bn254, Fq as ArkFq, Fq2 as ArkFq2, Fr as ArkFr, G1Affine as ArkG1Affine,
G1Projective as ArkG1Projective, G2Affine as ArkG2Affine, G2Projective as ArkG2Projective,
};
use ark_groth16::ProvingKey;
use ark_groth16::{
Proof as ArkProof, ProvingKey as ArkProvingKey, VerifyingKey as ArkVerifyingKey,
};
use ark_relations::r1cs::ConstraintMatrices;
use crate::circuit::error::ZKeyReadError;
use crate::circuit::iden3calc::calc_witness;
use {ark_ff::Field, ark_serialize::CanonicalDeserialize, ark_serialize::CanonicalSerialize};
use crate::utils::FrOrSecret;
pub const ARKZKEY_BYTES: &[u8] = include_bytes!("../../resources/tree_depth_20/rln_final.arkzkey");
#[cfg(not(target_arch = "wasm32"))]
use std::sync::LazyLock;
#[cfg(not(target_arch = "wasm32"))]
const GRAPH_BYTES: &[u8] = include_bytes!("../../resources/tree_depth_20/graph.bin");
lazy_static! {
static ref ARKZKEY: (ProvingKey<Curve>, ConstraintMatrices<Fr>) =
read_arkzkey_from_bytes_uncompressed(ARKZKEY_BYTES).expect("Failed to read arkzkey");
}
#[cfg(not(target_arch = "wasm32"))]
const ARKZKEY_BYTES: &[u8] = include_bytes!("../../resources/tree_depth_20/rln_final.arkzkey");
#[cfg(not(target_arch = "wasm32"))]
static ARKZKEY: LazyLock<Zkey> = LazyLock::new(|| {
read_arkzkey_from_bytes_uncompressed(ARKZKEY_BYTES).expect("Failed to read arkzkey")
});
pub const TEST_TREE_DEPTH: usize = 20;
@@ -42,10 +44,13 @@ pub type G1Projective = ArkG1Projective;
pub type G2Affine = ArkG2Affine;
pub type G2Projective = ArkG2Projective;
// Loads the proving key using a bytes vector
pub fn zkey_from_raw(
zkey_data: &[u8],
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>), ZKeyReadError> {
pub type Proof = ArkProof<Curve>;
pub type ProvingKey = ArkProvingKey<Curve>;
pub type Zkey = (ArkProvingKey<Curve>, ConstraintMatrices<Fr>);
pub type VerifyingKey = ArkVerifyingKey<Curve>;
// Loads the zkey using a bytes vector
pub fn zkey_from_raw(zkey_data: &[u8]) -> Result<Zkey, ZKeyReadError> {
if zkey_data.is_empty() {
return Err(ZKeyReadError::EmptyBytes);
}
@@ -57,17 +62,11 @@ pub fn zkey_from_raw(
// Loads the proving key
#[cfg(not(target_arch = "wasm32"))]
pub fn zkey_from_folder() -> &'static (ProvingKey<Curve>, ConstraintMatrices<Fr>) {
pub fn zkey_from_folder() -> &'static Zkey {
&ARKZKEY
}
pub fn calculate_rln_witness<I: IntoIterator<Item = (String, Vec<FrOrSecret>)>>(
inputs: I,
graph_data: &[u8],
) -> Vec<Fr> {
calc_witness(inputs, graph_data)
}
// Loads the graph data
#[cfg(not(target_arch = "wasm32"))]
pub fn graph_from_folder() -> &'static [u8] {
GRAPH_BYTES
@@ -75,33 +74,30 @@ pub fn graph_from_folder() -> &'static [u8] {
////////////////////////////////////////////////////////
// Functions and structs from [arkz-key](https://github.com/zkmopro/ark-zkey/blob/main/src/lib.rs#L106)
// without print and allow to choose between compressed and uncompressed arkzkey
////////////////////////////////////////////////////////
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
pub struct SerializableProvingKey(pub ProvingKey<Bn254>);
struct SerializableProvingKey(ArkProvingKey<Curve>);
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
pub struct SerializableConstraintMatrices<F: Field> {
pub num_instance_variables: usize,
pub num_witness_variables: usize,
pub num_constraints: usize,
pub a_num_non_zero: usize,
pub b_num_non_zero: usize,
pub c_num_non_zero: usize,
pub a: SerializableMatrix<F>,
pub b: SerializableMatrix<F>,
pub c: SerializableMatrix<F>,
struct SerializableConstraintMatrices<F: Field> {
num_instance_variables: usize,
num_witness_variables: usize,
num_constraints: usize,
a_num_non_zero: usize,
b_num_non_zero: usize,
c_num_non_zero: usize,
a: SerializableMatrix<F>,
b: SerializableMatrix<F>,
c: SerializableMatrix<F>,
}
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
pub struct SerializableMatrix<F: Field> {
struct SerializableMatrix<F: Field> {
pub data: Vec<Vec<(F, usize)>>,
}
pub fn read_arkzkey_from_bytes_uncompressed(
arkzkey_data: &[u8],
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>), ZKeyReadError> {
fn read_arkzkey_from_bytes_uncompressed(arkzkey_data: &[u8]) -> Result<Zkey, ZKeyReadError> {
if arkzkey_data.is_empty() {
return Err(ZKeyReadError::EmptyBytes);
}
@@ -115,8 +111,8 @@ pub fn read_arkzkey_from_bytes_uncompressed(
SerializableConstraintMatrices::deserialize_uncompressed_unchecked(&mut cursor)?;
// Get on right form for API
let proving_key: ProvingKey<Bn254> = serialized_proving_key.0;
let constraint_matrices: ConstraintMatrices<ark_bn254::Fr> = ConstraintMatrices {
let proving_key: ProvingKey = serialized_proving_key.0;
let constraint_matrices: ConstraintMatrices<Fr> = ConstraintMatrices {
num_instance_variables: serialized_constraint_matrices.num_instance_variables,
num_witness_variables: serialized_constraint_matrices.num_witness_variables,
num_constraints: serialized_constraint_matrices.num_constraints,
@@ -127,6 +123,7 @@ pub fn read_arkzkey_from_bytes_uncompressed(
b: serialized_constraint_matrices.b.data,
c: serialized_constraint_matrices.c.data,
};
let zkey = (proving_key, constraint_matrices);
Ok((proving_key, constraint_matrices))
Ok(zkey)
}

View File

@@ -18,7 +18,7 @@ use rayon::iter::{
/// coefficients domain. snarkjs instead precomputes the Lagrange form of the powers of tau bases
/// in a domain twice as large and the witness map is computed as the odd coefficients of (AB-C)
/// in that domain. This serves as HZ when computing the C proof element.
pub struct CircomReduction;
pub(crate) struct CircomReduction;
impl R1CSToQAP for CircomReduction {
#[allow(clippy::type_complexity)]

View File

@@ -1,5 +1,3 @@
use crate::circuit::error::ZKeyReadError;
use ark_bn254::Fr;
use ark_relations::r1cs::SynthesisError;
use ark_serialize::SerializationError;
use num_bigint::{BigInt, ParseBigIntError};
@@ -9,6 +7,8 @@ use std::string::FromUtf8Error;
use thiserror::Error;
use utils::error::{FromConfigError, ZerokitMerkleTreeError};
use crate::circuit::{error::ZKeyReadError, Fr};
#[derive(Debug, thiserror::Error)]
pub enum ConversionError {
#[error("Expected radix 10 or 16")]

View File

@@ -2,16 +2,13 @@
use super::ffi_utils::{CBoolResult, CFr, CResult};
use crate::{
circuit::{graph_from_folder, zkey_from_folder, zkey_from_raw, Curve},
circuit::{graph_from_folder, zkey_from_folder, zkey_from_raw, Fr, Proof},
protocol::{
compute_id_secret, generate_proof, proof_values_from_witness, verify_proof, RLNProofValues,
RLNWitnessInput,
RLNWitnessInput, RLN,
},
utils::IdSecret,
};
use ark_bn254::Fr;
use ark_groth16::{Proof as ArkProof, ProvingKey};
use ark_relations::r1cs::ConstraintMatrices;
use safer_ffi::{boxed::Box_, derive_ReprC, ffi_export, prelude::repr_c};
#[cfg(not(feature = "stateless"))]
@@ -26,13 +23,7 @@ use {
#[derive_ReprC]
#[repr(opaque)]
pub struct FFI_RLN {
pub(crate) proving_key: (ProvingKey<Curve>, ConstraintMatrices<Fr>),
#[cfg(not(target_arch = "wasm32"))]
pub(crate) graph_data: Vec<u8>,
#[cfg(not(feature = "stateless"))]
pub(crate) tree: PoseidonTree,
}
pub struct FFI_RLN(pub(crate) RLN);
// RLN initialization APIs
@@ -61,7 +52,7 @@ pub fn ffi_rln_new(
_ => <PoseidonTree as ZerokitMerkleTree>::Config::default(),
};
let proving_key = zkey_from_folder().to_owned();
let zkey = zkey_from_folder().to_owned();
let graph_data = graph_from_folder().to_owned();
// We compute a default empty tree
@@ -79,15 +70,15 @@ pub fn ffi_rln_new(
}
};
let rln = FFI_RLN {
proving_key: proving_key.to_owned(),
graph_data: graph_data.to_vec(),
let rln = RLN {
zkey,
graph_data,
#[cfg(not(feature = "stateless"))]
tree,
};
CResult {
ok: Some(Box_::new(rln)),
ok: Some(Box_::new(FFI_RLN(rln))),
err: None,
}
}
@@ -95,16 +86,13 @@ pub fn ffi_rln_new(
#[cfg(feature = "stateless")]
#[ffi_export]
pub fn ffi_rln_new() -> CResult<repr_c::Box<FFI_RLN>, repr_c::String> {
let proving_key = zkey_from_folder().to_owned();
let zkey = zkey_from_folder().to_owned();
let graph_data = graph_from_folder().to_owned();
let rln = FFI_RLN {
proving_key: proving_key.to_owned(),
graph_data: graph_data.to_vec(),
};
let rln = RLN { zkey, graph_data };
CResult {
ok: Some(Box_::new(rln)),
ok: Some(Box_::new(FFI_RLN(rln))),
err: None,
}
}
@@ -136,7 +124,7 @@ pub fn ffi_rln_new_with_params(
_ => <PoseidonTree as ZerokitMerkleTree>::Config::default(),
};
let proving_key = match zkey_from_raw(zkey_buffer) {
let zkey = match zkey_from_raw(zkey_buffer) {
Ok(pk) => pk,
Err(err) => {
return CResult {
@@ -145,6 +133,7 @@ pub fn ffi_rln_new_with_params(
};
}
};
let graph_data = graph_data.to_vec();
// We compute a default empty tree
let tree = match PoseidonTree::new(
@@ -161,15 +150,15 @@ pub fn ffi_rln_new_with_params(
}
};
let rln = FFI_RLN {
proving_key,
graph_data: graph_data.to_vec(),
let rln = RLN {
zkey,
graph_data,
#[cfg(not(feature = "stateless"))]
tree,
};
CResult {
ok: Some(Box_::new(rln)),
ok: Some(Box_::new(FFI_RLN(rln))),
err: None,
}
}
@@ -180,7 +169,7 @@ pub fn ffi_new_with_params(
zkey_buffer: &repr_c::Vec<u8>,
graph_data: &repr_c::Vec<u8>,
) -> CResult<repr_c::Box<FFI_RLN>, repr_c::String> {
let proving_key = match zkey_from_raw(zkey_buffer) {
let zkey = match zkey_from_raw(zkey_buffer) {
Ok(pk) => pk,
Err(err) => {
return CResult {
@@ -189,14 +178,12 @@ pub fn ffi_new_with_params(
};
}
};
let graph_data = graph_data.to_vec();
let rln = FFI_RLN {
proving_key,
graph_data: graph_data.to_vec(),
};
let rln = RLN { zkey, graph_data };
CResult {
ok: Some(Box_::new(rln)),
ok: Some(Box_::new(FFI_RLN(rln))),
err: None,
}
}
@@ -211,8 +198,8 @@ pub fn ffi_rln_free(rln: repr_c::Box<FFI_RLN>) {
#[derive_ReprC]
#[repr(opaque)]
pub struct FFI_RLNProof {
pub(crate) proof: ArkProof<Curve>,
pub(crate) proof_values: RLNProofValues,
pub proof: Proof,
pub proof_values: RLNProofValues,
}
#[ffi_export]
@@ -233,7 +220,7 @@ pub fn ffi_generate_rln_proof(
external_nullifier: &CFr,
leaf_index: usize,
) -> CResult<repr_c::Box<FFI_RLNProof>, repr_c::String> {
let proof = match rln.tree.proof(leaf_index) {
let proof = match rln.0.tree.proof(leaf_index) {
Ok(proof) => proof,
Err(err) => {
return CResult {
@@ -275,7 +262,7 @@ pub fn ffi_generate_rln_proof(
}
};
let proof = match generate_proof(&rln.proving_key, &rln_witness, &rln.graph_data) {
let proof = match generate_proof(&rln.0.zkey, &rln_witness, &rln.0.graph_data) {
Ok(proof) => proof,
Err(err) => {
return CResult {
@@ -337,7 +324,7 @@ pub fn ffi_generate_rln_proof_stateless(
}
};
let proof = match generate_proof(&rln.proving_key, &rln_witness, &rln.graph_data) {
let proof = match generate_proof(&rln.0.zkey, &rln_witness, &rln.0.graph_data) {
Ok(proof) => proof,
Err(err) => {
return CResult {
@@ -366,7 +353,7 @@ pub fn ffi_verify_rln_proof(
x: &CFr,
) -> CBoolResult {
// Verify the root
if rln.tree.root() != proof.proof_values.root {
if rln.0.tree.root() != proof.proof_values.root {
return CBoolResult {
ok: false,
err: Some("Invalid root".to_string().into()),
@@ -382,7 +369,7 @@ pub fn ffi_verify_rln_proof(
}
// Verify the proof
match verify_proof(&rln.proving_key.0.vk, &proof.proof, &proof.proof_values) {
match verify_proof(&rln.0.zkey.0.vk, &proof.proof, &proof.proof_values) {
Ok(proof_verified) => {
if !proof_verified {
return CBoolResult {
@@ -430,7 +417,7 @@ pub fn ffi_verify_with_roots(
}
// Verify the proof
match verify_proof(&rln.proving_key.0.vk, &proof.proof, &proof.proof_values) {
match verify_proof(&rln.0.zkey.0.vk, &proof.proof, &proof.proof_values) {
Ok(proof_verified) => {
if !proof_verified {
return CBoolResult {

View File

@@ -30,7 +30,7 @@ pub fn ffi_set_tree(rln: &mut repr_c::Box<FFI_RLN>, tree_depth: usize) -> CBoolR
// We compute a default empty tree of desired depth
match PoseidonTree::default(tree_depth) {
Ok(tree) => {
rln.tree = tree;
rln.0.tree = tree;
CBoolResult {
ok: true,
err: None,
@@ -47,7 +47,7 @@ pub fn ffi_set_tree(rln: &mut repr_c::Box<FFI_RLN>, tree_depth: usize) -> CBoolR
#[ffi_export]
pub fn ffi_delete_leaf(rln: &mut repr_c::Box<FFI_RLN>, index: usize) -> CBoolResult {
match rln.tree.delete(index) {
match rln.0.tree.delete(index) {
Ok(_) => CBoolResult {
ok: true,
err: None,
@@ -65,7 +65,7 @@ pub fn ffi_set_leaf(
index: usize,
leaf: &repr_c::Box<CFr>,
) -> CBoolResult {
match rln.tree.set(index, leaf.0) {
match rln.0.tree.set(index, leaf.0) {
Ok(_) => CBoolResult {
ok: true,
err: None,
@@ -82,7 +82,7 @@ pub fn ffi_get_leaf(
rln: &repr_c::Box<FFI_RLN>,
index: usize,
) -> CResult<repr_c::Box<CFr>, repr_c::String> {
match rln.tree.get(index) {
match rln.0.tree.get(index) {
Ok(leaf) => CResult {
ok: Some(CFr::from(leaf).into()),
err: None,
@@ -96,12 +96,12 @@ pub fn ffi_get_leaf(
#[ffi_export]
pub fn ffi_leaves_set(rln: &repr_c::Box<FFI_RLN>) -> usize {
rln.tree.leaves_set()
rln.0.tree.leaves_set()
}
#[ffi_export]
pub fn ffi_set_next_leaf(rln: &mut repr_c::Box<FFI_RLN>, leaf: &repr_c::Box<CFr>) -> CBoolResult {
match rln.tree.update_next(leaf.0) {
match rln.0.tree.update_next(leaf.0) {
Ok(_) => CBoolResult {
ok: true,
err: None,
@@ -120,6 +120,7 @@ pub fn ffi_set_leaves_from(
leaves: &repr_c::Vec<CFr>,
) -> CBoolResult {
match rln
.0
.tree
.override_range(index, leaves.iter().map(|cfr| cfr.0), [].into_iter())
{
@@ -140,7 +141,7 @@ pub fn ffi_init_tree_with_leaves(
leaves: &repr_c::Vec<CFr>,
) -> CBoolResult {
// Reset tree to default
let tree_depth = rln.tree.depth();
let tree_depth = rln.0.tree.depth();
if let Err(err) = PoseidonTree::default(tree_depth) {
return CBoolResult {
ok: false,
@@ -149,6 +150,7 @@ pub fn ffi_init_tree_with_leaves(
};
match rln
.0
.tree
.override_range(0, leaves.iter().map(|cfr| cfr.0), [].into_iter())
{
@@ -172,7 +174,7 @@ pub fn ffi_atomic_operation(
leaves: &repr_c::Vec<CFr>,
indices: &repr_c::Vec<usize>,
) -> CBoolResult {
match rln.tree.override_range(
match rln.0.tree.override_range(
index,
leaves.iter().map(|cfr| cfr.0),
indices.iter().copied(),
@@ -194,8 +196,8 @@ pub fn ffi_seq_atomic_operation(
leaves: &repr_c::Vec<CFr>,
indices: &repr_c::Vec<u8>,
) -> CBoolResult {
let index = rln.tree.leaves_set();
match rln.tree.override_range(
let index = rln.0.tree.leaves_set();
match rln.0.tree.override_range(
index,
leaves.iter().map(|cfr| cfr.0),
indices.iter().map(|x| *x as usize),
@@ -215,7 +217,7 @@ pub fn ffi_seq_atomic_operation(
#[ffi_export]
pub fn ffi_get_root(rln: &repr_c::Box<FFI_RLN>) -> repr_c::Box<CFr> {
CFr::from(rln.tree.root()).into()
CFr::from(rln.0.tree.root()).into()
}
#[ffi_export]
@@ -223,7 +225,7 @@ pub fn ffi_get_proof(
rln: &repr_c::Box<FFI_RLN>,
index: usize,
) -> CResult<repr_c::Box<FFI_MerkleProof>, repr_c::String> {
match rln.tree.proof(index) {
match rln.0.tree.proof(index) {
Ok(proof) => {
let path_elements: repr_c::Vec<CFr> = proof
.get_path_elements()
@@ -255,7 +257,7 @@ pub fn ffi_get_proof(
#[ffi_export]
pub fn ffi_set_metadata(rln: &mut repr_c::Box<FFI_RLN>, metadata: &repr_c::Vec<u8>) -> CBoolResult {
match rln.tree.set_metadata(metadata) {
match rln.0.tree.set_metadata(metadata) {
Ok(_) => CBoolResult {
ok: true,
err: None,
@@ -269,7 +271,7 @@ pub fn ffi_set_metadata(rln: &mut repr_c::Box<FFI_RLN>, metadata: &repr_c::Vec<u
#[ffi_export]
pub fn ffi_get_metadata(rln: &repr_c::Box<FFI_RLN>) -> CResult<repr_c::Vec<u8>, repr_c::String> {
match rln.tree.metadata() {
match rln.0.tree.metadata() {
Ok(metadata) => CResult {
ok: Some(metadata.into()),
err: None,
@@ -283,7 +285,7 @@ pub fn ffi_get_metadata(rln: &repr_c::Box<FFI_RLN>) -> CResult<repr_c::Vec<u8>,
#[ffi_export]
pub fn ffi_flush(rln: &mut repr_c::Box<FFI_RLN>) -> CBoolResult {
match rln.tree.close_db_connection() {
match rln.0.tree.close_db_connection() {
Ok(_) => CBoolResult {
ok: true,
err: None,

View File

@@ -6,8 +6,8 @@ use crate::{
protocol::{extended_keygen, extended_seeded_keygen, keygen, seeded_keygen},
utils::{bytes_be_to_fr, bytes_le_to_fr, fr_to_bytes_be, fr_to_bytes_le},
};
use safer_ffi::prelude::ReprC;
use safer_ffi::{boxed::Box_, derive_ReprC, ffi_export, prelude::repr_c};
use safer_ffi::{boxed::Box_, prelude::ReprC};
use safer_ffi::{derive_ReprC, ffi_export, prelude::repr_c};
use std::ops::Deref;
// CResult
@@ -56,7 +56,7 @@ impl From<CFr> for repr_c::Box<CFr> {
impl From<&CFr> for repr_c::Box<CFr> {
fn from(cfr: &CFr) -> Self {
Box_::new(CFr(cfr.0))
CFr(cfr.0).into()
}
}
@@ -68,12 +68,12 @@ impl PartialEq<Fr> for CFr {
#[ffi_export]
pub fn cfr_zero() -> repr_c::Box<CFr> {
Box_::new(CFr::from(Fr::from(0)))
CFr::from(Fr::from(0)).into()
}
#[ffi_export]
pub fn cfr_one() -> repr_c::Box<CFr> {
Box_::new(CFr::from(Fr::from(1)))
CFr::from(Fr::from(1)).into()
}
#[ffi_export]
@@ -89,18 +89,18 @@ pub fn cfr_to_bytes_be(cfr: &CFr) -> repr_c::Vec<u8> {
#[ffi_export]
pub fn bytes_le_to_cfr(bytes: &repr_c::Vec<u8>) -> repr_c::Box<CFr> {
let (cfr, _) = bytes_le_to_fr(bytes);
Box_::new(CFr(cfr))
CFr(cfr).into()
}
#[ffi_export]
pub fn bytes_be_to_cfr(bytes: &repr_c::Vec<u8>) -> repr_c::Box<CFr> {
let (cfr, _) = bytes_be_to_fr(bytes);
Box_::new(CFr(cfr))
CFr(cfr).into()
}
#[ffi_export]
pub fn uint_to_cfr(value: u32) -> repr_c::Box<CFr> {
Box_::new(CFr::from(Fr::from(value)))
CFr::from(Fr::from(value)).into()
}
#[ffi_export]

View File

@@ -1,4 +1,5 @@
/// This crate instantiates the Poseidon hash algorithm.
// This crate instantiates the Poseidon hash algorithm.
use crate::{
circuit::Fr,
utils::{bytes_be_to_fr, bytes_le_to_fr},

View File

@@ -2,9 +2,7 @@ pub mod circuit;
pub mod error;
pub mod ffi;
pub mod hashers;
#[cfg(feature = "pmtree-ft")]
pub mod pm_tree_adapter;
#[cfg(not(feature = "stateless"))]
pub mod poseidon_tree;
pub mod protocol;
pub mod public;

View File

@@ -1,3 +1,5 @@
#![cfg(feature = "pmtree-ft")]
use serde_json::Value;
use std::fmt::Debug;
use std::path::PathBuf;

View File

@@ -7,29 +7,37 @@ use {
utils::{ZerokitMerkleProof, ZerokitMerkleTree},
};
use crate::circuit::{calculate_rln_witness, qap::CircomReduction, Curve};
use crate::circuit::{
iden3calc::calc_witness, qap::CircomReduction, Curve, Fr, Proof, VerifyingKey, Zkey,
};
use crate::error::{ComputeIdSecretError, ProofError, ProtocolError};
use crate::hashers::{hash_to_field_le, poseidon_hash};
use crate::public::RLN_IDENTIFIER;
use crate::hashers::poseidon_hash;
use crate::utils::{
bytes_be_to_fr, bytes_le_to_fr, bytes_le_to_vec_fr, bytes_le_to_vec_u8, fr_byte_size,
fr_to_bytes_le, normalize_usize_le, to_bigint, vec_fr_to_bytes_le, vec_u8_to_bytes_le,
FrOrSecret, IdSecret,
};
use ark_bn254::{Fr, FrConfig};
use ark_ff::{AdditiveGroup, Fp, MontBackend};
use ark_groth16::{prepare_verifying_key, Groth16, Proof as ArkProof, ProvingKey, VerifyingKey};
use ark_relations::r1cs::ConstraintMatrices;
use ark_ff::AdditiveGroup;
use ark_groth16::{prepare_verifying_key, Groth16};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::{rand::thread_rng, UniformRand};
use num_bigint::BigInt;
use rand::{Rng, SeedableRng};
use rand::SeedableRng;
use rand_chacha::ChaCha20Rng;
use serde::{Deserialize, Serialize};
#[cfg(test)]
use std::time::Instant;
use tiny_keccak::{Hasher as _, Keccak};
use zeroize::Zeroize;
pub struct RLN {
pub zkey: Zkey,
#[cfg(not(target_arch = "wasm32"))]
pub graph_data: Vec<u8>,
#[cfg(not(feature = "stateless"))]
pub tree: PoseidonTree,
}
///////////////////////////////////////////////////////
// RLN Witness data structure and utility functions
///////////////////////////////////////////////////////
@@ -61,8 +69,23 @@ impl RLNWitnessInput {
x: Fr,
external_nullifier: Fr,
) -> Result<Self, ProtocolError> {
merkle_proof_len_check(&path_elements, &identity_path_index)?;
message_id_range_check(&message_id, &user_message_limit)?;
// Message ID range check
if message_id > user_message_limit {
return Err(ProtocolError::InvalidMessageId(
message_id,
user_message_limit,
));
}
// Merkle proof length check
let path_elements_len = path_elements.len();
let identity_path_index_len = identity_path_index.len();
if path_elements_len != identity_path_index_len {
return Err(ProtocolError::InvalidMerkleProofLength(
path_elements_len,
identity_path_index_len,
));
}
Ok(Self {
identity_secret,
@@ -162,9 +185,6 @@ pub fn deserialize_identity_tuple_be(serialized: Vec<u8>) -> (Fr, Fr, Fr, Fr) {
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
/// input data is [ identity_secret<32> | user_message_limit<32> | message_id<32> | path_elements<32> | identity_path_index<8> | x<32> | external_nullifier<32> ]
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Result<Vec<u8>, ProtocolError> {
// Check if message_id is within user_message_limit
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
// Calculate capacity for Vec:
// - 5 fixed field elements: identity_secret, user_message_limit, message_id, x, external_nullifier
// - variable number of path elements
@@ -201,8 +221,6 @@ pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)
let (message_id, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
message_id_range_check(&message_id, &user_message_limit)?;
let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..])?;
all_read += read;
@@ -220,15 +238,15 @@ pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)
}
Ok((
RLNWitnessInput {
RLNWitnessInput::new(
identity_secret,
user_message_limit,
message_id,
path_elements,
identity_path_index,
x,
external_nullifier,
user_message_limit,
message_id,
},
)?,
all_read,
))
}
@@ -242,6 +260,8 @@ pub fn proof_inputs_to_rln_witness(
tree: &mut PoseidonTree,
serialized: &[u8],
) -> Result<(RLNWitnessInput, usize), ProtocolError> {
use crate::hashers::hash_to_field_le;
let mut all_read: usize = 0;
let (identity_secret, read) = IdSecret::from_bytes_le(&serialized[all_read..]);
@@ -281,81 +301,22 @@ pub fn proof_inputs_to_rln_witness(
let x = hash_to_field_le(&signal);
Ok((
RLNWitnessInput {
RLNWitnessInput::new(
identity_secret,
path_elements,
identity_path_index,
user_message_limit,
message_id,
path_elements,
identity_path_index,
x,
external_nullifier,
},
)?,
all_read,
))
}
/// Creates [`RLNWitnessInput`] from it's fields.
///
/// # Errors
///
/// Returns an error if `message_id` is not within `user_message_limit`.
pub fn rln_witness_from_values(
identity_secret: IdSecret,
path_elements: Vec<Fp<MontBackend<FrConfig, 4>, 4>>,
identity_path_index: Vec<u8>,
x: Fr,
external_nullifier: Fr,
user_message_limit: Fr,
message_id: Fr,
) -> Result<RLNWitnessInput, ProtocolError> {
message_id_range_check(&message_id, &user_message_limit)?;
Ok(RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
external_nullifier,
user_message_limit,
message_id,
})
}
pub fn random_rln_witness(tree_depth: usize) -> RLNWitnessInput {
let mut rng = thread_rng();
let identity_secret = IdSecret::rand(&mut rng);
let x = hash_to_field_le(&rng.gen::<[u8; 32]>());
let epoch = hash_to_field_le(&rng.gen::<[u8; 32]>());
let rln_identifier = hash_to_field_le(RLN_IDENTIFIER);
let mut path_elements: Vec<Fr> = Vec::new();
let mut identity_path_index: Vec<u8> = Vec::new();
for _ in 0..tree_depth {
path_elements.push(hash_to_field_le(&rng.gen::<[u8; 32]>()));
identity_path_index.push(rng.gen_range(0..2) as u8);
}
let user_message_limit = Fr::from(100);
let message_id = Fr::from(1);
RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
external_nullifier: poseidon_hash(&[epoch, rln_identifier]),
user_message_limit,
message_id,
}
}
pub fn proof_values_from_witness(
rln_witness: &RLNWitnessInput,
) -> Result<RLNProofValues, ProtocolError> {
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
// y share
let a_0 = &rln_witness.identity_secret;
let mut to_hash = [
@@ -638,14 +599,14 @@ fn calculate_witness_element<E: ark_ec::pairing::Pairing>(
}
pub fn generate_proof_with_witness(
witness: Vec<BigInt>,
proving_key: &(ProvingKey<Curve>, ConstraintMatrices<Fr>),
) -> Result<ArkProof<Curve>, ProofError> {
calculated_witness: Vec<BigInt>,
zkey: &Zkey,
) -> Result<Proof, ProofError> {
// If in debug mode, we measure and later print time take to compute witness
#[cfg(test)]
let now = Instant::now();
let full_assignment = calculate_witness_element::<Curve>(witness)?;
let full_assignment = calculate_witness_element::<Curve>(calculated_witness)?;
#[cfg(test)]
println!("witness generation took: {:.2?}", now.elapsed());
@@ -660,12 +621,12 @@ pub fn generate_proof_with_witness(
let now = Instant::now();
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
&proving_key.0,
&zkey.0,
r,
s,
&proving_key.1,
proving_key.1.num_instance_variables,
proving_key.1.num_constraints,
&zkey.1,
zkey.1.num_instance_variables,
zkey.1.num_constraints,
full_assignment.as_slice(),
)?;
@@ -683,8 +644,6 @@ pub fn generate_proof_with_witness(
pub fn inputs_for_witness_calculation(
rln_witness: &RLNWitnessInput,
) -> Result<[(&str, Vec<FrOrSecret>); 7], ProtocolError> {
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
let mut identity_path_index = Vec::with_capacity(rln_witness.identity_path_index.len());
rln_witness
.identity_path_index
@@ -728,10 +687,10 @@ pub fn inputs_for_witness_calculation(
///
/// Returns a [`ProofError`] if proving fails.
pub fn generate_proof(
proving_key: &(ProvingKey<Curve>, ConstraintMatrices<Fr>),
zkey: &Zkey,
rln_witness: &RLNWitnessInput,
graph_data: &[u8],
) -> Result<ArkProof<Curve>, ProofError> {
) -> Result<Proof, ProofError> {
let inputs = inputs_for_witness_calculation(rln_witness)?
.into_iter()
.map(|(name, values)| (name.to_string(), values));
@@ -739,7 +698,7 @@ pub fn generate_proof(
// If in debug mode, we measure and later print time take to compute witness
#[cfg(test)]
let now = Instant::now();
let full_assignment = calculate_rln_witness(inputs, graph_data);
let full_assignment = calc_witness(inputs, graph_data);
#[cfg(test)]
println!("witness generation took: {:.2?}", now.elapsed());
@@ -753,12 +712,12 @@ pub fn generate_proof(
#[cfg(test)]
let now = Instant::now();
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
&proving_key.0,
&zkey.0,
r,
s,
&proving_key.1,
proving_key.1.num_instance_variables,
proving_key.1.num_constraints,
&zkey.1,
zkey.1.num_instance_variables,
zkey.1.num_constraints,
full_assignment.as_slice(),
)?;
@@ -775,8 +734,8 @@ pub fn generate_proof(
/// Returns a [`ProofError`] if verifying fails. Verification failure does not
/// necessarily mean the proof is incorrect.
pub fn verify_proof(
verifying_key: &VerifyingKey<Curve>,
proof: &ArkProof<Curve>,
verifying_key: &VerifyingKey,
proof: &Proof,
proof_values: &RLNProofValues,
) -> Result<bool, ProofError> {
// We re-arrange proof-values according to the circuit specification
@@ -790,7 +749,6 @@ pub fn verify_proof(
// Check that the proof is valid
let pvk = prepare_verifying_key(verifying_key);
//let pr: ArkProof<Curve> = (*proof).into();
// If in debug mode, we measure and later print time take to verify proof
#[cfg(test)]
@@ -833,10 +791,7 @@ where
pub fn rln_witness_from_json(
input_json: serde_json::Value,
) -> Result<RLNWitnessInput, ProtocolError> {
let rln_witness: RLNWitnessInput = serde_json::from_value(input_json).unwrap();
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
Ok(rln_witness)
Ok(serde_json::from_value(input_json)?)
}
/// Converts a [`RLNWitnessInput`] object to the corresponding JSON serialization.
@@ -847,10 +802,7 @@ pub fn rln_witness_from_json(
pub fn rln_witness_to_json(
rln_witness: &RLNWitnessInput,
) -> Result<serde_json::Value, ProtocolError> {
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
let rln_witness_json = serde_json::to_value(rln_witness)?;
Ok(rln_witness_json)
Ok(serde_json::to_value(rln_witness)?)
}
/// Converts a [`RLNWitnessInput`] object to the corresponding JSON serialization.
@@ -862,8 +814,6 @@ pub fn rln_witness_to_json(
pub fn rln_witness_to_bigint_json(
rln_witness: &RLNWitnessInput,
) -> Result<serde_json::Value, ProtocolError> {
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
let mut path_elements = Vec::new();
for v in rln_witness.path_elements.iter() {
@@ -888,28 +838,3 @@ pub fn rln_witness_to_bigint_json(
Ok(inputs)
}
fn merkle_proof_len_check(
path_elements: &[Fr],
identity_path_index: &[u8],
) -> Result<(), ProtocolError> {
let path_elements_len = path_elements.len();
let identity_path_index_len = identity_path_index.len();
if path_elements_len != identity_path_index_len {
return Err(ProtocolError::InvalidMerkleProofLength(
path_elements_len,
identity_path_index_len,
));
}
Ok(())
}
fn message_id_range_check(message_id: &Fr, user_message_limit: &Fr) -> Result<(), ProtocolError> {
if message_id > user_message_limit {
return Err(ProtocolError::InvalidMessageId(
*message_id,
*user_message_limit,
));
}
Ok(())
}

View File

@@ -1,4 +1,4 @@
use crate::circuit::{zkey_from_raw, Curve, Fr};
use crate::circuit::{zkey_from_raw, Fr, Proof, VerifyingKey, Zkey};
use crate::hashers::{hash_to_field_be, hash_to_field_le, poseidon_hash as utils_poseidon_hash};
use crate::protocol::{
compute_id_secret, deserialize_proof_values, deserialize_witness, extended_keygen,
@@ -35,8 +35,6 @@ use {
};
use crate::error::{ConversionError, ProtocolError, RLNError};
use ark_groth16::{Proof as ArkProof, ProvingKey, VerifyingKey};
use ark_relations::r1cs::ConstraintMatrices;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, Write};
#[cfg(target_arch = "wasm32")]
use num_bigint::BigInt;
@@ -86,8 +84,8 @@ impl TreeConfigInput for <PoseidonTree as ZerokitMerkleTree>::Config {
///
/// I/O is mostly done using writers and readers implementing `std::io::Write` and `std::io::Read`, respectively.
pub struct RLN {
proving_key: (ProvingKey<Curve>, ConstraintMatrices<Fr>),
pub(crate) verification_key: VerifyingKey<Curve>,
pub(crate) zkey: Zkey,
pub(crate) verifying_key: VerifyingKey,
#[cfg(not(target_arch = "wasm32"))]
pub(crate) graph_data: Vec<u8>,
#[cfg(not(feature = "stateless"))]
@@ -113,8 +111,8 @@ impl RLN {
/// ```
#[cfg(all(not(target_arch = "wasm32"), not(feature = "stateless")))]
pub fn new<T: TreeConfigInput>(tree_depth: usize, input_buffer: T) -> Result<RLN, RLNError> {
let proving_key = zkey_from_folder().to_owned();
let verification_key = proving_key.0.vk.to_owned();
let zkey = zkey_from_folder().to_owned();
let verifying_key = zkey.0.vk.to_owned();
let graph_data = graph_from_folder().to_owned();
let tree_config = input_buffer.into_tree_config()?;
@@ -126,8 +124,8 @@ impl RLN {
)?;
Ok(RLN {
proving_key,
verification_key,
zkey,
verifying_key,
graph_data,
#[cfg(not(feature = "stateless"))]
tree,
@@ -143,13 +141,13 @@ impl RLN {
/// ```
#[cfg(all(not(target_arch = "wasm32"), feature = "stateless"))]
pub fn new() -> Result<RLN, RLNError> {
let proving_key = zkey_from_folder().to_owned();
let verification_key = proving_key.0.vk.to_owned();
let zkey = zkey_from_folder().to_owned();
let verifying_key = zkey.0.vk.to_owned();
let graph_data = graph_from_folder().to_owned();
Ok(RLN {
proving_key,
verification_key,
zkey,
verifying_key,
graph_data,
})
}
@@ -197,8 +195,8 @@ impl RLN {
graph_data: Vec<u8>,
input_buffer: T,
) -> Result<RLN, RLNError> {
let proving_key = zkey_from_raw(&zkey_vec)?;
let verification_key = proving_key.0.vk.to_owned();
let zkey = zkey_from_raw(&zkey_vec)?;
let verifying_key = zkey.0.vk.to_owned();
let tree_config = input_buffer.into_tree_config()?;
// We compute a default empty tree
@@ -209,8 +207,8 @@ impl RLN {
)?;
Ok(RLN {
proving_key,
verification_key,
zkey,
verifying_key,
graph_data,
#[cfg(not(feature = "stateless"))]
tree,
@@ -247,12 +245,12 @@ impl RLN {
/// ```
#[cfg(all(not(target_arch = "wasm32"), feature = "stateless"))]
pub fn new_with_params(zkey_vec: Vec<u8>, graph_data: Vec<u8>) -> Result<RLN, RLNError> {
let proving_key = zkey_from_raw(&zkey_vec)?;
let verification_key = proving_key.0.vk.to_owned();
let zkey = zkey_from_raw(&zkey_vec)?;
let verifying_key = zkey.0.vk.to_owned();
Ok(RLN {
proving_key,
verification_key,
zkey,
verifying_key,
graph_data,
})
}
@@ -278,12 +276,12 @@ impl RLN {
/// ```
#[cfg(all(target_arch = "wasm32", feature = "stateless"))]
pub fn new_with_params(zkey_vec: Vec<u8>) -> Result<RLN, RLNError> {
let proving_key = zkey_from_raw(&zkey_vec)?;
let verification_key = proving_key.0.vk.to_owned();
let zkey = zkey_from_raw(&zkey_vec)?;
let verifying_key = zkey.0.vk.to_owned();
Ok(RLN {
proving_key,
verification_key,
zkey,
verifying_key,
})
}
@@ -777,7 +775,7 @@ impl RLN {
input_data.read_to_end(&mut serialized_witness)?;
let (rln_witness, _) = deserialize_witness(&serialized_witness)?;
let proof = generate_proof(&self.proving_key, &rln_witness, &self.graph_data)?;
let proof = generate_proof(&self.zkey, &rln_witness, &self.graph_data)?;
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
proof.serialize_compressed(&mut output_data)?;
@@ -827,11 +825,11 @@ impl RLN {
// [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
let mut input_byte: Vec<u8> = Vec::new();
input_data.read_to_end(&mut input_byte)?;
let proof = ArkProof::deserialize_compressed(&mut Cursor::new(&input_byte[..128]))?;
let proof = Proof::deserialize_compressed(&mut Cursor::new(&input_byte[..128]))?;
let (proof_values, _) = deserialize_proof_values(&input_byte[128..]);
let verified = verify_proof(&self.verification_key, &proof, &proof_values)?;
let verified = verify_proof(&self.verifying_key, &proof, &proof_values)?;
Ok(verified)
}
@@ -899,7 +897,7 @@ impl RLN {
let (rln_witness, _) = proof_inputs_to_rln_witness(&mut self.tree, &witness_byte)?;
let proof_values = proof_values_from_witness(&rln_witness)?;
let proof = generate_proof(&self.proving_key, &rln_witness, &self.graph_data)?;
let proof = generate_proof(&self.zkey, &rln_witness, &self.graph_data)?;
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
// This proof is compressed, i.e. 128 bytes long
@@ -923,7 +921,7 @@ impl RLN {
let (rln_witness, _) = deserialize_witness(&serialized_witness)?;
let proof_values = proof_values_from_witness(&rln_witness)?;
let proof = generate_proof(&self.proving_key, &rln_witness, &self.graph_data)?;
let proof = generate_proof(&self.zkey, &rln_witness, &self.graph_data)?;
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
// This proof is compressed, i.e. 128 bytes long
@@ -945,7 +943,7 @@ impl RLN {
let (rln_witness, _) = deserialize_witness(&serialized_witness[..])?;
let proof_values = proof_values_from_witness(&rln_witness)?;
let proof = generate_proof_with_witness(calculated_witness, &self.proving_key).unwrap();
let proof = generate_proof_with_witness(calculated_witness, &self.zkey).unwrap();
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
// This proof is compressed, i.e. 128 bytes long
@@ -986,8 +984,7 @@ impl RLN {
let mut serialized: Vec<u8> = Vec::new();
input_data.read_to_end(&mut serialized)?;
let mut all_read = 0;
let proof =
ArkProof::deserialize_compressed(&mut Cursor::new(&serialized[..128].to_vec()))?;
let proof = Proof::deserialize_compressed(&mut Cursor::new(&serialized[..128].to_vec()))?;
all_read += 128;
let (proof_values, read) = deserialize_proof_values(&serialized[all_read..]);
all_read += read;
@@ -1002,7 +999,7 @@ impl RLN {
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
let verified = verify_proof(&self.verification_key, &proof, &proof_values)?;
let verified = verify_proof(&self.verifying_key, &proof, &proof_values)?;
let x = hash_to_field_le(&signal);
// Consistency checks to counter proof tampering
@@ -1068,8 +1065,7 @@ impl RLN {
let mut serialized: Vec<u8> = Vec::new();
input_data.read_to_end(&mut serialized)?;
let mut all_read = 0;
let proof =
ArkProof::deserialize_compressed(&mut Cursor::new(&serialized[..128].to_vec()))?;
let proof = Proof::deserialize_compressed(&mut Cursor::new(&serialized[..128].to_vec()))?;
all_read += 128;
let (proof_values, read) = deserialize_proof_values(&serialized[all_read..]);
all_read += read;
@@ -1084,7 +1080,7 @@ impl RLN {
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
let verified = verify_proof(&self.verification_key, &proof, &proof_values)?;
let verified = verify_proof(&self.verifying_key, &proof, &proof_values)?;
// First consistency checks to counter proof tampering
let x = hash_to_field_le(&signal);

View File

@@ -1,12 +1,18 @@
use crate::circuit::TEST_TREE_DEPTH;
use crate::circuit::{
Fq, Fq2, Fr, G1Affine, G1Projective, G2Affine, G2Projective, Proof, TEST_TREE_DEPTH,
};
use crate::error::ProtocolError;
use crate::hashers::{hash_to_field_le, poseidon_hash};
use crate::protocol::{
proof_values_from_witness, random_rln_witness, serialize_proof_values, serialize_witness,
verify_proof, RLNProofValues,
proof_values_from_witness, serialize_proof_values, serialize_witness, verify_proof,
RLNProofValues, RLNWitnessInput,
};
use crate::public::RLN;
use crate::utils::str_to_fr;
use ark_groth16::Proof as ArkProof;
use crate::public::RLN_IDENTIFIER;
use crate::utils::{str_to_fr, IdSecret};
use ark_serialize::CanonicalDeserialize;
use rand::thread_rng;
use rand::Rng;
use serde_json::{json, Value};
use std::io::Cursor;
use std::str::FromStr;
@@ -14,31 +20,31 @@ use std::str::FromStr;
#[cfg(not(feature = "stateless"))]
use crate::utils::generate_input_buffer;
fn fq_from_str(s: &str) -> ark_bn254::Fq {
ark_bn254::Fq::from_str(s).unwrap()
fn fq_from_str(s: &str) -> Fq {
Fq::from_str(s).unwrap()
}
fn g1_from_str(g1: &[String]) -> ark_bn254::G1Affine {
fn g1_from_str(g1: &[String]) -> G1Affine {
let x = fq_from_str(&g1[0]);
let y = fq_from_str(&g1[1]);
let z = fq_from_str(&g1[2]);
ark_bn254::G1Affine::from(ark_bn254::G1Projective::new(x, y, z))
G1Affine::from(G1Projective::new(x, y, z))
}
fn g2_from_str(g2: &[Vec<String>]) -> ark_bn254::G2Affine {
fn g2_from_str(g2: &[Vec<String>]) -> G2Affine {
let c0 = fq_from_str(&g2[0][0]);
let c1 = fq_from_str(&g2[0][1]);
let x = ark_bn254::Fq2::new(c0, c1);
let x = Fq2::new(c0, c1);
let c0 = fq_from_str(&g2[1][0]);
let c1 = fq_from_str(&g2[1][1]);
let y = ark_bn254::Fq2::new(c0, c1);
let y = Fq2::new(c0, c1);
let c0 = fq_from_str(&g2[2][0]);
let c1 = fq_from_str(&g2[2][1]);
let z = ark_bn254::Fq2::new(c0, c1);
let z = Fq2::new(c0, c1);
ark_bn254::G2Affine::from(ark_bn254::G2Projective::new(x, y, z))
G2Affine::from(G2Projective::new(x, y, z))
}
fn value_to_string_vec(value: &Value) -> Vec<String> {
@@ -50,6 +56,37 @@ fn value_to_string_vec(value: &Value) -> Vec<String> {
.collect()
}
fn random_rln_witness(tree_depth: usize) -> Result<RLNWitnessInput, ProtocolError> {
let mut rng = thread_rng();
let identity_secret = IdSecret::rand(&mut rng);
let x = hash_to_field_le(&rng.gen::<[u8; 32]>());
let epoch = hash_to_field_le(&rng.gen::<[u8; 32]>());
let rln_identifier = hash_to_field_le(RLN_IDENTIFIER);
let mut path_elements: Vec<Fr> = Vec::new();
let mut identity_path_index: Vec<u8> = Vec::new();
for _ in 0..tree_depth {
path_elements.push(hash_to_field_le(&rng.gen::<[u8; 32]>()));
identity_path_index.push(rng.gen_range(0..2) as u8);
}
let user_message_limit = Fr::from(100);
let message_id = Fr::from(1);
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
RLNWitnessInput::new(
identity_secret,
user_message_limit,
message_id,
path_elements,
identity_path_index,
x,
external_nullifier,
)
}
#[test]
fn test_groth16_proof_hardcoded() {
#[cfg(not(feature = "stateless"))]
@@ -85,7 +122,7 @@ fn test_groth16_proof_hardcoded() {
"protocol": "groth16",
"curve": "bn128"
});
let valid_ark_proof = ArkProof {
let valid_ark_proof = Proof {
a: g1_from_str(&value_to_string_vec(&valid_snarkjs_proof["pi_a"])),
b: g2_from_str(
&valid_snarkjs_proof["pi_b"]
@@ -126,7 +163,7 @@ fn test_groth16_proof_hardcoded() {
.unwrap(),
};
let verified = verify_proof(&rln.verification_key, &valid_ark_proof, &valid_proof_values);
let verified = verify_proof(&rln.verifying_key, &valid_ark_proof, &valid_proof_values);
assert!(verified.unwrap());
}
@@ -141,7 +178,7 @@ fn test_groth16_proof() {
let mut rln = RLN::new().unwrap();
// Note: we only test Groth16 proof generation, so we ignore setting the tree in the RLN object
let rln_witness = random_rln_witness(tree_depth);
let rln_witness = random_rln_witness(tree_depth).unwrap();
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
// We compute a Groth16 proof
@@ -151,8 +188,8 @@ fn test_groth16_proof() {
let serialized_proof = output_buffer.into_inner();
// Before checking public verify API, we check that the (deserialized) proof generated by prove is actually valid
let proof = ArkProof::deserialize_compressed(&mut Cursor::new(&serialized_proof)).unwrap();
let verified = verify_proof(&rln.verification_key, &proof, &proof_values);
let proof = Proof::deserialize_compressed(&mut Cursor::new(&serialized_proof)).unwrap();
let verified = verify_proof(&rln.verifying_key, &proof, &proof_values);
// dbg!(verified.unwrap());
assert!(verified.unwrap());
@@ -1082,15 +1119,16 @@ mod stateless_test {
let x = hash_to_field_le(&signal);
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
let message_id = Fr::from(1);
let rln_witness = rln_witness_from_values(
let rln_witness = RLNWitnessInput::new(
identity_secret_hash,
user_message_limit,
message_id,
merkle_proof.get_path_elements(),
merkle_proof.get_path_index(),
x,
external_nullifier,
user_message_limit,
Fr::from(1),
)
.unwrap();
@@ -1178,26 +1216,27 @@ mod stateless_test {
let identity_index = tree.leaves_set();
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
let message_id = Fr::from(1);
let rln_witness1 = rln_witness_from_values(
let rln_witness1 = RLNWitnessInput::new(
identity_secret_hash.clone(),
user_message_limit,
message_id,
merkle_proof.get_path_elements(),
merkle_proof.get_path_index(),
x1,
external_nullifier,
user_message_limit,
Fr::from(1),
)
.unwrap();
let rln_witness2 = rln_witness_from_values(
let rln_witness2 = RLNWitnessInput::new(
identity_secret_hash.clone(),
user_message_limit,
message_id,
merkle_proof.get_path_elements(),
merkle_proof.get_path_index(),
x2,
external_nullifier,
user_message_limit,
Fr::from(1),
)
.unwrap();
@@ -1247,14 +1286,14 @@ mod stateless_test {
let identity_index_new = tree.leaves_set();
let merkle_proof_new = tree.proof(identity_index_new).expect("proof should exist");
let rln_witness3 = rln_witness_from_values(
let rln_witness3 = RLNWitnessInput::new(
identity_secret_hash_new.clone(),
user_message_limit,
message_id,
merkle_proof_new.get_path_elements(),
merkle_proof_new.get_path_index(),
x3,
external_nullifier,
user_message_limit,
Fr::from(1),
)
.unwrap();

View File

@@ -322,7 +322,8 @@ pub fn normalize_usize_le(input: usize) -> [u8; 8] {
pub fn normalize_usize_be(input: usize) -> [u8; 8] {
let mut bytes = [0u8; 8];
let input_bytes = input.to_be_bytes();
bytes[..input_bytes.len()].copy_from_slice(&input_bytes);
let offset = 8 - input_bytes.len();
bytes[offset..].copy_from_slice(&input_bytes);
bytes
}
@@ -334,7 +335,7 @@ pub fn generate_input_buffer() -> Cursor<String> {
#[derive(
Debug, Zeroize, ZeroizeOnDrop, Clone, PartialEq, CanonicalSerialize, CanonicalDeserialize,
)]
pub struct IdSecret(ark_bn254::Fr);
pub struct IdSecret(Fr);
impl IdSecret {
pub fn rand<R: Rng + ?Sized>(rng: &mut R) -> Self {

View File

@@ -8,7 +8,6 @@ mod test {
use rln::hashers::{hash_to_field_le, poseidon_hash as utils_poseidon_hash};
use rln::protocol::*;
use rln::utils::*;
use safer_ffi::boxed::Box_;
use safer_ffi::prelude::repr_c;
use serde_json::json;
use std::fs::File;
@@ -101,7 +100,7 @@ mod test {
// We first add leaves one by one specifying the index
for (i, leaf) in leaves.iter().enumerate() {
// We prepare the rate_commitment and we set the leaf at provided index
let result = ffi_set_leaf(&mut ffi_rln_instance, i, &Box_::new(CFr::from(*leaf)));
let result = ffi_set_leaf(&mut ffi_rln_instance, i, &CFr::from(*leaf).into());
if !result.ok {
panic!("set leaf call failed: {:?}", result.err);
}
@@ -118,7 +117,7 @@ mod test {
// We add leaves one by one using the internal index (new leaves goes in next available position)
for leaf in &leaves {
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &Box_::new(CFr::from(*leaf)));
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(*leaf).into());
if !result.ok {
panic!("set next leaf call failed: {:?}", result.err);
}
@@ -224,7 +223,7 @@ mod test {
// We add leaves one by one using the internal index (new leaves goes in next available position)
for leaf in &leaves {
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &Box_::new(CFr::from(*leaf)));
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(*leaf).into());
if !result.ok {
panic!("set next leaf call failed: {:?}", result.err);
}
@@ -319,7 +318,7 @@ mod test {
let result = ffi_set_leaf(
&mut ffi_rln_instance,
leaf_index,
&Box_::new(CFr::from(rate_commitment)),
&CFr::from(rate_commitment).into(),
);
if !result.ok {
panic!("set leaf call failed: {:?}", result.err);
@@ -487,10 +486,7 @@ mod test {
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
// We set as leaf rate_commitment, its index would be equal to no_of_leaves
let result = ffi_set_next_leaf(
&mut ffi_rln_instance,
&Box_::new(CFr::from(rate_commitment)),
);
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(rate_commitment).into());
if !result.ok {
panic!("set next leaf call failed: {:?}", result.err);
}
@@ -557,10 +553,7 @@ mod test {
let message_id = Fr::from(1);
// We set as leaf rate_commitment, its index would be equal to no_of_leaves
let result = ffi_set_next_leaf(
&mut ffi_rln_instance,
&Box_::new(CFr::from(rate_commitment)),
);
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(rate_commitment).into());
if !result.ok {
panic!("set next leaf call failed: {:?}", result.err);
}
@@ -657,10 +650,7 @@ mod test {
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
// We set as leaf rate_commitment, its index would be equal to 0 since tree is empty
let result = ffi_set_next_leaf(
&mut ffi_rln_instance,
&Box_::new(CFr::from(rate_commitment)),
);
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(rate_commitment).into());
if !result.ok {
panic!("set next leaf call failed: {:?}", result.err);
}
@@ -750,7 +740,7 @@ mod test {
// We set as leaf id_commitment, its index would be equal to 1 since at 0 there is id_commitment
let result = ffi_set_next_leaf(
&mut ffi_rln_instance,
&Box_::new(CFr::from(rate_commitment_new)),
&CFr::from(rate_commitment_new).into(),
);
if !result.ok {
panic!("set next leaf call failed: {:?}", result.err);
@@ -833,7 +823,7 @@ mod test {
let result = ffi_set_leaf(
&mut ffi_rln_instance,
index,
&Box_::new(CFr::from(id_commitment)),
&CFr::from(id_commitment).into(),
);
if !result.ok {
panic!("set leaf call failed: {:?}", result.err);

View File

@@ -9,9 +9,8 @@ mod test {
use rln::poseidon_tree::PoseidonTree;
use rln::protocol::{
deserialize_proof_values, deserialize_witness, generate_proof, keygen,
proof_values_from_witness, rln_witness_from_json, rln_witness_from_values,
rln_witness_to_json, seeded_keygen, serialize_proof_values, serialize_witness,
verify_proof, RLNWitnessInput,
proof_values_from_witness, rln_witness_from_json, rln_witness_to_json, seeded_keygen,
serialize_proof_values, serialize_witness, verify_proof, RLNWitnessInput,
};
use rln::utils::str_to_fr;
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
@@ -119,14 +118,16 @@ mod test {
let rln_identifier = hash_to_field_le(b"test-rln-identifier");
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
rln_witness_from_values(
let message_id = Fr::from(1);
RLNWitnessInput::new(
identity_secret_hash,
user_message_limit,
message_id,
merkle_proof.get_path_elements(),
merkle_proof.get_path_index(),
x,
external_nullifier,
user_message_limit,
Fr::from(1),
)
.unwrap()
}
@@ -136,7 +137,7 @@ mod test {
fn test_witness_from_json() {
// We generate all relevant keys
let proving_key = zkey_from_folder();
let verification_key = &proving_key.0.vk;
let verifying_key = &proving_key.0.vk;
let graph_data = graph_from_folder();
// We compute witness from the json input
let rln_witness = get_test_witness();
@@ -149,7 +150,7 @@ mod test {
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
// Let's verify the proof
let verified = verify_proof(verification_key, &proof, &proof_values);
let verified = verify_proof(verifying_key, &proof, &proof_values);
assert!(verified.unwrap());
}
@@ -164,7 +165,7 @@ mod test {
// We generate all relevant keys
let proving_key = zkey_from_folder();
let verification_key = &proving_key.0.vk;
let verifying_key = &proving_key.0.vk;
let graph_data = graph_from_folder();
// Let's generate a zkSNARK proof
@@ -173,7 +174,7 @@ mod test {
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
// Let's verify the proof
let success = verify_proof(verification_key, &proof, &proof_values).unwrap();
let success = verify_proof(verifying_key, &proof, &proof_values).unwrap();
assert!(success);
}

View File

@@ -17,7 +17,6 @@ num-bigint = { version = "0.4.6", default-features = false }
pmtree = { package = "vacp2p_pmtree", version = "2.0.3", optional = true }
sled = "0.34.7"
serde_json = "1.0.141"
lazy_static = "1.5.0"
hex = "0.4.3"
rayon = "1.10.0"
thiserror = "2.0"

View File

@@ -1,7 +1,5 @@
use std::{fmt::Display, str::FromStr};
use criterion::{criterion_group, criterion_main, Criterion};
use lazy_static::lazy_static;
use std::{fmt::Display, str::FromStr, sync::LazyLock};
use tiny_keccak::{Hasher as _, Keccak};
use zerokit_utils::{
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,
@@ -46,18 +44,17 @@ impl FromStr for TestFr {
}
}
lazy_static! {
static ref LEAVES: Vec<TestFr> = {
let mut leaves = Vec::with_capacity(1 << 20);
for i in 0..(1 << 20) {
let mut bytes = [0u8; 32];
bytes[28..].copy_from_slice(&(i as u32).to_be_bytes());
leaves.push(TestFr(bytes));
}
leaves
};
static ref INDICES: Vec<usize> = (0..(1 << 20)).collect();
}
static LEAVES: LazyLock<Vec<TestFr>> = LazyLock::new(|| {
let mut leaves = Vec::with_capacity(1 << 20);
for i in 0..(1 << 20) {
let mut bytes = [0u8; 32];
bytes[28..].copy_from_slice(&(i as u32).to_be_bytes());
leaves.push(TestFr(bytes));
}
leaves
});
static INDICES: LazyLock<Vec<usize>> = LazyLock::new(|| (0..(1 << 20)).collect());
const NOF_LEAVES: usize = 8192;