Compare commits

...

13 Commits

Author SHA1 Message Date
Aaryamann Challani
cd60af5b52 chore(ci): add arkzkey feature to nightly assets (#244) 2024-05-07 16:08:59 +05:30
Ekaterina Broslavskaya
8581ac0b78 chore(rln): add ark-zkey support (#242)
* Add Submodule

* Add arkzkey

* make arkzkey support as feature

* update submodule url

* add abstract around feature

* new bench file

* update ci
2024-05-06 18:09:22 +07:00
Aaryamann Challani
5937a67ee6 fix(rln-wasm): dont run benchmarks (#241) 2024-04-30 16:06:49 +05:30
Aaryamann Challani
d96eb59e92 fix(makefile): include wasm-pack+node in installation (#240)
* fix(makefile): include wasm-pack in installation

* fix: include node in installdeps

* fix: install 18.20.2 node
2024-04-30 15:44:33 +05:30
tyshko-rostyslav
a372053047 Remove height 32 from RLN (#239)
* rm tree height from circuit

* rm corresponding logic from ffi

* fm from tests

* rm height 32 resources

* remove `TEST_PARAMETERS_INDEX` and related comments
2024-04-29 18:26:29 +05:30
rymnc
b450bfdb37 docs: add users 2024-04-21 15:53:46 +02:00
Aaryamann Challani
0521c7349e chore(rln): add QoL traits to the Hasher associated type in MerkleTree trait (#238) 2024-04-17 17:54:49 +05:30
Aaryamann Challani
d91a5b3568 chore: Release (#236) 2024-04-09 03:56:57 +05:30
Aaryamann Challani
cf9dbb419d chore: remove code surface area for maintainability (#235)
* chore: remove code surface area for maintainability

* fix: ci

* fix: remove gitmodules
2024-04-08 13:15:12 +05:30
Aaryamann Challani
aaa12db70d chore: Release (#232) 2024-03-07 11:47:23 +05:30
Aaryamann Challani
30d5f94181 chore(rln): return empty metadata if it doesnt exist (#230)
* chore(rln): return empty metadata if it doesnt exist

* fix: clippy
2024-03-07 02:22:37 +05:30
Aaryamann Challani
ccd2ead847 chore(rln): infallible conversions (#229) 2024-01-23 19:47:54 +05:30
Richard Ramos
7669d72f9b fix: add support to aarch64-linux-android 2024-01-23 09:29:32 -04:00
55 changed files with 607 additions and 2306 deletions

View File

@@ -3,27 +3,20 @@ on:
branches:
- master
paths-ignore:
- '**.md'
- '!.github/workflows/*.yml'
- '!multiplier/src/**'
- '!private-settlement/src/**'
- '!rln-wasm/**'
- '!rln/src/**'
- '!rln/resources/**'
- '!semaphore/src/**'
- '!utils/src/**'
- "**.md"
- "!.github/workflows/*.yml"
- "!rln-wasm/**"
- "!rln/src/**"
- "!rln/resources/**"
- "!utils/src/**"
pull_request:
paths-ignore:
- '**.md'
- '!.github/workflows/*.yml'
- '!multiplier/src/**'
- '!private-settlement/src/**'
- '!rln-wasm/**'
- '!rln/src/**'
- '!rln/resources/**'
- '!semaphore/src/**'
- '!utils/src/**'
- "**.md"
- "!.github/workflows/*.yml"
- "!rln-wasm/**"
- "!rln/src/**"
- "!rln/resources/**"
- "!utils/src/**"
name: Tests
@@ -32,10 +25,10 @@ jobs:
strategy:
matrix:
platform: [ubuntu-latest, macos-latest]
crate: [multiplier, semaphore, rln, utils]
crate: [rln, utils]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: test - ${{ matrix.crate }} - ${{ matrix.platform }}
steps:
- name: Checkout sources
@@ -51,16 +44,16 @@ jobs:
run: make installdeps
- name: cargo-make test
run: |
cargo make test --release
cargo make test --release
working-directory: ${{ matrix.crate }}
rln-wasm:
strategy:
matrix:
platform: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: test - rln-wasm - ${{ matrix.platform }}
steps:
- uses: actions/checkout@v3
@@ -85,11 +78,11 @@ jobs:
matrix:
# we run lint tests only on ubuntu
platform: [ubuntu-latest]
crate: [multiplier, semaphore, rln, utils]
crate: [rln, utils]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: lint - ${{ matrix.crate }} - ${{ matrix.platform }}
name: lint - ${{ matrix.crate }} - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
@@ -110,7 +103,7 @@ jobs:
- name: cargo clippy
if: success() || failure()
run: |
cargo clippy --release -- -D warnings
cargo clippy --release -- -D warnings
working-directory: ${{ matrix.crate }}
# We skip clippy on rln-wasm, since wasm target is managed by cargo make
# Currently not treating warnings as error, too noisy
@@ -130,8 +123,10 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v3
with:
submodules: true
- uses: Swatinem/rust-cache@v2
- uses: boa-dev/criterion-compare-action@v3
with:
branchName: ${{ github.base_ref }}
cwd: ${{ matrix.crate }}
cwd: ${{ matrix.crate }}

View File

@@ -8,7 +8,8 @@ jobs:
linux:
strategy:
matrix:
target:
feature: ["default", "arkzkey"]
target:
- x86_64-unknown-linux-gnu
- aarch64-unknown-linux-gnu
- i686-unknown-linux-gnu
@@ -29,16 +30,16 @@ jobs:
run: make installdeps
- name: cross build
run: |
cross build --release --target ${{ matrix.target }} --workspace --exclude rln-wasm
cross build --release --target ${{ matrix.target }} --features ${{ matrix.feature }} --workspace --exclude rln-wasm
mkdir release
cp target/${{ matrix.target }}/release/librln* release/
tar -czvf ${{ matrix.target }}-rln.tar.gz release/
tar -czvf ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz release/
- name: Upload archive artifact
uses: actions/upload-artifact@v2
with:
name: ${{ matrix.target }}-archive
path: ${{ matrix.target }}-rln.tar.gz
name: ${{ matrix.target }}-${{ matrix.feature }}-archive
path: ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz
retention-days: 2
macos:
@@ -46,7 +47,8 @@ jobs:
runs-on: macos-latest
strategy:
matrix:
target:
feature: ["default", "arkzkey"]
target:
- x86_64-apple-darwin
- aarch64-apple-darwin
steps:
@@ -64,18 +66,18 @@ jobs:
run: make installdeps
- name: cross build
run: |
cross build --release --target ${{ matrix.target }} --workspace --exclude rln-wasm
cross build --release --target ${{ matrix.target }} --features ${{ matrix.feature }} --workspace --exclude rln-wasm
mkdir release
cp target/${{ matrix.target }}/release/librln* release/
tar -czvf ${{ matrix.target }}-rln.tar.gz release/
tar -czvf ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz release/
- name: Upload archive artifact
uses: actions/upload-artifact@v2
with:
name: ${{ matrix.target }}-archive
path: ${{ matrix.target }}-rln.tar.gz
name: ${{ matrix.target }}-${{ matrix.feature }}-archive
path: ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz
retention-days: 2
browser-rln-wasm:
name: Browser build (RLN WASM)
runs-on: ubuntu-latest
@@ -108,7 +110,6 @@ jobs:
path: rln-wasm/browser-rln-wasm.tar.gz
retention-days: 2
prepare-prerelease:
name: Prepare pre-release
needs: [linux, macos, browser-rln-wasm]
@@ -120,7 +121,7 @@ jobs:
ref: master
- name: Download artifacts
uses: actions/download-artifact@v2
- name: Delete tag
uses: dev-drprasad/delete-tag-and-release@v0.2.1
with:

7
.gitmodules vendored
View File

@@ -1,4 +1,3 @@
[submodule "semaphore/vendor/semaphore"]
path = semaphore/vendor/semaphore
ignore = dirty
url = https://github.com/appliedzkp/semaphore.git
[submodule "mopro"]
path = mopro
url = https://github.com/zkmopro/mopro.git

1428
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,13 +1,6 @@
[workspace]
members = [
"multiplier",
"private-settlement",
"semaphore",
"rln",
"rln-cli",
"rln-wasm",
"utils",
]
members = ["rln", "rln-cli", "rln-wasm", "utils"]
default-members = ["rln", "rln-cli", "utils"]
resolver = "2"
# Compilation profile for any non-workspace member.
@@ -19,6 +12,3 @@ opt-level = 3
[profile.release.package."rln-wasm"]
# Tell `rustc` to optimize for small code size.
opt-level = "s"
[profile.release.package."semaphore"]
codegen-units = 1

View File

@@ -29,4 +29,7 @@ image = "ghcr.io/cross-rs/mips64-unknown-linux-gnuabi64:latest"
image = "ghcr.io/cross-rs/mips64el-unknown-linux-gnuabi64:latest"
[target.mipsel-unknown-linux-gnu]
image = "ghcr.io/cross-rs/mipsel-unknown-linux-gnu:latest"
image = "ghcr.io/cross-rs/mipsel-unknown-linux-gnu:latest"
[target.aarch64-linux-android]
image = "ghcr.io/cross-rs/aarch64-linux-android:edge"

View File

@@ -13,15 +13,19 @@ endif
installdeps: .pre-build
ifeq ($(shell uname),Darwin)
# commented due to https://github.com/orgs/Homebrew/discussions/4612
# @brew update
# commented due to https://github.com/orgs/Homebrew/discussions/4612
# @brew update
@brew install cmake ninja
else ifeq ($(shell uname),Linux)
@sudo apt-get update
@sudo apt-get install -y cmake ninja-build
endif
@git clone --recursive https://github.com/WebAssembly/wabt.git
@cd wabt && mkdir build && cd build && cmake .. -GNinja && ninja && sudo ninja install
@git -C "wabt" pull || git clone --recursive https://github.com/WebAssembly/wabt.git "wabt"
@cd wabt && mkdir -p build && cd build && cmake .. -GNinja && ninja && sudo ninja install
@which wasm-pack || cargo install wasm-pack
# nvm already checks if it's installed, and no-ops if it is
@curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash
@. ${HOME}/.nvm/nvm.sh && nvm install 18.20.2 && nvm use 18.20.2;
build: .pre-build
@cargo make build

View File

@@ -18,13 +18,23 @@ in Rust.
- [semaphore-rs](https://github.com/worldcoin/semaphore-rs) written in Rust based on ark-circom.
## Users
Zerokit is used by -
- [nwaku](https://github.com/waku-org/nwaku)
- [js-rln](https://github.com/waku-org/js-rln)
## Build and Test
To install missing dependencies, run the following commands from the root folder
```bash
make installdeps
```
To build and test all crates, run the following commands from the root folder
```bash
make build
make test
@@ -32,4 +42,4 @@ make test
## Release assets
We use [`cross-rs`](https://github.com/cross-rs/cross) to cross-compile and generate release assets for rln.
We use [`cross-rs`](https://github.com/cross-rs/cross) to cross-compile and generate release assets for rln.

1
mopro Submodule

Submodule mopro added at 3c8d734336

View File

@@ -1,32 +0,0 @@
[package]
name = "multiplier"
version = "0.3.0"
edition = "2018"
license = "MIT OR Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
# WASM operations
# wasmer = { version = "2.0" }
# fnv = { version = "1.0.3", default-features = false }
# num = { version = "0.4.0" }
# num-traits = { version = "0.2.0", default-features = false }
# ZKP Generation
# ark-ff = { version = "0.3.0", default-features = false, features = ["parallel", "asm"] }
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
ark-bn254 = { version = "0.3.0" }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
# ark-poly = { version = "^0.3.0", default-features = false, features = ["parallel"] }
ark-serialize = { version = "0.3.0", default-features = false }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circom-2"], rev = "35ce5a9" }
# error handling
color-eyre = "0.6.1"
# decoding of data
# hex = "0.4.3"
# byteorder = "1.4.3"

View File

@@ -1,7 +0,0 @@
[tasks.build]
command = "cargo"
args = ["build", "--release"]
[tasks.test]
command = "cargo"
args = ["test", "--release"]

View File

@@ -1,21 +0,0 @@
# Multiplier example
Example wrapper around a basic Circom circuit to test Circom 2 integration
through ark-circom and FFI.
## Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```
## FFI
To generate C or Nim bindings from Rust FFI, use `cbindgen` or `nbindgen`:
```
cbindgen . -o target/multiplier.h
nbindgen . -o target/multiplier.nim
```

View File

@@ -1,77 +0,0 @@
use crate::public::Multiplier;
use std::slice;
/// Buffer struct is taken from
/// https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs
///
/// Also heavily inspired by https://github.com/kilic/rln/blob/master/src/ffi.rs
#[repr(C)]
#[derive(Clone, Debug, PartialEq)]
pub struct Buffer {
pub ptr: *const u8,
pub len: usize,
}
impl From<&[u8]> for Buffer {
fn from(src: &[u8]) -> Self {
Self {
ptr: &src[0] as *const u8,
len: src.len(),
}
}
}
impl<'a> From<&Buffer> for &'a [u8] {
fn from(src: &Buffer) -> &'a [u8] {
unsafe { slice::from_raw_parts(src.ptr, src.len) }
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn new_circuit(ctx: *mut *mut Multiplier) -> bool {
if let Ok(mul) = Multiplier::new() {
unsafe { *ctx = Box::into_raw(Box::new(mul)) };
true
} else {
false
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn prove(ctx: *const Multiplier, output_buffer: *mut Buffer) -> bool {
println!("multiplier ffi: prove");
let mul = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
match mul.prove(&mut output_data) {
Ok(proof_data) => proof_data,
Err(_) => return false,
};
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn verify(
ctx: *const Multiplier,
proof_buffer: *const Buffer,
result_ptr: *mut u32,
) -> bool {
println!("multiplier ffi: verify");
let mul = unsafe { &*ctx };
let proof_data = <&[u8]>::from(unsafe { &*proof_buffer });
if match mul.verify(proof_data) {
Ok(verified) => verified,
Err(_) => return false,
} {
unsafe { *result_ptr = 0 };
} else {
unsafe { *result_ptr = 1 };
};
true
}

View File

@@ -1,2 +0,0 @@
pub mod ffi;
pub mod public;

View File

@@ -1,49 +0,0 @@
use ark_circom::{CircomBuilder, CircomConfig};
use ark_std::rand::thread_rng;
use color_eyre::{Report, Result};
use ark_bn254::Bn254;
use ark_groth16::{
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
};
fn groth16_proof_example() -> Result<()> {
let cfg = CircomConfig::<Bn254>::new(
"./resources/circom2_multiplier2.wasm",
"./resources/circom2_multiplier2.r1cs",
)?;
let mut builder = CircomBuilder::new(cfg);
builder.push_input("a", 3);
builder.push_input("b", 11);
// create an empty instance for setting it up
let circom = builder.setup();
let mut rng = thread_rng();
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng)?;
let circom = builder.build()?;
let inputs = circom
.get_public_inputs()
.ok_or(Report::msg("no public inputs"))?;
let proof = prove(circom, &params, &mut rng)?;
let pvk = prepare_verifying_key(&params.vk);
match verify_proof(&pvk, &proof, &inputs) {
Ok(_) => Ok(()),
Err(_) => Err(Report::msg("not verified")),
}
}
fn main() {
println!("Hello, world!");
match groth16_proof_example() {
Ok(_) => println!("Success"),
Err(_) => println!("Error"),
}
}

View File

@@ -1,79 +0,0 @@
use ark_circom::{CircomBuilder, CircomCircuit, CircomConfig};
use ark_std::rand::thread_rng;
use ark_bn254::Bn254;
use ark_groth16::{
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
Proof, ProvingKey,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use color_eyre::{Report, Result};
use std::io::{Read, Write};
pub struct Multiplier {
circom: CircomCircuit<Bn254>,
params: ProvingKey<Bn254>,
}
impl Multiplier {
// TODO Break this apart here
pub fn new() -> Result<Multiplier> {
let cfg = CircomConfig::<Bn254>::new(
"./resources/circom2_multiplier2.wasm",
"./resources/circom2_multiplier2.r1cs",
)?;
let mut builder = CircomBuilder::new(cfg);
builder.push_input("a", 3);
builder.push_input("b", 11);
// create an empty instance for setting it up
let circom = builder.setup();
let mut rng = thread_rng();
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng)?;
let circom = builder.build()?;
Ok(Multiplier { circom, params })
}
// TODO Input Read
pub fn prove<W: Write>(&self, result_data: W) -> Result<()> {
let mut rng = thread_rng();
// XXX: There's probably a better way to do this
let circom = self.circom.clone();
let params = self.params.clone();
let proof = prove(circom, &params, &mut rng)?;
// XXX: Unclear if this is different from other serialization(s)
proof.serialize(result_data)?;
Ok(())
}
pub fn verify<R: Read>(&self, input_data: R) -> Result<bool> {
let proof = Proof::deserialize(input_data)?;
let pvk = prepare_verifying_key(&self.params.vk);
// XXX Part of input data?
let inputs = self
.circom
.get_public_inputs()
.ok_or(Report::msg("no public inputs"))?;
let verified = verify_proof(&pvk, &proof, &inputs)?;
Ok(verified)
}
}
impl Default for Multiplier {
fn default() -> Self {
Self::new().unwrap()
}
}

View File

@@ -1,21 +0,0 @@
#[cfg(test)]
mod tests {
use multiplier::public::Multiplier;
#[test]
fn multiplier_proof() {
let mul = Multiplier::new().unwrap();
let mut output_data: Vec<u8> = Vec::new();
let _ = mul.prove(&mut output_data);
let proof_data = &output_data[..];
// XXX Pass as arg?
//let pvk = prepare_verifying_key(&mul.params.vk);
let verified = mul.verify(proof_data).unwrap();
assert!(verified);
}
}

View File

@@ -1,9 +0,0 @@
[package]
name = "private-settlement"
version = "0.3.0"
edition = "2021"
license = "MIT OR Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]

View File

@@ -1,7 +0,0 @@
[tasks.build]
command = "cargo"
args = ["build", "--release"]
[tasks.test]
command = "cargo"
args = ["test", "--release"]

View File

@@ -1,11 +0,0 @@
# Private Settlement Module
This module is to provide APIs to manage, compute and verify [Private Settlement](https://rfc.vac.dev/spec/44/) zkSNARK proofs and primitives.
## Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```

View File

@@ -1 +0,0 @@

View File

@@ -1,11 +0,0 @@
#[cfg(test)]
mod tests {
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
let result = 2 + 2;
assert_eq!(result, 4);
}
}
}

View File

@@ -37,7 +37,11 @@ fn main() -> Result<()> {
tree_config_input,
}) => {
let mut resources: Vec<Vec<u8>> = Vec::new();
for filename in ["rln.wasm", "rln_final.zkey", "verification_key.json"] {
#[cfg(feature = "arkzkey")]
let filenames = ["rln.wasm", "rln_final.arkzkey", "verification_key.json"];
#[cfg(not(feature = "arkzkey"))]
let filenames = ["rln.wasm", "rln_final.zkey", "verification_key.json"];
for filename in filenames {
let fullpath = config.join(Path::new(filename));
let mut file = File::open(&fullpath)?;
let metadata = std::fs::metadata(&fullpath)?;

View File

@@ -3,6 +3,9 @@ name = "rln-wasm"
version = "0.0.13"
edition = "2021"
license = "MIT or Apache2"
autobenches = false
autotests = false
autobins = false
[lib]
crate-type = ["cdylib", "rlib"]
@@ -30,4 +33,3 @@ console_error_panic_hook = { version = "0.1.7", optional = true }
[dev-dependencies]
wasm-bindgen-test = "0.3.13"
wasm-bindgen-futures = "0.4.33"

View File

@@ -29,3 +29,7 @@ args = ["login"]
[tasks.publish]
command = "wasm-pack"
args = ["publish", "--access", "public", "--target", "web"]
[tasks.bench]
command = "echo"
args = ["'No benchmarks available for this project'"]

View File

@@ -1,6 +1,6 @@
[package]
name = "rln"
version = "0.4.1"
version = "0.4.3"
edition = "2021"
license = "MIT OR Apache-2.0"
description = "APIs to manage, compute and verify zkSNARK proofs and RLN primitives"
@@ -19,13 +19,20 @@ doctest = false
[dependencies]
# ZKP Generation
ark-ec = { version = "=0.4.1", default-features = false }
ark-ff = { version = "=0.4.1", default-features = false, features = [ "asm"] }
ark-ff = { version = "=0.4.1", default-features = false, features = ["asm"] }
ark-std = { version = "=0.4.0", default-features = false }
ark-bn254 = { version = "=0.4.0" }
ark-groth16 = { version = "=0.4.0", features = ["parallel"], default-features = false }
ark-relations = { version = "=0.4.0", default-features = false, features = [ "std" ] }
ark-groth16 = { version = "=0.4.0", features = [
"parallel",
], default-features = false }
ark-relations = { version = "=0.4.0", default-features = false, features = [
"std",
] }
ark-serialize = { version = "=0.4.1", default-features = false }
ark-circom = { version = "=0.1.0", default-features = false, features = ["circom-2"] }
ark-circom = { version = "=0.1.0", default-features = false, features = [
"circom-2",
] }
ark-zkey = { path = "../mopro/ark-zkey", optional = true, default-features = false }
# WASM
wasmer = { version = "=2.3.0", default-features = false }
@@ -36,13 +43,15 @@ thiserror = "=1.0.39"
# utilities
cfg-if = "=1.0"
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
num-bigint = { version = "=0.4.3", default-features = false, features = [
"rand",
] }
num-traits = "=0.2.15"
once_cell = "=1.17.1"
rand = "=0.8.5"
rand_chacha = "=0.3.1"
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
utils = { package = "zerokit_utils", version = "=0.4.1", path = "../utils/", default-features = false }
utils = { package = "zerokit_utils", version = "=0.4.3", path = "../utils/", default-features = false }
# serialization
@@ -57,9 +66,16 @@ criterion = { version = "=0.4.0", features = ["html_reports"] }
[features]
default = ["parallel", "wasmer/sys-default", "pmtree-ft"]
parallel = ["ark-ec/parallel", "ark-ff/parallel", "ark-std/parallel", "ark-groth16/parallel", "utils/parallel"]
parallel = [
"ark-ec/parallel",
"ark-ff/parallel",
"ark-std/parallel",
"ark-groth16/parallel",
"utils/parallel",
]
wasm = ["wasmer/js", "wasmer/std"]
fullmerkletree = ["default"]
arkzkey = ["ark-zkey"]
# Note: pmtree feature is still experimental
pmtree-ft = ["utils/pmtree-ft"]
@@ -67,3 +83,7 @@ pmtree-ft = ["utils/pmtree-ft"]
[[bench]]
name = "pmtree_benchmark"
harness = false
[[bench]]
name = "circuit_loading_benchmark"
harness = false

View File

@@ -0,0 +1,15 @@
use criterion::{criterion_group, criterion_main, Criterion};
use rln::circuit::TEST_RESOURCES_FOLDER;
// Depending on the key type (enabled by the `--features arkzkey` flag)
// the upload speed from the `rln_final.zkey` or `rln_final.arkzkey` file is calculated
pub fn key_load_benchmark(c: &mut Criterion) {
c.bench_function("zkey::upload_from_folder", |b| {
b.iter(|| {
let _ = rln::circuit::zkey_from_folder(TEST_RESOURCES_FOLDER);
})
});
}
criterion_group!(benches, key_load_benchmark);
criterion_main!(benches);

View File

@@ -1,7 +1,6 @@
use criterion::{criterion_group, criterion_main, Criterion};
use utils::ZerokitMerkleTree;
use rln::{circuit::Fr, pm_tree_adapter::PmTree};
use utils::ZerokitMerkleTree;
pub fn pmtree_benchmark(c: &mut Criterion) {
let mut tree = PmTree::default(2).unwrap();

Binary file not shown.

View File

@@ -1,114 +0,0 @@
{
"protocol": "groth16",
"curve": "bn128",
"nPublic": 5,
"vk_alpha_1": [
"20491192805390485299153009773594534940189261866228447918068658471970481763042",
"9383485363053290200918347156157836566562967994039712273449902621266178545958",
"1"
],
"vk_beta_2": [
[
"6375614351688725206403948262868962793625744043794305715222011528459656738731",
"4252822878758300859123897981450591353533073413197771768651442665752259397132"
],
[
"10505242626370262277552901082094356697409835680220590971873171140371331206856",
"21847035105528745403288232691147584728191162732299865338377159692350059136679"
],
[
"1",
"0"
]
],
"vk_gamma_2": [
[
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
],
[
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
],
[
"1",
"0"
]
],
"vk_delta_2": [
[
"3689226096868373144622340732612563195789744807442014147637039988348252818659",
"18947459102520510468597269280688700807407684209892273827108603062925288762423"
],
[
"5816405977664254142436796931495067997250259145480168934320978750042633353708",
"14555486789839131710516067578112557185806110684461247253491378577062852578892"
],
[
"1",
"0"
]
],
"vk_alphabeta_12": [
[
[
"2029413683389138792403550203267699914886160938906632433982220835551125967885",
"21072700047562757817161031222997517981543347628379360635925549008442030252106"
],
[
"5940354580057074848093997050200682056184807770593307860589430076672439820312",
"12156638873931618554171829126792193045421052652279363021382169897324752428276"
],
[
"7898200236362823042373859371574133993780991612861777490112507062703164551277",
"7074218545237549455313236346927434013100842096812539264420499035217050630853"
]
],
[
[
"7077479683546002997211712695946002074877511277312570035766170199895071832130",
"10093483419865920389913245021038182291233451549023025229112148274109565435465"
],
[
"4595479056700221319381530156280926371456704509942304414423590385166031118820",
"19831328484489333784475432780421641293929726139240675179672856274388269393268"
],
[
"11934129596455521040620786944827826205713621633706285934057045369193958244500",
"8037395052364110730298837004334506829870972346962140206007064471173334027475"
]
]
],
"IC": [
[
"5412646265162057015134786739992128493053406364679846617542694915593022919217",
"9665511386935901867415947590751330959748921059696950821222365265700369811120",
"1"
],
[
"4294362651275803035824711662252687124584574009834787359330648404293309808795",
"1861758671717754835450145961645465880215655915164196594175485865489885224285",
"1"
],
[
"1911114017568107170522785254288953144010421698038439931935418407428234018676",
"13761363892532562822351086117281964648116890138564516558345965908415019790129",
"1"
],
[
"16312980235585837964428386585067529342038135099260965575497230302984635878053",
"20286500347141875536561618770383759234192052027362539966911091298688849002783",
"1"
],
[
"21038649368092225315431823433752123495654049075935052064397443455654061176031",
"6976971039866104284556300526186000690370678593992968176463280189048347216392",
"1"
],
[
"971745799362951123575710699973701411260115357326598060711339429906895409324",
"12959821343398475313407440786226277845673045139874184400082186049649123071798",
"1"
]
]
}

View File

@@ -4,14 +4,12 @@ use ark_bn254::{
Bn254, Fq as ArkFq, Fq2 as ArkFq2, Fr as ArkFr, G1Affine as ArkG1Affine,
G1Projective as ArkG1Projective, G2Affine as ArkG2Affine, G2Projective as ArkG2Projective,
};
use ark_circom::read_zkey;
use ark_groth16::{ProvingKey, VerifyingKey};
use ark_relations::r1cs::ConstraintMatrices;
use cfg_if::cfg_if;
use color_eyre::{Report, Result};
use num_bigint::BigUint;
use serde_json::Value;
use std::io::Cursor;
use std::str::FromStr;
cfg_if! {
@@ -25,16 +23,23 @@ cfg_if! {
}
}
cfg_if! {
if #[cfg(feature = "arkzkey")] {
use ark_zkey::read_arkzkey_from_bytes;
const ARKZKEY_FILENAME: &str = "rln_final.arkzkey";
} else {
use std::io::Cursor;
use ark_circom::read_zkey;
}
}
const ZKEY_FILENAME: &str = "rln_final.zkey";
const VK_FILENAME: &str = "verification_key.json";
const WASM_FILENAME: &str = "rln.wasm";
// These parameters are used for tests
// Note that the circuit and keys in TEST_RESOURCES_FOLDER are compiled for Merkle trees of height 20 & 32
// Changing these parameters to other values than these defaults will cause zkSNARK proof verification to fail
pub const TEST_PARAMETERS_INDEX: usize = 0;
pub const TEST_TREE_HEIGHT: usize = [20, 32][TEST_PARAMETERS_INDEX];
pub const TEST_RESOURCES_FOLDER: &str = ["tree_height_20", "tree_height_32"][TEST_PARAMETERS_INDEX];
pub const TEST_TREE_HEIGHT: usize = 20;
pub const TEST_RESOURCES_FOLDER: &str = "tree_height_20";
#[cfg(not(target_arch = "wasm32"))]
static RESOURCES_DIR: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/resources");
@@ -53,8 +58,15 @@ pub type G2Projective = ArkG2Projective;
// Loads the proving key using a bytes vector
pub fn zkey_from_raw(zkey_data: &Vec<u8>) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
if !zkey_data.is_empty() {
let mut c = Cursor::new(zkey_data);
let proving_key_and_matrices = read_zkey(&mut c)?;
let proving_key_and_matrices = match () {
#[cfg(feature = "arkzkey")]
() => read_arkzkey_from_bytes(zkey_data.as_slice())?,
#[cfg(not(feature = "arkzkey"))]
() => {
let mut c = Cursor::new(zkey_data);
read_zkey(&mut c)?
}
};
Ok(proving_key_and_matrices)
} else {
Err(Report::msg("No proving key found!"))
@@ -66,10 +78,21 @@ pub fn zkey_from_raw(zkey_data: &Vec<u8>) -> Result<(ProvingKey<Curve>, Constrai
pub fn zkey_from_folder(
resources_folder: &str,
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
#[cfg(feature = "arkzkey")]
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ARKZKEY_FILENAME));
#[cfg(not(feature = "arkzkey"))]
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
if let Some(zkey) = zkey {
let mut c = Cursor::new(zkey.contents());
let proving_key_and_matrices = read_zkey(&mut c)?;
let proving_key_and_matrices = match () {
#[cfg(feature = "arkzkey")]
() => read_arkzkey_from_bytes(zkey.contents())?,
#[cfg(not(feature = "arkzkey"))]
() => {
let mut c = Cursor::new(zkey.contents());
read_zkey(&mut c)?
}
};
Ok(proving_key_and_matrices)
} else {
Err(Report::msg("No proving key found!"))
@@ -77,7 +100,7 @@ pub fn zkey_from_folder(
}
// Loads the verification key from a bytes vector
pub fn vk_from_raw(vk_data: &Vec<u8>, zkey_data: &Vec<u8>) -> Result<VerifyingKey<Curve>> {
pub fn vk_from_raw(vk_data: &[u8], zkey_data: &Vec<u8>) -> Result<VerifyingKey<Curve>> {
let verifying_key: VerifyingKey<Curve>;
if !vk_data.is_empty() {
@@ -145,7 +168,7 @@ pub fn circom_from_folder(resources_folder: &str) -> Result<&'static Mutex<Witne
// Utilities to convert a json verification key in a groth16::VerificationKey
fn fq_from_str(s: &str) -> Result<Fq> {
Ok(Fq::try_from(BigUint::from_str(s)?)?)
Ok(Fq::from(BigUint::from_str(s)?))
}
// Extracts the element in G1 corresponding to its JSON serialization

View File

@@ -246,7 +246,8 @@ impl ZerokitMerkleTree for PmTree {
let data = self.tree.db.get(METADATA_KEY)?;
if data.is_none() {
return Err(Report::msg("metadata does not exist"));
// send empty Metadata
return Ok(Vec::new());
}
Ok(data.unwrap())
}

View File

@@ -58,7 +58,7 @@ impl RLN<'_> {
///
/// Input parameters are
/// - `tree_height`: the height of the internal Merkle tree
/// - `input_data`: a reader for the string path of the resource folder containing the ZK circuit (`rln.wasm`), the proving key (`rln_final.zkey`) and the verification key (`verification_key.json`).
/// - `input_data`: a reader for the string path of the resource folder containing the ZK circuit (`rln.wasm`), the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) and the verification key (`verification_key.json`).
///
/// Example:
/// ```
@@ -83,8 +83,8 @@ impl RLN<'_> {
let tree_config = rln_config["tree_config"].to_string();
let witness_calculator = circom_from_folder(resources_folder)?;
let proving_key = zkey_from_folder(resources_folder)?;
let verification_key = vk_from_folder(resources_folder)?;
let tree_config: <PoseidonTree as ZerokitMerkleTree>::Config = if tree_config.is_empty() {
@@ -115,7 +115,7 @@ impl RLN<'_> {
/// Input parameters are
/// - `tree_height`: the height of the internal Merkle tree
/// - `circom_vec`: a byte vector containing the ZK circuit (`rln.wasm`) as binary file
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) as binary file
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) as binary file
/// - `vk_vec`: a byte vector containing to the verification key (`verification_key.json`) as binary file
/// - `tree_config`: a reader for a string containing a json with the merkle tree configuration
///
@@ -684,7 +684,7 @@ impl RLN<'_> {
/// Output values are:
/// - `output_data`: a writer receiving the serialization of the zkSNARK proof and the circuit evaluations outputs, i.e. `[ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]`
///
/// Example
/// Example
/// ```
/// use rln::protocol::*:
/// use rln::utils::*;
@@ -829,7 +829,7 @@ impl RLN<'_> {
/// Note that contrary to [`verify_rln_proof`](crate::public::RLN::verify_rln_proof), this function does not check if the internal Merkle tree root corresponds to the root provided as input, but rather checks if the root provided as input in `input_data` corresponds to one of the roots serialized in `roots_data`.
///
/// If `roots_data` contains no root (is empty), root validation is skipped and the proof will be correctly verified only if the other proof values results valid (i.e., zk-proof, signal, x-coordinate, RLN identifier)
///
///
/// Example
/// ```
/// // proof_data is computed as in the example code snippet provided for rln::public::RLN::generate_rln_proof

View File

@@ -977,7 +977,7 @@ fn test_get_leaf() {
}
#[test]
fn test_metadata() {
fn test_valid_metadata() {
let tree_height = TEST_TREE_HEIGHT;
let input_buffer =
@@ -993,3 +993,18 @@ fn test_metadata() {
assert_eq!(arbitrary_metadata, received_metadata);
}
#[test]
fn test_empty_metadata() {
let tree_height = TEST_TREE_HEIGHT;
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let rln = RLN::new(tree_height, input_buffer).unwrap();
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_metadata(&mut buffer).unwrap();
let received_metadata = buffer.into_inner();
assert_eq!(received_metadata.len(), 0);
}

View File

@@ -8,7 +8,7 @@ use num_traits::Num;
use std::iter::Extend;
pub fn to_bigint(el: &Fr) -> Result<BigInt> {
let res: BigUint = (*el).try_into()?;
let res: BigUint = (*el).into();
Ok(res.into())
}
@@ -28,10 +28,10 @@ pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr> {
input_clean = input_clean.trim().to_string();
if radix == 10 {
Ok(BigUint::from_str_radix(&input_clean, radix)?.try_into()?)
Ok(BigUint::from_str_radix(&input_clean, radix)?.into())
} else {
input_clean = input_clean.replace("0x", "");
Ok(BigUint::from_str_radix(&input_clean, radix)?.try_into()?)
Ok(BigUint::from_str_radix(&input_clean, radix)?.into())
}
}

View File

@@ -341,7 +341,6 @@ mod test {
fn test_merkle_proof_ffi() {
let tree_height = TEST_TREE_HEIGHT;
let leaf_index = 3;
let user_message_limit = 1;
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
@@ -373,7 +372,7 @@ mod test {
use ark_ff::BigInt;
if TEST_TREE_HEIGHT == 20 || TEST_TREE_HEIGHT == 32 {
if TEST_TREE_HEIGHT == 20 {
assert_eq!(
root,
BigInt([
@@ -619,7 +618,9 @@ mod test {
circom_file
.read_exact(&mut circom_buffer)
.expect("buffer overflow");
#[cfg(feature = "arkzkey")]
let zkey_path = format!("./resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.arkzkey");
#[cfg(not(feature = "arkzkey"))]
let zkey_path = format!("./resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
let mut zkey_file = File::open(&zkey_path).expect("no file found");
let metadata = std::fs::metadata(&zkey_path).expect("unable to read metadata");
@@ -671,7 +672,7 @@ mod test {
fn test_rln_proof_ffi() {
let tree_height = TEST_TREE_HEIGHT;
let no_of_leaves = 256;
let user_message_limit = Fr::from(65535);
let user_message_limit = Fr::from(100);
// We generate a vector of random leaves
let mut leaves: Vec<Fr> = Vec::new();
@@ -704,7 +705,6 @@ mod test {
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (identity_secret_hash, read) = bytes_le_to_fr(&result_data);
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..].to_vec());
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
let identity_index: usize = no_of_leaves;
@@ -717,7 +717,6 @@ mod test {
let rln_identifier = hash_to_field(b"test-rln-identifier");
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
let user_message_limit = Fr::from(100);
let message_id = Fr::from(0);
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
@@ -727,9 +726,6 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
// We generate a random rln_identifier
let rln_identifier = hash_to_field(b"test-rln-identifier");
// We prepare input for generate_rln_proof API
// input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
@@ -819,7 +815,6 @@ mod test {
let message_id = Fr::from(0);
// We set as leaf rate_commitment, its index would be equal to no_of_leaves
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
let leaf_ser = fr_to_bytes_le(&rate_commitment);
let input_buffer = &Buffer::from(leaf_ser.as_ref());
let success = set_next_leaf(rln_pointer, input_buffer);
@@ -1268,7 +1263,7 @@ mod test {
}
#[test]
fn test_metadata() {
fn test_valid_metadata() {
// We create a RLN instance
let tree_height = TEST_TREE_HEIGHT;
@@ -1294,4 +1289,25 @@ mod test {
assert_eq!(result_data, seed_bytes.to_vec());
}
#[test]
fn test_empty_metadata() {
// We create a RLN instance
let tree_height = TEST_TREE_HEIGHT;
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = get_metadata(rln_pointer, output_buffer.as_mut_ptr());
assert!(success, "get_metadata call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
assert_eq!(output_buffer.len, 0);
}
}

View File

@@ -1,9 +1,9 @@
#[cfg(test)]
mod test {
use ark_ff::BigInt;
use rln::circuit::zkey_from_folder;
use rln::circuit::{
circom_from_folder, vk_from_folder, zkey_from_folder, Fr, TEST_RESOURCES_FOLDER,
TEST_TREE_HEIGHT,
circom_from_folder, vk_from_folder, Fr, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT,
};
use rln::hashers::{hash_to_field, poseidon_hash};
use rln::poseidon_tree::PoseidonTree;
@@ -92,7 +92,7 @@ mod test {
// We check correct computation of the root
let root = tree.root();
if TEST_TREE_HEIGHT == 20 || TEST_TREE_HEIGHT == 32 {
if TEST_TREE_HEIGHT == 20 {
assert_eq!(
root,
BigInt([

View File

@@ -46,15 +46,6 @@ mod test {
2328960363755461975
]))
);
} else if TEST_TREE_HEIGHT == 32 {
assert_eq!(
root,
str_to_fr(
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
.unwrap()
);
}
// We check correct computation of merkle proof
@@ -148,7 +139,7 @@ mod test {
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
// We add the remaining elements for the case TEST_TREE_HEIGHT = 20
if TEST_TREE_HEIGHT == 20 || TEST_TREE_HEIGHT == 32 {
if TEST_TREE_HEIGHT == 20 {
expected_path_elements.append(&mut vec![
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",

View File

@@ -1,50 +0,0 @@
[package]
name = "semaphore-wrapper"
version = "0.3.0"
edition = "2021"
license = "MIT OR Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = []
dylib = [ "wasmer/dylib", "wasmer-engine-dylib", "wasmer-compiler-cranelift" ]
[dependencies]
ark-bn254 = { version = "0.3.0" }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features=["circom-2"], rev = "35ce5a9" }
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
ark-relations = { version = "0.3.0", default-features = false }
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
color-eyre = "0.6.2"
once_cell = "1.17.1"
rand = "0.8.5"
semaphore = { git = "https://github.com/worldcoin/semaphore-rs", rev = "ee658c2"}
ethers-core = { version = "2.0.10", default-features = false }
ruint = { version = "1.10.0", features = [ "serde", "num-bigint", "ark-ff" ] }
serde = "1.0"
thiserror = "1.0.39"
wasmer = { version = "2.3" }
[dev-dependencies]
rand_chacha = "0.3.1"
serde_json = "1.0.96"
[build-dependencies]
color-eyre = "0.6.2"
wasmer = { version = "2.3" }
wasmer-engine-dylib = { version = "2.3.0", optional = true }
wasmer-compiler-cranelift = { version = "3.3.0", optional = true }
[profile.release]
codegen-units = 1
lto = true
panic = "abort"
opt-level = 3
# Compilation profile for any non-workspace member.
# Dependencies are optimized, even in a dev build. This improves dev performance
# while having neglible impact on incremental build times.
[profile.dev.package."*"]
opt-level = 3

View File

@@ -1,7 +0,0 @@
[tasks.build]
command = "cargo"
args = ["build", "--release"]
[tasks.test]
command = "cargo"
args = ["test", "--release"]

View File

@@ -1,18 +0,0 @@
# Semaphore example package
This is basically a wrapper around/copy of
https://github.com/worldcoin/semaphore-rs to illustrate how e.g. RLN package
can be structured like.
Goal is also to provide a basic FFI around protocol.rs, which is currently not
in scope for that project.
See that project for more information.
## Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```

View File

@@ -1,111 +0,0 @@
// Adapted from semaphore-rs/build.rs
use color_eyre::eyre::{eyre, Result};
use std::{
path::{Component, Path, PathBuf},
process::Command,
};
const ZKEY_FILE: &str = "./vendor/semaphore/build/snark/semaphore_final.zkey";
const WASM_FILE: &str = "./vendor/semaphore/build/snark/semaphore.wasm";
// See <https://internals.rust-lang.org/t/path-to-lexical-absolute/14940>
fn absolute(path: &str) -> Result<PathBuf> {
let path = Path::new(path);
let mut absolute = if path.is_absolute() {
PathBuf::new()
} else {
std::env::current_dir()?
};
for component in path.components() {
match component {
Component::CurDir => {}
Component::ParentDir => {
absolute.pop();
}
component => absolute.push(component.as_os_str()),
}
}
Ok(absolute)
}
fn build_circuit() -> Result<()> {
println!("cargo:rerun-if-changed=./vendor/semaphore");
let run = |cmd: &[&str]| -> Result<()> {
// TODO: Use ExitCode::exit_ok() when stable.
Command::new(cmd[0])
.args(cmd[1..].iter())
.current_dir("./vendor/semaphore")
.status()?
.success()
.then_some(())
.ok_or(eyre!("procees returned failure"))?;
Ok(())
};
// Compute absolute paths
let zkey_file = absolute(ZKEY_FILE)?;
let wasm_file = absolute(WASM_FILE)?;
// Build circuits if not exists
// TODO: This does not rebuild if the semaphore submodule is changed.
// NOTE: This requires npm / nodejs to be installed.
if !(zkey_file.exists() && wasm_file.exists()) {
run(&["npm", "install"])?;
run(&["npm", "exec", "ts-node", "./scripts/compile-circuits.ts"])?;
}
assert!(zkey_file.exists());
assert!(wasm_file.exists());
// Export generated paths
println!("cargo:rustc-env=BUILD_RS_ZKEY_FILE={}", zkey_file.display());
println!("cargo:rustc-env=BUILD_RS_WASM_FILE={}", wasm_file.display());
Ok(())
}
#[cfg(feature = "dylib")]
fn build_dylib() -> Result<()> {
use enumset::enum_set;
use std::{env, str::FromStr};
use wasmer::{Module, Store, Target, Triple};
use wasmer_compiler_cranelift::Cranelift;
use wasmer_engine_dylib::Dylib;
let wasm_file = absolute(WASM_FILE)?;
assert!(wasm_file.exists());
let out_dir = env::var("OUT_DIR")?;
let out_dir = Path::new(&out_dir).to_path_buf();
let dylib_file = out_dir.join("semaphore.dylib");
println!(
"cargo:rustc-env=CIRCUIT_WASM_DYLIB={}",
dylib_file.display()
);
if dylib_file.exists() {
return Ok(());
}
// Create a WASM engine for the target that can compile
let triple = Triple::from_str(&env::var("TARGET")?).map_err(|e| eyre!(e))?;
let cpu_features = enum_set!();
let target = Target::new(triple, cpu_features);
let compiler_config = Cranelift::default();
let engine = Dylib::new(compiler_config).target(target).engine();
// Compile the WASM module
let store = Store::new(&engine);
let module = Module::from_file(&store, &wasm_file)?;
module.serialize_to_file(&dylib_file)?;
assert!(dylib_file.exists());
println!("cargo:warning=Circuit dylib is in {}", dylib_file.display());
Ok(())
}
fn main() -> Result<()> {
build_circuit()?;
#[cfg(feature = "dylib")]
build_dylib()?;
Ok(())
}

View File

@@ -1,79 +0,0 @@
// Adapted from semaphore-rs/src/circuit.rs
use ark_bn254::{Bn254, Fr};
use ark_circom::{read_zkey, WitnessCalculator};
use ark_groth16::ProvingKey;
use ark_relations::r1cs::ConstraintMatrices;
use core::include_bytes;
use once_cell::sync::{Lazy, OnceCell};
use std::{io::Cursor, sync::Mutex};
use wasmer::{Module, Store};
#[cfg(feature = "dylib")]
use std::{env, path::Path};
#[cfg(feature = "dylib")]
use wasmer::Dylib;
const ZKEY_BYTES: &[u8] = include_bytes!(env!("BUILD_RS_ZKEY_FILE"));
#[cfg(not(feature = "dylib"))]
const WASM: &[u8] = include_bytes!(env!("BUILD_RS_WASM_FILE"));
static ZKEY: Lazy<(ProvingKey<Bn254>, ConstraintMatrices<Fr>)> = Lazy::new(|| {
let mut reader = Cursor::new(ZKEY_BYTES);
read_zkey(&mut reader).expect("zkey should be valid")
});
static WITNESS_CALCULATOR: OnceCell<Mutex<WitnessCalculator>> = OnceCell::new();
/// Initialize the library.
#[cfg(feature = "dylib")]
pub fn initialize(dylib_path: &Path) {
WITNESS_CALCULATOR
.set(from_dylib(dylib_path))
.expect("Failed to initialize witness calculator");
// Force init of ZKEY
Lazy::force(&ZKEY);
}
#[cfg(feature = "dylib")]
fn from_dylib(path: &Path) -> Mutex<WitnessCalculator> {
let store = Store::new(&Dylib::headless().engine());
// The module must be exported using [`Module::serialize`].
let module = unsafe {
Module::deserialize_from_file(&store, path).expect("Failed to load wasm dylib module")
};
let result =
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
Mutex::new(result)
}
#[must_use]
pub fn zkey() -> &'static (ProvingKey<Bn254>, ConstraintMatrices<Fr>) {
&ZKEY
}
#[cfg(feature = "dylib")]
#[must_use]
pub fn witness_calculator() -> &'static Mutex<WitnessCalculator> {
WITNESS_CALCULATOR.get_or_init(|| {
let path = env::var("CIRCUIT_WASM_DYLIB").expect(
"Semaphore-rs is not initialized. The library needs to be initialized before use when \
build with the `cdylib` feature. You can initialize by calling `initialize` or \
seting the `CIRCUIT_WASM_DYLIB` environment variable.",
);
from_dylib(Path::new(&path))
})
}
#[cfg(not(feature = "dylib"))]
#[must_use]
pub fn witness_calculator() -> &'static Mutex<WitnessCalculator> {
WITNESS_CALCULATOR.get_or_init(|| {
let store = Store::default();
let module = Module::from_binary(&store, WASM).expect("wasm should be valid");
let result =
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
Mutex::new(result)
})
}

View File

@@ -1,7 +0,0 @@
#![allow(clippy::multiple_crate_versions)]
pub mod circuit;
pub mod protocol;
#[cfg(feature = "dylib")]
pub use circuit::initialize;

View File

@@ -1,215 +0,0 @@
// Adapted from semaphore-rs/src/protocol.rs
// For illustration purposes only as an example protocol
// Private module
use crate::circuit::{witness_calculator, zkey};
use ark_bn254::{Bn254, Parameters};
use ark_circom::CircomReduction;
use ark_ec::bn::Bn;
use ark_groth16::{
create_proof_with_reduction_and_matrices, prepare_verifying_key, Proof as ArkProof,
};
use ark_relations::r1cs::SynthesisError;
use ark_std::UniformRand;
use color_eyre::{Report, Result};
use ethers_core::types::U256;
use rand::{thread_rng, Rng};
use semaphore::{
identity::Identity,
merkle_tree::{self, Branch},
poseidon,
poseidon_tree::PoseidonHash,
Field,
};
use serde::{Deserialize, Serialize};
use std::time::Instant;
use thiserror::Error;
// Matches the private G1Tup type in ark-circom.
pub type G1 = (U256, U256);
// Matches the private G2Tup type in ark-circom.
pub type G2 = ([U256; 2], [U256; 2]);
/// Wrap a proof object so we have serde support
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Proof(G1, G2, G1);
impl From<ArkProof<Bn<Parameters>>> for Proof {
fn from(proof: ArkProof<Bn<Parameters>>) -> Self {
let proof = ark_circom::ethereum::Proof::from(proof);
let (a, b, c) = proof.as_tuple();
Self(a, b, c)
}
}
impl From<Proof> for ArkProof<Bn<Parameters>> {
fn from(proof: Proof) -> Self {
let eth_proof = ark_circom::ethereum::Proof {
a: ark_circom::ethereum::G1 {
x: proof.0 .0,
y: proof.0 .1,
},
#[rustfmt::skip] // Rustfmt inserts some confusing spaces
b: ark_circom::ethereum::G2 {
// The order of coefficients is flipped.
x: [proof.1.0[1], proof.1.0[0]],
y: [proof.1.1[1], proof.1.1[0]],
},
c: ark_circom::ethereum::G1 {
x: proof.2 .0,
y: proof.2 .1,
},
};
eth_proof.into()
}
}
/// Helper to merkle proof into a bigint vector
/// TODO: we should create a From trait for this
fn merkle_proof_to_vec(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<Field> {
proof
.0
.iter()
.map(|x| match x {
Branch::Left(value) | Branch::Right(value) => *value,
})
.collect()
}
/// Generates the nullifier hash
#[must_use]
pub fn generate_nullifier_hash(identity: &Identity, external_nullifier: Field) -> Field {
poseidon::hash2(external_nullifier, identity.nullifier)
}
#[derive(Error, Debug)]
pub enum ProofError {
#[error("Error reading circuit key: {0}")]
CircuitKeyError(#[from] std::io::Error),
#[error("Error producing witness: {0}")]
WitnessError(Report),
#[error("Error producing proof: {0}")]
SynthesisError(#[from] SynthesisError),
#[error("Error converting public input: {0}")]
ToFieldError(#[from] ruint::ToFieldError),
}
/// Generates a semaphore proof
///
/// # Errors
///
/// Returns a [`ProofError`] if proving fails.
pub fn generate_proof(
identity: &Identity,
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
external_nullifier_hash: Field,
signal_hash: Field,
) -> Result<Proof, ProofError> {
generate_proof_rng(
identity,
merkle_proof,
external_nullifier_hash,
signal_hash,
&mut thread_rng(),
)
}
/// Generates a semaphore proof from entropy
///
/// # Errors
///
/// Returns a [`ProofError`] if proving fails.
pub fn generate_proof_rng(
identity: &Identity,
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
external_nullifier_hash: Field,
signal_hash: Field,
rng: &mut impl Rng,
) -> Result<Proof, ProofError> {
generate_proof_rs(
identity,
merkle_proof,
external_nullifier_hash,
signal_hash,
ark_bn254::Fr::rand(rng),
ark_bn254::Fr::rand(rng),
)
}
fn generate_proof_rs(
identity: &Identity,
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
external_nullifier_hash: Field,
signal_hash: Field,
r: ark_bn254::Fr,
s: ark_bn254::Fr,
) -> Result<Proof, ProofError> {
let inputs = [
("identityNullifier", vec![identity.nullifier]),
("identityTrapdoor", vec![identity.trapdoor]),
("treePathIndices", merkle_proof.path_index()),
("treeSiblings", merkle_proof_to_vec(merkle_proof)),
("externalNullifier", vec![external_nullifier_hash]),
("signalHash", vec![signal_hash]),
];
let inputs = inputs.into_iter().map(|(name, values)| {
(
name.to_string(),
values.iter().copied().map(Into::into).collect::<Vec<_>>(),
)
});
let now = Instant::now();
let full_assignment = witness_calculator()
.lock()
.expect("witness_calculator mutex should not get poisoned")
.calculate_witness_element::<Bn254, _>(inputs, false)
.map_err(ProofError::WitnessError)?;
println!("witness generation took: {:.2?}", now.elapsed());
let now = Instant::now();
let zkey = zkey();
let ark_proof = create_proof_with_reduction_and_matrices::<_, CircomReduction>(
&zkey.0,
r,
s,
&zkey.1,
zkey.1.num_instance_variables,
zkey.1.num_constraints,
full_assignment.as_slice(),
)?;
let proof = ark_proof.into();
println!("proof generation took: {:.2?}", now.elapsed());
Ok(proof)
}
/// Verifies a given semaphore proof
///
/// # Errors
///
/// Returns a [`ProofError`] if verifying fails. Verification failure does not
/// necessarily mean the proof is incorrect.
pub fn verify_proof(
root: Field,
nullifier_hash: Field,
signal_hash: Field,
external_nullifier_hash: Field,
proof: &Proof,
) -> Result<bool, ProofError> {
let zkey = zkey();
let pvk = prepare_verifying_key(&zkey.0.vk);
let public_inputs = [root, nullifier_hash, signal_hash, external_nullifier_hash]
.iter()
.map(ark_bn254::Fr::try_from)
.collect::<Result<Vec<_>, _>>()?;
let ark_proof = (*proof).into();
let result = ark_groth16::verify_proof(&pvk, &ark_proof, &public_inputs[..])?;
Ok(result)
}

View File

@@ -1,115 +0,0 @@
#[cfg(test)]
mod tests {
use ark_bn254::Parameters;
use ark_ec::bn::Bn;
use ark_groth16::Proof as ArkProof;
use rand::{Rng, SeedableRng as _};
use rand_chacha::ChaChaRng;
use semaphore::{hash_to_field, identity::Identity, poseidon_tree::PoseidonTree, Field};
use semaphore_wrapper::protocol::{
generate_nullifier_hash, generate_proof, generate_proof_rng, verify_proof, Proof,
};
use serde_json::json;
#[test]
fn test_semaphore() {
// generate identity
let id = Identity::from_seed(b"secret");
// generate merkle tree
let leaf = Field::from(0);
let mut tree = PoseidonTree::new(21, leaf);
tree.set(0, id.commitment());
let merkle_proof = tree.proof(0).expect("proof should exist");
let root = tree.root().into();
// change signal and external_nullifier here
let signal_hash = hash_to_field(b"xxx");
let external_nullifier_hash = hash_to_field(b"appId");
let nullifier_hash = generate_nullifier_hash(&id, external_nullifier_hash);
let proof =
generate_proof(&id, &merkle_proof, external_nullifier_hash, signal_hash).unwrap();
let success = verify_proof(
root,
nullifier_hash,
signal_hash,
external_nullifier_hash,
&proof,
)
.unwrap();
assert!(success);
}
fn arb_proof(seed: u64) -> Proof {
// Deterministic randomness for testing
let mut rng = ChaChaRng::seed_from_u64(seed);
// generate identity
let seed: [u8; 16] = rng.gen();
let id = Identity::from_seed(&seed);
// generate merkle tree
let leaf = Field::from(0);
let mut tree = PoseidonTree::new(21, leaf);
tree.set(0, id.commitment());
let merkle_proof = tree.proof(0).expect("proof should exist");
let external_nullifier: [u8; 16] = rng.gen();
let external_nullifier_hash = hash_to_field(&external_nullifier);
let signal: [u8; 16] = rng.gen();
let signal_hash = hash_to_field(&signal);
generate_proof_rng(
&id,
&merkle_proof,
external_nullifier_hash,
signal_hash,
&mut rng,
)
.unwrap()
}
#[test]
fn test_proof_cast_roundtrip() {
let proof = arb_proof(123);
let ark_proof: ArkProof<Bn<Parameters>> = proof.into();
let result: Proof = ark_proof.into();
assert_eq!(proof, result);
}
#[test]
fn test_proof_serialize() {
let proof = arb_proof(456);
let json = serde_json::to_value(&proof).unwrap();
assert_eq!(
json,
json!([
[
"0x249ae469686987ee9368da60dd177a8c42891c02f5760e955e590c79d55cfab2",
"0xf22e25870f49388459d388afb24dcf6ec11bb2d4def1e2ec26d6e42f373aad8"
],
[
[
"0x17bd25dbd7436c30ea5b8a3a47aadf11ed646c4b25cc14a84ff8cbe0252ff1f8",
"0x1c140668c56688367416534d57b4a14e5a825efdd5e121a6a2099f6dc4cd277b"
],
[
"0x26a8524759d969ea0682a092cf7a551697d81962d6c998f543f81e52d83e05e1",
"0x273eb3f796fd1807b9df9c6d769d983e3dabdc61677b75d48bb7691303b2c8dd"
]
],
[
"0x62715c53a0eb4c46dbb5f73f1fd7449b9c63d37c1ece65debc39b472065a90f",
"0x114f7becc66f1cd7a8b01c89db8233622372fc0b6fc037c4313bca41e2377fd9"
]
])
);
}
}

View File

@@ -1,6 +1,6 @@
[package]
name = "zerokit_utils"
version = "0.4.1"
version = "0.4.3"
edition = "2021"
license = "MIT OR Apache-2.0"
description = "Various utilities for Zerokit"

View File

@@ -1,5 +1,6 @@
use criterion::{criterion_group, criterion_main, Criterion};
use hex_literal::hex;
use std::{fmt::Display, str::FromStr};
use tiny_keccak::{Hasher as _, Keccak};
use zerokit_utils::{
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,
@@ -9,34 +10,53 @@ use zerokit_utils::{
#[derive(Clone, Copy, Eq, PartialEq)]
struct Keccak256;
#[derive(Clone, Copy, Eq, PartialEq, Debug, Default)]
struct TestFr([u8; 32]);
impl Hasher for Keccak256 {
type Fr = [u8; 32];
type Fr = TestFr;
fn default_leaf() -> Self::Fr {
[0; 32]
TestFr([0; 32])
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
let mut output = [0; 32];
let mut hasher = Keccak::v256();
for element in inputs {
hasher.update(element);
hasher.update(element.0.as_slice());
}
hasher.finalize(&mut output);
output
TestFr(output)
}
}
impl Display for TestFr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", String::from_utf8_lossy(self.0.as_slice()))
}
}
impl FromStr for TestFr {
type Err = std::string::FromUtf8Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(TestFr(s.as_bytes().try_into().unwrap()))
}
}
pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default()).unwrap();
OptimalMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), OptimalMerkleConfig::default())
.unwrap();
let leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
]
.map(|x| TestFr(x));
c.bench_function("OptimalMerkleTree::set", |b| {
b.iter(|| {
@@ -71,14 +91,15 @@ pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
pub fn full_merkle_tree_benchmark(c: &mut Criterion) {
let mut tree =
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
FullMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), FullMerkleConfig::default()).unwrap();
let leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
]
.map(|x| TestFr(x));
c.bench_function("FullMerkleTree::set", |b| {
b.iter(|| {

View File

@@ -21,7 +21,7 @@ use color_eyre::Result;
/// and the hash function used to initialize a Merkle Tree implementation
pub trait Hasher {
/// Type of the leaf and tree node
type Fr: Clone + Copy + Eq;
type Fr: Clone + Copy + Eq + Default + std::fmt::Debug + std::fmt::Display + FromStr;
/// Returns the default tree leaf
fn default_leaf() -> Self::Fr;

View File

@@ -1,6 +1,8 @@
// Tests adapted from https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/merkle_tree.rs
#[cfg(test)]
mod test {
pub mod test {
use std::{fmt::Display, str::FromStr};
use hex_literal::hex;
use tiny_keccak::{Hasher as _, Keccak};
use zerokit_utils::{
@@ -10,21 +12,38 @@ mod test {
#[derive(Clone, Copy, Eq, PartialEq)]
struct Keccak256;
#[derive(Clone, Copy, Eq, PartialEq, Debug, Default)]
struct TestFr([u8; 32]);
impl Hasher for Keccak256 {
type Fr = [u8; 32];
type Fr = TestFr;
fn default_leaf() -> Self::Fr {
[0; 32]
TestFr([0; 32])
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
let mut output = [0; 32];
let mut hasher = Keccak::v256();
for element in inputs {
hasher.update(element);
hasher.update(element.0.as_slice());
}
hasher.finalize(&mut output);
output
TestFr(output)
}
}
impl Display for TestFr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", String::from_utf8_lossy(self.0.as_slice()))
}
}
impl FromStr for TestFr {
type Err = std::string::FromUtf8Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(TestFr(s.as_bytes().try_into().unwrap()))
}
}
@@ -35,20 +54,24 @@ mod test {
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
]
.map(|x| TestFr(x));
let default_tree_root =
hex!("b4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30");
let default_tree_root = TestFr(hex!(
"b4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30"
));
let roots = [
hex!("c1ba1812ff680ce84c1d5b4f1087eeb08147a4d510f3496b2849df3a73f5af95"),
hex!("893760ec5b5bee236f29e85aef64f17139c3c1b7ff24ce64eb6315fca0f2485b"),
hex!("222ff5e0b5877792c2bc1670e2ccd0c2c97cd7bb1672a57d598db05092d3d72c"),
hex!("a9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36"),
];
]
.map(|x| TestFr(x));
let mut tree =
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
FullMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), FullMerkleConfig::default())
.unwrap();
assert_eq!(tree.root(), default_tree_root);
for i in 0..leaves.len() {
tree.set(i, leaves[i]).unwrap();
@@ -56,7 +79,7 @@ mod test {
}
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
OptimalMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), OptimalMerkleConfig::default())
.unwrap();
assert_eq!(tree.root(), default_tree_root);
for i in 0..leaves.len() {
@@ -72,11 +95,13 @@ mod test {
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
]
.map(|x| TestFr(x));
// We thest the FullMerkleTree implementation
let mut tree =
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
FullMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), FullMerkleConfig::default())
.unwrap();
for i in 0..leaves.len() {
// We set the leaves
tree.set(i, leaves[i]).unwrap();
@@ -101,7 +126,7 @@ mod test {
// We test the OptimalMerkleTree implementation
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
OptimalMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), OptimalMerkleConfig::default())
.unwrap();
for i in 0..leaves.len() {
// We set the leaves
@@ -133,10 +158,11 @@ mod test {
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
]
.map(|x| TestFr(x));
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
OptimalMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), OptimalMerkleConfig::default())
.unwrap();
// We set the leaves
@@ -145,7 +171,8 @@ mod test {
let new_leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000005"),
hex!("0000000000000000000000000000000000000000000000000000000000000006"),
];
]
.map(|x| TestFr(x));
let to_delete_indices: [usize; 2] = [0, 1];