mirror of
https://github.com/vacp2p/zerokit.git
synced 2026-01-10 14:18:18 -05:00
Compare commits
10 Commits
zerokit_ut
...
v0.6.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
88f8a80faa | ||
|
|
0cce9f92d9 | ||
|
|
b9d27039c3 | ||
|
|
49e2517e15 | ||
|
|
6621efd0bb | ||
|
|
4a74ff0d6c | ||
|
|
fc823e7187 | ||
|
|
0d5642492a | ||
|
|
c4579e1917 | ||
|
|
e6238fd722 |
72
.github/workflows/ci.yml
vendored
72
.github/workflows/ci.yml
vendored
@@ -21,11 +21,11 @@ on:
|
||||
name: Tests
|
||||
|
||||
jobs:
|
||||
test:
|
||||
utils-test:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [rln, utils]
|
||||
platform: [ ubuntu-latest, macos-latest ]
|
||||
crate: [ utils ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
@@ -47,10 +47,37 @@ jobs:
|
||||
cargo make test --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
rln-test:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest, macos-latest ]
|
||||
crate: [ rln ]
|
||||
feature: [ "default", "arkzkey", "stateless" ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: cargo-make test
|
||||
run: |
|
||||
cargo make test_${{ matrix.feature }} --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
rln-wasm:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
platform: [ ubuntu-latest, macos-latest ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
@@ -61,7 +88,8 @@ jobs:
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
# TODO: Update to stable once wasmer supports it
|
||||
toolchain: 1.82.0
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install Dependencies
|
||||
@@ -77,8 +105,8 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
# we run lint tests only on ubuntu
|
||||
platform: [ubuntu-latest]
|
||||
crate: [rln, utils]
|
||||
platform: [ ubuntu-latest ]
|
||||
crate: [ rln, utils ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
@@ -108,14 +136,15 @@ jobs:
|
||||
# We skip clippy on rln-wasm, since wasm target is managed by cargo make
|
||||
# Currently not treating warnings as error, too noisy
|
||||
# -- -D warnings
|
||||
benchmark:
|
||||
|
||||
benchmark-utils:
|
||||
# run only in pull requests
|
||||
if: github.event_name == 'pull_request'
|
||||
strategy:
|
||||
matrix:
|
||||
# we run benchmark tests only on ubuntu
|
||||
platform: [ubuntu-latest]
|
||||
crate: [rln, utils]
|
||||
platform: [ ubuntu-latest ]
|
||||
crate: [ utils ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
@@ -128,3 +157,26 @@ jobs:
|
||||
with:
|
||||
branchName: ${{ github.base_ref }}
|
||||
cwd: ${{ matrix.crate }}
|
||||
|
||||
benchmark-rln:
|
||||
# run only in pull requests
|
||||
if: github.event_name == 'pull_request'
|
||||
strategy:
|
||||
matrix:
|
||||
# we run benchmark tests only on ubuntu
|
||||
platform: [ ubuntu-latest ]
|
||||
crate: [ rln ]
|
||||
feature: [ "default", "arkzkey" ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: benchmark - ${{ matrix.platform }} - ${{ matrix.crate }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: boa-dev/criterion-compare-action@v3
|
||||
with:
|
||||
branchName: ${{ github.base_ref }}
|
||||
cwd: ${{ matrix.crate }}
|
||||
features: ${{ matrix.feature }}
|
||||
29
.github/workflows/nightly-release.yml
vendored
29
.github/workflows/nightly-release.yml
vendored
@@ -8,11 +8,14 @@ jobs:
|
||||
linux:
|
||||
strategy:
|
||||
matrix:
|
||||
feature: ["default", "arkzkey"]
|
||||
feature: [ "default", "arkzkey", "stateless" ]
|
||||
target:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- aarch64-unknown-linux-gnu
|
||||
- i686-unknown-linux-gnu
|
||||
include:
|
||||
- feature: stateless
|
||||
cargo_args: --exclude rln-cli
|
||||
name: Linux build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -30,13 +33,13 @@ jobs:
|
||||
run: make installdeps
|
||||
- name: cross build
|
||||
run: |
|
||||
cross build --release --target ${{ matrix.target }} --features ${{ matrix.feature }} --workspace --exclude rln-wasm
|
||||
cross build --release --target ${{ matrix.target }} --features ${{ matrix.feature }} --workspace --exclude rln-wasm ${{ matrix.cargo_args }}
|
||||
mkdir release
|
||||
cp target/${{ matrix.target }}/release/librln* release/
|
||||
tar -czvf ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz release/
|
||||
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.target }}-${{ matrix.feature }}-archive
|
||||
path: ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz
|
||||
@@ -47,10 +50,13 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
feature: ["default", "arkzkey"]
|
||||
feature: [ "default", "arkzkey", "stateless" ]
|
||||
target:
|
||||
- x86_64-apple-darwin
|
||||
- aarch64-apple-darwin
|
||||
include:
|
||||
- feature: stateless
|
||||
cargo_args: --exclude rln-cli
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
@@ -66,13 +72,13 @@ jobs:
|
||||
run: make installdeps
|
||||
- name: cross build
|
||||
run: |
|
||||
cross build --release --target ${{ matrix.target }} --features ${{ matrix.feature }} --workspace --exclude rln-wasm
|
||||
cross build --release --target ${{ matrix.target }} --features ${{ matrix.feature }} --workspace --exclude rln-wasm ${{ matrix.cargo_args }}
|
||||
mkdir release
|
||||
cp target/${{ matrix.target }}/release/librln* release/
|
||||
tar -czvf ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz release/
|
||||
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.target }}-${{ matrix.feature }}-archive
|
||||
path: ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz
|
||||
@@ -88,7 +94,8 @@ jobs:
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
# TODO: Update to stable once wasmer supports it
|
||||
toolchain: 1.82.0
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
@@ -104,7 +111,7 @@ jobs:
|
||||
working-directory: rln-wasm
|
||||
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: browser-rln-wasm-archive
|
||||
path: rln-wasm/browser-rln-wasm.tar.gz
|
||||
@@ -112,7 +119,7 @@ jobs:
|
||||
|
||||
prepare-prerelease:
|
||||
name: Prepare pre-release
|
||||
needs: [linux, macos, browser-rln-wasm]
|
||||
needs: [ linux, macos, browser-rln-wasm ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
@@ -120,7 +127,7 @@ jobs:
|
||||
with:
|
||||
ref: master
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v2
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Delete tag
|
||||
uses: dev-drprasad/delete-tag-and-release@v0.2.1
|
||||
@@ -143,7 +150,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Delete artifacts
|
||||
uses: geekyeggo/delete-artifact@v1
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
failOnError: false
|
||||
name: |
|
||||
|
||||
34
Cargo.lock
generated
34
Cargo.lock
generated
@@ -1,6 +1,6 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "addr2line"
|
||||
@@ -71,9 +71,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.0"
|
||||
version = "1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41ed9a86bf92ae6580e0a31281f65a1b1d867c0cc68d5346e2ae128dddfa6a7d"
|
||||
checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-parse"
|
||||
@@ -993,6 +993,15 @@ dependencies = [
|
||||
"subtle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "document-features"
|
||||
version = "0.2.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cb6969eaabd2421f8a2775cfd2471a2b634372b4a25d41e3bd647b79912850a0"
|
||||
dependencies = [
|
||||
"litrs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ecdsa"
|
||||
version = "0.16.7"
|
||||
@@ -1611,6 +1620,12 @@ version = "0.4.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f"
|
||||
|
||||
[[package]]
|
||||
name = "litrs"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4ce301924b7887e9d637144fdade93f9dfff9b60981d4ac161db09720d39aa5"
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
version = "0.4.9"
|
||||
@@ -1623,12 +1638,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.17"
|
||||
version = "0.4.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
|
||||
|
||||
[[package]]
|
||||
name = "loupe"
|
||||
@@ -2270,7 +2282,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rln"
|
||||
version = "0.5.1"
|
||||
version = "0.6.1"
|
||||
dependencies = [
|
||||
"ark-bn254",
|
||||
"ark-circom",
|
||||
@@ -2284,6 +2296,7 @@ dependencies = [
|
||||
"cfg-if",
|
||||
"color-eyre",
|
||||
"criterion 0.4.0",
|
||||
"document-features",
|
||||
"lazy_static 1.4.0",
|
||||
"num-bigint",
|
||||
"num-traits",
|
||||
@@ -2313,7 +2326,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rln-wasm"
|
||||
version = "0.0.13"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"console_error_panic_hook",
|
||||
"getrandom",
|
||||
@@ -2327,6 +2340,7 @@ dependencies = [
|
||||
"wasm-bindgen-test",
|
||||
"wasmer",
|
||||
"web-sys",
|
||||
"zerokit_utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -4,7 +4,7 @@ version = "0.3.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
rln = { path = "../rln" }
|
||||
rln = { path = "../rln", default-features = true, features = ["arkzkey"] }
|
||||
clap = { version = "4.2.7", features = ["cargo", "derive", "env"]}
|
||||
clap_derive = { version = "=4.2.0" }
|
||||
color-eyre = "=0.6.2"
|
||||
|
||||
@@ -1,11 +1,8 @@
|
||||
[package]
|
||||
name = "rln-wasm"
|
||||
version = "0.0.13"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
license = "MIT or Apache2"
|
||||
autobenches = false
|
||||
autotests = false
|
||||
autobins = false
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
@@ -14,10 +11,16 @@ crate-type = ["cdylib", "rlib"]
|
||||
default = ["console_error_panic_hook"]
|
||||
|
||||
[dependencies]
|
||||
rln = { path = "../rln", default-features = false, features = ["wasm"] }
|
||||
num-bigint = { version = "0.4", default-features = false, features = ["rand", "serde"] }
|
||||
rln = { path = "../rln", default-features = false, features = [
|
||||
"wasm",
|
||||
"stateless",
|
||||
], version = "=0.6.1"}
|
||||
num-bigint = { version = "0.4", default-features = false, features = [
|
||||
"rand",
|
||||
"serde",
|
||||
] }
|
||||
wasmer = { version = "2.3", default-features = false, features = ["js", "std"] }
|
||||
web-sys = {version = "0.3", features=["console"]}
|
||||
web-sys = { version = "0.3", features = ["console"] }
|
||||
getrandom = { version = "0.2.7", default-features = false, features = ["js"] }
|
||||
wasm-bindgen = "0.2.63"
|
||||
serde-wasm-bindgen = "0.4"
|
||||
@@ -29,6 +32,7 @@ serde_json = "1.0.85"
|
||||
# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for
|
||||
# code size when deploying.
|
||||
console_error_panic_hook = { version = "0.1.7", optional = true }
|
||||
zerokit_utils = { version = "0.5.1", path = "../utils" }
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen-test = "0.3.13"
|
||||
|
||||
@@ -7,11 +7,7 @@ script = "sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/
|
||||
|
||||
[tasks.build]
|
||||
clear = true
|
||||
dependencies = [
|
||||
"pack-build",
|
||||
"pack-rename",
|
||||
"post-build"
|
||||
]
|
||||
dependencies = ["pack-build", "pack-rename", "post-build"]
|
||||
|
||||
[tasks.post-build]
|
||||
command = "wasm-strip"
|
||||
|
||||
@@ -182,96 +182,27 @@ impl<'a> ProcessArg for &'a [u8] {
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = newRLN)]
|
||||
pub fn wasm_new(
|
||||
tree_height: usize,
|
||||
zkey: Uint8Array,
|
||||
vk: Uint8Array,
|
||||
) -> Result<*mut RLNWrapper, String> {
|
||||
let instance = RLN::new_with_params(tree_height, zkey.to_vec(), vk.to_vec())
|
||||
let instance = RLN::new_with_params(zkey.to_vec(), vk.to_vec())
|
||||
.map_err(|err| format!("{:#?}", err))?;
|
||||
let wrapper = RLNWrapper { instance };
|
||||
Ok(Box::into_raw(Box::new(wrapper)))
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = getSerializedRLNWitness)]
|
||||
pub fn wasm_get_serialized_rln_witness(
|
||||
ctx: *mut RLNWrapper,
|
||||
input: Uint8Array,
|
||||
) -> Result<Uint8Array, String> {
|
||||
let rln_witness = call!(ctx, get_serialized_rln_witness, &input.to_vec()[..])
|
||||
.map_err(|err| format!("{:#?}", err))?;
|
||||
Ok(Uint8Array::from(&rln_witness[..]))
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = insertMember)]
|
||||
pub fn wasm_set_next_leaf(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
|
||||
call_with_error_msg!(
|
||||
ctx,
|
||||
set_next_leaf,
|
||||
"could not insert member into merkle tree".to_string(),
|
||||
&input.to_vec()[..]
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = setLeavesFrom)]
|
||||
pub fn wasm_set_leaves_from(
|
||||
ctx: *mut RLNWrapper,
|
||||
index: usize,
|
||||
input: Uint8Array,
|
||||
) -> Result<(), String> {
|
||||
call_with_error_msg!(
|
||||
ctx,
|
||||
set_leaves_from,
|
||||
"could not set multiple leaves".to_string(),
|
||||
index,
|
||||
&*input.to_vec()
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = deleteLeaf)]
|
||||
pub fn wasm_delete_leaf(ctx: *mut RLNWrapper, index: usize) -> Result<(), String> {
|
||||
call_with_error_msg!(ctx, delete_leaf, "could not delete leaf".to_string(), index)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = setMetadata)]
|
||||
pub fn wasm_set_metadata(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
|
||||
call_with_error_msg!(
|
||||
ctx,
|
||||
set_metadata,
|
||||
"could not set metadata".to_string(),
|
||||
&*input.to_vec()
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = getMetadata)]
|
||||
pub fn wasm_get_metadata(ctx: *mut RLNWrapper) -> Result<Uint8Array, String> {
|
||||
call_with_output_and_error_msg!(ctx, get_metadata, "could not get metadata".to_string())
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = initTreeWithLeaves)]
|
||||
pub fn wasm_init_tree_with_leaves(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
|
||||
call_with_error_msg!(
|
||||
ctx,
|
||||
init_tree_with_leaves,
|
||||
"could not init merkle tree".to_string(),
|
||||
&*input.to_vec()
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = RLNWitnessToJson)]
|
||||
pub fn rln_witness_to_json(
|
||||
ctx: *mut RLNWrapper,
|
||||
serialized_witness: Uint8Array,
|
||||
) -> Result<Object, String> {
|
||||
let inputs = call!(ctx, get_rln_witness_json, &serialized_witness.to_vec()[..])
|
||||
.map_err(|err| err.to_string())?;
|
||||
let inputs = call!(
|
||||
ctx,
|
||||
get_rln_witness_bigint_json,
|
||||
&serialized_witness.to_vec()[..]
|
||||
)
|
||||
.map_err(|err| err.to_string())?;
|
||||
let js_value = serde_wasm_bindgen::to_value(&inputs).map_err(|err| err.to_string())?;
|
||||
Object::from_entries(&js_value).map_err(|err| format!("{:#?}", err))
|
||||
}
|
||||
@@ -358,17 +289,6 @@ pub fn wasm_recover_id_secret(
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = verifyRLNProof)]
|
||||
pub fn wasm_verify_rln_proof(ctx: *const RLNWrapper, proof: Uint8Array) -> Result<bool, String> {
|
||||
call_bool_method_with_error_msg!(
|
||||
ctx,
|
||||
verify_rln_proof,
|
||||
"error while verifying rln proof".to_string(),
|
||||
&proof.to_vec()[..]
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = verifyWithRoots)]
|
||||
pub fn wasm_verify_with_roots(
|
||||
@@ -385,12 +305,6 @@ pub fn wasm_verify_with_roots(
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = getRoot)]
|
||||
pub fn wasm_get_root(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
|
||||
call_with_output_and_error_msg!(ctx, get_root, "could not obtain root")
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = hash)]
|
||||
pub fn wasm_hash(input: Uint8Array) -> Result<Uint8Array, String> {
|
||||
fn_call_with_output_and_error_msg!(hash, "could not generate hash", &input.to_vec()[..])
|
||||
|
||||
@@ -7,8 +7,13 @@ mod tests {
|
||||
use rln::hashers::{hash_to_field, poseidon_hash};
|
||||
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, normalize_usize};
|
||||
use rln_wasm::*;
|
||||
use rln::poseidon_tree::PoseidonTree;
|
||||
use rln::utils::vec_fr_to_bytes_le;
|
||||
use wasm_bindgen::{prelude::*, JsValue};
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
use zerokit_utils::merkle_tree::merkle_tree::ZerokitMerkleTree;
|
||||
use zerokit_utils::ZerokitMerkleProof;
|
||||
use rln::utils::vec_u8_to_bytes_le;
|
||||
|
||||
#[wasm_bindgen(module = "src/utils.js")]
|
||||
extern "C" {
|
||||
@@ -30,7 +35,9 @@ mod tests {
|
||||
let vk = read_file(&vk_path).unwrap();
|
||||
|
||||
// Creating an instance of RLN
|
||||
let rln_instance = wasm_new(tree_height, zkey, vk).unwrap();
|
||||
let rln_instance = wasm_new(zkey, vk).unwrap();
|
||||
|
||||
let mut tree = PoseidonTree::default(TEST_TREE_HEIGHT).unwrap();
|
||||
|
||||
// Creating membership key
|
||||
let mem_keys = wasm_key_gen(rln_instance).unwrap();
|
||||
@@ -40,7 +47,7 @@ mod tests {
|
||||
// Prepare the message
|
||||
let signal = b"Hello World";
|
||||
|
||||
let identity_index: usize = 0;
|
||||
let identity_index = tree.leaves_set();
|
||||
// Setting up the epoch and rln_identifier
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
@@ -53,31 +60,28 @@ mod tests {
|
||||
|
||||
let (id_commitment_fr, _) = bytes_le_to_fr(&id_commitment.to_vec()[..]);
|
||||
let rate_commitment = poseidon_hash(&[id_commitment_fr, user_message_limit]);
|
||||
tree.update_next(rate_commitment).unwrap();
|
||||
|
||||
let x = hash_to_field(signal);
|
||||
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
|
||||
// Insert PK
|
||||
wasm_set_next_leaf(
|
||||
rln_instance,
|
||||
Uint8Array::from(fr_to_bytes_le(&rate_commitment).as_slice()),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Serializing the message
|
||||
let mut serialized_vec: Vec<u8> = Vec::new();
|
||||
serialized_vec.append(&mut id_key.to_vec());
|
||||
serialized_vec.append(&mut normalize_usize(identity_index));
|
||||
serialized_vec.append(&mut fr_to_bytes_le(&user_message_limit).to_vec());
|
||||
serialized_vec.append(&mut message_id.to_vec());
|
||||
serialized_vec.append(&mut vec_fr_to_bytes_le(&path_elements).unwrap());
|
||||
serialized_vec.append(&mut vec_u8_to_bytes_le(&identity_path_index).unwrap());
|
||||
serialized_vec.append(&mut fr_to_bytes_le(&x));
|
||||
serialized_vec.append(&mut external_nullifier.to_vec());
|
||||
serialized_vec.append(&mut normalize_usize(signal.len()));
|
||||
serialized_vec.append(&mut signal.to_vec());
|
||||
let serialized_message = Uint8Array::from(&serialized_vec[..]);
|
||||
|
||||
let serialized_rln_witness =
|
||||
wasm_get_serialized_rln_witness(rln_instance, serialized_message).unwrap();
|
||||
|
||||
// Obtaining inputs that should be sent to circom witness calculator
|
||||
let json_inputs =
|
||||
rln_witness_to_json(rln_instance, serialized_rln_witness.clone()).unwrap();
|
||||
rln_witness_to_json(rln_instance, serialized_message.clone()).unwrap();
|
||||
|
||||
// Calculating witness with JS
|
||||
// (Using a JSON since wasm_bindgen does not like Result<Vec<JsBigInt>,JsValue>)
|
||||
@@ -97,7 +101,7 @@ mod tests {
|
||||
let proof = generate_rln_proof_with_witness(
|
||||
rln_instance,
|
||||
calculated_witness.into(),
|
||||
serialized_rln_witness,
|
||||
serialized_message,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -107,42 +111,13 @@ mod tests {
|
||||
proof_bytes.append(&mut signal.to_vec());
|
||||
let proof_with_signal = Uint8Array::from(&proof_bytes[..]);
|
||||
|
||||
// Validate Proof
|
||||
let is_proof_valid = wasm_verify_rln_proof(rln_instance, proof_with_signal);
|
||||
|
||||
assert!(
|
||||
is_proof_valid.unwrap(),
|
||||
"validating proof generated with wasm failed"
|
||||
);
|
||||
|
||||
// Validating Proof with Roots
|
||||
let root = wasm_get_root(rln_instance).unwrap();
|
||||
let roots = Uint8Array::from(&root.to_vec()[..]);
|
||||
let root = tree.root();
|
||||
let root_le = fr_to_bytes_le(&root);
|
||||
let roots = Uint8Array::from(&root_le[..]);
|
||||
let proof_with_signal = Uint8Array::from(&proof_bytes[..]);
|
||||
|
||||
let is_proof_valid = wasm_verify_with_roots(rln_instance, proof_with_signal, roots);
|
||||
assert!(is_proof_valid.unwrap(), "verifying proof with roots failed");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_metadata() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let zkey_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
|
||||
let vk_path =
|
||||
format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.arkvkey");
|
||||
let zkey = read_file(&zkey_path).unwrap();
|
||||
let vk = read_file(&vk_path).unwrap();
|
||||
|
||||
// Creating an instance of RLN
|
||||
let rln_instance = wasm_new(tree_height, zkey, vk).unwrap();
|
||||
|
||||
let test_metadata = Uint8Array::new(&JsValue::from_str("test"));
|
||||
// Inserting random metadata
|
||||
wasm_set_metadata(rln_instance, test_metadata.clone()).unwrap();
|
||||
|
||||
// Getting metadata
|
||||
let metadata = wasm_get_metadata(rln_instance).unwrap();
|
||||
|
||||
assert_eq!(metadata.to_vec(), test_metadata.to_vec());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "rln"
|
||||
version = "0.5.1"
|
||||
version = "0.6.1"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "APIs to manage, compute and verify zkSNARK proofs and RLN primitives"
|
||||
@@ -59,6 +59,8 @@ utils = { package = "zerokit_utils", version = "=0.5.1", path = "../utils/", def
|
||||
serde_json = "=1.0.96"
|
||||
serde = { version = "=1.0.163", features = ["derive"] }
|
||||
|
||||
document-features = { version = "=0.2.10", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
sled = "=0.34.7"
|
||||
criterion = { version = "=0.4.0", features = ["html_reports"] }
|
||||
@@ -75,6 +77,7 @@ parallel = [
|
||||
wasm = ["wasmer/js", "wasmer/std"]
|
||||
fullmerkletree = ["default"]
|
||||
arkzkey = ["ark-zkey"]
|
||||
stateless = []
|
||||
|
||||
# Note: pmtree feature is still experimental
|
||||
pmtree-ft = ["utils/pmtree-ft"]
|
||||
@@ -87,6 +90,11 @@ harness = false
|
||||
name = "circuit_loading_benchmark"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "circuit_loading_arkzkey_benchmark"
|
||||
harness = false
|
||||
required-features = ["arkzkey"]
|
||||
|
||||
[[bench]]
|
||||
name = "circuit_deser_benchmark"
|
||||
harness = false
|
||||
@@ -94,3 +102,6 @@ harness = false
|
||||
[[bench]]
|
||||
name = "poseidon_tree_benchmark"
|
||||
harness = false
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
|
||||
@@ -2,10 +2,18 @@
|
||||
command = "cargo"
|
||||
args = ["build", "--release"]
|
||||
|
||||
[tasks.test]
|
||||
[tasks.test_default]
|
||||
command = "cargo"
|
||||
args = ["test", "--release"]
|
||||
|
||||
[tasks.test_stateless]
|
||||
command = "cargo"
|
||||
args = ["test", "--release", "--features", "stateless"]
|
||||
|
||||
[tasks.test_arkzkey]
|
||||
command = "cargo"
|
||||
args = ["test", "--release", "--features", "arkzkey"]
|
||||
|
||||
[tasks.bench]
|
||||
command = "cargo"
|
||||
args = ["bench"]
|
||||
|
||||
43
rln/benches/circuit_loading_arkzkey_benchmark.rs
Normal file
43
rln/benches/circuit_loading_arkzkey_benchmark.rs
Normal file
@@ -0,0 +1,43 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use rln::circuit::{
|
||||
read_arkzkey_from_bytes_compressed, read_arkzkey_from_bytes_uncompressed, ARKZKEY_BYTES,
|
||||
ARKZKEY_BYTES_UNCOMPR,
|
||||
};
|
||||
|
||||
pub fn uncompressed_bench(c: &mut Criterion) {
|
||||
let arkzkey = ARKZKEY_BYTES_UNCOMPR.to_vec();
|
||||
let size = arkzkey.len() as f32;
|
||||
println!(
|
||||
"Size of uncompressed arkzkey: {:.2?} MB",
|
||||
size / 1024.0 / 1024.0
|
||||
);
|
||||
|
||||
c.bench_function("arkzkey::arkzkey_from_raw_uncompressed", |b| {
|
||||
b.iter(|| {
|
||||
let r = read_arkzkey_from_bytes_uncompressed(&arkzkey);
|
||||
assert_eq!(r.is_ok(), true);
|
||||
})
|
||||
});
|
||||
}
|
||||
pub fn compressed_bench(c: &mut Criterion) {
|
||||
let arkzkey = ARKZKEY_BYTES.to_vec();
|
||||
let size = arkzkey.len() as f32;
|
||||
println!(
|
||||
"Size of compressed arkzkey: {:.2?} MB",
|
||||
size / 1024.0 / 1024.0
|
||||
);
|
||||
|
||||
c.bench_function("arkzkey::arkzkey_from_raw_compressed", |b| {
|
||||
b.iter(|| {
|
||||
let r = read_arkzkey_from_bytes_compressed(&arkzkey);
|
||||
assert_eq!(r.is_ok(), true);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group! {
|
||||
name = benches;
|
||||
config = Criterion::default().measurement_time(std::time::Duration::from_secs(250));
|
||||
targets = uncompressed_bench, compressed_bench
|
||||
}
|
||||
criterion_main!(benches);
|
||||
@@ -1,14 +1,17 @@
|
||||
use ark_circom::read_zkey;
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use rln::circuit::{zkey_from_raw, ZKEY_BYTES};
|
||||
use std::io::Cursor;
|
||||
|
||||
// Depending on the key type (enabled by the `--features arkzkey` flag)
|
||||
// the upload speed from the `rln_final.zkey` or `rln_final.arkzkey` file is calculated
|
||||
pub fn key_load_benchmark(c: &mut Criterion) {
|
||||
let zkey = ZKEY_BYTES.to_vec();
|
||||
pub fn zkey_load_benchmark(c: &mut Criterion) {
|
||||
let zkey = rln::circuit::ZKEY_BYTES.to_vec();
|
||||
let size = zkey.len() as f32;
|
||||
println!("Size of zkey: {:.2?} MB", size / 1024.0 / 1024.0);
|
||||
|
||||
c.bench_function("zkey::zkey_from_raw", |b| {
|
||||
b.iter(|| {
|
||||
let _ = zkey_from_raw(&zkey);
|
||||
let mut reader = Cursor::new(zkey.clone());
|
||||
let r = read_zkey(&mut reader);
|
||||
assert_eq!(r.is_ok(), true);
|
||||
})
|
||||
});
|
||||
}
|
||||
@@ -16,6 +19,6 @@ pub fn key_load_benchmark(c: &mut Criterion) {
|
||||
criterion_group! {
|
||||
name = benches;
|
||||
config = Criterion::default().measurement_time(std::time::Duration::from_secs(250));
|
||||
targets = key_load_benchmark
|
||||
targets = zkey_load_benchmark
|
||||
}
|
||||
criterion_main!(benches);
|
||||
|
||||
BIN
rln/resources/tree_height_20/rln_final_uncompr.arkzkey
Normal file
BIN
rln/resources/tree_height_20/rln_final_uncompr.arkzkey
Normal file
Binary file not shown.
@@ -19,13 +19,19 @@ use {
|
||||
};
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
use ark_zkey::read_arkzkey_from_bytes;
|
||||
use {
|
||||
ark_zkey::{read_arkzkey_from_bytes, SerializableConstraintMatrices, SerializableProvingKey},
|
||||
color_eyre::eyre::WrapErr,
|
||||
};
|
||||
|
||||
#[cfg(not(feature = "arkzkey"))]
|
||||
use {ark_circom::read_zkey, std::io::Cursor};
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
const ARKZKEY_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/rln_final.arkzkey");
|
||||
pub const ARKZKEY_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/rln_final.arkzkey");
|
||||
#[cfg(feature = "arkzkey")]
|
||||
pub const ARKZKEY_BYTES_UNCOMPR: &[u8] =
|
||||
include_bytes!("../resources/tree_height_20/rln_final_uncompr.arkzkey");
|
||||
|
||||
pub const ZKEY_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/rln_final.zkey");
|
||||
pub const VK_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/verification_key.arkvkey");
|
||||
@@ -37,7 +43,7 @@ lazy_static! {
|
||||
static ref ZKEY: (ProvingKey<Curve>, ConstraintMatrices<Fr>) = {
|
||||
cfg_if! {
|
||||
if #[cfg(feature = "arkzkey")] {
|
||||
read_arkzkey_from_bytes(ARKZKEY_BYTES).expect("Failed to read arkzkey")
|
||||
read_arkzkey_from_bytes_uncompressed(ARKZKEY_BYTES_UNCOMPR).expect("Failed to read arkzkey")
|
||||
} else {
|
||||
let mut reader = Cursor::new(ZKEY_BYTES);
|
||||
read_zkey(&mut reader).expect("Failed to read zkey")
|
||||
@@ -143,3 +149,77 @@ pub fn check_vk_from_zkey(verifying_key: VerifyingKey<Curve>) -> Result<()> {
|
||||
Err(Report::msg("verifying_keys are not equal"))
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// Functions from [arkz-key](https://github.com/zkmopro/ark-zkey/blob/main/src/lib.rs#L106)
|
||||
// without print and allow to choose between compressed and uncompressed arkzkey
|
||||
////////////////////////////////////////////////////////
|
||||
#[cfg(feature = "arkzkey")]
|
||||
pub fn read_arkzkey_from_bytes_uncompressed(
|
||||
arkzkey_data: &[u8],
|
||||
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
|
||||
if arkzkey_data.is_empty() {
|
||||
return Err(Report::msg("No proving key found!"));
|
||||
}
|
||||
|
||||
let mut cursor = std::io::Cursor::new(arkzkey_data);
|
||||
|
||||
let serialized_proving_key =
|
||||
SerializableProvingKey::deserialize_uncompressed_unchecked(&mut cursor)
|
||||
.wrap_err("Failed to deserialize proving key")?;
|
||||
|
||||
let serialized_constraint_matrices =
|
||||
SerializableConstraintMatrices::deserialize_uncompressed_unchecked(&mut cursor)
|
||||
.wrap_err("Failed to deserialize constraint matrices")?;
|
||||
|
||||
// Get on right form for API
|
||||
let proving_key: ProvingKey<Bn254> = serialized_proving_key.0;
|
||||
let constraint_matrices: ConstraintMatrices<ark_bn254::Fr> = ConstraintMatrices {
|
||||
num_instance_variables: serialized_constraint_matrices.num_instance_variables,
|
||||
num_witness_variables: serialized_constraint_matrices.num_witness_variables,
|
||||
num_constraints: serialized_constraint_matrices.num_constraints,
|
||||
a_num_non_zero: serialized_constraint_matrices.a_num_non_zero,
|
||||
b_num_non_zero: serialized_constraint_matrices.b_num_non_zero,
|
||||
c_num_non_zero: serialized_constraint_matrices.c_num_non_zero,
|
||||
a: serialized_constraint_matrices.a.data,
|
||||
b: serialized_constraint_matrices.b.data,
|
||||
c: serialized_constraint_matrices.c.data,
|
||||
};
|
||||
|
||||
Ok((proving_key, constraint_matrices))
|
||||
}
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
pub fn read_arkzkey_from_bytes_compressed(
|
||||
arkzkey_data: &[u8],
|
||||
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
|
||||
if arkzkey_data.is_empty() {
|
||||
return Err(Report::msg("No proving key found!"));
|
||||
}
|
||||
|
||||
let mut cursor = std::io::Cursor::new(arkzkey_data);
|
||||
|
||||
let serialized_proving_key =
|
||||
SerializableProvingKey::deserialize_compressed_unchecked(&mut cursor)
|
||||
.wrap_err("Failed to deserialize proving key")?;
|
||||
|
||||
let serialized_constraint_matrices =
|
||||
SerializableConstraintMatrices::deserialize_compressed_unchecked(&mut cursor)
|
||||
.wrap_err("Failed to deserialize constraint matrices")?;
|
||||
|
||||
// Get on right form for API
|
||||
let proving_key: ProvingKey<Bn254> = serialized_proving_key.0;
|
||||
let constraint_matrices: ConstraintMatrices<ark_bn254::Fr> = ConstraintMatrices {
|
||||
num_instance_variables: serialized_constraint_matrices.num_instance_variables,
|
||||
num_witness_variables: serialized_constraint_matrices.num_witness_variables,
|
||||
num_constraints: serialized_constraint_matrices.num_constraints,
|
||||
a_num_non_zero: serialized_constraint_matrices.a_num_non_zero,
|
||||
b_num_non_zero: serialized_constraint_matrices.b_num_non_zero,
|
||||
c_num_non_zero: serialized_constraint_matrices.c_num_non_zero,
|
||||
a: serialized_constraint_matrices.a.data,
|
||||
b: serialized_constraint_matrices.b.data,
|
||||
c: serialized_constraint_matrices.c.data,
|
||||
};
|
||||
|
||||
Ok((proving_key, constraint_matrices))
|
||||
}
|
||||
|
||||
@@ -157,10 +157,10 @@ impl ProcessArg for *mut RLN {
|
||||
}
|
||||
}
|
||||
|
||||
/// Buffer struct is taken from
|
||||
/// <https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs>
|
||||
///
|
||||
/// Also heavily inspired by <https://github.com/kilic/rln/blob/master/src/ffi.rs>
|
||||
///// Buffer struct is taken from
|
||||
///// <https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs>
|
||||
/////
|
||||
///// Also heavily inspired by <https://github.com/kilic/rln/blob/master/src/ffi.rs>
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
@@ -192,6 +192,7 @@ impl<'a> From<&Buffer> for &'a [u8] {
|
||||
////////////////////////////////////////////////////////
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut *mut RLN) -> bool {
|
||||
match RLN::new(tree_height, input_buffer.process()) {
|
||||
@@ -207,6 +208,23 @@ pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[cfg(feature = "stateless")]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new(ctx: *mut *mut RLN) -> bool {
|
||||
match RLN::new() {
|
||||
Ok(rln) => {
|
||||
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
|
||||
true
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("could not instantiate rln: {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new_with_params(
|
||||
tree_height: usize,
|
||||
@@ -234,47 +252,79 @@ pub extern "C" fn new_with_params(
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[cfg(feature = "stateless")]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new_with_params(
|
||||
circom_buffer: *const Buffer,
|
||||
zkey_buffer: *const Buffer,
|
||||
vk_buffer: *const Buffer,
|
||||
ctx: *mut *mut RLN,
|
||||
) -> bool {
|
||||
match RLN::new_with_params(
|
||||
circom_buffer.process().to_vec(),
|
||||
zkey_buffer.process().to_vec(),
|
||||
vk_buffer.process().to_vec(),
|
||||
) {
|
||||
Ok(rln) => {
|
||||
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
|
||||
true
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("could not instantiate rln: {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// Merkle tree APIs
|
||||
////////////////////////////////////////////////////////
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn set_tree(ctx: *mut RLN, tree_height: usize) -> bool {
|
||||
call!(ctx, set_tree, tree_height)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn delete_leaf(ctx: *mut RLN, index: usize) -> bool {
|
||||
call!(ctx, delete_leaf, index)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn set_leaf(ctx: *mut RLN, index: usize, input_buffer: *const Buffer) -> bool {
|
||||
call!(ctx, set_leaf, index, input_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn get_leaf(ctx: *mut RLN, index: usize, output_buffer: *mut Buffer) -> bool {
|
||||
call_with_output_arg!(ctx, get_leaf, output_buffer, index)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn leaves_set(ctx: *mut RLN) -> usize {
|
||||
ctx.process().leaves_set()
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn set_next_leaf(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
|
||||
call!(ctx, set_next_leaf, input_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn set_leaves_from(
|
||||
ctx: *mut RLN,
|
||||
index: usize,
|
||||
@@ -285,12 +335,14 @@ pub extern "C" fn set_leaves_from(
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn init_tree_with_leaves(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
|
||||
call!(ctx, init_tree_with_leaves, input_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn atomic_operation(
|
||||
ctx: *mut RLN,
|
||||
index: usize,
|
||||
@@ -302,6 +354,7 @@ pub extern "C" fn atomic_operation(
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn seq_atomic_operation(
|
||||
ctx: *mut RLN,
|
||||
leaves_buffer: *const Buffer,
|
||||
@@ -318,12 +371,14 @@ pub extern "C" fn seq_atomic_operation(
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn get_root(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
|
||||
call_with_output_arg!(ctx, get_root, output_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn get_proof(ctx: *const RLN, index: usize, output_buffer: *mut Buffer) -> bool {
|
||||
call_with_output_arg!(ctx, get_proof, output_buffer, index)
|
||||
}
|
||||
@@ -353,6 +408,7 @@ pub extern "C" fn verify(
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn generate_rln_proof(
|
||||
ctx: *mut RLN,
|
||||
input_buffer: *const Buffer,
|
||||
@@ -378,6 +434,7 @@ pub extern "C" fn generate_rln_proof_with_witness(
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn verify_rln_proof(
|
||||
ctx: *const RLN,
|
||||
proof_buffer: *const Buffer,
|
||||
@@ -461,18 +518,21 @@ pub extern "C" fn recover_id_secret(
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn set_metadata(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
|
||||
call!(ctx, set_metadata, input_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn get_metadata(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
|
||||
call_with_output_arg!(ctx, get_metadata, output_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn flush(ctx: *mut RLN) -> bool {
|
||||
call!(ctx, flush)
|
||||
}
|
||||
|
||||
@@ -739,11 +739,11 @@ where
|
||||
a.map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
/// Converts a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object to the corresponding JSON serialization.
|
||||
/// Converts a JSON value into [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if `message_id` is not within `user_message_limit`.
|
||||
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
|
||||
pub fn rln_witness_from_json(input_json: serde_json::Value) -> Result<RLNWitnessInput> {
|
||||
let rln_witness: RLNWitnessInput = serde_json::from_value(input_json).unwrap();
|
||||
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
|
||||
@@ -751,11 +751,11 @@ pub fn rln_witness_from_json(input_json: serde_json::Value) -> Result<RLNWitness
|
||||
Ok(rln_witness)
|
||||
}
|
||||
|
||||
/// Converts a JSON value into [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object.
|
||||
/// Converts a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object to the corresponding JSON serialization.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
|
||||
/// Returns an error if `message_id` is not within `user_message_limit`.
|
||||
pub fn rln_witness_to_json(rln_witness: &RLNWitnessInput) -> Result<serde_json::Value> {
|
||||
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
|
||||
|
||||
@@ -763,6 +763,40 @@ pub fn rln_witness_to_json(rln_witness: &RLNWitnessInput) -> Result<serde_json::
|
||||
Ok(rln_witness_json)
|
||||
}
|
||||
|
||||
/// Converts a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object to the corresponding JSON serialization.
|
||||
/// Before serialisation the data should be translated into big int for further calculation in the witness calculator.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if `message_id` is not within `user_message_limit`.
|
||||
pub fn rln_witness_to_bigint_json(rln_witness: &RLNWitnessInput) -> Result<serde_json::Value> {
|
||||
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
|
||||
|
||||
let mut path_elements = Vec::new();
|
||||
|
||||
for v in rln_witness.path_elements.iter() {
|
||||
path_elements.push(to_bigint(v)?.to_str_radix(10));
|
||||
}
|
||||
|
||||
let mut identity_path_index = Vec::new();
|
||||
rln_witness
|
||||
.identity_path_index
|
||||
.iter()
|
||||
.for_each(|v| identity_path_index.push(BigInt::from(*v).to_str_radix(10)));
|
||||
|
||||
let inputs = serde_json::json!({
|
||||
"identitySecret": to_bigint(&rln_witness.identity_secret)?.to_str_radix(10),
|
||||
"userMessageLimit": to_bigint(&rln_witness.user_message_limit)?.to_str_radix(10),
|
||||
"messageId": to_bigint(&rln_witness.message_id)?.to_str_radix(10),
|
||||
"pathElements": path_elements,
|
||||
"identityPathIndex": identity_path_index,
|
||||
"x": to_bigint(&rln_witness.x)?.to_str_radix(10),
|
||||
"externalNullifier": to_bigint(&rln_witness.external_nullifier)?.to_str_radix(10),
|
||||
});
|
||||
|
||||
Ok(inputs)
|
||||
}
|
||||
|
||||
pub fn message_id_range_check(message_id: &Fr, user_message_limit: &Fr) -> Result<()> {
|
||||
if message_id > user_message_limit {
|
||||
return Err(color_eyre::Report::msg(
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
use crate::circuit::{vk_from_raw, zkey_from_raw, Curve, Fr};
|
||||
use crate::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash};
|
||||
use crate::poseidon_tree::PoseidonTree;
|
||||
use crate::protocol::*;
|
||||
use crate::utils::*;
|
||||
/// This is the main public API for RLN module. It is used by the FFI, and should be
|
||||
/// used by tests etc as well
|
||||
/// used by tests etc. as well
|
||||
use ark_groth16::Proof as ArkProof;
|
||||
use ark_groth16::{ProvingKey, VerifyingKey};
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
@@ -20,6 +19,7 @@ cfg_if! {
|
||||
use std::sync::Mutex;
|
||||
use crate::circuit::{circom_from_folder, vk_from_folder, circom_from_raw, zkey_from_folder, TEST_TREE_HEIGHT};
|
||||
use ark_circom::WitnessCalculator;
|
||||
use crate::poseidon_tree::PoseidonTree;
|
||||
use serde_json::{json, Value};
|
||||
use utils::{Hasher};
|
||||
use std::sync::Arc;
|
||||
@@ -43,6 +43,7 @@ pub const RLN_IDENTIFIER: &[u8] = b"zerokit/rln/010203040506070809";
|
||||
pub struct RLN {
|
||||
proving_key: (ProvingKey<Curve>, ConstraintMatrices<Fr>),
|
||||
pub(crate) verification_key: VerifyingKey<Curve>,
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub(crate) tree: PoseidonTree,
|
||||
|
||||
// The witness calculator can't be loaded in zerokit. Since this struct
|
||||
@@ -60,17 +61,18 @@ impl RLN {
|
||||
/// Input parameters are
|
||||
/// - `tree_height`: the height of the internal Merkle tree
|
||||
/// - `input_data`: include `tree_config` a reader for a string containing a json with the merkle tree configuration
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
/// use std::io::Cursor;
|
||||
///
|
||||
/// let tree_height = 20;
|
||||
/// let input = Cursor::new(json!({}).to_string());;
|
||||
/// let input = Cursor::new(json!({}).to_string());
|
||||
///
|
||||
/// // We create a new RLN instance
|
||||
/// let mut rln = RLN::new(tree_height, input);
|
||||
/// ```
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(not(target_arch = "wasm32"), not(feature = "stateless")))]
|
||||
pub fn new<R: Read>(tree_height: usize, mut input_data: R) -> Result<RLN> {
|
||||
// We read input
|
||||
let mut input: Vec<u8> = Vec::new();
|
||||
@@ -101,12 +103,38 @@ impl RLN {
|
||||
witness_calculator: witness_calculator.to_owned(),
|
||||
proving_key: proving_key.to_owned(),
|
||||
verification_key: verification_key.to_owned(),
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
tree,
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new stateless RLN object by loading circuit resources from a folder.
|
||||
///
|
||||
/// Example:
|
||||
///
|
||||
/// ```
|
||||
/// // We create a new RLN instance
|
||||
/// let mut rln = RLN::new();
|
||||
/// ```
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "stateless")))]
|
||||
#[cfg(all(not(target_arch = "wasm32"), feature = "stateless"))]
|
||||
pub fn new() -> Result<RLN> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let witness_calculator = circom_from_folder();
|
||||
let proving_key = zkey_from_folder();
|
||||
let verification_key = vk_from_folder();
|
||||
|
||||
Ok(RLN {
|
||||
witness_calculator: witness_calculator.to_owned(),
|
||||
proving_key: proving_key.to_owned(),
|
||||
verification_key: verification_key.to_owned(),
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new RLN object by passing circuit resources as byte vectors.
|
||||
///
|
||||
/// Input parameters are
|
||||
@@ -144,7 +172,7 @@ impl RLN {
|
||||
/// tree_config_input,
|
||||
/// );
|
||||
/// ```
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(not(target_arch = "wasm32"), not(feature = "stateless")))]
|
||||
pub fn new_with_params<R: Read>(
|
||||
tree_height: usize,
|
||||
circom_vec: Vec<u8>,
|
||||
@@ -179,27 +207,93 @@ impl RLN {
|
||||
witness_calculator,
|
||||
proving_key,
|
||||
verification_key,
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
tree,
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn new_with_params(tree_height: usize, zkey_vec: Vec<u8>, vk_vec: Vec<u8>) -> Result<RLN> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let witness_calculator = circom_from_raw(circom_vec)?;
|
||||
/// Creates a new stateless RLN object by passing circuit resources as byte vectors.
|
||||
///
|
||||
/// Input parameters are
|
||||
/// - `circom_vec`: a byte vector containing the ZK circuit (`rln.wasm`) as binary file
|
||||
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) as binary file
|
||||
/// - `vk_vec`: a byte vector containing to the verification key (`verification_key.arkvkey`) as binary file
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
/// use std::fs::File;
|
||||
/// use std::io::Read;
|
||||
///
|
||||
/// let resources_folder = "./resources/tree_height_20/";
|
||||
///
|
||||
/// let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
/// for filename in ["rln.wasm", "rln_final.zkey", "verification_key.arkvkey"] {
|
||||
/// let fullpath = format!("{resources_folder}{filename}");
|
||||
/// let mut file = File::open(&fullpath).expect("no file found");
|
||||
/// let metadata = std::fs::metadata(&fullpath).expect("unable to read metadata");
|
||||
/// let mut buffer = vec![0; metadata.len() as usize];
|
||||
/// file.read_exact(&mut buffer).expect("buffer overflow");
|
||||
/// resources.push(buffer);
|
||||
/// }
|
||||
///
|
||||
/// let mut rln = RLN::new_with_params(
|
||||
/// resources[0].clone(),
|
||||
/// resources[1].clone(),
|
||||
/// resources[2].clone(),
|
||||
/// );
|
||||
/// ```
|
||||
#[cfg(all(not(target_arch = "wasm32"), feature = "stateless"))]
|
||||
pub fn new_with_params(circom_vec: Vec<u8>, zkey_vec: Vec<u8>, vk_vec: Vec<u8>) -> Result<RLN> {
|
||||
let witness_calculator = circom_from_raw(&circom_vec)?;
|
||||
|
||||
let proving_key = zkey_from_raw(&zkey_vec)?;
|
||||
let verification_key = vk_from_raw(&vk_vec, &zkey_vec)?;
|
||||
|
||||
// We compute a default empty tree
|
||||
let tree = PoseidonTree::default(tree_height)?;
|
||||
Ok(RLN {
|
||||
witness_calculator,
|
||||
proving_key,
|
||||
verification_key,
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new stateless RLN object by passing circuit resources as byte vectors.
|
||||
///
|
||||
/// Input parameters are
|
||||
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) as binary file
|
||||
/// - `vk_vec`: a byte vector containing to the verification key (`verification_key.arkvkey`) as binary file
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
/// use std::fs::File;
|
||||
/// use std::io::Read;
|
||||
///
|
||||
/// let resources_folder = "./resources/tree_height_20/";
|
||||
///
|
||||
/// let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
/// for filename in ["rln_final.zkey", "verification_key.arkvkey"] {
|
||||
/// let fullpath = format!("{resources_folder}{filename}");
|
||||
/// let mut file = File::open(&fullpath).expect("no file found");
|
||||
/// let metadata = std::fs::metadata(&fullpath).expect("unable to read metadata");
|
||||
/// let mut buffer = vec![0; metadata.len() as usize];
|
||||
/// file.read_exact(&mut buffer).expect("buffer overflow");
|
||||
/// resources.push(buffer);
|
||||
/// }
|
||||
///
|
||||
/// let mut rln = RLN::new_with_params(
|
||||
/// resources[0].clone(),
|
||||
/// resources[1].clone(),
|
||||
/// );
|
||||
/// ```
|
||||
#[cfg(all(target_arch = "wasm32", feature = "stateless"))]
|
||||
pub fn new_with_params(zkey_vec: Vec<u8>, vk_vec: Vec<u8>) -> Result<RLN> {
|
||||
let proving_key = zkey_from_raw(&zkey_vec)?;
|
||||
let verification_key = vk_from_raw(&vk_vec, &zkey_vec)?;
|
||||
|
||||
Ok(RLN {
|
||||
proving_key,
|
||||
verification_key,
|
||||
tree,
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
@@ -213,6 +307,7 @@ impl RLN {
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `tree_height`: the height of the Merkle tree.
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn set_tree(&mut self, tree_height: usize) -> Result<()> {
|
||||
// We compute a default empty tree of desired height
|
||||
self.tree = PoseidonTree::default(tree_height)?;
|
||||
@@ -243,6 +338,7 @@ impl RLN {
|
||||
/// let mut buffer = Cursor::new(serialize_field_element(rate_commitment));
|
||||
/// rln.set_leaf(id_index, &mut buffer).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn set_leaf<R: Read>(&mut self, index: usize, mut input_data: R) -> Result<()> {
|
||||
// We read input
|
||||
let mut leaf_byte: Vec<u8> = Vec::new();
|
||||
@@ -272,6 +368,7 @@ impl RLN {
|
||||
/// let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
/// rln.get_leaf(id_index, &mut buffer).unwrap();
|
||||
/// let rate_commitment = deserialize_field_element(&buffer.into_inner()).unwrap();
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_leaf<W: Write>(&self, index: usize, mut output_data: W) -> Result<()> {
|
||||
// We get the leaf at input index
|
||||
let leaf = self.tree.get(index)?;
|
||||
@@ -314,6 +411,7 @@ impl RLN {
|
||||
/// let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves));
|
||||
/// rln.set_leaves_from(index, &mut buffer).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn set_leaves_from<R: Read>(&mut self, index: usize, mut input_data: R) -> Result<()> {
|
||||
// We read input
|
||||
let mut leaves_byte: Vec<u8> = Vec::new();
|
||||
@@ -334,6 +432,7 @@ impl RLN {
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of multiple leaf values (serialization done with [`rln::utils::vec_fr_to_bytes_le`](crate::utils::vec_fr_to_bytes_le))
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn init_tree_with_leaves<R: Read>(&mut self, input_data: R) -> Result<()> {
|
||||
// reset the tree
|
||||
// NOTE: this requires the tree to be initialized with the correct height initially
|
||||
@@ -384,6 +483,7 @@ impl RLN {
|
||||
/// let mut indices_buffer = Cursor::new(vec_u8_to_bytes_le(&indices));
|
||||
/// rln.atomic_operation(index, &mut leaves_buffer, indices_buffer).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn atomic_operation<R: Read>(
|
||||
&mut self,
|
||||
index: usize,
|
||||
@@ -409,6 +509,7 @@ impl RLN {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn leaves_set(&mut self) -> usize {
|
||||
self.tree.leaves_set()
|
||||
}
|
||||
@@ -456,6 +557,7 @@ impl RLN {
|
||||
/// let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
|
||||
/// rln.set_next_leaf(&mut buffer).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn set_next_leaf<R: Read>(&mut self, mut input_data: R) -> Result<()> {
|
||||
// We read input
|
||||
let mut leaf_byte: Vec<u8> = Vec::new();
|
||||
@@ -468,7 +570,7 @@ impl RLN {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sets the value of the leaf at position index to the harcoded default value.
|
||||
/// Sets the value of the leaf at position index to the hardcoded default value.
|
||||
///
|
||||
/// This function does not change the internal Merkle tree `next_index` value.
|
||||
///
|
||||
@@ -481,6 +583,7 @@ impl RLN {
|
||||
/// let index = 10;
|
||||
/// rln.delete_leaf(index).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn delete_leaf(&mut self, index: usize) -> Result<()> {
|
||||
self.tree.delete(index)?;
|
||||
Ok(())
|
||||
@@ -498,6 +601,7 @@ impl RLN {
|
||||
/// let metadata = b"some metadata";
|
||||
/// rln.set_metadata(metadata).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn set_metadata(&mut self, metadata: &[u8]) -> Result<()> {
|
||||
self.tree.set_metadata(metadata)?;
|
||||
Ok(())
|
||||
@@ -517,6 +621,7 @@ impl RLN {
|
||||
/// rln.get_metadata(&mut buffer).unwrap();
|
||||
/// let metadata = buffer.into_inner();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_metadata<W: Write>(&self, mut output_data: W) -> Result<()> {
|
||||
let metadata = self.tree.metadata()?;
|
||||
output_data.write_all(&metadata)?;
|
||||
@@ -536,6 +641,7 @@ impl RLN {
|
||||
/// rln.get_root(&mut buffer).unwrap();
|
||||
/// let (root, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_root<W: Write>(&self, mut output_data: W) -> Result<()> {
|
||||
let root = self.tree.root();
|
||||
output_data.write_all(&fr_to_bytes_le(&root))?;
|
||||
@@ -558,6 +664,7 @@ impl RLN {
|
||||
/// rln.get_subtree_root(level, index, &mut buffer).unwrap();
|
||||
/// let (subroot, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_subtree_root<W: Write>(
|
||||
&self,
|
||||
level: usize,
|
||||
@@ -591,6 +698,7 @@ impl RLN {
|
||||
/// let (path_elements, read) = bytes_le_to_vec_fr(&buffer_inner);
|
||||
/// let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec());
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_proof<W: Write>(&self, index: usize, mut output_data: W) -> Result<()> {
|
||||
let merkle_proof = self.tree.proof(index).expect("proof should exist");
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
@@ -634,6 +742,7 @@ impl RLN {
|
||||
/// let idxs = bytes_le_to_vec_usize(&buffer.into_inner()).unwrap();
|
||||
/// assert_eq!(idxs, [0, 1, 2, 3, 4]);
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_empty_leaves_indices<W: Write>(&self, mut output_data: W) -> Result<()> {
|
||||
let idxs = self.tree.get_empty_leaves_indices();
|
||||
idxs.serialize_compressed(&mut output_data)?;
|
||||
@@ -693,7 +802,7 @@ impl RLN {
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of the RLN zkSNARK proof concatenated with a serialization of the circuit output values,
|
||||
/// i.e. `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]`, where <_> indicates the byte length.
|
||||
/// i.e. `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]`, where <_> indicates the byte length.
|
||||
///
|
||||
/// The function returns true if the zkSNARK proof is valid with respect to the provided circuit output values, false otherwise.
|
||||
///
|
||||
@@ -792,7 +901,7 @@ impl RLN {
|
||||
/// // proof_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]
|
||||
/// let mut proof_data = output_buffer.into_inner();
|
||||
/// ```
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(not(target_arch = "wasm32"), not(feature = "stateless")))]
|
||||
pub fn generate_rln_proof<R: Read, W: Write>(
|
||||
&mut self,
|
||||
mut input_data: R,
|
||||
@@ -865,11 +974,11 @@ impl RLN {
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of the RLN zkSNARK proof concatenated with a serialization of the circuit output values and the signal information,
|
||||
/// i.e. `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var>]`, where <_> indicates the byte length.
|
||||
/// i.e. `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var>]`, where <_> indicates the byte length.
|
||||
///
|
||||
/// The function returns true if the zkSNARK proof is valid with respect to the provided circuit output values and signal. Returns false otherwise.
|
||||
///
|
||||
/// Note that contrary to [`verify`](crate::public::RLN::verify), this function takes additionaly as input the signal and further verifies if
|
||||
/// Note that contrary to [`verify`](crate::public::RLN::verify), this function takes additionally as input the signal and further verifies if
|
||||
/// - the Merkle tree root corresponds to the root provided as input;
|
||||
/// - the input signal corresponds to the Shamir's x coordinate provided as input
|
||||
/// - the hardcoded application [RLN identifier](crate::public::RLN_IDENTIFIER) corresponds to the RLN identifier provided as input
|
||||
@@ -889,6 +998,7 @@ impl RLN {
|
||||
///
|
||||
/// assert!(verified);
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn verify_rln_proof<R: Read>(&self, mut input_data: R) -> Result<bool> {
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut serialized)?;
|
||||
@@ -917,7 +1027,7 @@ impl RLN {
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of the RLN zkSNARK proof concatenated with a serialization of the circuit output values and the signal information, i.e. `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var>]`
|
||||
/// - `roots_data`: a reader for the serialization of a vector of roots, i.e. `[ number_of_roots<8> | root_1<32> | ... | root_n<32> ]` (number_of_roots is a uint64 in little-endian, roots are serialized using `rln::utils::fr_to_bytes_le`))
|
||||
/// - `roots_data`: a reader for the serialization of a vector of roots, i.e. `[ number_of_roots<8> | root_1<32> | ... | root_n<32> ]` (number_of_roots is an uint64 in little-endian, roots are serialized using `rln::utils::fr_to_bytes_le`)
|
||||
///
|
||||
/// The function returns true if the zkSNARK proof is valid with respect to the provided circuit output values, signal and roots. Returns false otherwise.
|
||||
///
|
||||
@@ -1062,7 +1172,7 @@ impl RLN {
|
||||
/// Generated credentials are compatible with [Semaphore](https://semaphore.appliedzkp.org/docs/guides/identities)'s credentials.
|
||||
///
|
||||
/// Output values are:
|
||||
/// - `output_data`: a writer receiving the serialization of the identity tapdoor, identity nullifier, identity secret and identity commitment (serialization done with `rln::utils::fr_to_bytes_le`)
|
||||
/// - `output_data`: a writer receiving the serialization of the identity trapdoor, identity nullifier, identity secret and identity commitment (serialization done with `rln::utils::fr_to_bytes_le`)
|
||||
///
|
||||
/// Example
|
||||
/// ```
|
||||
@@ -1137,7 +1247,7 @@ impl RLN {
|
||||
/// - `input_data`: a reader for the byte vector containing the seed
|
||||
///
|
||||
/// Output values are:
|
||||
/// - `output_data`: a writer receiving the serialization of the identity tapdoor, identity nullifier, identity secret and identity commitment (serialization done with `rln::utils::fr_to_bytes_le`)
|
||||
/// - `output_data`: a writer receiving the serialization of the identity trapdoor, identity nullifier, identity secret and identity commitment (serialization done with `rln::utils::fr_to_bytes_le`)
|
||||
///
|
||||
/// Example
|
||||
/// ```
|
||||
@@ -1175,8 +1285,8 @@ impl RLN {
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_proof_data_1`: a reader for the serialization of a RLN zkSNARK proof concatenated with a serialization of the circuit output values and -optionally- the signal information,
|
||||
/// i.e. either `[proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]`
|
||||
/// or `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var> ]` (to maintain compatibility with both output of [`generate_rln_proof`](crate::public::RLN::generate_rln_proof) and input of [`verify_rln_proof`](crate::public::RLN::verify_rln_proof))
|
||||
/// i.e. either `[proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]`
|
||||
/// or `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var> ]` (to maintain compatibility with both output of [`generate_rln_proof`](crate::public::RLN::generate_rln_proof) and input of [`verify_rln_proof`](crate::public::RLN::verify_rln_proof))
|
||||
/// - `input_proof_data_2`: same as `input_proof_data_1`
|
||||
///
|
||||
/// Output values are:
|
||||
@@ -1211,7 +1321,7 @@ impl RLN {
|
||||
mut input_proof_data_2: R,
|
||||
mut output_data: W,
|
||||
) -> Result<()> {
|
||||
// We serialize_compressed the two proofs and we get the corresponding RLNProofValues objects
|
||||
// We serialize_compressed the two proofs, and we get the corresponding RLNProofValues objects
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
input_proof_data_1.read_to_end(&mut serialized)?;
|
||||
// We skip deserialization of the zk-proof at the beginning
|
||||
@@ -1227,7 +1337,7 @@ impl RLN {
|
||||
// We continue only if the proof values are for the same external nullifier (which includes epoch and rln identifier)
|
||||
// The idea is that proof values that go as input to this function are verified first (with zk-proof verify), hence ensuring validity of external nullifier and other fields.
|
||||
// Only in case all fields are valid, an external_nullifier for the message will be stored (otherwise signal/proof will be simply discarded)
|
||||
// If the nullifier matches one already seen, we can recovery of identity secret.
|
||||
// If the nullifier matches one already seen, we can recover of identity secret.
|
||||
if external_nullifier_1 == external_nullifier_2 {
|
||||
// We extract the two shares
|
||||
let share1 = (proof_values_1.x, proof_values_1.y);
|
||||
@@ -1252,7 +1362,8 @@ impl RLN {
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of `[ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]`
|
||||
///
|
||||
/// The function returns the corresponding [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object serialized using [`rln::protocol::serialize_witness`](crate::protocol::serialize_witness)).
|
||||
/// The function returns the corresponding [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object serialized using [`rln::protocol::serialize_witness`](crate::protocol::serialize_witness).
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_serialized_rln_witness<R: Read>(&mut self, mut input_data: R) -> Result<Vec<u8>> {
|
||||
// We read input RLN witness and we serialize_compressed it
|
||||
let mut witness_byte: Vec<u8> = Vec::new();
|
||||
@@ -1273,10 +1384,26 @@ impl RLN {
|
||||
rln_witness_to_json(&rln_witness)
|
||||
}
|
||||
|
||||
/// Converts a byte serialization of a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object to the corresponding JSON serialization.
|
||||
/// Before serialization the data will be translated into big int for further calculation in the witness calculator.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `serialized_witness`: the byte serialization of a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object (serialization done with [`rln::protocol::serialize_witness`](crate::protocol::serialize_witness)).
|
||||
///
|
||||
/// The function returns the corresponding JSON encoding of the input [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object.
|
||||
pub fn get_rln_witness_bigint_json(
|
||||
&mut self,
|
||||
serialized_witness: &[u8],
|
||||
) -> Result<serde_json::Value> {
|
||||
let (rln_witness, _) = deserialize_witness(serialized_witness)?;
|
||||
rln_witness_to_bigint_json(&rln_witness)
|
||||
}
|
||||
|
||||
/// Closes the connection to the Merkle tree database.
|
||||
/// This function should be called before the RLN object is dropped.
|
||||
/// If not called, the connection will be closed when the RLN object is dropped.
|
||||
/// This improves robustness of the tree.
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn flush(&mut self) -> Result<()> {
|
||||
self.tree.close_db_connection()
|
||||
}
|
||||
@@ -1285,9 +1412,14 @@ impl RLN {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
impl Default for RLN {
|
||||
fn default() -> Self {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let buffer = Cursor::new(json!({}).to_string());
|
||||
Self::new(tree_height, buffer).unwrap()
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
{
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let buffer = Cursor::new(json!({}).to_string());
|
||||
Self::new(tree_height, buffer).unwrap()
|
||||
}
|
||||
#[cfg(feature = "stateless")]
|
||||
Self::new().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
477
rln/tests/ffi.rs
477
rln/tests/ffi.rs
@@ -1,4 +1,5 @@
|
||||
#[cfg(test)]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
mod test {
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
@@ -529,7 +530,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
// Computes and verifies an RLN ZK proof by checking proof's root against an input roots buffer
|
||||
fn test_verify_with_roots() {
|
||||
fn test_verify_with_roots_ffi() {
|
||||
// First part similar to test_rln_proof_ffi
|
||||
let user_message_limit = Fr::from(100);
|
||||
|
||||
@@ -905,7 +906,7 @@ mod test {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_leaf() {
|
||||
fn test_get_leaf_ffi() {
|
||||
// We create a RLN instance
|
||||
let no_of_leaves = 1 << TEST_TREE_HEIGHT;
|
||||
|
||||
@@ -945,7 +946,7 @@ mod test {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_metadata() {
|
||||
fn test_valid_metadata_ffi() {
|
||||
// We create a RLN instance
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
@@ -966,7 +967,7 @@ mod test {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_metadata() {
|
||||
fn test_empty_metadata_ffi() {
|
||||
// We create a RLN instance
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
@@ -978,3 +979,471 @@ mod test {
|
||||
assert_eq!(output_buffer.len, 0);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[cfg(feature = "stateless")]
|
||||
mod stateless_test {
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
use rln::circuit::*;
|
||||
use rln::ffi::generate_rln_proof_with_witness;
|
||||
use rln::ffi::{hash as ffi_hash, poseidon_hash as ffi_poseidon_hash, *};
|
||||
use rln::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
|
||||
use rln::poseidon_tree::PoseidonTree;
|
||||
use rln::protocol::*;
|
||||
use rln::public::RLN;
|
||||
use rln::utils::*;
|
||||
use std::mem::MaybeUninit;
|
||||
use std::time::{Duration, Instant};
|
||||
use utils::ZerokitMerkleTree;
|
||||
|
||||
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
|
||||
|
||||
fn create_rln_instance() -> &'static mut RLN {
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let success = new(rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
unsafe { &mut *rln_pointer.assume_init() }
|
||||
}
|
||||
|
||||
fn identity_pair_gen(rln_pointer: &mut RLN) -> (Fr, Fr) {
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = key_gen(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "key gen call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret_hash, read) = bytes_le_to_fr(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..].to_vec());
|
||||
(identity_secret_hash, id_commitment)
|
||||
}
|
||||
|
||||
fn rln_proof_gen_with_witness(rln_pointer: &mut RLN, serialized: &[u8]) -> Vec<u8> {
|
||||
let input_buffer = &Buffer::from(serialized);
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success =
|
||||
generate_rln_proof_with_witness(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "generate rln proof call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
<&[u8]>::from(&output_buffer).to_vec()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recover_id_secret_stateless_ffi() {
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We generate a new identity pair
|
||||
let (identity_secret_hash, id_commitment) = identity_pair_gen(rln_pointer);
|
||||
|
||||
let user_message_limit = Fr::from(100);
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
tree.update_next(rate_commitment).unwrap();
|
||||
|
||||
// We generate a random epoch
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
|
||||
|
||||
// We generate two proofs using same epoch but different signals.
|
||||
// We generate a random signal
|
||||
let mut rng = thread_rng();
|
||||
let signal1: [u8; 32] = rng.gen();
|
||||
let x1 = hash_to_field(&signal1);
|
||||
|
||||
let signal2: [u8; 32] = rng.gen();
|
||||
let x2 = hash_to_field(&signal2);
|
||||
|
||||
let identity_index = tree.leaves_set();
|
||||
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
|
||||
|
||||
// We prepare input for generate_rln_proof API
|
||||
let rln_witness1 = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
x1,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
Fr::from(1),
|
||||
)
|
||||
.unwrap();
|
||||
let serialized1 = serialize_witness(&rln_witness1).unwrap();
|
||||
|
||||
let rln_witness2 = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
x2,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
Fr::from(1),
|
||||
)
|
||||
.unwrap();
|
||||
let serialized2 = serialize_witness(&rln_witness2).unwrap();
|
||||
|
||||
// We call generate_rln_proof for first proof values
|
||||
// result_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let proof_data_1 = rln_proof_gen_with_witness(rln_pointer, serialized1.as_ref());
|
||||
|
||||
// We call generate_rln_proof
|
||||
// result_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let proof_data_2 = rln_proof_gen_with_witness(rln_pointer, serialized2.as_ref());
|
||||
|
||||
let input_proof_buffer_1 = &Buffer::from(proof_data_1.as_ref());
|
||||
let input_proof_buffer_2 = &Buffer::from(proof_data_2.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = recover_id_secret(
|
||||
rln_pointer,
|
||||
input_proof_buffer_1,
|
||||
input_proof_buffer_2,
|
||||
output_buffer.as_mut_ptr(),
|
||||
);
|
||||
assert!(success, "recover id secret call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let serialized_identity_secret_hash = <&[u8]>::from(&output_buffer).to_vec();
|
||||
|
||||
// We passed two shares for the same secret, so recovery should be successful
|
||||
// To check it, we ensure that recovered identity secret hash is empty
|
||||
assert!(!serialized_identity_secret_hash.is_empty());
|
||||
|
||||
// We check if the recovered identity secret hash corresponds to the original one
|
||||
let (recovered_identity_secret_hash, _) = bytes_le_to_fr(&serialized_identity_secret_hash);
|
||||
assert_eq!(recovered_identity_secret_hash, identity_secret_hash);
|
||||
|
||||
// We now test that computing identity_secret_hash is unsuccessful if shares computed from two different identity secret hashes but within same epoch are passed
|
||||
|
||||
// We generate a new identity pair
|
||||
let (identity_secret_hash_new, id_commitment_new) = identity_pair_gen(rln_pointer);
|
||||
let rate_commitment_new = utils_poseidon_hash(&[id_commitment_new, user_message_limit]);
|
||||
tree.update_next(rate_commitment_new).unwrap();
|
||||
|
||||
// We generate a random signals
|
||||
let signal3: [u8; 32] = rng.gen();
|
||||
let x3 = hash_to_field(&signal3);
|
||||
|
||||
let identity_index_new = tree.leaves_set();
|
||||
let merkle_proof_new = tree.proof(identity_index_new).expect("proof should exist");
|
||||
|
||||
let rln_witness3 = rln_witness_from_values(
|
||||
identity_secret_hash_new,
|
||||
&merkle_proof_new,
|
||||
x3,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
Fr::from(1),
|
||||
)
|
||||
.unwrap();
|
||||
let serialized3 = serialize_witness(&rln_witness3).unwrap();
|
||||
|
||||
// We call generate_rln_proof
|
||||
// result_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let proof_data_3 = rln_proof_gen_with_witness(rln_pointer, serialized3.as_ref());
|
||||
|
||||
// We attempt to recover the secret using share1 (coming from identity_secret_hash) and share3 (coming from identity_secret_hash_new)
|
||||
|
||||
let input_proof_buffer_1 = &Buffer::from(proof_data_1.as_ref());
|
||||
let input_proof_buffer_3 = &Buffer::from(proof_data_3.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = recover_id_secret(
|
||||
rln_pointer,
|
||||
input_proof_buffer_1,
|
||||
input_proof_buffer_3,
|
||||
output_buffer.as_mut_ptr(),
|
||||
);
|
||||
assert!(success, "recover id secret call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let serialized_identity_secret_hash = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (recovered_identity_secret_hash_new, _) =
|
||||
bytes_le_to_fr(&serialized_identity_secret_hash);
|
||||
|
||||
// ensure that the recovered secret does not match with either of the
|
||||
// used secrets in proof generation
|
||||
assert_ne!(recovered_identity_secret_hash_new, identity_secret_hash_new);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_verify_with_roots_stateless_ffi() {
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We generate a new identity pair
|
||||
let (identity_secret_hash, id_commitment) = identity_pair_gen(rln_pointer);
|
||||
|
||||
let identity_index = tree.leaves_set();
|
||||
let user_message_limit = Fr::from(100);
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
tree.update_next(rate_commitment).unwrap();
|
||||
|
||||
// We generate a random epoch
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
|
||||
|
||||
// We generate two proofs using same epoch but different signals.
|
||||
// We generate a random signal
|
||||
let mut rng = thread_rng();
|
||||
let signal: [u8; 32] = rng.gen();
|
||||
let x = hash_to_field(&signal);
|
||||
|
||||
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
|
||||
|
||||
// We prepare input for generate_rln_proof API
|
||||
let rln_witness = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
x,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
Fr::from(1),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let serialized = serialize_witness(&rln_witness).unwrap();
|
||||
let mut proof_data = rln_proof_gen_with_witness(rln_pointer, serialized.as_ref());
|
||||
|
||||
proof_data.append(&mut normalize_usize(signal.len()));
|
||||
proof_data.append(&mut signal.to_vec());
|
||||
|
||||
// If no roots is provided, proof validation is skipped and if the remaining proof values are valid, the proof will be correctly verified
|
||||
let mut roots_data: Vec<u8> = Vec::new();
|
||||
|
||||
let input_buffer = &Buffer::from(proof_data.as_ref());
|
||||
let roots_buffer = &Buffer::from(roots_data.as_ref());
|
||||
let mut proof_is_valid: bool = false;
|
||||
let proof_is_valid_ptr = &mut proof_is_valid as *mut bool;
|
||||
let success =
|
||||
verify_with_roots(rln_pointer, input_buffer, roots_buffer, proof_is_valid_ptr);
|
||||
assert!(success, "verify call failed");
|
||||
// Proof should be valid
|
||||
assert_eq!(proof_is_valid, true);
|
||||
|
||||
// We serialize in the roots buffer some random values and we check that the proof is not verified since doesn't contain the correct root the proof refers to
|
||||
for _ in 0..5 {
|
||||
roots_data.append(&mut fr_to_bytes_le(&Fr::rand(&mut rng)));
|
||||
}
|
||||
let input_buffer = &Buffer::from(proof_data.as_ref());
|
||||
let roots_buffer = &Buffer::from(roots_data.as_ref());
|
||||
let mut proof_is_valid: bool = false;
|
||||
let proof_is_valid_ptr = &mut proof_is_valid as *mut bool;
|
||||
let success =
|
||||
verify_with_roots(rln_pointer, input_buffer, roots_buffer, proof_is_valid_ptr);
|
||||
assert!(success, "verify call failed");
|
||||
// Proof should be invalid.
|
||||
assert_eq!(proof_is_valid, false);
|
||||
|
||||
// We get the root of the tree obtained adding one leaf per time
|
||||
let root = tree.root();
|
||||
|
||||
// We add the real root and we check if now the proof is verified
|
||||
roots_data.append(&mut fr_to_bytes_le(&root));
|
||||
let input_buffer = &Buffer::from(proof_data.as_ref());
|
||||
let roots_buffer = &Buffer::from(roots_data.as_ref());
|
||||
let mut proof_is_valid: bool = false;
|
||||
let proof_is_valid_ptr = &mut proof_is_valid as *mut bool;
|
||||
let success =
|
||||
verify_with_roots(rln_pointer, input_buffer, roots_buffer, proof_is_valid_ptr);
|
||||
assert!(success, "verify call failed");
|
||||
// Proof should be valid.
|
||||
assert_eq!(proof_is_valid, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_groth16_proofs_performance_stateless_ffi() {
|
||||
// We create a RLN instance
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We compute some benchmarks regarding proof and verify API calls
|
||||
// Note that circuit loading requires some initial overhead.
|
||||
// Once the circuit is loaded (i.e., when the RLN object is created), proof generation and verification times should be similar at each call.
|
||||
let sample_size = 100;
|
||||
let mut prove_time: u128 = 0;
|
||||
let mut verify_time: u128 = 0;
|
||||
|
||||
for _ in 0..sample_size {
|
||||
// We generate random witness instances and relative proof values
|
||||
let rln_witness = random_rln_witness(TEST_TREE_HEIGHT);
|
||||
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
|
||||
|
||||
// We prepare id_commitment and we set the leaf at provided index
|
||||
let rln_witness_ser = serialize_witness(&rln_witness).unwrap();
|
||||
let input_buffer = &Buffer::from(rln_witness_ser.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let now = Instant::now();
|
||||
let success = prove(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
prove_time += now.elapsed().as_nanos();
|
||||
assert!(success, "prove call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
|
||||
// We read the returned proof and we append proof values for verify
|
||||
let serialized_proof = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let serialized_proof_values = serialize_proof_values(&proof_values);
|
||||
let mut verify_data = Vec::<u8>::new();
|
||||
verify_data.extend(&serialized_proof);
|
||||
verify_data.extend(&serialized_proof_values);
|
||||
|
||||
// We prepare input proof values and we call verify
|
||||
let input_buffer = &Buffer::from(verify_data.as_ref());
|
||||
let mut proof_is_valid: bool = false;
|
||||
let proof_is_valid_ptr = &mut proof_is_valid as *mut bool;
|
||||
let now = Instant::now();
|
||||
let success = verify(rln_pointer, input_buffer, proof_is_valid_ptr);
|
||||
verify_time += now.elapsed().as_nanos();
|
||||
assert!(success, "verify call failed");
|
||||
assert_eq!(proof_is_valid, true);
|
||||
}
|
||||
|
||||
println!(
|
||||
"Average prove API call time: {:?}",
|
||||
Duration::from_nanos((prove_time / sample_size).try_into().unwrap())
|
||||
);
|
||||
println!(
|
||||
"Average verify API call time: {:?}",
|
||||
Duration::from_nanos((verify_time / sample_size).try_into().unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Tests hash to field using FFI APIs
|
||||
fn test_seeded_keygen_stateless_ffi() {
|
||||
// We create a RLN instance
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We generate a new identity pair from an input seed
|
||||
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
let input_buffer = &Buffer::from(seed_bytes);
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = seeded_key_gen(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "seeded key gen call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret_hash, read) = bytes_le_to_fr(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..].to_vec());
|
||||
|
||||
// We check against expected values
|
||||
let expected_identity_secret_hash_seed_bytes = str_to_fr(
|
||||
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
|
||||
16,
|
||||
);
|
||||
let expected_id_commitment_seed_bytes = str_to_fr(
|
||||
"0xbf16d2b5c0d6f9d9d561e05bfca16a81b4b873bb063508fae360d8c74cef51f",
|
||||
16,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
identity_secret_hash,
|
||||
expected_identity_secret_hash_seed_bytes.unwrap()
|
||||
);
|
||||
assert_eq!(id_commitment, expected_id_commitment_seed_bytes.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Tests hash to field using FFI APIs
|
||||
fn test_seeded_extended_keygen_stateless_ffi() {
|
||||
// We create a RLN instance
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We generate a new identity tuple from an input seed
|
||||
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
let input_buffer = &Buffer::from(seed_bytes);
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success =
|
||||
seeded_extended_key_gen(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "seeded key gen call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) =
|
||||
deserialize_identity_tuple(result_data);
|
||||
|
||||
// We check against expected values
|
||||
let expected_identity_trapdoor_seed_bytes = str_to_fr(
|
||||
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
|
||||
16,
|
||||
);
|
||||
let expected_identity_nullifier_seed_bytes = str_to_fr(
|
||||
"0x1f18714c7bc83b5bca9e89d404cf6f2f585bc4c0f7ed8b53742b7e2b298f50b4",
|
||||
16,
|
||||
);
|
||||
let expected_identity_secret_hash_seed_bytes = str_to_fr(
|
||||
"0x2aca62aaa7abaf3686fff2caf00f55ab9462dc12db5b5d4bcf3994e671f8e521",
|
||||
16,
|
||||
);
|
||||
let expected_id_commitment_seed_bytes = str_to_fr(
|
||||
"0x68b66aa0a8320d2e56842581553285393188714c48f9b17acd198b4f1734c5c",
|
||||
16,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
identity_trapdoor,
|
||||
expected_identity_trapdoor_seed_bytes.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
identity_nullifier,
|
||||
expected_identity_nullifier_seed_bytes.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
identity_secret_hash,
|
||||
expected_identity_secret_hash_seed_bytes.unwrap()
|
||||
);
|
||||
assert_eq!(id_commitment, expected_id_commitment_seed_bytes.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Tests hash to field using FFI APIs
|
||||
fn test_hash_to_field_stateless_ffi() {
|
||||
let mut rng = rand::thread_rng();
|
||||
let signal: [u8; 32] = rng.gen();
|
||||
|
||||
// We prepare id_commitment and we set the leaf at provided index
|
||||
let input_buffer = &Buffer::from(signal.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = ffi_hash(input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "hash call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
|
||||
// We read the returned proof and we append proof values for verify
|
||||
let serialized_hash = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (hash1, _) = bytes_le_to_fr(&serialized_hash);
|
||||
|
||||
let hash2 = hash_to_field(&signal);
|
||||
|
||||
assert_eq!(hash1, hash2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Test Poseidon hash FFI
|
||||
fn test_poseidon_hash_stateless_ffi() {
|
||||
// generate random number between 1..ROUND_PARAMS.len()
|
||||
let mut rng = thread_rng();
|
||||
let number_of_inputs = rng.gen_range(1..ROUND_PARAMS.len());
|
||||
let mut inputs = Vec::with_capacity(number_of_inputs);
|
||||
for _ in 0..number_of_inputs {
|
||||
inputs.push(Fr::rand(&mut rng));
|
||||
}
|
||||
let inputs_ser = vec_fr_to_bytes_le(&inputs).unwrap();
|
||||
let input_buffer = &Buffer::from(inputs_ser.as_ref());
|
||||
|
||||
let expected_hash = utils_poseidon_hash(inputs.as_ref());
|
||||
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = ffi_poseidon_hash(input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "poseidon hash call failed");
|
||||
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (received_hash, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
assert_eq!(received_hash, expected_hash);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ mod test {
|
||||
|
||||
#[test]
|
||||
// This test is similar to the one in lib, but uses only public API
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
fn test_merkle_proof() {
|
||||
let leaf_index = 3;
|
||||
let user_message_limit = 1;
|
||||
|
||||
@@ -13,9 +13,11 @@ bench = false
|
||||
|
||||
[dependencies]
|
||||
ark-ff = { version = "=0.4.1", default-features = false, features = ["asm"] }
|
||||
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
|
||||
num-bigint = { version = "=0.4.3", default-features = false, features = [
|
||||
"rand",
|
||||
] }
|
||||
color-eyre = "=0.6.2"
|
||||
pmtree = { package = "vacp2p_pmtree", version = "=2.0.2", optional = true}
|
||||
pmtree = { package = "vacp2p_pmtree", version = "=2.0.2", optional = true }
|
||||
sled = "=0.34.7"
|
||||
serde = "=1.0.163"
|
||||
lazy_static = "1.4.0"
|
||||
|
||||
@@ -8,7 +8,7 @@ use std::{
|
||||
};
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
/// Full Merkle Tree Implementation
|
||||
///// Full Merkle Tree Implementation
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
/// Merkle tree with all leaf and intermediate hashes stored
|
||||
|
||||
@@ -6,7 +6,7 @@ use std::str::FromStr;
|
||||
use std::{cmp::max, fmt::Debug};
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
/// Optimal Merkle Tree Implementation
|
||||
///// Optimal Merkle Tree Implementation
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
/// The Merkle tree structure
|
||||
@@ -55,7 +55,9 @@ impl FromStr for OptimalMerkleConfig {
|
||||
}
|
||||
}
|
||||
|
||||
/// Implementations
|
||||
////////////////////////////////////////////////////////////
|
||||
///// Implementations
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
impl<H: Hasher> ZerokitMerkleTree for OptimalMerkleTree<H>
|
||||
where
|
||||
|
||||
Reference in New Issue
Block a user