Compare commits

...

20 Commits

Author SHA1 Message Date
Richard Ramos
7bc85dfa19 feat: expose hash, poseidon_hash and delete_leaf 2023-04-28 07:23:20 -04:00
tyshko-rostyslav
4f98fd8028 chore(rln): bring hash functions under a single module (#146) 2023-04-20 16:24:29 +05:30
tyshko-rostyslav
9931e901e5 most changes (#145)
Co-authored-by: tyshkor <tyshko1@gmail.com>
2023-04-13 06:45:12 +05:30
Aaryamann Challani
0fb7e0bbcb feat: abstract shared behaviour into ZerokitMerkleTree (#142)
* feat: abstract shared behaviour into ZerokitMerkleTree

* fix: tests
2023-04-11 16:46:13 +05:30
tyshko-rostyslav
672287b77b call_bool_method_with_error_msg (#144)
Co-authored-by: tyshkor <tyshko1@gmail.com>
2023-04-10 19:45:16 +05:30
Aaryamann Challani
2e868d6cbf fix(ci): force draft=false for nightly releases (#143) 2023-03-31 18:15:37 +05:30
tyshko-rostyslav
39bea35a6d Macro to call functions with an error message with output (#141)
Another variation of our call, this time when output is used
2023-03-31 14:44:04 +02:00
tyshko-rostyslav
6ff4eeb237 Macro to call functions with an error message (#140)
abstract out calls

---------

Co-authored-by: tyshkor <tyshko1@gmail.com>
2023-03-29 15:16:36 +02:00
Aaryamann Challani
1f983bb232 fix(rln): move std::path to cfg_if block (#138) 2023-03-24 09:31:01 +05:30
tyshko-rostyslav
13a2c61355 add wasm-strip to reduce size even more (#137)
* added wasm-strip fixed docs

* requested change

* fix installdeps

* fix ubuntu

* fix macos

---------

Co-authored-by: tyshkor <tyshko1@gmail.com>
2023-03-24 09:30:48 +05:30
tyshko-rostyslav
2bbb710e83 add Cargo.lock to the repo (#136)
add Cargo.lock to the repo
2023-03-23 07:45:36 +01:00
tyshko-rostyslav
8cd4baba8a leave our fork of ark-circom (#132)
* leave our fork of `ark-circom`

---------

Co-authored-by: tyshkor <tyshko1@gmail.com>
2023-03-22 07:01:24 +01:00
Aaryamann Challani
9045e31006 fix ci tag (#133)
* fix(ci): release tag

* fix: use 0.2.1
2023-03-20 17:47:46 +05:30
Aaryamann Challani
9e44bb64dc fix(semaphore): use fixed rev (#130) 2023-03-20 14:06:25 +05:30
Aaryamann Challani
bb7dfb80ee feat(ci): cross-compile release assets, cache deps (#128)
* feat(ci): cross-compile release assets, cache deps

chore(ci): add caching to regular tests

* fix(ci): include cross only in ci env, add note about release assets
2023-03-14 17:44:06 +05:30
Aaryamann Challani
c319f32a1e feat(rln): package rln w/ resources into a static lib (#118)
* feat(rln): package resources into lib

* fix(rln): use Path

* fix(rln): fmt

* fix(rln): trailing slash
2023-03-07 18:15:06 +05:30
tyshko-rostyslav
bf2aa16a71 chore(rln): ensure all dependencies have fixed revision (#127) 2023-03-07 09:11:08 +05:30
tyshko-rostyslav
c423bdea61 chore(rln): update pmtree implementation (#125)
* most changes

* fmt

* hide tests back under feature

* grooming

* changed `SledConfig`

* requested change: rm `dbpath`

---------

Co-authored-by: tyshkor <tyshko1@gmail.com>
2023-03-03 10:33:39 +05:30
Carlos Pérez
5eb98d4b33 change: Replace u64 for usize for length vars (#94)
* change: Replace `u64` for `usize` for length vars

Resolves: #39

* fix(rln): usize instead of u64 in tests

* fix(rln): linter

* fix: outlier u64 usage

* fix(rln|rln-wasm): serde of usize types

---------

Co-authored-by: Aaryamann Challani <43716372+rymnc@users.noreply.github.com>
2023-03-02 19:15:58 +05:30
Aaryamann Challani
b698153e28 fix(ci): nightly release (#124) 2023-03-02 07:41:49 +05:30
34 changed files with 5114 additions and 1015 deletions

View File

@@ -46,10 +46,9 @@ jobs:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: cargo-make test
run: |
cargo make test --release
@@ -72,10 +71,9 @@ jobs:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- name: cargo-make test
run: |
cargo make test --release
@@ -97,10 +95,11 @@ jobs:
profile: minimal
toolchain: stable
override: true
- run: git submodule update --init --recursive
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- name: Install wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
- run: cargo install cargo-make
- run: cargo make build
working-directory: rln-wasm
- run: cargo-make test
@@ -123,10 +122,9 @@ jobs:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- name: cargo-make test
run: |
cargo make test --release
@@ -149,10 +147,9 @@ jobs:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- name: cargo-make test
run: |
cargo make test --release
@@ -177,8 +174,9 @@ jobs:
toolchain: stable
override: true
components: rustfmt, clippy
- name: Update git submodules
run: git submodule update --init --recursive
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- name: cargo fmt
if: success() || failure()
run: cargo fmt --all -- --check

View File

@@ -6,6 +6,12 @@ on:
jobs:
linux:
strategy:
matrix:
target:
- x86_64-unknown-linux-gnu
- aarch64-unknown-linux-gnu
- i686-unknown-linux-gnu
name: Linux build
runs-on: ubuntu-latest
steps:
@@ -17,50 +23,57 @@ jobs:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: cargo build
target: ${{ matrix.target }}
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: cross build
run: |
cargo build --release --workspace --exclude rln-wasm
cross build --release --target ${{ matrix.target }} --workspace --exclude rln-wasm
mkdir release
cp target/release/librln* release/
tar -czvf linux-rln.tar.gz release/
cp target/${{ matrix.target }}/release/librln* release/
tar -czvf ${{ matrix.target }}-rln.tar.gz release/
- name: Upload archive artifact
uses: actions/upload-artifact@v2
with:
name: linux-archive
path: linux-rln.tar.gz
name: ${{ matrix.target }}-archive
path: ${{ matrix.target }}-rln.tar.gz
retention-days: 2
macos:
name: MacOS build
runs-on: macos-latest
strategy:
matrix:
target:
- x86_64-apple-darwin
- aarch64-apple-darwin
steps:
- name: Checkout sources
uses: actions/checkout@v2
with:
ref: master
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: cargo build
target: ${{ matrix.target }}
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: cross build
run: |
cargo build --release --workspace --exclude rln-wasm
cross build --release --target ${{ matrix.target }} --workspace --exclude rln-wasm
mkdir release
cp target/release/librln* release/
tar -czvf macos-rln.tar.gz release/
cp target/${{ matrix.target }}/release/librln* release/
tar -czvf ${{ matrix.target }}-rln.tar.gz release/
- name: Upload archive artifact
uses: actions/upload-artifact@v2
with:
name: macos-archive
path: macos-rln.tar.gz
name: ${{ matrix.target }}-archive
path: ${{ matrix.target }}-rln.tar.gz
retention-days: 2
browser-rln-wasm:
@@ -69,22 +82,20 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v2
with:
ref: master
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- run: git submodule update --init --recursive
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: Install wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo make build
- name: cross make build
run: |
cargo make build
cross make build
mkdir release
cp pkg/** release/
tar -czvf browser-rln-wasm.tar.gz release/
@@ -111,7 +122,7 @@ jobs:
uses: actions/download-artifact@v2
- name: Delete tag
uses: dev-drprasad/delete-tag-and-release@v0.2.0
uses: dev-drprasad/delete-tag-and-release@v0.2.1
with:
delete_release: true
tag_name: nightly
@@ -120,14 +131,13 @@ jobs:
- name: Create prerelease
run: |
start_tag=$(gh release list -L 2 --exclude-drafts | grep -v nightly | cut -d$'\t' -f3)
start_tag=$(gh release list -L 2 --exclude-drafts | grep -v nightly | cut -d$'\t' -f3 | sed -n '1p')
gh release create nightly --prerelease --target master \
--title 'Nightly build ("master" branch)' \
--generate-notes \
--draft=false \
--notes-start-tag $start_tag \
linux-archive/linux-rln.tar.gz \
macos-archive/macos-rln.tar.gz \
browser-rln-wasm-archive/browser-rln-wasm.tar.gz
*-archive/*.tar.gz \
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -136,6 +146,4 @@ jobs:
with:
failOnError: false
name: |
linux-archive
macos-archive
browser-rln-wasm-archive
*-archive

5
.gitignore vendored
View File

@@ -8,10 +8,7 @@ rln/pmtree_db
# will have compiled files and executables
debug/
target/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock
wabt/
# These are backup files generated by rustfmt
**/*.rs.bk

3803
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

32
Cross.toml Normal file
View File

@@ -0,0 +1,32 @@
[target.x86_64-pc-windows-gnu]
image = "ghcr.io/cross-rs/x86_64-pc-windows-gnu:latest"
[target.aarch64-unknown-linux-gnu]
image = "ghcr.io/cross-rs/aarch64-unknown-linux-gnu:latest"
[target.x86_64-unknown-linux-gnu]
image = "ghcr.io/cross-rs/x86_64-unknown-linux-gnu:latest"
[target.arm-unknown-linux-gnueabi]
image = "ghcr.io/cross-rs/arm-unknown-linux-gnueabi:latest"
[target.i686-pc-windows-gnu]
image = "ghcr.io/cross-rs/i686-pc-windows-gnu:latest"
[target.i686-unknown-linux-gnu]
image = "ghcr.io/cross-rs/i686-unknown-linux-gnu:latest"
[target.arm-unknown-linux-gnueabihf]
image = "ghcr.io/cross-rs/arm-unknown-linux-gnueabihf:latest"
[target.mips-unknown-linux-gnu]
image = "ghcr.io/cross-rs/mips-unknown-linux-gnu:latest"
[target.mips64-unknown-linux-gnuabi64]
image = "ghcr.io/cross-rs/mips64-unknown-linux-gnuabi64:latest"
[target.mips64el-unknown-linux-gnuabi64]
image = "ghcr.io/cross-rs/mips64el-unknown-linux-gnuabi64:latest"
[target.mipsel-unknown-linux-gnu]
image = "ghcr.io/cross-rs/mipsel-unknown-linux-gnu:latest"

View File

@@ -6,11 +6,21 @@ all: .pre-build build
@git submodule update --init --recursive
.pre-build: .fetch-submodules
ifeq (, $(shell which cargo-make))
@cargo install --force cargo-make
@cargo install cargo-make
ifdef CI
@cargo install cross --git https://github.com/cross-rs/cross --branch main
endif
installdeps: .pre-build
ifeq ($(shell uname),Darwin)
@brew update
@brew install cmake ninja
else ifeq ($(shell uname),Linux)
@sudo apt-get update
@sudo apt-get install -y cmake ninja-build
endif
@git clone --recursive https://github.com/WebAssembly/wabt.git
@cd wabt && mkdir build && cd build && cmake .. -GNinja && ninja && sudo ninja install
build: .pre-build
@cargo make build

View File

@@ -29,3 +29,7 @@ To build and test all crates, run the following commands from the root folder
make build
make test
```
## Release assets
We use [`cross-rs`](https://github.com/cross-rs/cross) to cross-compile and generate release assets for rln.

View File

@@ -22,7 +22,7 @@ ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f",
# ark-poly = { version = "^0.3.0", default-features = false, features = ["parallel"] }
ark-serialize = { version = "0.3.0", default-features = false }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circom-2"] }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circom-2"], rev = "35ce5a9" }
# error handling
color-eyre = "0.6.1"

View File

@@ -1,6 +1,6 @@
[package]
name = "rln-wasm"
version = "0.0.7"
version = "0.0.8"
edition = "2021"
license = "MIT or Apache2"

View File

@@ -9,9 +9,14 @@ script = "sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/
clear = true
dependencies = [
"pack-build",
"pack-rename"
"pack-rename",
"post-build"
]
[tasks.post-build]
command = "wasm-strip"
args = ["./pkg/rln_wasm_bg.wasm"]
[tasks.test]
command = "wasm-pack"
args = ["test", "--release", "--node"]

View File

@@ -21,6 +21,11 @@ make installdeps
cd rln-wasm
cargo make build
```
4. Compile a slimmer version of zerokit for `wasm32-unknown-unknown`:
```
cd rln-wasm
cargo make post-build
```
## Running tests
```

View File

@@ -3,9 +3,11 @@
extern crate wasm_bindgen;
extern crate web_sys;
use std::vec::Vec;
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
use num_bigint::BigInt;
use rln::public::RLN;
use rln::public::{hash, poseidon_hash, RLN};
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
@@ -20,6 +22,163 @@ pub struct RLNWrapper {
instance: RLN<'static>,
}
// Macro to call methods with arbitrary amount of arguments,
// which have the last argument is output buffer pointer
// First argument to the macro is context,
// second is the actual method on `RLN`
// third is the aforementioned output buffer argument
// rest are all other arguments to the method
macro_rules! call_with_output_and_error_msg {
// this variant is needed for the case when
// there are zero other arguments
($instance:expr, $method:ident, $error_msg:expr) => {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
if let Err(err) = new_instance.instance.$method(&mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
($instance:expr, $method:ident, $error_msg:expr, $( $arg:expr ),* ) => {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
if let Err(err) = new_instance.instance.$method($($arg.process()),*, &mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
}
// Macro to call_with_error_msg methods with arbitrary amount of arguments,
// First argument to the macro is context,
// second is the actual method on `RLNWrapper`
// rest are all other arguments to the method
macro_rules! call_with_error_msg {
($instance:expr, $method:ident, $error_msg:expr $(, $arg:expr)*) => {
{
let new_instance: &mut RLNWrapper = $instance.process();
if let Err(err) = new_instance.instance.$method($($arg.process()),*) {
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
Ok(())
}
}
}
}
macro_rules! call {
($instance:expr, $method:ident $(, $arg:expr)*) => {
{
let new_instance: &mut RLNWrapper = $instance.process();
new_instance.instance.$method($($arg.process()),*)
}
}
}
macro_rules! call_bool_method_with_error_msg {
($instance:expr, $method:ident, $error_msg:expr $(, $arg:expr)*) => {
{
let new_instance: &RLNWrapper = $instance.process();
new_instance.instance.$method($($arg.process()),*).map_err(|err| format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
}
}
}
// Macro to execute a function with arbitrary amount of arguments,
// First argument is the function to execute
// Rest are all other arguments to the method
macro_rules! fn_call_with_output_and_error_msg {
// this variant is needed for the case when
// there are zero other arguments
($func:ident, $error_msg:expr) => {
{
let mut output_data: Vec<u8> = Vec::new();
if let Err(err) = $func(&mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
($func:ident, $error_msg:expr, $( $arg:expr ),* ) => {
{
let mut output_data: Vec<u8> = Vec::new();
if let Err(err) = $func($($arg.process()),*, &mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
}
trait ProcessArg {
type ReturnType;
fn process(self) -> Self::ReturnType;
}
impl ProcessArg for usize {
type ReturnType = usize;
fn process(self) -> Self::ReturnType {
self
}
}
impl<T> ProcessArg for Vec<T> {
type ReturnType = Vec<T>;
fn process(self) -> Self::ReturnType {
self
}
}
impl<'a> ProcessArg for *const RLN<'a> {
type ReturnType = &'a RLN<'a>;
fn process(self) -> Self::ReturnType {
unsafe { &*self }
}
}
impl ProcessArg for *const RLNWrapper {
type ReturnType = &'static RLNWrapper;
fn process(self) -> Self::ReturnType {
unsafe { &*self }
}
}
impl ProcessArg for *mut RLNWrapper {
type ReturnType = &'static mut RLNWrapper;
fn process(self) -> Self::ReturnType {
unsafe { &mut *self }
}
}
impl<'a> ProcessArg for &'a [u8] {
type ReturnType = &'a [u8];
fn process(self) -> Self::ReturnType {
self
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = newRLN)]
pub fn wasm_new(
@@ -39,24 +198,20 @@ pub fn wasm_get_serialized_rln_witness(
ctx: *mut RLNWrapper,
input: Uint8Array,
) -> Result<Uint8Array, String> {
let wrapper = unsafe { &mut *ctx };
let rln_witness = wrapper
.instance
.get_serialized_rln_witness(&input.to_vec()[..])
let rln_witness = call!(ctx, get_serialized_rln_witness, &input.to_vec()[..])
.map_err(|err| format!("{:#?}", err))?;
Ok(Uint8Array::from(&rln_witness[..]))
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = insertMember)]
pub fn wasm_set_next_leaf(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
let wrapper = unsafe { &mut *ctx };
if wrapper.instance.set_next_leaf(&input.to_vec()[..]).is_ok() {
Ok(())
} else {
Err("could not insert member into merkle tree".into())
}
call_with_error_msg!(
ctx,
set_next_leaf,
"could not insert member into merkle tree".to_string(),
&input.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -66,31 +221,30 @@ pub fn wasm_set_leaves_from(
index: usize,
input: Uint8Array,
) -> Result<(), String> {
let wrapper = unsafe { &mut *ctx };
if wrapper
.instance
.set_leaves_from(index as usize, &input.to_vec()[..])
.is_ok()
{
Ok(())
} else {
Err("could not set multiple leaves".into())
}
call_with_error_msg!(
ctx,
set_leaves_from,
"could not set multiple leaves".to_string(),
index,
&*input.to_vec()
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = deleteLeaf)]
pub fn wasm_delete_leaf(ctx: *mut RLNWrapper, index: usize) -> Result<(), String> {
call_with_error_msg!(ctx, delete_leaf, "could not delete leaf".to_string(), index)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = initTreeWithLeaves)]
pub fn wasm_init_tree_with_leaves(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
let wrapper = unsafe { &mut *ctx };
if wrapper
.instance
.init_tree_with_leaves(&input.to_vec()[..])
.is_ok()
{
Ok(())
} else {
Err("could not init merkle tree".into())
}
call_with_error_msg!(
ctx,
init_tree_with_leaves,
"could not init merkle tree".to_string(),
&*input.to_vec()
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -99,12 +253,8 @@ pub fn rln_witness_to_json(
ctx: *mut RLNWrapper,
serialized_witness: Uint8Array,
) -> Result<Object, String> {
let wrapper = unsafe { &mut *ctx };
let inputs = wrapper
.instance
.get_rln_witness_json(&serialized_witness.to_vec()[..])
let inputs = call!(ctx, get_rln_witness_json, &serialized_witness.to_vec()[..])
.map_err(|err| err.to_string())?;
let js_value = serde_wasm_bindgen::to_value(&inputs).map_err(|err| err.to_string())?;
Object::from_entries(&js_value).map_err(|err| format!("{:#?}", err))
}
@@ -116,8 +266,6 @@ pub fn generate_rln_proof_with_witness(
calculated_witness: Vec<JsBigInt>,
serialized_witness: Uint8Array,
) -> Result<Uint8Array, String> {
let wrapper = unsafe { &mut *ctx };
let mut witness_vec: Vec<BigInt> = vec![];
for v in calculated_witness {
@@ -131,69 +279,36 @@ pub fn generate_rln_proof_with_witness(
);
}
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.generate_rln_proof_with_witness(witness_vec, serialized_witness.to_vec(), &mut output_data)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate proof".into())
}
call_with_output_and_error_msg!(
ctx,
generate_rln_proof_with_witness,
"could not generate proof",
witness_vec,
serialized_witness.to_vec()
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateMembershipKey)]
pub fn wasm_key_gen(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper.instance.key_gen(&mut output_data).is_ok() {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership keys".into())
}
call_with_output_and_error_msg!(ctx, key_gen, "could not generate membership keys")
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateExtendedMembershipKey)]
pub fn wasm_extended_key_gen(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper.instance.extended_key_gen(&mut output_data).is_ok() {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership keys".into())
}
call_with_output_and_error_msg!(ctx, extended_key_gen, "could not generate membership keys")
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateSeededMembershipKey)]
pub fn wasm_seeded_key_gen(ctx: *const RLNWrapper, seed: Uint8Array) -> Result<Uint8Array, String> {
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.seeded_key_gen(&seed.to_vec()[..], &mut output_data)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership key".into())
}
call_with_output_and_error_msg!(
ctx,
seeded_key_gen,
"could not generate membership key",
&seed.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -202,20 +317,12 @@ pub fn wasm_seeded_extended_key_gen(
ctx: *const RLNWrapper,
seed: Uint8Array,
) -> Result<Uint8Array, String> {
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.seeded_extended_key_gen(&seed.to_vec()[..], &mut output_data)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership key".into())
}
call_with_output_and_error_msg!(
ctx,
seeded_extended_key_gen,
"could not generate membership key",
&seed.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -225,38 +332,24 @@ pub fn wasm_recover_id_secret(
input_proof_data_1: Uint8Array,
input_proof_data_2: Uint8Array,
) -> Result<Uint8Array, String> {
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.recover_id_secret(
&input_proof_data_1.to_vec()[..],
&input_proof_data_2.to_vec()[..],
&mut output_data,
)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not recover id secret".into())
}
call_with_output_and_error_msg!(
ctx,
recover_id_secret,
"could not recover id secret",
&input_proof_data_1.to_vec()[..],
&input_proof_data_2.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = verifyRLNProof)]
pub fn wasm_verify_rln_proof(ctx: *const RLNWrapper, proof: Uint8Array) -> Result<bool, String> {
let wrapper = unsafe { &*ctx };
if match wrapper.instance.verify_rln_proof(&proof.to_vec()[..]) {
Ok(verified) => verified,
Err(_) => return Err("error while verifying rln proof".into()),
} {
return Ok(true);
}
Ok(false)
call_bool_method_with_error_msg!(
ctx,
verify_rln_proof,
"error while verifying rln proof".to_string(),
&proof.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -266,31 +359,31 @@ pub fn wasm_verify_with_roots(
proof: Uint8Array,
roots: Uint8Array,
) -> Result<bool, String> {
let wrapper = unsafe { &*ctx };
if match wrapper
.instance
.verify_with_roots(&proof.to_vec()[..], &roots.to_vec()[..])
{
Ok(verified) => verified,
Err(_) => return Err("error while verifying proof with roots".into()),
} {
return Ok(true);
}
Ok(false)
call_bool_method_with_error_msg!(
ctx,
verify_with_roots,
"error while verifying proof with roots".to_string(),
&proof.to_vec()[..],
&roots.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = getRoot)]
pub fn wasm_get_root(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper.instance.get_root(&mut output_data).is_ok() {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not obtain root".into())
}
call_with_output_and_error_msg!(ctx, get_root, "could not obtain root")
}
#[wasm_bindgen(js_name = hash)]
pub fn wasm_hash(input: Uint8Array) -> Result<Uint8Array, String> {
fn_call_with_output_and_error_msg!(hash, "could not generate hash", &input.to_vec()[..])
}
#[wasm_bindgen(js_name = poseidonHash)]
pub fn wasm_poseidon_hash(input: Uint8Array) -> Result<Uint8Array, String> {
fn_call_with_output_and_error_msg!(
poseidon_hash,
"could not generate poseidon hash",
&input.to_vec()[..]
)
}

View File

@@ -4,6 +4,7 @@
mod tests {
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
use rln::circuit::TEST_TREE_HEIGHT;
use rln::utils::normalize_usize;
use rln_wasm::*;
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsValue;
@@ -41,20 +42,19 @@ mod tests {
// Prepare the message
let signal = "Hello World".as_bytes();
let signal_len: u64 = signal.len() as u64;
// Setting up the epoch (With 0s for the test)
let epoch = Uint8Array::new_with_length(32);
epoch.fill(0, 0, 32);
let identity_index: u64 = 0;
let identity_index: usize = 0;
// Serializing the message
let mut serialized_vec: Vec<u8> = Vec::new();
serialized_vec.append(&mut idkey.to_vec());
serialized_vec.append(&mut identity_index.to_le_bytes().to_vec());
serialized_vec.append(&mut normalize_usize(identity_index));
serialized_vec.append(&mut epoch.to_vec());
serialized_vec.append(&mut signal_len.to_le_bytes().to_vec());
serialized_vec.append(&mut normalize_usize(signal.len()));
serialized_vec.append(&mut signal.to_vec());
let serialized_message = Uint8Array::from(&serialized_vec[..]);
@@ -88,7 +88,7 @@ mod tests {
// Add signal_len | signal
let mut proof_bytes = proof.to_vec();
proof_bytes.append(&mut signal_len.to_le_bytes().to_vec());
proof_bytes.append(&mut normalize_usize(signal.len()));
proof_bytes.append(&mut signal.to_vec());
let proof_with_signal = Uint8Array::from(&proof_bytes[..]);

View File

@@ -5,7 +5,7 @@ edition = "2021"
license = "MIT OR Apache-2.0"
[lib]
crate-type = ["cdylib", "rlib", "staticlib"]
crate-type = ["rlib", "staticlib"]
# This flag disable cargo doctests, i.e. testing example code-snippets in documentation
doctest = false
@@ -13,39 +13,40 @@ doctest = false
[dependencies]
# ZKP Generation
ark-ec = { version = "0.3.0", default-features = false }
ark-ff = { version = "0.3.0", default-features = false, features = [ "asm"] }
ark-std = { version = "0.3.0", default-features = false }
ark-bn254 = { version = "0.3.0" }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", default-features = false }
ark-relations = { version = "0.3.0", default-features = false, features = [ "std" ] }
ark-serialize = { version = "0.3.0", default-features = false }
ark-circom = { git = "https://github.com/vacp2p/ark-circom", branch = "wasm", default-features = false, features = ["circom-2"] }
#ark-circom = { git = "https://github.com/vacp2p/ark-circom", branch = "no-ethers-core", features = ["circom-2"] }
ark-ec = { version = "=0.4.1", default-features = false }
ark-ff = { version = "=0.4.1", default-features = false, features = [ "asm"] }
ark-std = { version = "=0.4.0", default-features = false }
ark-bn254 = { version = "=0.4.0" }
ark-groth16 = { version = "=0.4.0", features = ["parallel"], default-features = false }
ark-relations = { version = "=0.4.0", default-features = false, features = [ "std" ] }
ark-serialize = { version = "=0.4.1", default-features = false }
ark-circom = { git = "https://github.com/gakonst/ark-circom", default-features = false, features = ["circom-2"] }
# WASM
wasmer = { version = "2.3.0", default-features = false }
# error handling
color-eyre = "0.6.1"
thiserror = "1.0.0"
color-eyre = "=0.6.2"
thiserror = "=1.0.38"
# utilities
cfg-if = "1.0"
num-bigint = { version = "0.4.3", default-features = false, features = ["rand"] }
cfg-if = "=1.0"
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
num-traits = "0.2.11"
once_cell = "1.14.0"
rand = "0.8"
rand_chacha = "0.3.1"
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
rand = "=0.8.5"
rand_chacha = "=0.3.1"
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
utils = { path = "../utils/", default-features = false }
pmtree = { git = "https://github.com/Rate-Limiting-Nullifier/pmtree", rev = "f6d1a1fecad72cd39e6808e78085091d541dc882", optional = true}
# serialization
serde_json = "1.0.48"
include_dir = "=0.7.3"
[dev-dependencies]
pmtree = { git = "https://github.com/Rate-Limiting-Nullifier/pmtree" }
sled = "0.34.7"
sled = "=0.34.7"
[features]
default = ["parallel", "wasmer/sys-default"]
@@ -54,4 +55,4 @@ wasm = ["wasmer/js", "wasmer/std"]
fullmerkletree = ["default"]
# Note: pmtree feature is still experimental
pmtree = ["default"]
pmtree-ft = ["default", "pmtree"]

View File

@@ -11,9 +11,7 @@ use cfg_if::cfg_if;
use color_eyre::{Report, Result};
use num_bigint::BigUint;
use serde_json::Value;
use std::fs::File;
use std::io::Cursor;
use std::path::Path;
use std::str::FromStr;
cfg_if! {
@@ -22,11 +20,13 @@ cfg_if! {
use once_cell::sync::OnceCell;
use std::sync::Mutex;
use wasmer::{Module, Store};
use include_dir::{include_dir, Dir};
use std::path::Path;
}
}
const ZKEY_FILENAME: &str = "rln_final.zkey";
const VK_FILENAME: &str = "verifying_key.json";
const VK_FILENAME: &str = "verification_key.json";
const WASM_FILENAME: &str = "rln.wasm";
// These parameters are used for tests
@@ -34,11 +34,11 @@ const WASM_FILENAME: &str = "rln.wasm";
// Changing these parameters to other values than these defaults will cause zkSNARK proof verification to fail
pub const TEST_PARAMETERS_INDEX: usize = 2;
pub const TEST_TREE_HEIGHT: usize = [15, 19, 20][TEST_PARAMETERS_INDEX];
pub const TEST_RESOURCES_FOLDER: &str = [
"./resources/tree_height_15/",
"./resources/tree_height_19/",
"./resources/tree_height_20/",
][TEST_PARAMETERS_INDEX];
pub const TEST_RESOURCES_FOLDER: &str =
["tree_height_15", "tree_height_19", "tree_height_20"][TEST_PARAMETERS_INDEX];
#[cfg(not(target_arch = "wasm32"))]
static RESOURCES_DIR: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/resources");
// The following types define the pairing friendly elliptic curve, the underlying finite fields and groups default to this module
// Note that proofs are serialized assuming Fr to be 4x8 = 32 bytes in size. Hence, changing to a curve with different encoding will make proof verification to fail
@@ -63,13 +63,14 @@ pub fn zkey_from_raw(zkey_data: &Vec<u8>) -> Result<(ProvingKey<Curve>, Constrai
}
// Loads the proving key
#[cfg(not(target_arch = "wasm32"))]
pub fn zkey_from_folder(
resources_folder: &str,
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
if Path::new(&zkey_path).exists() {
let mut file = File::open(&zkey_path)?;
let proving_key_and_matrices = read_zkey(&mut file)?;
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
if let Some(zkey) = zkey {
let mut c = Cursor::new(zkey.contents());
let proving_key_and_matrices = read_zkey(&mut c)?;
Ok(proving_key_and_matrices)
} else {
Err(Report::msg("No proving key found!"))
@@ -93,15 +94,18 @@ pub fn vk_from_raw(vk_data: &Vec<u8>, zkey_data: &Vec<u8>) -> Result<VerifyingKe
}
// Loads the verification key
#[cfg(not(target_arch = "wasm32"))]
pub fn vk_from_folder(resources_folder: &str) -> Result<VerifyingKey<Curve>> {
let vk_path = format!("{resources_folder}{VK_FILENAME}");
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
let vk = RESOURCES_DIR.get_file(Path::new(resources_folder).join(VK_FILENAME));
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
let verifying_key: VerifyingKey<Curve>;
if Path::new(&vk_path).exists() {
vk_from_json(&vk_path)
} else if Path::new(&zkey_path).exists() {
if let Some(vk) = vk {
verifying_key = vk_from_json(vk.contents_utf8().ok_or(Report::msg(
"Could not read verification key from JSON file!",
))?)?;
Ok(verifying_key)
} else if let Some(_zkey) = zkey {
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
verifying_key = proving_key.vk;
Ok(verifying_key)
@@ -128,9 +132,14 @@ pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> Result<&'static Mutex<WitnessCal
#[cfg(not(target_arch = "wasm32"))]
pub fn circom_from_folder(resources_folder: &str) -> Result<&'static Mutex<WitnessCalculator>> {
// We read the wasm file
let wasm_path = format!("{resources_folder}{WASM_FILENAME}");
let wasm_buffer = std::fs::read(wasm_path)?;
circom_from_raw(wasm_buffer)
let wasm = RESOURCES_DIR.get_file(Path::new(resources_folder).join(WASM_FILENAME));
if let Some(wasm) = wasm {
let wasm_buffer = wasm.contents();
circom_from_raw(wasm_buffer.to_vec())
} else {
Err(Report::msg("No wasm file found!"))
}
}
// The following function implementations are taken/adapted from https://github.com/gakonst/ark-circom/blob/1732e15d6313fe176b0b1abb858ac9e095d0dbd7/src/zkey.rs
@@ -231,10 +240,8 @@ fn to_verifying_key(json: serde_json::Value) -> Result<VerifyingKey<Curve>> {
}
// Computes the verification key from its JSON serialization
fn vk_from_json(vk_path: &str) -> Result<VerifyingKey<Curve>> {
let json = std::fs::read_to_string(vk_path)?;
let json: Value = serde_json::from_str(&json)?;
fn vk_from_json(vk: &str) -> Result<VerifyingKey<Curve>> {
let json: Value = serde_json::from_str(vk)?;
to_verifying_key(json)
}
@@ -247,6 +254,7 @@ fn vk_from_vector(vk: &[u8]) -> Result<VerifyingKey<Curve>> {
}
// Checks verification key to be correct with respect to proving key
#[cfg(not(target_arch = "wasm32"))]
pub fn check_vk_from_zkey(
resources_folder: &str,
verifying_key: VerifyingKey<Curve>,

View File

@@ -1,7 +1,8 @@
// This crate instantiate the Poseidon hash algorithm
use crate::circuit::Fr;
use crate::{circuit::Fr, utils::bytes_le_to_fr};
use once_cell::sync::Lazy;
use tiny_keccak::{Hasher, Keccak};
use utils::poseidon::Poseidon;
// These indexed constants hardcodes the supported round parameters tuples (t, RF, RN, SKIP_MATRICES) for the Bn254 scalar field
@@ -26,3 +27,17 @@ pub fn poseidon_hash(input: &[Fr]) -> Fr {
.hash(input.to_vec())
.expect("hash with fixed input size can't fail")
}
// Hashes arbitrary signal to the underlying prime field
pub fn hash_to_field(signal: &[u8]) -> Fr {
// We hash the input signal using Keccak256
// (note that a bigger curve order might require a bigger hash blocksize)
let mut hash = [0; 32];
let mut hasher = Keccak::v256();
hasher.update(signal);
hasher.finalize(&mut hash);
// We export the hash as a field element
let (el, _) = bytes_le_to_fr(hash.as_ref());
el
}

View File

@@ -1,7 +1,7 @@
#![allow(dead_code)]
pub mod circuit;
pub mod poseidon_hash;
pub mod hashers;
pub mod poseidon_tree;
pub mod protocol;
pub mod public;

View File

@@ -3,10 +3,15 @@
// Implementation inspired by https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/poseidon_tree.rs (no differences)
use crate::circuit::Fr;
use crate::poseidon_hash::poseidon_hash;
use crate::hashers::poseidon_hash;
use cfg_if::cfg_if;
use utils::merkle_tree::*;
#[cfg(feature = "pmtree-ft")]
use crate::utils::{bytes_le_to_fr, fr_to_bytes_le};
#[cfg(feature = "pmtree-ft")]
use pmtree::*;
// The zerokit RLN default Merkle tree implementation is the OptimalMerkleTree.
// To switch to FullMerkleTree implementation, it is enough to enable the fullmerkletree feature
@@ -36,3 +41,26 @@ impl utils::merkle_tree::Hasher for PoseidonHash {
poseidon_hash(inputs)
}
}
#[cfg(feature = "pmtree-ft")]
// The pmtree Hasher trait used by pmtree Merkle tree
impl pmtree::Hasher for PoseidonHash {
type Fr = Fr;
fn default_leaf() -> Self::Fr {
Fr::from(0)
}
fn serialize(value: Self::Fr) -> Value {
fr_to_bytes_le(&value)
}
fn deserialize(value: Value) -> Self::Fr {
let (fr, _) = bytes_le_to_fr(&value);
fr
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}

View File

@@ -1,10 +1,7 @@
// This crate collects all the underlying primitives used to implement RLN
use ark_circom::{CircomReduction, WitnessCalculator};
use ark_groth16::{
create_proof_with_reduction_and_matrices, prepare_verifying_key,
verify_proof as ark_verify_proof, Proof as ArkProof, ProvingKey, VerifyingKey,
};
use ark_groth16::{prepare_verifying_key, Groth16, Proof as ArkProof, ProvingKey, VerifyingKey};
use ark_relations::r1cs::ConstraintMatrices;
use ark_relations::r1cs::SynthesisError;
use ark_std::{rand::thread_rng, UniformRand};
@@ -20,11 +17,13 @@ use thiserror::Error;
use tiny_keccak::{Hasher as _, Keccak};
use crate::circuit::{Curve, Fr};
use crate::poseidon_hash::poseidon_hash;
use crate::hashers::hash_to_field;
use crate::hashers::poseidon_hash;
use crate::poseidon_tree::*;
use crate::public::RLN_IDENTIFIER;
use crate::utils::*;
use cfg_if::cfg_if;
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
///////////////////////////////////////////////////////
// RLN Witness data structure and utility functions
@@ -156,18 +155,18 @@ pub fn proof_inputs_to_rln_witness(
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let id_index = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into()?);
let id_index = usize::from_le_bytes(serialized[all_read..all_read + 8].try_into()?);
all_read += 8;
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let signal_len = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into()?);
let signal_len = usize::from_le_bytes(serialized[all_read..all_read + 8].try_into()?);
all_read += 8;
let signal: Vec<u8> = serialized[all_read..all_read + (signal_len as usize)].to_vec();
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
let merkle_proof = tree.proof(id_index as usize).expect("proof should exist");
let merkle_proof = tree.proof(id_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
@@ -358,31 +357,27 @@ pub fn prepare_prove_input(
id_index: usize,
epoch: Fr,
signal: &[u8],
) -> Result<Vec<u8>> {
let signal_len = u64::try_from(signal.len())?;
) -> Vec<u8> {
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret));
serialized.append(&mut id_index.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(id_index));
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
Ok(serialized)
serialized
}
#[allow(clippy::redundant_clone)]
pub fn prepare_verify_input(proof_data: Vec<u8>, signal: &[u8]) -> Result<Vec<u8>> {
let signal_len = u64::try_from(signal.len())?;
pub fn prepare_verify_input(proof_data: Vec<u8>, signal: &[u8]) -> Vec<u8> {
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut proof_data.clone());
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
Ok(serialized)
serialized
}
///////////////////////////////////////////////////////
@@ -489,20 +484,6 @@ pub fn extended_seeded_keygen(signal: &[u8]) -> (Fr, Fr, Fr, Fr) {
)
}
// Hashes arbitrary signal to the underlying prime field
pub fn hash_to_field(signal: &[u8]) -> Fr {
// We hash the input signal using Keccak256
// (note that a bigger curve order might require a bigger hash blocksize)
let mut hash = [0; 32];
let mut hasher = Keccak::v256();
hasher.update(signal);
hasher.finalize(&mut hash);
// We export the hash as a field element
let (el, _) = bytes_le_to_fr(hash.as_ref());
el
}
pub fn compute_id_secret(
share1: (Fr, Fr),
share2: (Fr, Fr),
@@ -545,9 +526,11 @@ pub enum ProofError {
SynthesisError(#[from] SynthesisError),
}
fn calculate_witness_element<E: ark_ec::PairingEngine>(witness: Vec<BigInt>) -> Result<Vec<E::Fr>> {
use ark_ff::{FpParameters, PrimeField};
let modulus = <<E::Fr as PrimeField>::Params as FpParameters>::MODULUS;
fn calculate_witness_element<E: ark_ec::pairing::Pairing>(
witness: Vec<BigInt>,
) -> Result<Vec<E::ScalarField>> {
use ark_ff::PrimeField;
let modulus = <E::ScalarField as PrimeField>::MODULUS;
// convert it to field elements
use num_traits::Signed;
@@ -562,7 +545,7 @@ fn calculate_witness_element<E: ark_ec::PairingEngine>(witness: Vec<BigInt>) ->
} else {
w.to_biguint().ok_or(Report::msg("not a biguint value"))?
};
witness_vec.push(E::Fr::from(w))
witness_vec.push(E::ScalarField::from(w))
}
Ok(witness_vec)
@@ -591,7 +574,7 @@ pub fn generate_proof_with_witness(
#[cfg(debug_assertions)]
let now = Instant::now();
let proof = create_proof_with_reduction_and_matrices::<_, CircomReduction>(
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
&proving_key.0,
r,
s,
@@ -685,7 +668,7 @@ pub fn generate_proof(
#[cfg(debug_assertions)]
let now = Instant::now();
let proof = create_proof_with_reduction_and_matrices::<_, CircomReduction>(
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
&proving_key.0,
r,
s,
@@ -730,7 +713,7 @@ pub fn verify_proof(
#[cfg(debug_assertions)]
let now = Instant::now();
let verified = ark_verify_proof(&pvk, proof, &inputs)?;
let verified = Groth16::<_, CircomReduction>::verify_proof(&pvk, proof, &inputs)?;
#[cfg(debug_assertions)]
println!("verify took: {:.2?}", now.elapsed());

View File

@@ -1,5 +1,5 @@
use crate::circuit::{vk_from_raw, zkey_from_raw, Curve, Fr};
use crate::poseidon_hash::poseidon_hash as utils_poseidon_hash;
use crate::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash};
use crate::poseidon_tree::PoseidonTree;
use crate::protocol::*;
use crate::utils::*;
@@ -13,6 +13,8 @@ use cfg_if::cfg_if;
use color_eyre::Result;
use num_bigint::BigInt;
use std::io::Cursor;
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
// use rkyv::Deserialize;
cfg_if! {
if #[cfg(not(target_arch = "wasm32"))] {
@@ -61,7 +63,7 @@ impl RLN<'_> {
/// use std::io::Cursor;
///
/// let tree_height = 20;
/// let resources = Cursor::new("./resources/tree_height_20/");
/// let resources = Cursor::new("tree_height_20");
///
/// // We create a new RLN instance
/// let mut rln = RLN::new(tree_height, resources);
@@ -407,7 +409,7 @@ impl RLN<'_> {
mut input_data: R,
mut output_data: W,
) -> Result<()> {
// We read input RLN witness and we deserialize it
// We read input RLN witness and we serialize_compressed it
let mut serialized: Vec<u8> = Vec::new();
input_data.read_to_end(&mut serialized)?;
let (rln_witness, _) = deserialize_witness(&serialized)?;
@@ -421,7 +423,7 @@ impl RLN<'_> {
let proof = generate_proof(self.witness_calculator, &self.proving_key, &rln_witness)?;
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
proof.serialize(&mut output_data)?;
proof.serialize_compressed(&mut output_data)?;
Ok(())
}
@@ -467,7 +469,7 @@ impl RLN<'_> {
// [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]
let mut input_byte: Vec<u8> = Vec::new();
input_data.read_to_end(&mut input_byte)?;
let proof = ArkProof::deserialize(&mut Cursor::new(&input_byte[..128]))?;
let proof = ArkProof::deserialize_compressed(&mut Cursor::new(&input_byte[..128]))?;
let (proof_values, _) = deserialize_proof_values(&input_byte[128..]);
@@ -500,8 +502,6 @@ impl RLN<'_> {
/// // We generate a random signal
/// let mut rng = rand::thread_rng();
/// let signal: [u8; 32] = rng.gen();
/// let signal_len = u64::try_from(signal.len()).unwrap();
///
/// // We generate a random epoch
/// let epoch = hash_to_field(b"test-epoch");
///
@@ -509,9 +509,9 @@ impl RLN<'_> {
/// // input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
/// let mut serialized: Vec<u8> = Vec::new();
/// serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
/// serialized.append(&mut identity_index.to_le_bytes().to_vec());
/// serialized.append(&mut normalize_usize(identity_index));
/// serialized.append(&mut fr_to_bytes_le(&epoch));
/// serialized.append(&mut signal_len.to_le_bytes().to_vec());
/// serialized.append(&mut normalize_usize(signal_len).resize(8,0));
/// serialized.append(&mut signal.to_vec());
///
/// let mut input_buffer = Cursor::new(serialized);
@@ -528,7 +528,7 @@ impl RLN<'_> {
mut input_data: R,
mut output_data: W,
) -> Result<()> {
// We read input RLN witness and we deserialize it
// We read input RLN witness and we serialize_compressed it
let mut witness_byte: Vec<u8> = Vec::new();
input_data.read_to_end(&mut witness_byte)?;
let (rln_witness, _) = proof_inputs_to_rln_witness(&mut self.tree, &witness_byte)?;
@@ -538,7 +538,7 @@ impl RLN<'_> {
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
// This proof is compressed, i.e. 128 bytes long
proof.serialize(&mut output_data)?;
proof.serialize_compressed(&mut output_data)?;
output_data.write_all(&serialize_proof_values(&proof_values))?;
Ok(())
@@ -563,7 +563,7 @@ impl RLN<'_> {
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
// This proof is compressed, i.e. 128 bytes long
proof.serialize(&mut output_data)?;
proof.serialize_compressed(&mut output_data)?;
output_data.write_all(&serialize_proof_values(&proof_values))?;
Ok(())
}
@@ -587,7 +587,7 @@ impl RLN<'_> {
/// // We prepare input for verify_rln_proof API
/// // input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
/// // that is [ proof_data || signal_len<8> | signal<var> ]
/// proof_data.append(&mut signal_len.to_le_bytes().to_vec());
/// proof_data.append(&mut normalize_usize(signal_len));
/// proof_data.append(&mut signal.to_vec());
///
/// let mut input_buffer = Cursor::new(proof_data);
@@ -599,13 +599,13 @@ impl RLN<'_> {
let mut serialized: Vec<u8> = Vec::new();
input_data.read_to_end(&mut serialized)?;
let mut all_read = 0;
let proof = ArkProof::deserialize(&mut Cursor::new(&serialized[..128].to_vec()))?;
let proof =
ArkProof::deserialize_compressed(&mut Cursor::new(&serialized[..128].to_vec()))?;
all_read += 128;
let (proof_values, read) = deserialize_proof_values(&serialized[all_read..]);
all_read += read;
let signal_len =
u64::from_le_bytes(serialized[all_read..all_read + 8].try_into()?) as usize;
let signal_len = usize::from_le_bytes(serialized[all_read..all_read + 8].try_into()?);
all_read += 8;
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
@@ -675,13 +675,13 @@ impl RLN<'_> {
let mut serialized: Vec<u8> = Vec::new();
input_data.read_to_end(&mut serialized)?;
let mut all_read = 0;
let proof = ArkProof::deserialize(&mut Cursor::new(&serialized[..128].to_vec()))?;
let proof =
ArkProof::deserialize_compressed(&mut Cursor::new(&serialized[..128].to_vec()))?;
all_read += 128;
let (proof_values, read) = deserialize_proof_values(&serialized[all_read..]);
all_read += read;
let signal_len =
u64::from_le_bytes(serialized[all_read..all_read + 8].try_into()?) as usize;
let signal_len = usize::from_le_bytes(serialized[all_read..all_read + 8].try_into()?);
all_read += 8;
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
@@ -749,7 +749,7 @@ impl RLN<'_> {
/// let mut buffer = Cursor::new(Vec::<u8>::new());
/// rln.key_gen(&mut buffer).unwrap();
///
/// // We deserialize the keygen output
/// // We serialize_compressed the keygen output
/// let (identity_secret_hash, id_commitment) = deserialize_identity_pair(buffer.into_inner());
/// ```
pub fn key_gen<W: Write>(&self, mut output_data: W) -> Result<()> {
@@ -779,7 +779,7 @@ impl RLN<'_> {
/// let mut buffer = Cursor::new(Vec::<u8>::new());
/// rln.extended_key_gen(&mut buffer).unwrap();
///
/// // We deserialize the keygen output
/// // We serialize_compressed the keygen output
/// let (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) = deserialize_identity_tuple(buffer.into_inner());
/// ```
pub fn extended_key_gen<W: Write>(&self, mut output_data: W) -> Result<()> {
@@ -814,7 +814,7 @@ impl RLN<'_> {
/// rln.seeded_key_gen(&mut input_buffer, &mut output_buffer)
/// .unwrap();
///
/// // We deserialize the keygen output
/// // We serialize_compressed the keygen output
/// let (identity_secret_hash, id_commitment) = deserialize_identity_pair(output_buffer.into_inner());
/// ```
pub fn seeded_key_gen<R: Read, W: Write>(
@@ -857,7 +857,7 @@ impl RLN<'_> {
/// rln.seeded_key_gen(&mut input_buffer, &mut output_buffer)
/// .unwrap();
///
/// // We deserialize the keygen output
/// // We serialize_compressed the keygen output
/// let (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) = deserialize_identity_tuple(buffer.into_inner());
/// ```
pub fn seeded_extended_key_gen<R: Read, W: Write>(
@@ -916,7 +916,7 @@ impl RLN<'_> {
mut input_proof_data_2: R,
mut output_data: W,
) -> Result<()> {
// We deserialize the two proofs and we get the corresponding RLNProofValues objects
// We serialize_compressed the two proofs and we get the corresponding RLNProofValues objects
let mut serialized: Vec<u8> = Vec::new();
input_proof_data_1.read_to_end(&mut serialized)?;
// We skip deserialization of the zk-proof at the beginning
@@ -960,7 +960,7 @@ impl RLN<'_> {
///
/// The function returns the corresponding [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object serialized using [`rln::protocol::serialize_witness`](crate::protocol::serialize_witness)).
pub fn get_serialized_rln_witness<R: Read>(&mut self, mut input_data: R) -> Result<Vec<u8>> {
// We read input RLN witness and we deserialize it
// We read input RLN witness and we serialize_compressed it
let mut witness_byte: Vec<u8> = Vec::new();
input_data.read_to_end(&mut witness_byte)?;
let (rln_witness, _) = proof_inputs_to_rln_witness(&mut self.tree, &witness_byte)?;
@@ -1008,7 +1008,7 @@ impl Default for RLN<'_> {
/// hash(&mut input_buffer, &mut output_buffer)
/// .unwrap();
///
/// // We deserialize the keygen output
/// // We serialize_compressed the keygen output
/// let field_element = deserialize_field_element(output_buffer.into_inner());
/// ```
pub fn hash<R: Read, W: Write>(mut input_data: R, mut output_data: W) -> Result<()> {
@@ -1041,7 +1041,7 @@ pub fn hash<R: Read, W: Write>(mut input_data: R, mut output_data: W) -> Result<
/// poseidon_hash(&mut input_buffer, &mut output_buffer)
/// .unwrap();
///
/// // We deserialize the hash output
/// // We serialize_compressed the hash output
/// let hash_result = deserialize_field_element(output_buffer.into_inner());
/// ```
pub fn poseidon_hash<R: Read, W: Write>(mut input_data: R, mut output_data: W) -> Result<()> {
@@ -1060,6 +1060,8 @@ mod test {
use super::*;
use ark_std::{rand::thread_rng, UniformRand};
use rand::Rng;
use utils::ZerokitMerkleTree;
// use rkyv::Deserialize;
#[test]
// We test merkle batch Merkle tree additions
@@ -1284,7 +1286,7 @@ mod test {
let serialized_proof = output_buffer.into_inner();
// Before checking public verify API, we check that the (deserialized) proof generated by prove is actually valid
let proof = ArkProof::deserialize(&mut Cursor::new(&serialized_proof)).unwrap();
let proof = ArkProof::deserialize_compressed(&mut Cursor::new(&serialized_proof)).unwrap();
let verified = verify_proof(&rln.verification_key, &proof, &proof_values);
assert!(verified.unwrap());
@@ -1325,14 +1327,13 @@ mod test {
let (identity_secret_hash, id_commitment) = keygen();
// We set as leaf id_commitment after storing its index
let identity_index = u64::try_from(rln.tree.leaves_set()).unwrap();
let identity_index = rln.tree.leaves_set();
let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment));
rln.set_next_leaf(&mut buffer).unwrap();
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let signal_len = u64::try_from(signal.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -1341,9 +1342,9 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut identity_index.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
let mut input_buffer = Cursor::new(serialized);
@@ -1357,7 +1358,7 @@ mod test {
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data || signal_len<8> | signal<var> ]
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal.to_vec());
let mut input_buffer = Cursor::new(proof_data);
@@ -1390,14 +1391,13 @@ mod test {
let (identity_secret_hash, id_commitment) = keygen();
// We set as leaf id_commitment after storing its index
let identity_index = u64::try_from(rln.tree.leaves_set()).unwrap();
let identity_index = rln.tree.leaves_set();
let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment));
rln.set_next_leaf(&mut buffer).unwrap();
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let signal_len = u64::try_from(signal.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -1406,14 +1406,14 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut identity_index.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
let mut input_buffer = Cursor::new(serialized);
// We read input RLN witness and we deserialize it
// We read input RLN witness and we serialize_compressed it
let mut witness_byte: Vec<u8> = Vec::new();
input_buffer.read_to_end(&mut witness_byte).unwrap();
let (rln_witness, _) = proof_inputs_to_rln_witness(&mut rln.tree, &witness_byte).unwrap();
@@ -1453,7 +1453,7 @@ mod test {
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data || signal_len<8> | signal<var> ]
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal.to_vec());
let mut input_buffer = Cursor::new(proof_data);
@@ -1487,14 +1487,13 @@ mod test {
let (identity_secret_hash, id_commitment) = keygen();
// We set as leaf id_commitment after storing its index
let identity_index = u64::try_from(rln.tree.leaves_set()).unwrap();
let identity_index = rln.tree.leaves_set();
let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment));
rln.set_next_leaf(&mut buffer).unwrap();
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let signal_len = u64::try_from(signal.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -1503,9 +1502,9 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut identity_index.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
let mut input_buffer = Cursor::new(serialized);
@@ -1519,7 +1518,7 @@ mod test {
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data || signal_len<8> | signal<var> ]
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal.to_vec());
let input_buffer = Cursor::new(proof_data);
@@ -1570,17 +1569,15 @@ mod test {
let (identity_secret_hash, id_commitment) = keygen();
// We set as leaf id_commitment after storing its index
let identity_index = u64::try_from(rln.tree.leaves_set()).unwrap();
let identity_index = rln.tree.leaves_set();
let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment));
rln.set_next_leaf(&mut buffer).unwrap();
// We generate two random signals
let mut rng = rand::thread_rng();
let signal1: [u8; 32] = rng.gen();
let signal1_len = u64::try_from(signal1.len()).unwrap();
let signal2: [u8; 32] = rng.gen();
let signal2_len = u64::try_from(signal2.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -1591,18 +1588,18 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized1: Vec<u8> = Vec::new();
serialized1.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized1.append(&mut identity_index.to_le_bytes().to_vec());
serialized1.append(&mut normalize_usize(identity_index));
serialized1.append(&mut fr_to_bytes_le(&epoch));
// The first part is the same for both proof input, so we clone
let mut serialized2 = serialized1.clone();
// We attach the first signal to the first proof input
serialized1.append(&mut signal1_len.to_le_bytes().to_vec());
serialized1.append(&mut normalize_usize(signal1.len()));
serialized1.append(&mut signal1.to_vec());
// We attach the second signal to the first proof input
serialized2.append(&mut signal2_len.to_le_bytes().to_vec());
serialized2.append(&mut normalize_usize(signal2.len()));
serialized2.append(&mut signal2.to_vec());
// We generate the first proof
@@ -1644,21 +1641,20 @@ mod test {
let (identity_secret_hash_new, id_commitment_new) = keygen();
// We add it to the tree
let identity_index_new = u64::try_from(rln.tree.leaves_set()).unwrap();
let identity_index_new = rln.tree.leaves_set();
let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment_new));
rln.set_next_leaf(&mut buffer).unwrap();
// We generate a random signals
let signal3: [u8; 32] = rng.gen();
let signal3_len = u64::try_from(signal3.len()).unwrap();
// We prepare proof input. Note that epoch is the same as before
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized3: Vec<u8> = Vec::new();
serialized3.append(&mut fr_to_bytes_le(&identity_secret_hash_new));
serialized3.append(&mut identity_index_new.to_le_bytes().to_vec());
serialized3.append(&mut normalize_usize(identity_index_new));
serialized3.append(&mut fr_to_bytes_le(&epoch));
serialized3.append(&mut signal3_len.to_le_bytes().to_vec());
serialized3.append(&mut normalize_usize(signal3.len()));
serialized3.append(&mut signal3.to_vec());
// We generate the proof

View File

@@ -13,8 +13,8 @@ pub fn to_bigint(el: &Fr) -> Result<BigInt> {
}
pub fn fr_byte_size() -> usize {
let mbs = <Fr as PrimeField>::size_in_bits();
(mbs + 64 - (mbs % 64)) / 8
let mbs = <Fr as PrimeField>::MODULUS_BIT_SIZE;
((mbs + 64 - (mbs % 64)) / 8) as usize
}
pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr> {
@@ -75,7 +75,8 @@ pub fn fr_to_bytes_be(input: &Fr) -> Vec<u8> {
pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Result<Vec<u8>> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len())?.to_le_bytes().to_vec());
bytes.extend(input.len().to_le_bytes().to_vec());
// We store each element
input.iter().for_each(|el| bytes.extend(fr_to_bytes_le(el)));
@@ -85,7 +86,8 @@ pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Result<Vec<u8>> {
pub fn vec_fr_to_bytes_be(input: &[Fr]) -> Result<Vec<u8>> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len())?.to_be_bytes().to_vec());
bytes.extend(input.len().to_be_bytes().to_vec());
// We store each element
input.iter().for_each(|el| bytes.extend(fr_to_bytes_be(el)));
@@ -95,15 +97,18 @@ pub fn vec_fr_to_bytes_be(input: &[Fr]) -> Result<Vec<u8>> {
pub fn vec_u8_to_bytes_le(input: &[u8]) -> Result<Vec<u8>> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len())?.to_le_bytes().to_vec());
bytes.extend(input.len().to_le_bytes().to_vec());
bytes.extend(input);
Ok(bytes)
}
pub fn vec_u8_to_bytes_be(input: Vec<u8>) -> Result<Vec<u8>> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
let mut bytes: Vec<u8> = u64::try_from(input.len())?.to_be_bytes().to_vec();
bytes.extend(input.len().to_be_bytes().to_vec());
bytes.extend(input);
Ok(bytes)
@@ -112,7 +117,7 @@ pub fn vec_u8_to_bytes_be(input: Vec<u8>) -> Result<Vec<u8>> {
pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
let mut read: usize = 0;
let len = u64::from_le_bytes(input[0..8].try_into()?) as usize;
let len = usize::from_le_bytes(input[0..8].try_into()?);
read += 8;
let res = input[8..8 + len].to_vec();
@@ -124,7 +129,7 @@ pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
pub fn bytes_be_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
let mut read: usize = 0;
let len = u64::from_be_bytes(input[0..8].try_into()?) as usize;
let len = usize::from_be_bytes(input[0..8].try_into()?);
read += 8;
let res = input[8..8 + len].to_vec();
@@ -138,7 +143,7 @@ pub fn bytes_le_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
let mut read: usize = 0;
let mut res: Vec<Fr> = Vec::new();
let len = u64::from_le_bytes(input[0..8].try_into()?) as usize;
let len = usize::from_le_bytes(input[0..8].try_into()?);
read += 8;
let el_size = fr_byte_size();
@@ -155,7 +160,7 @@ pub fn bytes_be_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
let mut read: usize = 0;
let mut res: Vec<Fr> = Vec::new();
let len = u64::from_be_bytes(input[0..8].try_into()?) as usize;
let len = usize::from_be_bytes(input[0..8].try_into()?);
read += 8;
let el_size = fr_byte_size();
@@ -168,6 +173,12 @@ pub fn bytes_be_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
Ok((res, read))
}
pub fn normalize_usize(input: usize) -> Vec<u8> {
let mut normalized_usize = input.to_le_bytes().to_vec();
normalized_usize.resize(8, 0);
normalized_usize
}
/* Old conversion utilities between different libraries data types
// Conversion Utilities between poseidon-rs Field and arkworks Fr (in order to call directly poseidon-rs' poseidon_hash)

View File

@@ -4,7 +4,7 @@ mod test {
use rand::Rng;
use rln::circuit::*;
use rln::ffi::{hash as ffi_hash, poseidon_hash as ffi_poseidon_hash, *};
use rln::poseidon_hash::{poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
use rln::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
use rln::protocol::*;
use rln::public::RLN;
use rln::utils::*;
@@ -609,12 +609,11 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index: u64 = no_of_leaves;
let identity_index: usize = no_of_leaves;
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let signal_len = u64::try_from(signal.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -623,9 +622,9 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut identity_index.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
// We call generate_rln_proof
@@ -640,7 +639,7 @@ mod test {
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data | signal_len<8> | signal<var> ]
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal.to_vec());
// We call verify_rln_proof
@@ -694,12 +693,11 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index: u64 = no_of_leaves;
let identity_index: usize = no_of_leaves;
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let signal_len = u64::try_from(signal.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -708,9 +706,9 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut identity_index.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
// We call generate_rln_proof
@@ -725,7 +723,7 @@ mod test {
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data | signal_len<8> | signal<var> ]
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal.to_vec());
// We test verify_with_roots
@@ -807,18 +805,16 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index: u64 = 0;
let identity_index: usize = 0;
// We generate two proofs using same epoch but different signals.
// We generate two random signals
let mut rng = rand::thread_rng();
let signal1: [u8; 32] = rng.gen();
let signal1_len = u64::try_from(signal1.len()).unwrap();
// We generate two random signals
let signal2: [u8; 32] = rng.gen();
let signal2_len = u64::try_from(signal2.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -827,18 +823,18 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized1: Vec<u8> = Vec::new();
serialized1.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized1.append(&mut identity_index.to_le_bytes().to_vec());
serialized1.append(&mut normalize_usize(identity_index));
serialized1.append(&mut fr_to_bytes_le(&epoch));
// The first part is the same for both proof input, so we clone
let mut serialized2 = serialized1.clone();
// We attach the first signal to the first proof input
serialized1.append(&mut signal1_len.to_le_bytes().to_vec());
serialized1.append(&mut normalize_usize(signal1.len()));
serialized1.append(&mut signal1.to_vec());
// We attach the second signal to the first proof input
serialized2.append(&mut signal2_len.to_le_bytes().to_vec());
serialized2.append(&mut normalize_usize(signal2.len()));
serialized2.append(&mut signal2.to_vec());
// We call generate_rln_proof for first proof values
@@ -897,20 +893,19 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index_new: u64 = 1;
let identity_index_new: usize = 1;
// We generate a random signals
let signal3: [u8; 32] = rng.gen();
let signal3_len = u64::try_from(signal3.len()).unwrap();
// We prepare input for generate_rln_proof API
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
// Note that epoch is the same as before
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash_new));
serialized.append(&mut identity_index_new.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(identity_index_new));
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut signal3_len.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(signal3.len()));
serialized.append(&mut signal3.to_vec());
// We call generate_rln_proof

View File

@@ -6,7 +6,7 @@
mod test {
use rln::circuit::*;
use rln::poseidon_tree::*;
use utils::{FullMerkleTree, OptimalMerkleTree};
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
#[test]
/// A basic performance comparison between the two supported Merkle Tree implementations
@@ -82,56 +82,38 @@ mod test {
// Test module for testing pmtree integration and features in zerokit
// enabled only if the pmtree feature is enabled
#[cfg(feature = "pmtree")]
#[cfg(feature = "pmtree-ft")]
#[cfg(test)]
mod pmtree_test {
use pmtree::*;
use rln::circuit::Fr;
use rln::poseidon_hash::poseidon_hash;
use rln::hashers::{hash_to_field, poseidon_hash};
use rln::poseidon_tree::PoseidonHash;
use rln::protocol::hash_to_field;
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, str_to_fr};
use rln::utils::str_to_fr;
use sled::Db as Sled;
use std::collections::HashMap;
use std::fs;
use std::path::Path;
use std::{collections::HashMap, path::PathBuf};
use utils::{FullMerkleTree, OptimalMerkleTree};
// The pmtree Hasher trait used by pmtree Merkle tree
impl pmtree::Hasher for PoseidonHash {
type Fr = Fr;
fn default_leaf() -> Self::Fr {
Fr::from(0)
}
fn serialize(value: Self::Fr) -> Value {
fr_to_bytes_le(&value)
}
fn deserialize(value: Value) -> Self::Fr {
let (fr, _) = bytes_le_to_fr(&value);
fr
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}
// pmtree supports in-memory and on-disk databases (Database trait) for storing the Merkle tree state
// We implement Database for hashmaps, an in-memory database
struct MemoryDB(HashMap<DBKey, Value>);
#[derive(Default)]
struct MemoryDBConfig {}
impl Database for MemoryDB {
fn new(_dbpath: &str) -> Result<Self> {
type Config = MemoryDBConfig;
fn new(_config: Self::Config) -> Result<Self> {
Ok(MemoryDB(HashMap::new()))
}
fn load(_dbpath: &str) -> Result<Self> {
Err(Error("Cannot load in-memory DB".to_string()))
fn load(_config: Self::Config) -> Result<Self> {
Err(Box::new(Error("Cannot load in-memory DB".to_string())))
}
fn get(&self, key: DBKey) -> Result<Option<Value>> {
@@ -143,36 +125,46 @@ mod pmtree_test {
Ok(())
}
fn put_batch(&mut self, subtree: HashMap<DBKey, Value>) -> Result<()> {
self.0.extend(subtree);
Ok(())
}
}
// We implement Database for sled DB, an on-disk database
struct SledDB(Sled);
impl Database for SledDB {
fn new(dbpath: &str) -> Result<Self> {
if Path::new(dbpath).exists() {
match fs::remove_dir_all(dbpath) {
type Config = sled::Config;
fn new(config: Self::Config) -> Result<Self> {
let dbpath = config.path;
if config.dbpath.exists() {
match fs::remove_dir_all(&config.dbpath) {
Ok(x) => x,
Err(e) => return Err(Error(e.to_string())),
Err(e) => return Err(Box::new(Error(e.to_string()))),
}
}
let db: Sled = match sled::open(dbpath) {
let db: Sled = match config.open() {
Ok(db) => db,
Err(e) => return Err(Error(e.to_string())),
Err(e) => return Err(Box::new(Error(e.to_string()))),
};
Ok(SledDB(db))
}
fn load(dbpath: &str) -> Result<Self> {
let db: Sled = match sled::open(dbpath) {
fn load(config: Self::Config) -> Result<Self> {
let db: Sled = match sled::open(config.dbpath) {
Ok(db) => db,
Err(e) => return Err(Error(e.to_string())),
Err(e) => return Err(Box::new(Error(e.to_string()))),
};
if !db.was_recovered() {
return Err(Error("Trying to load non-existing database!".to_string()));
return Err(Box::new(Error(
"Trying to load non-existing database!".to_string(),
)));
}
Ok(SledDB(db))
@@ -181,16 +173,27 @@ mod pmtree_test {
fn get(&self, key: DBKey) -> Result<Option<Value>> {
match self.0.get(key) {
Ok(value) => Ok(value.map(|val| val.to_vec())),
Err(e) => Err(Error(e.to_string())),
Err(e) => Err(Box::new(Error(e.to_string()))),
}
}
fn put(&mut self, key: DBKey, value: Value) -> Result<()> {
match self.0.insert(key, value) {
Ok(_) => Ok(()),
Err(e) => Err(Error(e.to_string())),
Err(e) => Err(Box::new(Error(e.to_string()))),
}
}
fn put_batch(&mut self, subtree: HashMap<DBKey, Value>) -> Result<()> {
let mut batch = sled::Batch::default();
for (key, value) in subtree {
batch.insert(&key, value);
}
self.0.apply_batch(batch)?;
Ok(())
}
}
#[test]
@@ -341,6 +344,7 @@ mod pmtree_test {
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
.unwrap()
);
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
@@ -353,83 +357,103 @@ mod pmtree_test {
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
),
)
.unwrap(),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
),
)
.unwrap(),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
),
)
.unwrap(),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
),
)
.unwrap(),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
),
)
.unwrap(),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
),
)
.unwrap(),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
),
)
.unwrap(),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
),
)
.unwrap(),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
),
)
.unwrap(),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
),
)
.unwrap(),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
),
)
.unwrap(),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
),
)
.unwrap(),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
),
)
.unwrap(),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
),
)
.unwrap(),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
),
)
.unwrap(),
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
),
)
.unwrap(),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
),
)
.unwrap(),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
),
)
.unwrap(),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
),
)
.unwrap(),
str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
),
)
.unwrap(),
];
let expected_identity_path_index: Vec<u8> =
@@ -467,6 +491,7 @@ mod pmtree_test {
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
.unwrap()
);
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
@@ -479,83 +504,103 @@ mod pmtree_test {
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
),
)
.unwrap(),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
),
)
.unwrap(),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
),
)
.unwrap(),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
),
)
.unwrap(),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
),
)
.unwrap(),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
),
)
.unwrap(),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
),
)
.unwrap(),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
),
)
.unwrap(),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
),
)
.unwrap(),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
),
)
.unwrap(),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
),
)
.unwrap(),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
),
)
.unwrap(),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
),
)
.unwrap(),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
),
)
.unwrap(),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
),
)
.unwrap(),
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
),
)
.unwrap(),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
),
)
.unwrap(),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
),
)
.unwrap(),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
),
)
.unwrap(),
str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
),
)
.unwrap(),
];
let expected_identity_path_index: Vec<u8> =

View File

@@ -4,10 +4,11 @@ mod test {
circom_from_folder, vk_from_folder, zkey_from_folder, Fr, TEST_RESOURCES_FOLDER,
TEST_TREE_HEIGHT,
};
use rln::poseidon_hash::poseidon_hash;
use rln::hashers::{hash_to_field, poseidon_hash};
use rln::poseidon_tree::PoseidonTree;
use rln::protocol::*;
use rln::utils::str_to_fr;
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
const WITNESS_JSON_15: &str = r#"

View File

@@ -3,8 +3,8 @@ mod test {
use ark_std::{rand::thread_rng, UniformRand};
use rand::Rng;
use rln::circuit::{Fr, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
use rln::poseidon_hash::{poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
use rln::protocol::{compute_tree_root, deserialize_identity_tuple, hash_to_field};
use rln::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
use rln::protocol::{compute_tree_root, deserialize_identity_tuple};
use rln::public::{hash as public_hash, poseidon_hash as public_poseidon_hash, RLN};
use rln::utils::*;
use std::io::Cursor;

View File

@@ -12,7 +12,7 @@ dylib = [ "wasmer/dylib", "wasmer-engine-dylib", "wasmer-compiler-cranelift" ]
[dependencies]
ark-bn254 = { version = "0.3.0" }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features=["circom-2"] }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features=["circom-2"], rev = "35ce5a9" }
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
ark-relations = { version = "0.3.0", default-features = false }

View File

@@ -5,12 +5,12 @@ edition = "2021"
license = "MIT OR Apache-2.0"
[dependencies]
ark-ff = { version = "0.3.0", default-features = false, features = ["asm"] }
num-bigint = { version = "0.4.3", default-features = false, features = ["rand"] }
color-eyre = "0.6.1"
ark-ff = { version = "=0.4.1", default-features = false, features = ["asm"] }
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
color-eyre = "=0.6.2"
[dev-dependencies]
ark-bn254 = { version = "0.3.0" }
ark-bn254 = "=0.4.0"
num-traits = "0.2.11"
hex-literal = "0.3.4"
tiny-keccak = { version = "2.0.2", features = ["keccak"] }

View File

@@ -0,0 +1,289 @@
use crate::merkle_tree::{Hasher, ZerokitMerkleProof, ZerokitMerkleTree};
use color_eyre::{Report, Result};
use std::{
cmp::max,
fmt::Debug,
iter::{once, repeat, successors},
};
////////////////////////////////////////////////////////////
/// Full Merkle Tree Implementation
////////////////////////////////////////////////////////////
/// Merkle tree with all leaf and intermediate hashes stored
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct FullMerkleTree<H: Hasher> {
/// The depth of the tree, i.e. the number of levels from leaf to root
depth: usize,
/// The nodes cached from the empty part of the tree (where leaves are set to default).
/// Since the rightmost part of the tree is usually changed much later than its creation,
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
/// and by caching few intermediate nodes to the root computed from default leaves
cached_nodes: Vec<H::Fr>,
/// The tree nodes
nodes: Vec<H::Fr>,
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
// (deletions leave next_index unchanged)
next_index: usize,
}
/// Element of a Merkle proof
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum FullMerkleBranch<H: Hasher> {
/// Left branch taken, value is the right sibling hash.
Left(H::Fr),
/// Right branch taken, value is the left sibling hash.
Right(H::Fr),
}
/// Merkle proof path, bottom to top.
#[derive(Clone, PartialEq, Eq)]
pub struct FullMerkleProof<H: Hasher>(pub Vec<FullMerkleBranch<H>>);
/// Implementations
impl<H: Hasher> ZerokitMerkleTree<H> for FullMerkleTree<H> {
type Proof = FullMerkleProof<H>;
fn default(depth: usize) -> Self {
FullMerkleTree::<H>::new(depth, H::default_leaf())
}
/// Creates a new `MerkleTree`
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
fn new(depth: usize, initial_leaf: H::Fr) -> Self {
// Compute cache node values, leaf to root
let cached_nodes = successors(Some(initial_leaf), |prev| Some(H::hash(&[*prev, *prev])))
.take(depth + 1)
.collect::<Vec<_>>();
// Compute node values
let nodes = cached_nodes
.iter()
.rev()
.enumerate()
.flat_map(|(levels, hash)| repeat(hash).take(1 << levels))
.cloned()
.collect::<Vec<_>>();
debug_assert!(nodes.len() == (1 << (depth + 1)) - 1);
let next_index = 0;
Self {
depth,
cached_nodes,
nodes,
next_index,
}
}
// Returns the depth of the tree
fn depth(&self) -> usize {
self.depth
}
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
fn capacity(&self) -> usize {
1 << self.depth
}
// Returns the total number of leaves set
fn leaves_set(&mut self) -> usize {
self.next_index
}
#[must_use]
// Returns the root of the tree
fn root(&self) -> H::Fr {
self.nodes[0]
}
// Sets a leaf at the specified tree index
fn set(&mut self, leaf: usize, hash: H::Fr) -> Result<()> {
self.set_range(leaf, once(hash))?;
self.next_index = max(self.next_index, leaf + 1);
Ok(())
}
// Sets tree nodes, starting from start index
// Function proper of FullMerkleTree implementation
fn set_range<I: IntoIterator<Item = H::Fr>>(&mut self, start: usize, hashes: I) -> Result<()> {
let index = self.capacity() + start - 1;
let mut count = 0;
// first count number of hashes, and check that they fit in the tree
// then insert into the tree
let hashes = hashes.into_iter().collect::<Vec<_>>();
if hashes.len() + start > self.capacity() {
return Err(Report::msg("provided hashes do not fit in the tree"));
}
hashes.into_iter().for_each(|hash| {
self.nodes[index + count] = hash;
count += 1;
});
if count != 0 {
self.update_nodes(index, index + (count - 1))?;
self.next_index = max(self.next_index, start + count);
}
Ok(())
}
// Sets a leaf at the next available index
fn update_next(&mut self, leaf: H::Fr) -> Result<()> {
self.set(self.next_index, leaf)?;
Ok(())
}
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
fn delete(&mut self, index: usize) -> Result<()> {
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
}
Ok(())
}
// Computes a merkle proof the the leaf at the specified index
fn proof(&self, leaf: usize) -> Result<FullMerkleProof<H>> {
if leaf >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
let mut index = self.capacity() + leaf - 1;
let mut path = Vec::with_capacity(self.depth + 1);
while let Some(parent) = self.parent(index) {
// Add proof for node at index to parent
path.push(match index & 1 {
1 => FullMerkleBranch::Left(self.nodes[index + 1]),
0 => FullMerkleBranch::Right(self.nodes[index - 1]),
_ => unreachable!(),
});
index = parent;
}
Ok(FullMerkleProof(path))
}
// Verifies a Merkle proof with respect to the input leaf and the tree root
fn verify(&self, hash: &H::Fr, proof: &FullMerkleProof<H>) -> Result<bool> {
Ok(proof.compute_root_from(hash) == self.root())
}
}
impl<H: Hasher> FullMerkleTree<H>
where
H: Hasher,
{
// Utilities for updating the tree nodes
/// For a given node index, return the parent node index
/// Returns None if there is no parent (root node)
fn parent(&self, index: usize) -> Option<usize> {
if index == 0 {
None
} else {
Some(((index + 1) >> 1) - 1)
}
}
/// For a given node index, return index of the first (left) child.
fn first_child(&self, index: usize) -> usize {
(index << 1) + 1
}
fn levels(&self, index: usize) -> usize {
// `n.next_power_of_two()` will return `n` iff `n` is a power of two.
// The extra offset corrects this.
(index + 2).next_power_of_two().trailing_zeros() as usize - 1
}
fn update_nodes(&mut self, start: usize, end: usize) -> Result<()> {
if self.levels(start) != self.levels(end) {
return Err(Report::msg("self.levels(start) != self.levels(end)"));
}
if let (Some(start), Some(end)) = (self.parent(start), self.parent(end)) {
for parent in start..=end {
let child = self.first_child(parent);
self.nodes[parent] = H::hash(&[self.nodes[child], self.nodes[child + 1]]);
}
self.update_nodes(start, end)?;
}
Ok(())
}
}
impl<H: Hasher> ZerokitMerkleProof<H> for FullMerkleProof<H> {
type Index = u8;
#[must_use]
// Returns the length of a Merkle proof
fn length(&self) -> usize {
self.0.len()
}
/// Computes the leaf index corresponding to a Merkle proof
#[must_use]
fn leaf_index(&self) -> usize {
self.0.iter().rev().fold(0, |index, branch| match branch {
FullMerkleBranch::Left(_) => index << 1,
FullMerkleBranch::Right(_) => (index << 1) + 1,
})
}
#[must_use]
/// Returns the path elements forming a Merkle proof
fn get_path_elements(&self) -> Vec<H::Fr> {
self.0
.iter()
.map(|x| match x {
FullMerkleBranch::Left(value) | FullMerkleBranch::Right(value) => *value,
})
.collect()
}
/// Returns the path indexes forming a Merkle proof
#[must_use]
fn get_path_index(&self) -> Vec<Self::Index> {
self.0
.iter()
.map(|branch| match branch {
FullMerkleBranch::Left(_) => 0,
FullMerkleBranch::Right(_) => 1,
})
.collect()
}
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
#[must_use]
fn compute_root_from(&self, hash: &H::Fr) -> H::Fr {
self.0.iter().fold(*hash, |hash, branch| match branch {
FullMerkleBranch::Left(sibling) => H::hash(&[hash, *sibling]),
FullMerkleBranch::Right(sibling) => H::hash(&[*sibling, hash]),
})
}
}
// Debug formatting for printing a (Full) Merkle Proof Branch
impl<H> Debug for FullMerkleBranch<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Left(arg0) => f.debug_tuple("Left").field(arg0).finish(),
Self::Right(arg0) => f.debug_tuple("Right").field(arg0).finish(),
}
}
}
// Debug formatting for printing a (Full) Merkle Proof
impl<H> Debug for FullMerkleProof<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Proof").field(&self.0).finish()
}
}

View File

@@ -13,22 +13,13 @@
//! * Disk based storage backend (using mmaped files should be easy)
//! * Implement serialization for tree and Merkle proof
#![allow(dead_code)]
use std::collections::HashMap;
use std::{
cmp::max,
fmt::Debug,
iter::{once, repeat, successors},
};
use color_eyre::{Report, Result};
use color_eyre::Result;
/// In the Hasher trait we define the node type, the default leaf
/// and the hash function used to initialize a Merkle Tree implementation
pub trait Hasher {
/// Type of the leaf and tree node
type Fr: Copy + Clone + Eq;
type Fr: Clone + Copy + Eq;
/// Returns the default tree leaf
fn default_leaf() -> Self::Fr;
@@ -37,528 +28,39 @@ pub trait Hasher {
fn hash(input: &[Self::Fr]) -> Self::Fr;
}
////////////////////////////////////////////////////////////
/// Optimal Merkle Tree Implementation
////////////////////////////////////////////////////////////
/// The Merkle tree structure
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct OptimalMerkleTree<H>
/// In the ZerokitMerkleTree trait we define the methods that are required to be implemented by a Merkle tree
/// Including, OptimalMerkleTree, FullMerkleTree, Pmtree
pub trait ZerokitMerkleTree<H: Hasher>
where
H: Hasher,
{
/// The depth of the tree, i.e. the number of levels from leaf to root
depth: usize,
type Proof;
/// The nodes cached from the empty part of the tree (where leaves are set to default).
/// Since the rightmost part of the tree is usually changed much later than its creation,
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
/// and by caching few intermediate nodes to the root computed from default leaves
cached_nodes: Vec<H::Fr>,
/// The tree nodes
nodes: HashMap<(usize, usize), H::Fr>,
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
// (deletions leave next_index unchanged)
next_index: usize,
fn default(depth: usize) -> Self;
fn new(depth: usize, default_leaf: H::Fr) -> Self;
fn depth(&self) -> usize;
fn capacity(&self) -> usize;
fn leaves_set(&mut self) -> usize;
fn root(&self) -> H::Fr;
fn set(&mut self, index: usize, leaf: H::Fr) -> Result<()>;
fn set_range<I>(&mut self, start: usize, leaves: I) -> Result<()>
where
I: IntoIterator<Item = H::Fr>;
fn update_next(&mut self, leaf: H::Fr) -> Result<()>;
fn delete(&mut self, index: usize) -> Result<()>;
fn proof(&self, index: usize) -> Result<Self::Proof>;
fn verify(&self, leaf: &H::Fr, witness: &Self::Proof) -> Result<bool>;
}
/// The Merkle proof
/// Contains a vector of (node, branch_index) that defines the proof path elements and branch direction (1 or 0)
#[derive(Clone, PartialEq, Eq)]
pub struct OptimalMerkleProof<H: Hasher>(pub Vec<(H::Fr, u8)>);
/// Implementations
impl<H: Hasher> OptimalMerkleTree<H> {
pub fn default(depth: usize) -> Self {
OptimalMerkleTree::<H>::new(depth, H::default_leaf())
}
/// Creates a new `MerkleTree`
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
pub fn new(depth: usize, default_leaf: H::Fr) -> Self {
let mut cached_nodes: Vec<H::Fr> = Vec::with_capacity(depth + 1);
cached_nodes.push(default_leaf);
for i in 0..depth {
cached_nodes.push(H::hash(&[cached_nodes[i]; 2]));
}
cached_nodes.reverse();
OptimalMerkleTree {
cached_nodes: cached_nodes.clone(),
depth,
nodes: HashMap::new(),
next_index: 0,
}
}
// Returns the depth of the tree
pub fn depth(&self) -> usize {
self.depth
}
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
pub fn capacity(&self) -> usize {
1 << self.depth
}
// Returns the total number of leaves set
pub fn leaves_set(&mut self) -> usize {
self.next_index
}
#[must_use]
// Returns the root of the tree
pub fn root(&self) -> H::Fr {
self.get_node(0, 0)
}
// Sets a leaf at the specified tree index
pub fn set(&mut self, index: usize, leaf: H::Fr) -> Result<()> {
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
self.nodes.insert((self.depth, index), leaf);
self.recalculate_from(index)?;
self.next_index = max(self.next_index, index + 1);
Ok(())
}
// Sets multiple leaves from the specified tree index
pub fn set_range<I: IntoIterator<Item = H::Fr>>(
&mut self,
start: usize,
leaves: I,
) -> Result<()> {
let leaves = leaves.into_iter().collect::<Vec<_>>();
// check if the range is valid
if start + leaves.len() > self.capacity() {
return Err(Report::msg("provided range exceeds set size"));
}
for (i, leaf) in leaves.iter().enumerate() {
self.nodes.insert((self.depth, start + i), *leaf);
self.recalculate_from(start + i)?;
}
self.next_index = max(self.next_index, start + leaves.len());
Ok(())
}
// Sets a leaf at the next available index
pub fn update_next(&mut self, leaf: H::Fr) -> Result<()> {
self.set(self.next_index, leaf)?;
Ok(())
}
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
pub fn delete(&mut self, index: usize) -> Result<()> {
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
}
Ok(())
}
// Computes a merkle proof the the leaf at the specified index
pub fn proof(&self, index: usize) -> Result<OptimalMerkleProof<H>> {
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
let mut witness = Vec::<(H::Fr, u8)>::with_capacity(self.depth);
let mut i = index;
let mut depth = self.depth;
loop {
i ^= 1;
witness.push((self.get_node(depth, i), (1 - (i & 1)).try_into().unwrap()));
i >>= 1;
depth -= 1;
if depth == 0 {
break;
}
}
if i != 0 {
Err(Report::msg("i != 0"))
} else {
Ok(OptimalMerkleProof(witness))
}
}
// Verifies a Merkle proof with respect to the input leaf and the tree root
pub fn verify(&self, leaf: &H::Fr, witness: &OptimalMerkleProof<H>) -> Result<bool> {
if witness.length() != self.depth {
return Err(Report::msg("witness length doesn't match tree depth"));
}
let expected_root = witness.compute_root_from(leaf);
Ok(expected_root.eq(&self.root()))
}
// Utilities for updating the tree nodes
fn get_node(&self, depth: usize, index: usize) -> H::Fr {
let node = *self
.nodes
.get(&(depth, index))
.unwrap_or_else(|| &self.cached_nodes[depth]);
node
}
fn get_leaf(&self, index: usize) -> H::Fr {
self.get_node(self.depth, index)
}
fn hash_couple(&mut self, depth: usize, index: usize) -> H::Fr {
let b = index & !1;
H::hash(&[self.get_node(depth, b), self.get_node(depth, b + 1)])
}
fn recalculate_from(&mut self, index: usize) -> Result<()> {
let mut i = index;
let mut depth = self.depth;
loop {
let h = self.hash_couple(depth, i);
i >>= 1;
depth -= 1;
self.nodes.insert((depth, i), h);
if depth == 0 {
break;
}
}
if depth != 0 {
return Err(Report::msg("did not reach the depth"));
}
if i != 0 {
return Err(Report::msg("did not go through all indexes"));
}
Ok(())
}
}
impl<H: Hasher> OptimalMerkleProof<H> {
#[must_use]
// Returns the length of a Merkle proof
pub fn length(&self) -> usize {
self.0.len()
}
/// Computes the leaf index corresponding to a Merkle proof
#[must_use]
pub fn leaf_index(&self) -> usize {
// In current implementation the path indexes in a proof correspond to the binary representation of the leaf index
let mut binary_repr = self.get_path_index();
binary_repr.reverse();
binary_repr
.into_iter()
.fold(0, |acc, digit| (acc << 1) + usize::from(digit))
}
#[must_use]
/// Returns the path elements forming a Merkle proof
pub fn get_path_elements(&self) -> Vec<H::Fr> {
self.0.iter().map(|x| x.0).collect()
}
/// Returns the path indexes forming a Merkle proof
#[must_use]
pub fn get_path_index(&self) -> Vec<u8> {
self.0.iter().map(|x| x.1).collect()
}
#[must_use]
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
pub fn compute_root_from(&self, leaf: &H::Fr) -> H::Fr {
let mut acc: H::Fr = *leaf;
for w in self.0.iter() {
if w.1 == 0 {
acc = H::hash(&[acc, w.0]);
} else {
acc = H::hash(&[w.0, acc]);
}
}
acc
}
}
// Debug formatting for printing a (Optimal) Merkle Proof
impl<H> Debug for OptimalMerkleProof<H>
pub trait ZerokitMerkleProof<H: Hasher>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Proof").field(&self.0).finish()
}
}
////////////////////////////////////////////////////////////
/// Full Merkle Tree Implementation
////////////////////////////////////////////////////////////
/// Merkle tree with all leaf and intermediate hashes stored
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct FullMerkleTree<H: Hasher> {
/// The depth of the tree, i.e. the number of levels from leaf to root
depth: usize,
/// The nodes cached from the empty part of the tree (where leaves are set to default).
/// Since the rightmost part of the tree is usually changed much later than its creation,
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
/// and by caching few intermediate nodes to the root computed from default leaves
cached_nodes: Vec<H::Fr>,
/// The tree nodes
nodes: Vec<H::Fr>,
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
// (deletions leave next_index unchanged)
next_index: usize,
}
/// Element of a Merkle proof
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum FullMerkleBranch<H: Hasher> {
/// Left branch taken, value is the right sibling hash.
Left(H::Fr),
/// Right branch taken, value is the left sibling hash.
Right(H::Fr),
}
/// Merkle proof path, bottom to top.
#[derive(Clone, PartialEq, Eq)]
pub struct FullMerkleProof<H: Hasher>(pub Vec<FullMerkleBranch<H>>);
/// Implementations
impl<H: Hasher> FullMerkleTree<H> {
pub fn default(depth: usize) -> Self {
FullMerkleTree::<H>::new(depth, H::default_leaf())
}
/// Creates a new `MerkleTree`
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
pub fn new(depth: usize, initial_leaf: H::Fr) -> Self {
// Compute cache node values, leaf to root
let cached_nodes = successors(Some(initial_leaf), |prev| Some(H::hash(&[*prev, *prev])))
.take(depth + 1)
.collect::<Vec<_>>();
// Compute node values
let nodes = cached_nodes
.iter()
.rev()
.enumerate()
.flat_map(|(levels, hash)| repeat(hash).take(1 << levels))
.cloned()
.collect::<Vec<_>>();
debug_assert!(nodes.len() == (1 << (depth + 1)) - 1);
let next_index = 0;
Self {
depth,
cached_nodes,
nodes,
next_index,
}
}
// Returns the depth of the tree
pub fn depth(&self) -> usize {
self.depth
}
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
pub fn capacity(&self) -> usize {
1 << self.depth
}
// Returns the total number of leaves set
pub fn leaves_set(&mut self) -> usize {
self.next_index
}
#[must_use]
// Returns the root of the tree
pub fn root(&self) -> H::Fr {
self.nodes[0]
}
// Sets a leaf at the specified tree index
pub fn set(&mut self, leaf: usize, hash: H::Fr) -> Result<()> {
self.set_range(leaf, once(hash))?;
self.next_index = max(self.next_index, leaf + 1);
Ok(())
}
// Sets tree nodes, starting from start index
// Function proper of FullMerkleTree implementation
fn set_range<I: IntoIterator<Item = H::Fr>>(&mut self, start: usize, hashes: I) -> Result<()> {
let index = self.capacity() + start - 1;
let mut count = 0;
// first count number of hashes, and check that they fit in the tree
// then insert into the tree
let hashes = hashes.into_iter().collect::<Vec<_>>();
if hashes.len() + start > self.capacity() {
return Err(Report::msg("provided hashes do not fit in the tree"));
}
hashes.into_iter().for_each(|hash| {
self.nodes[index + count] = hash;
count += 1;
});
if count != 0 {
self.update_nodes(index, index + (count - 1))?;
self.next_index = max(self.next_index, start + count);
}
Ok(())
}
// Sets a leaf at the next available index
pub fn update_next(&mut self, leaf: H::Fr) -> Result<()> {
self.set(self.next_index, leaf)?;
Ok(())
}
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
pub fn delete(&mut self, index: usize) -> Result<()> {
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
}
Ok(())
}
// Computes a merkle proof the the leaf at the specified index
pub fn proof(&self, leaf: usize) -> Result<FullMerkleProof<H>> {
if leaf >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
let mut index = self.capacity() + leaf - 1;
let mut path = Vec::with_capacity(self.depth + 1);
while let Some(parent) = self.parent(index) {
// Add proof for node at index to parent
path.push(match index & 1 {
1 => FullMerkleBranch::Left(self.nodes[index + 1]),
0 => FullMerkleBranch::Right(self.nodes[index - 1]),
_ => unreachable!(),
});
index = parent;
}
Ok(FullMerkleProof(path))
}
// Verifies a Merkle proof with respect to the input leaf and the tree root
pub fn verify(&self, hash: &H::Fr, proof: &FullMerkleProof<H>) -> Result<bool> {
Ok(proof.compute_root_from(hash) == self.root())
}
// Utilities for updating the tree nodes
/// For a given node index, return the parent node index
/// Returns None if there is no parent (root node)
fn parent(&self, index: usize) -> Option<usize> {
if index == 0 {
None
} else {
Some(((index + 1) >> 1) - 1)
}
}
/// For a given node index, return index of the first (left) child.
fn first_child(&self, index: usize) -> usize {
(index << 1) + 1
}
fn levels(&self, index: usize) -> usize {
// `n.next_power_of_two()` will return `n` iff `n` is a power of two.
// The extra offset corrects this.
(index + 2).next_power_of_two().trailing_zeros() as usize - 1
}
fn update_nodes(&mut self, start: usize, end: usize) -> Result<()> {
if self.levels(start) != self.levels(end) {
return Err(Report::msg("self.levels(start) != self.levels(end)"));
}
if let (Some(start), Some(end)) = (self.parent(start), self.parent(end)) {
for parent in start..=end {
let child = self.first_child(parent);
self.nodes[parent] = H::hash(&[self.nodes[child], self.nodes[child + 1]]);
}
self.update_nodes(start, end)?;
}
Ok(())
}
}
impl<H: Hasher> FullMerkleProof<H> {
#[must_use]
// Returns the length of a Merkle proof
pub fn length(&self) -> usize {
self.0.len()
}
/// Computes the leaf index corresponding to a Merkle proof
#[must_use]
pub fn leaf_index(&self) -> usize {
self.0.iter().rev().fold(0, |index, branch| match branch {
FullMerkleBranch::Left(_) => index << 1,
FullMerkleBranch::Right(_) => (index << 1) + 1,
})
}
#[must_use]
/// Returns the path elements forming a Merkle proof
pub fn get_path_elements(&self) -> Vec<H::Fr> {
self.0
.iter()
.map(|x| match x {
FullMerkleBranch::Left(value) | FullMerkleBranch::Right(value) => *value,
})
.collect()
}
/// Returns the path indexes forming a Merkle proof
#[must_use]
pub fn get_path_index(&self) -> Vec<u8> {
self.0
.iter()
.map(|branch| match branch {
FullMerkleBranch::Left(_) => 0,
FullMerkleBranch::Right(_) => 1,
})
.collect()
}
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
#[must_use]
pub fn compute_root_from(&self, hash: &H::Fr) -> H::Fr {
self.0.iter().fold(*hash, |hash, branch| match branch {
FullMerkleBranch::Left(sibling) => H::hash(&[hash, *sibling]),
FullMerkleBranch::Right(sibling) => H::hash(&[*sibling, hash]),
})
}
}
// Debug formatting for printing a (Full) Merkle Proof Branch
impl<H> Debug for FullMerkleBranch<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Left(arg0) => f.debug_tuple("Left").field(arg0).finish(),
Self::Right(arg0) => f.debug_tuple("Right").field(arg0).finish(),
}
}
}
// Debug formatting for printing a (Full) Merkle Proof
impl<H> Debug for FullMerkleProof<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Proof").field(&self.0).finish()
}
type Index;
fn length(&self) -> usize;
fn leaf_index(&self) -> usize;
fn get_path_elements(&self) -> Vec<H::Fr>;
fn get_path_index(&self) -> Vec<Self::Index>;
fn compute_root_from(&self, leaf: &H::Fr) -> H::Fr;
}

View File

@@ -1,3 +1,7 @@
pub mod full_merkle_tree;
#[allow(clippy::module_inception)]
pub mod merkle_tree;
pub mod optimal_merkle_tree;
pub use self::full_merkle_tree::*;
pub use self::merkle_tree::*;
pub use self::optimal_merkle_tree::*;

View File

@@ -0,0 +1,266 @@
use crate::merkle_tree::{Hasher, ZerokitMerkleProof, ZerokitMerkleTree};
use color_eyre::{Report, Result};
use std::collections::HashMap;
use std::{cmp::max, fmt::Debug};
////////////////////////////////////////////////////////////
/// Optimal Merkle Tree Implementation
////////////////////////////////////////////////////////////
/// The Merkle tree structure
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct OptimalMerkleTree<H>
where
H: Hasher,
{
/// The depth of the tree, i.e. the number of levels from leaf to root
depth: usize,
/// The nodes cached from the empty part of the tree (where leaves are set to default).
/// Since the rightmost part of the tree is usually changed much later than its creation,
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
/// and by caching few intermediate nodes to the root computed from default leaves
cached_nodes: Vec<H::Fr>,
/// The tree nodes
nodes: HashMap<(usize, usize), H::Fr>,
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
// (deletions leave next_index unchanged)
next_index: usize,
}
/// The Merkle proof
/// Contains a vector of (node, branch_index) that defines the proof path elements and branch direction (1 or 0)
#[derive(Clone, PartialEq, Eq)]
pub struct OptimalMerkleProof<H: Hasher>(pub Vec<(H::Fr, u8)>);
/// Implementations
impl<H: Hasher> ZerokitMerkleTree<H> for OptimalMerkleTree<H>
where
H: Hasher,
{
type Proof = OptimalMerkleProof<H>;
fn default(depth: usize) -> Self {
OptimalMerkleTree::<H>::new(depth, H::default_leaf())
}
/// Creates a new `MerkleTree`
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
fn new(depth: usize, default_leaf: H::Fr) -> Self {
let mut cached_nodes: Vec<H::Fr> = Vec::with_capacity(depth + 1);
cached_nodes.push(default_leaf);
for i in 0..depth {
cached_nodes.push(H::hash(&[cached_nodes[i]; 2]));
}
cached_nodes.reverse();
OptimalMerkleTree {
cached_nodes: cached_nodes.clone(),
depth,
nodes: HashMap::new(),
next_index: 0,
}
}
// Returns the depth of the tree
fn depth(&self) -> usize {
self.depth
}
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
fn capacity(&self) -> usize {
1 << self.depth
}
// Returns the total number of leaves set
fn leaves_set(&mut self) -> usize {
self.next_index
}
#[must_use]
// Returns the root of the tree
fn root(&self) -> H::Fr {
self.get_node(0, 0)
}
// Sets a leaf at the specified tree index
fn set(&mut self, index: usize, leaf: H::Fr) -> Result<()> {
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
self.nodes.insert((self.depth, index), leaf);
self.recalculate_from(index)?;
self.next_index = max(self.next_index, index + 1);
Ok(())
}
// Sets multiple leaves from the specified tree index
fn set_range<I: IntoIterator<Item = H::Fr>>(&mut self, start: usize, leaves: I) -> Result<()> {
let leaves = leaves.into_iter().collect::<Vec<_>>();
// check if the range is valid
if start + leaves.len() > self.capacity() {
return Err(Report::msg("provided range exceeds set size"));
}
for (i, leaf) in leaves.iter().enumerate() {
self.nodes.insert((self.depth, start + i), *leaf);
self.recalculate_from(start + i)?;
}
self.next_index = max(self.next_index, start + leaves.len());
Ok(())
}
// Sets a leaf at the next available index
fn update_next(&mut self, leaf: H::Fr) -> Result<()> {
self.set(self.next_index, leaf)?;
Ok(())
}
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
fn delete(&mut self, index: usize) -> Result<()> {
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
}
Ok(())
}
// Computes a merkle proof the the leaf at the specified index
fn proof(&self, index: usize) -> Result<Self::Proof> {
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
let mut witness = Vec::<(H::Fr, u8)>::with_capacity(self.depth);
let mut i = index;
let mut depth = self.depth;
loop {
i ^= 1;
witness.push((self.get_node(depth, i), (1 - (i & 1)).try_into().unwrap()));
i >>= 1;
depth -= 1;
if depth == 0 {
break;
}
}
if i != 0 {
Err(Report::msg("i != 0"))
} else {
Ok(OptimalMerkleProof(witness))
}
}
// Verifies a Merkle proof with respect to the input leaf and the tree root
fn verify(&self, leaf: &H::Fr, witness: &Self::Proof) -> Result<bool> {
if witness.length() != self.depth {
return Err(Report::msg("witness length doesn't match tree depth"));
}
let expected_root = witness.compute_root_from(leaf);
Ok(expected_root.eq(&self.root()))
}
}
impl<H: Hasher> OptimalMerkleTree<H>
where
H: Hasher,
{
// Utilities for updating the tree nodes
fn get_node(&self, depth: usize, index: usize) -> H::Fr {
let node = *self
.nodes
.get(&(depth, index))
.unwrap_or_else(|| &self.cached_nodes[depth]);
node
}
pub fn get_leaf(&self, index: usize) -> H::Fr {
self.get_node(self.depth, index)
}
fn hash_couple(&mut self, depth: usize, index: usize) -> H::Fr {
let b = index & !1;
H::hash(&[self.get_node(depth, b), self.get_node(depth, b + 1)])
}
fn recalculate_from(&mut self, index: usize) -> Result<()> {
let mut i = index;
let mut depth = self.depth;
loop {
let h = self.hash_couple(depth, i);
i >>= 1;
depth -= 1;
self.nodes.insert((depth, i), h);
if depth == 0 {
break;
}
}
if depth != 0 {
return Err(Report::msg("did not reach the depth"));
}
if i != 0 {
return Err(Report::msg("did not go through all indexes"));
}
Ok(())
}
}
impl<H: Hasher> ZerokitMerkleProof<H> for OptimalMerkleProof<H>
where
H: Hasher,
{
type Index = u8;
#[must_use]
// Returns the length of a Merkle proof
fn length(&self) -> usize {
self.0.len()
}
/// Computes the leaf index corresponding to a Merkle proof
#[must_use]
fn leaf_index(&self) -> usize {
// In current implementation the path indexes in a proof correspond to the binary representation of the leaf index
let mut binary_repr = self.get_path_index();
binary_repr.reverse();
binary_repr
.into_iter()
.fold(0, |acc, digit| (acc << 1) + usize::from(digit))
}
#[must_use]
/// Returns the path elements forming a Merkle proof
fn get_path_elements(&self) -> Vec<H::Fr> {
self.0.iter().map(|x| x.0).collect()
}
/// Returns the path indexes forming a Merkle proof
#[must_use]
fn get_path_index(&self) -> Vec<u8> {
self.0.iter().map(|x| x.1).collect()
}
#[must_use]
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
fn compute_root_from(&self, leaf: &H::Fr) -> H::Fr {
let mut acc: H::Fr = *leaf;
for w in self.0.iter() {
if w.1 == 0 {
acc = H::hash(&[acc, w.0]);
} else {
acc = H::hash(&[w.0, acc]);
}
}
acc
}
}
// Debug formatting for printing a (Optimal) Merkle Proof
impl<H> Debug for OptimalMerkleProof<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Proof").field(&self.0).finish()
}
}

View File

@@ -11,7 +11,7 @@
#![allow(dead_code)]
use ark_ff::{FpParameters, PrimeField};
use ark_ff::PrimeField;
use num_bigint::BigUint;
pub struct PoseidonGrainLFSR {
@@ -129,8 +129,8 @@ impl PoseidonGrainLFSR {
&mut self,
num_elems: usize,
) -> Vec<F> {
assert_eq!(F::Params::MODULUS_BITS as u64, self.prime_num_bits);
let modulus: BigUint = F::Params::MODULUS.into();
assert_eq!(F::MODULUS_BIT_SIZE as u64, self.prime_num_bits);
let modulus: BigUint = F::MODULUS.into();
let mut res = Vec::new();
for _ in 0..num_elems {
@@ -163,7 +163,7 @@ impl PoseidonGrainLFSR {
}
pub fn get_field_elements_mod_p<F: PrimeField>(&mut self, num_elems: usize) -> Vec<F> {
assert_eq!(F::Params::MODULUS_BITS as u64, self.prime_num_bits);
assert_eq!(F::MODULUS_BIT_SIZE as u64, self.prime_num_bits);
let mut res = Vec::new();
for _ in 0..num_elems {

View File

@@ -4,7 +4,7 @@
// and adapted to work over arkworks field traits and custom data structures
use crate::poseidon_constants::find_poseidon_ark_and_mds;
use ark_ff::{FpParameters, PrimeField};
use ark_ff::PrimeField;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RoundParamenters<F: PrimeField> {
@@ -32,7 +32,7 @@ impl<F: PrimeField> Poseidon<F> {
let (ark, mds) = find_poseidon_ark_and_mds::<F>(
1, // is_field = 1
0, // is_sbox_inverse = 0
F::Params::MODULUS_BITS as u64,
F::MODULUS_BIT_SIZE as u64,
t,
n_rounds_f as u64,
n_rounds_p as u64,

View File

@@ -3,7 +3,7 @@
mod test {
use hex_literal::hex;
use tiny_keccak::{Hasher as _, Keccak};
use utils::{FullMerkleTree, Hasher, OptimalMerkleTree};
use utils::{FullMerkleTree, Hasher, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
struct Keccak256;