Compare commits

...

81 Commits

Author SHA1 Message Date
rymnc
b51896c3a7 chore: Release 2023-08-25 05:37:09 +05:30
Aaryamann Challani
0c5ef6abcf fix(rln): use config.open instead of sled::open (#209)
fix: include tries & exp backoff to catch WouldBlock errors
2023-08-25 05:34:20 +05:30
tyshko-rostyslav
a1c292cb2e add new path arg (#207) 2023-08-24 23:35:14 +05:30
Aaryamann Challani
c6c1bfde91 chore(rln): expose seq_atomic_operation api (#206) 2023-08-21 12:07:57 +05:30
Aaryamann Challani
bf3d1d3309 chore: use pmtree instead of vacp2p_pmtree (#203) 2023-08-16 16:01:39 +05:30
Aaryamann Challani
7110e00674 fix: building in ci (#201) 2023-08-09 14:35:47 +05:30
richΛrd
99966d1a6e feat: print to stderr any error obtained while executing functions via FFI (#200) 2023-08-09 14:35:33 +05:30
Richard Ramos
7d63912ace chore: accept tree_config in new_with_params 2023-08-07 08:48:41 -04:00
Aaryamann Challani
ef1da42d94 v0.3.1 (#198)
* fix(rln): missing fields from cargo.toml

* fix(utils): missing fields from cargo.toml

* chore: Release
2023-08-05 10:42:21 +05:30
Aaryamann Challani
ecb4d9307f chore: docs cleanup (#196) 2023-08-01 22:33:08 +05:30
Aaryamann Challani
d1414a44c5 fix(rln): atomic operation edge case (#195)
* fix(rln): atomic operation edge case

* fmt

* fix: bug

* test: new batching mechanism

* Revert "test: new batching mechanism"

This reverts commit 396c2ec342.

* fix: end should be max index + 1

* fix: optimization

* fix: apply cleanup

* fix: idiomatic leaf setting

* fix: abstract out logic

* fix: type aliasing for verbose types

* fix: remove_indices_and_set_leaves fn
2023-08-01 18:06:52 +05:30
Aaryamann Challani
6d58320077 fix(crates): version tags (#194)
* fix(crates): version tags

* fix commit

---------

Co-authored-by: Rostyslav Tyshko <tyshko.rostyslav@gmail.com>
2023-07-31 09:22:23 +02:00
tyshko-rostyslav
be2dccfdd0 Prepare for crates.io publication (#193)
* fix versions, use release ark-circom

* fix utils version

* fix lock file

* utils: renaming, use vacp2p_pmtree, description

* utils: fix  benches and tests

* fix lock and rkn files
2023-07-28 12:25:34 +02:00
Aaryamann Challani
9d4ed68450 fix: rename close_db_connection to flush (#192) 2023-07-26 11:20:33 +05:30
Aaryamann Challani
5cf2b2e05e chore(utils): bump pmtree rev (#190) 2023-07-25 22:09:34 +05:30
Aaryamann Challani
36158e8d08 chore(utils): bump pmtree rev (#189)
* chore(utils): bump pmtree rev

* chore(utils): bump pmtree rev
2023-07-25 21:21:04 +05:30
Aaryamann Challani
c8cf033f32 chore(utils): bump pmtree rev (#188) 2023-07-25 17:26:43 +05:30
Aaryamann Challani
23d2331b78 feat(rln): close db connection before dropping rln object (#187) 2023-07-25 15:22:55 +05:30
Aaryamann Challani
c6b7a8c0a4 Revert "remove multiplier (#184)" (#185)
This reverts commit 4ec93c5e1f.
2023-07-24 13:44:49 +05:30
Aaryamann Challani
4ec93c5e1f remove multiplier (#184)
* chore: use crates.io dep of ethers-rs

* chore: remove multiplier package
2023-07-24 13:24:04 +05:30
Aaryamann Challani
c83c9902d7 chore: use crates.io dep of ethers-rs (#183) 2023-07-24 12:50:30 +05:30
Aaryamann Challani
131cacab35 chore: bump ethers-core (#182)
* fix: version

* fix: clippy
2023-07-24 12:21:00 +05:30
Aaryamann Challani
8a365f0c9e fix(ci): homebrew errors on github actions (#181) 2023-07-04 12:10:35 +05:30
rymnc
c561741339 fix: use fixed rev of cross 2023-07-04 10:28:45 +05:30
rymnc
90fdfb9d78 fix: version of ethers-core 2023-07-03 20:08:37 +05:30
Rostyslav Tyshko
56b9285fef fix versions 2023-06-22 23:17:32 +02:00
Aaryamann Challani
be88a432d7 fix(rln): tree_config parsing (#180)
* fix(rln): tree_config parsing

* fix(rln): clippy
2023-06-16 15:49:56 +05:30
Aaryamann Challani
8cfd83de54 feat(rln-wasm): set/get metadata api (#179)
* feat(rln-wasm): set/get metadata api

* fix(rln): imports
2023-06-16 09:48:26 +05:30
Aaryamann Challani
2793fe0e24 feat(rln): expose set_metadata and get_metadata public and ffi apis (#178) 2023-06-15 20:35:49 +05:30
Aaryamann Challani
0d35571215 feat(rln): get_leaf ffi and public api (#177) 2023-06-08 21:33:09 +05:30
Aaryamann Challani
9cc86e526e fix(rln): error out when temporary=true and path is exists (#176)
* fix(rln): error out when temporary=true and path is exists

* fix(rln): should error out when temp=true and path exists

* fix(rln): clippy
2023-06-07 16:58:39 +05:30
tyshko-rostyslav
ecd056884c CLI state between calls (#175)
* add serialization

* add config

* change state

* final touches
2023-06-07 16:48:01 +05:30
Aaryamann Challani
96497db7c5 test(rln): sled db config change (#174)
* test(rln): sled db config change

* fix: increase cache_capacity
2023-06-06 22:41:38 +05:30
tyshko-rostyslav
ba8f011cc1 MVP CLI Proposal implementation: proof/verify functionality (#168)
* next feaf command

* delete leaf command

* get root command

* next feaf call

* delete leaf call

* get root call

* GetProof command

* Prove command

* Verify command

* GenerateProof command

* VerifyWithRoots command

* GetProof call

* Prove call

* Verify call

* GenerateProof call

* VerifyWithRoots call

* fmt

* redundunt

* output moved to stdout, better error msg
2023-06-05 15:24:17 +05:30
Aaryamann Challani
9dc92ec1ce fix(ci): run benchmarks only in PRs (#173) 2023-05-30 12:22:06 +05:30
Aaryamann Challani
75d760c179 feat(ci): benchmark compare (#171) 2023-05-29 23:42:30 +05:30
Aaryamann Challani
72a3ce1770 fix(cargo.toml): bench=false (#172) 2023-05-29 19:44:00 +05:30
Aaryamann Challani
b841e725a0 feat(rln): pmtree benchmarks (#169)
* feat(rln): pmtree benchmarks

* style(rln): lint
2023-05-29 18:51:36 +05:30
tyshko-rostyslav
3177e3ae74 MVP CLI Proposal implementation: leaf and root interactions (#167)
* next feaf command

* delete leaf command

* get root command

* next feaf call

* delete leaf call

* get root call

* better error comment

* to stdout

* fmt
2023-05-29 18:49:18 +05:30
Aaryamann Challani
2c4de0484a feat(utils): initialize benchmarks (#166) 2023-05-25 12:02:45 +05:30
tyshko-rostyslav
fcd4854037 Merkle tree operations (#164)
merkle tree operations added
2023-05-23 09:08:47 +02:00
Rahul Ghangas
d68dc1ad8e fix: wasm tests and ci (#165)
* fix: unwrap values in test

* fix: rln-wasm tests weren't being run in ci

* chore: cargo fmt
2023-05-18 11:33:45 +05:30
tyshko-rostyslav
8c3d60ed01 RLN CLI basic (#163)
* new crate

* toml file

* lock file

* cli

* state

* commands

* main

* fmt

* reorgs

* redundunt

* fmt
2023-05-18 11:33:22 +05:30
Aaryamann Challani
c2d386cb74 feat(rln): public, ffi for atomic ops (#162) 2023-05-16 10:00:32 +05:30
Aaryamann Challani
8f2c9e3586 combined batch operations (insert + delete) (#160)
* fix(rln): clippy error

* feat: batch ops in ZerokitMerkleTree

* chore: bump pmtree

* fix: upstream root calc
2023-05-15 07:11:43 +05:30
Aaryamann Challani
584c2cf4c0 fix(rln): clippy error (#159) 2023-05-06 18:30:58 +05:30
Aaryamann Challani
2c4b399126 feat(rln): ability to pass rln tree config in ffi (#150)
* feat(rln): pass config in rln ffi

* fix: for rln-wasm

* fix: ffi tests

* fix: clippy

* fix: test cursor
2023-05-05 15:15:33 +05:30
RichΛrd
c4b699ddff fix: len().to_le_bytes().to_vec() contains 4 bytes on 32b and should contain 8 (#154) 2023-05-04 15:23:53 +05:30
Richard Ramos
33d3732922 fix: u64 to usize conversion
usize::from_le_bytes will take an array of length 2, 4 or 8 bytes depending on the target pointer size. Since wasm uses 32b, there was a failure while reading serialized values due to the number of bytes read being incorrect. I also update version rln-wasm to 0.0.9 (npm package needs to be updated to include this fix)
2023-05-03 09:53:11 -04:00
Aaryamann Challani
654c77dcf6 feat: make pmtree default (#149) 2023-05-02 13:31:26 +05:30
Richard Ramos
783f875d3b feat: expose hash, poseidon_hash and delete_leaf 2023-05-01 11:21:42 -04:00
Aaryamann Challani
fd7d7d9318 Integrate pmtree into rln (#147)
* feat: integrate pmtree with zerokit

* fix: tests
2023-04-28 10:02:21 +05:30
tyshko-rostyslav
4f98fd8028 chore(rln): bring hash functions under a single module (#146) 2023-04-20 16:24:29 +05:30
tyshko-rostyslav
9931e901e5 most changes (#145)
Co-authored-by: tyshkor <tyshko1@gmail.com>
2023-04-13 06:45:12 +05:30
Aaryamann Challani
0fb7e0bbcb feat: abstract shared behaviour into ZerokitMerkleTree (#142)
* feat: abstract shared behaviour into ZerokitMerkleTree

* fix: tests
2023-04-11 16:46:13 +05:30
tyshko-rostyslav
672287b77b call_bool_method_with_error_msg (#144)
Co-authored-by: tyshkor <tyshko1@gmail.com>
2023-04-10 19:45:16 +05:30
Aaryamann Challani
2e868d6cbf fix(ci): force draft=false for nightly releases (#143) 2023-03-31 18:15:37 +05:30
tyshko-rostyslav
39bea35a6d Macro to call functions with an error message with output (#141)
Another variation of our call, this time when output is used
2023-03-31 14:44:04 +02:00
tyshko-rostyslav
6ff4eeb237 Macro to call functions with an error message (#140)
abstract out calls

---------

Co-authored-by: tyshkor <tyshko1@gmail.com>
2023-03-29 15:16:36 +02:00
Aaryamann Challani
1f983bb232 fix(rln): move std::path to cfg_if block (#138) 2023-03-24 09:31:01 +05:30
tyshko-rostyslav
13a2c61355 add wasm-strip to reduce size even more (#137)
* added wasm-strip fixed docs

* requested change

* fix installdeps

* fix ubuntu

* fix macos

---------

Co-authored-by: tyshkor <tyshko1@gmail.com>
2023-03-24 09:30:48 +05:30
tyshko-rostyslav
2bbb710e83 add Cargo.lock to the repo (#136)
add Cargo.lock to the repo
2023-03-23 07:45:36 +01:00
tyshko-rostyslav
8cd4baba8a leave our fork of ark-circom (#132)
* leave our fork of `ark-circom`

---------

Co-authored-by: tyshkor <tyshko1@gmail.com>
2023-03-22 07:01:24 +01:00
Aaryamann Challani
9045e31006 fix ci tag (#133)
* fix(ci): release tag

* fix: use 0.2.1
2023-03-20 17:47:46 +05:30
Aaryamann Challani
9e44bb64dc fix(semaphore): use fixed rev (#130) 2023-03-20 14:06:25 +05:30
Aaryamann Challani
bb7dfb80ee feat(ci): cross-compile release assets, cache deps (#128)
* feat(ci): cross-compile release assets, cache deps

chore(ci): add caching to regular tests

* fix(ci): include cross only in ci env, add note about release assets
2023-03-14 17:44:06 +05:30
Aaryamann Challani
c319f32a1e feat(rln): package rln w/ resources into a static lib (#118)
* feat(rln): package resources into lib

* fix(rln): use Path

* fix(rln): fmt

* fix(rln): trailing slash
2023-03-07 18:15:06 +05:30
tyshko-rostyslav
bf2aa16a71 chore(rln): ensure all dependencies have fixed revision (#127) 2023-03-07 09:11:08 +05:30
tyshko-rostyslav
c423bdea61 chore(rln): update pmtree implementation (#125)
* most changes

* fmt

* hide tests back under feature

* grooming

* changed `SledConfig`

* requested change: rm `dbpath`

---------

Co-authored-by: tyshkor <tyshko1@gmail.com>
2023-03-03 10:33:39 +05:30
Carlos Pérez
5eb98d4b33 change: Replace u64 for usize for length vars (#94)
* change: Replace `u64` for `usize` for length vars

Resolves: #39

* fix(rln): usize instead of u64 in tests

* fix(rln): linter

* fix: outlier u64 usage

* fix(rln|rln-wasm): serde of usize types

---------

Co-authored-by: Aaryamann Challani <43716372+rymnc@users.noreply.github.com>
2023-03-02 19:15:58 +05:30
Aaryamann Challani
b698153e28 fix(ci): nightly release (#124) 2023-03-02 07:41:49 +05:30
Aaryamann Challani
a6c8090c93 feat(v0.2): changelog (#122) 2023-02-28 16:25:18 +05:30
tyshko-rostyslav
7ee7675d52 Redundunt dependencies (#111)
* most changes

* delete unused deps + update ark-circom

* fix build

* revert

* default deatures

* return

* кумуке 2

* try

* rm

---------

Co-authored-by: tyshkor <tyshko1@gmail.com>
2023-02-28 00:19:47 +05:30
Aaryamann Challani
062055dc5e fix(license): add licensing info to cargo.toml (#121) 2023-02-27 11:47:55 +05:30
tyshko-rostyslav
55b00fd653 Code quality (#114)
* to color_eyre::Result 1st part

* tests and seconds batch

* third batch

* rln fixes + multiplier

* rln-wasm, assert rln, multiplier

* io to color_eyre

* fmt + clippy

* fix lint

* temporary fix of `ark-circom`

* fix ci after merge

* fmt

* fix rln tests

* minor

* fix tests

* imports

* requested change

* report + commented line + requested change

* requested changes

* fix build

* lint fixes

* better comments

---------

Co-authored-by: tyshkor <tyshko1@gmail.com>
2023-02-27 11:46:16 +05:30
Rahul Ghangas
62018b4eba Update documentation for building and testing (#120)
* chore: fix Makefile pre-build

* chore: add Makefile command to install depenedncy cargo-make

* chore: update all READMEs with instructions to install dependencies, build and test

* chore: add target to fetch all submodules
2023-02-24 11:50:51 +05:30
oskarth
48fa1b9b3d chore: Add MIT/Apache dual license (#119) 2023-02-24 11:20:01 +08:00
Aaryamann Challani
a6145ab201 feat(rln): expose poseidon to ffi (#112) 2023-02-16 13:26:13 +05:30
Aaryamann Challani
e21e9954ac fix(semaphore): revert ark-circom dependency (#116) 2023-02-16 12:32:43 +05:30
Carlos Pérez
de5eb2066a change: Replace profile overwrites to Workspace Cargo.toml (#95)
Since profile info specified inside workspace members `Cargo.toml`'s
is ignored by Cargo, this replaces the place to specify these details
for the workspace-level `Cargo.toml`.

NOTE that `panic` and `rpath` aren't supported with the Overwritting
feature. Therefore, the only required thing (if considered necessary) is
to create a new profile which also enables these things.

Resolves: #93
2023-02-08 12:38:42 +01:00
tyshko-rostyslav
7aba62ff51 Add rust-clippy to CI (#108)
Convert clippy warnings to errors, fix them 

---------

Co-authored-by: tyshkor <tyshko1@gmail.com>
2023-02-06 05:54:59 +01:00
68 changed files with 8548 additions and 2270 deletions

View File

@@ -28,59 +28,32 @@ on:
name: Tests
jobs:
multiplier:
test:
strategy:
matrix:
platform: [ubuntu-latest, macos-latest]
crate: [multiplier, semaphore, rln, utils]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: multiplier - ${{ matrix.platform }}
name: test - ${{ matrix.crate }} - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: cargo-make test
run: |
cargo make test --release
working-directory: multiplier
working-directory: ${{ matrix.crate }}
semaphore:
strategy:
matrix:
platform: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: semaphore - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo-make test
run: |
cargo make test --release
working-directory: semaphore
rln-wasm:
strategy:
matrix:
@@ -88,88 +61,38 @@ jobs:
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: rln-wasm - ${{ matrix.platform }}
name: test - rln-wasm - ${{ matrix.platform }}
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- run: git submodule update --init --recursive
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- name: Install wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
- run: cargo install cargo-make
- run: cargo make build
working-directory: rln-wasm
- run: cargo-make test
- run: cargo make test --release
working-directory: rln-wasm
rln:
strategy:
matrix:
platform: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: rln - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo-make test
run: |
cargo make test --release
working-directory: rln
utils:
strategy:
matrix:
platform: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: utils - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo-make test
run: |
cargo make test --release
working-directory: utils
lint:
strategy:
matrix:
# we run lint tests only on ubuntu
platform: [ubuntu-latest]
crate: [multiplier, semaphore, rln, utils]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: lint - ${{ matrix.platform }}
name: lint - ${{ matrix.crate }} - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
@@ -177,31 +100,38 @@ jobs:
toolchain: stable
override: true
components: rustfmt, clippy
- name: Update git submodules
run: git submodule update --init --recursive
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- name: cargo fmt
if: success() || failure()
run: cargo fmt --all -- --check
- name: multiplier - cargo clippy
run: cargo fmt -- --check
working-directory: ${{ matrix.crate }}
- name: cargo clippy
if: success() || failure()
run: |
cargo clippy --release
working-directory: multiplier
- name: semaphore - cargo clippy
if: success() || failure()
run: |
cargo clippy --release
working-directory: semaphore
- name: rln - cargo clippy
if: success() || failure()
run: |
cargo clippy --release
working-directory: rln
- name: utils - cargo clippy
if: success() || failure()
run: |
cargo clippy --release
working-directory: utils
cargo clippy --release -- -D warnings
working-directory: ${{ matrix.crate }}
# We skip clippy on rln-wasm, since wasm target is managed by cargo make
# Currently not treating warnings as error, too noisy
# -- -D warnings
# -- -D warnings
benchmark:
# run only in pull requests
if: github.event_name == 'pull_request'
strategy:
matrix:
# we run benchmark tests only on ubuntu
platform: [ubuntu-latest]
crate: [rln, utils]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: benchmark - ${{ matrix.platform }} - ${{ matrix.crate }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
- uses: Swatinem/rust-cache@v2
- uses: boa-dev/criterion-compare-action@v3
with:
branchName: ${{ github.base_ref }}
cwd: ${{ matrix.crate }}

View File

@@ -6,61 +6,74 @@ on:
jobs:
linux:
strategy:
matrix:
target:
- x86_64-unknown-linux-gnu
- aarch64-unknown-linux-gnu
- i686-unknown-linux-gnu
name: Linux build
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v2
uses: actions/checkout@v3
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: cargo build
target: ${{ matrix.target }}
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: cross build
run: |
cargo build --release --workspace --exclude rln-wasm
cross build --release --target ${{ matrix.target }} --workspace --exclude rln-wasm
mkdir release
cp target/release/librln* release/
tar -czvf linux-rln.tar.gz release/
cp target/${{ matrix.target }}/release/librln* release/
tar -czvf ${{ matrix.target }}-rln.tar.gz release/
- name: Upload archive artifact
uses: actions/upload-artifact@v2
with:
name: linux-archive
path: linux-rln.tar.gz
name: ${{ matrix.target }}-archive
path: ${{ matrix.target }}-rln.tar.gz
retention-days: 2
macos:
name: MacOS build
runs-on: macos-latest
strategy:
matrix:
target:
- x86_64-apple-darwin
- aarch64-apple-darwin
steps:
- name: Checkout sources
uses: actions/checkout@v2
with:
ref: master
uses: actions/checkout@v3
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: cargo build
target: ${{ matrix.target }}
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: cross build
run: |
cargo build --release --workspace --exclude rln-wasm
cross build --release --target ${{ matrix.target }} --workspace --exclude rln-wasm
mkdir release
cp target/release/librln* release/
tar -czvf macos-rln.tar.gz release/
cp target/${{ matrix.target }}/release/librln* release/
tar -czvf ${{ matrix.target }}-rln.tar.gz release/
- name: Upload archive artifact
uses: actions/upload-artifact@v2
with:
name: macos-archive
path: macos-rln.tar.gz
name: ${{ matrix.target }}-archive
path: ${{ matrix.target }}-rln.tar.gz
retention-days: 2
browser-rln-wasm:
@@ -68,23 +81,21 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v2
with:
ref: master
uses: actions/checkout@v3
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- run: git submodule update --init --recursive
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: Install wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo make build
- name: cross make build
run: |
cargo make build
cross make build
mkdir release
cp pkg/** release/
tar -czvf browser-rln-wasm.tar.gz release/
@@ -104,14 +115,14 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
uses: actions/checkout@v3
with:
ref: master
- name: Download artifacts
uses: actions/download-artifact@v2
- name: Delete tag
uses: dev-drprasad/delete-tag-and-release@v0.2.0
uses: dev-drprasad/delete-tag-and-release@v0.2.1
with:
delete_release: true
tag_name: nightly
@@ -120,14 +131,13 @@ jobs:
- name: Create prerelease
run: |
start_tag=$(gh release list -L 2 --exclude-drafts | grep -v nightly | cut -d$'\t' -f3)
start_tag=$(gh release list -L 2 --exclude-drafts | grep -v nightly | cut -d$'\t' -f3 | sed -n '1p')
gh release create nightly --prerelease --target master \
--title 'Nightly build ("master" branch)' \
--generate-notes \
--draft=false \
--notes-start-tag $start_tag \
linux-archive/linux-rln.tar.gz \
macos-archive/macos-rln.tar.gz \
browser-rln-wasm-archive/browser-rln-wasm.tar.gz
*-archive/*.tar.gz \
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -136,6 +146,4 @@ jobs:
with:
failOnError: false
name: |
linux-archive
macos-archive
browser-rln-wasm-archive
*-archive

View File

@@ -9,7 +9,7 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- uses: micnncim/action-label-syncer@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

5
.gitignore vendored
View File

@@ -8,10 +8,7 @@ rln/pmtree_db
# will have compiled files and executables
debug/
target/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock
wabt/
# These are backup files generated by rustfmt
**/*.rs.bk

View File

@@ -1,18 +1,15 @@
## Upcoming release
## 2023-02-28 v0.2
Release highlights:
This release contains:
- Improved code quality
- Allows consumers of zerokit RLN to set leaves to the Merkle Tree from an arbitrary index. Useful for batching updates to the Merkle Tree.
- Improved performance for proof generation and verification
- rln_wasm which allows for the consumption of RLN through a WebAssembly interface
- Refactored to generate Semaphore-compatible credentials
- Dual License under Apache 2.0 and MIT
- RLN compiles as a static library, which can be consumed through a C FFI
The full list of changes is below.
### Features
- Creation of `set_leaves_from`, which allows consumers to add leaves to a tree from a given starting index. `init_tree_with_leaves` internally uses `set_leaves_from`, with index 0.
### Changes
- Renaming of `set_leaves` to `init_tree_with_leaves`, which is a more accurate representation of the function's utility.
### Fixes
- None
## 2022-09-19 v0.1

4276
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -4,6 +4,21 @@ members = [
"private-settlement",
"semaphore",
"rln",
"rln-cli",
"rln-wasm",
"utils",
]
resolver = "2"
# Compilation profile for any non-workspace member.
# Dependencies are optimized, even in a dev build. This improves dev performance
# while having neglible impact on incremental build times.
[profile.dev.package."*"]
opt-level = 3
[profile.release.package."rln-wasm"]
# Tell `rustc` to optimize for small code size.
opt-level = "s"
[profile.release.package."semaphore"]
codegen-units = 1

32
Cross.toml Normal file
View File

@@ -0,0 +1,32 @@
[target.x86_64-pc-windows-gnu]
image = "ghcr.io/cross-rs/x86_64-pc-windows-gnu:latest"
[target.aarch64-unknown-linux-gnu]
image = "ghcr.io/cross-rs/aarch64-unknown-linux-gnu:latest"
[target.x86_64-unknown-linux-gnu]
image = "ghcr.io/cross-rs/x86_64-unknown-linux-gnu:latest"
[target.arm-unknown-linux-gnueabi]
image = "ghcr.io/cross-rs/arm-unknown-linux-gnueabi:latest"
[target.i686-pc-windows-gnu]
image = "ghcr.io/cross-rs/i686-pc-windows-gnu:latest"
[target.i686-unknown-linux-gnu]
image = "ghcr.io/cross-rs/i686-unknown-linux-gnu:latest"
[target.arm-unknown-linux-gnueabihf]
image = "ghcr.io/cross-rs/arm-unknown-linux-gnueabihf:latest"
[target.mips-unknown-linux-gnu]
image = "ghcr.io/cross-rs/mips-unknown-linux-gnu:latest"
[target.mips64-unknown-linux-gnuabi64]
image = "ghcr.io/cross-rs/mips64-unknown-linux-gnuabi64:latest"
[target.mips64el-unknown-linux-gnuabi64]
image = "ghcr.io/cross-rs/mips64el-unknown-linux-gnuabi64:latest"
[target.mipsel-unknown-linux-gnu]
image = "ghcr.io/cross-rs/mipsel-unknown-linux-gnu:latest"

203
LICENSE-APACHE Normal file
View File

@@ -0,0 +1,203 @@
Copyright (c) 2022 Vac Research
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

25
LICENSE-MIT Normal file
View File

@@ -0,0 +1,25 @@
Copyright (c) 2022 Vac Research
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE O THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@@ -1,13 +1,31 @@
.PHONY: all test clean
.PHONY: all installdeps build test clean
all: .pre-build
@cargo make build
all: .pre-build build
.pre-build:
ifndef $(cargo make --help)
@cargo install --force cargo-make
.fetch-submodules:
@git submodule update --init --recursive
.pre-build: .fetch-submodules
@cargo install cargo-make
ifdef CI
@cargo install cross --git https://github.com/cross-rs/cross.git --rev 1511a28
endif
installdeps: .pre-build
ifeq ($(shell uname),Darwin)
# commented due to https://github.com/orgs/Homebrew/discussions/4612
# @brew update
@brew install cmake ninja
else ifeq ($(shell uname),Linux)
@sudo apt-get update
@sudo apt-get install -y cmake ninja-build
endif
@git clone --recursive https://github.com/WebAssembly/wabt.git
@cd wabt && mkdir build && cd build && cmake .. -GNinja && ninja && sudo ninja install
build: .pre-build
@cargo make build
test: .pre-build
@cargo make test

View File

@@ -17,3 +17,19 @@ in Rust.
- [RLN library](https://github.com/kilic/rln) written in Rust based on Bellman.
- [semaphore-rs](https://github.com/worldcoin/semaphore-rs) written in Rust based on ark-circom.
## Build and Test
To install missing dependencies, run the following commands from the root folder
```bash
make installdeps
```
To build and test all crates, run the following commands from the root folder
```bash
make build
make test
```
## Release assets
We use [`cross-rs`](https://github.com/cross-rs/cross) to cross-compile and generate release assets for rln.

View File

@@ -1,7 +1,8 @@
[package]
name = "multiplier"
version = "0.1.0"
version = "0.3.0"
edition = "2018"
license = "MIT OR Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -12,23 +13,19 @@ edition = "2018"
# fnv = { version = "1.0.3", default-features = false }
# num = { version = "0.4.0" }
# num-traits = { version = "0.2.0", default-features = false }
num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
# ZKP Generation
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
# ark-ff = { version = "0.3.0", default-features = false, features = ["parallel", "asm"] }
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
ark-bn254 = { version = "0.3.0" }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
# ark-poly = { version = "^0.3.0", default-features = false, features = ["parallel"] }
ark-relations = { version = "0.3.0", default-features = false }
ark-serialize = { version = "0.3.0", default-features = false }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circom-2"] }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circom-2"], rev = "35ce5a9" }
# error handling
# thiserror = "1.0.26"
color-eyre = "0.5"
color-eyre = "0.6.1"
# decoding of data
# hex = "0.4.3"

View File

@@ -3,7 +3,15 @@
Example wrapper around a basic Circom circuit to test Circom 2 integration
through ark-circom and FFI.
# FFI
## Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```
## FFI
To generate C or Nim bindings from Rust FFI, use `cbindgen` or `nbindgen`:

View File

@@ -31,12 +31,12 @@ impl<'a> From<&Buffer> for &'a [u8] {
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn new_circuit(ctx: *mut *mut Multiplier) -> bool {
println!("multiplier ffi: new");
let mul = Multiplier::new();
unsafe { *ctx = Box::into_raw(Box::new(mul)) };
true
if let Ok(mul) = Multiplier::new() {
unsafe { *ctx = Box::into_raw(Box::new(mul)) };
true
} else {
false
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]

View File

@@ -1,6 +1,6 @@
use ark_circom::{CircomBuilder, CircomConfig};
use ark_std::rand::thread_rng;
use color_eyre::Result;
use color_eyre::{Report, Result};
use ark_bn254::Bn254;
use ark_groth16::{
@@ -25,17 +25,18 @@ fn groth16_proof_example() -> Result<()> {
let circom = builder.build()?;
let inputs = circom.get_public_inputs().unwrap();
let inputs = circom
.get_public_inputs()
.ok_or(Report::msg("no public inputs"))?;
let proof = prove(circom, &params, &mut rng)?;
let pvk = prepare_verifying_key(&params.vk);
let verified = verify_proof(&pvk, &proof, &inputs)?;
assert!(verified);
Ok(())
match verify_proof(&pvk, &proof, &inputs) {
Ok(_) => Ok(()),
Err(_) => Err(Report::msg("not verified")),
}
}
fn main() {

View File

@@ -7,9 +7,8 @@ use ark_groth16::{
Proof, ProvingKey,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
// , SerializationError};
use std::io::{self, Read, Write};
use color_eyre::{Report, Result};
use std::io::{Read, Write};
pub struct Multiplier {
circom: CircomCircuit<Bn254>,
@@ -18,12 +17,11 @@ pub struct Multiplier {
impl Multiplier {
// TODO Break this apart here
pub fn new() -> Multiplier {
pub fn new() -> Result<Multiplier> {
let cfg = CircomConfig::<Bn254>::new(
"./resources/circom2_multiplier2.wasm",
"./resources/circom2_multiplier2.r1cs",
)
.unwrap();
)?;
let mut builder = CircomBuilder::new(cfg);
builder.push_input("a", 3);
@@ -34,40 +32,41 @@ impl Multiplier {
let mut rng = thread_rng();
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng).unwrap();
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng)?;
let circom = builder.build().unwrap();
let circom = builder.build()?;
//let inputs = circom.get_public_inputs().unwrap();
Multiplier { circom, params }
Ok(Multiplier { circom, params })
}
// TODO Input Read
pub fn prove<W: Write>(&self, result_data: W) -> io::Result<()> {
pub fn prove<W: Write>(&self, result_data: W) -> Result<()> {
let mut rng = thread_rng();
// XXX: There's probably a better way to do this
let circom = self.circom.clone();
let params = self.params.clone();
let proof = prove(circom, &params, &mut rng).unwrap();
let proof = prove(circom, &params, &mut rng)?;
// XXX: Unclear if this is different from other serialization(s)
let _ = proof.serialize(result_data).unwrap();
proof.serialize(result_data)?;
Ok(())
}
pub fn verify<R: Read>(&self, input_data: R) -> io::Result<bool> {
let proof = Proof::deserialize(input_data).unwrap();
pub fn verify<R: Read>(&self, input_data: R) -> Result<bool> {
let proof = Proof::deserialize(input_data)?;
let pvk = prepare_verifying_key(&self.params.vk);
// XXX Part of input data?
let inputs = self.circom.get_public_inputs().unwrap();
let inputs = self
.circom
.get_public_inputs()
.ok_or(Report::msg("no public inputs"))?;
let verified = verify_proof(&pvk, &proof, &inputs).unwrap();
let verified = verify_proof(&pvk, &proof, &inputs)?;
Ok(verified)
}
@@ -75,6 +74,6 @@ impl Multiplier {
impl Default for Multiplier {
fn default() -> Self {
Self::new()
Self::new().unwrap()
}
}

View File

@@ -4,8 +4,7 @@ mod tests {
#[test]
fn multiplier_proof() {
let mul = Multiplier::new();
//let inputs = mul.circom.get_public_inputs().unwrap();
let mul = Multiplier::new().unwrap();
let mut output_data: Vec<u8> = Vec::new();
let _ = mul.prove(&mut output_data);

View File

@@ -1,7 +1,8 @@
[package]
name = "private-settlement"
version = "0.1.0"
version = "0.3.0"
edition = "2021"
license = "MIT OR Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@@ -2,3 +2,10 @@
This module is to provide APIs to manage, compute and verify [Private Settlement](https://rfc.vac.dev/spec/44/) zkSNARK proofs and primitives.
## Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```

13
rln-cli/Cargo.toml Normal file
View File

@@ -0,0 +1,13 @@
[package]
name = "rln-cli"
version = "0.3.0"
edition = "2021"
[dependencies]
rln = { path = "../rln" }
clap = { version = "4.2.7", features = ["cargo", "derive", "env"]}
clap_derive = { version = "=4.2.0" }
color-eyre = "=0.6.2"
# serialization
serde_json = "1.0.48"
serde = { version = "1.0.130", features = ["derive"] }

67
rln-cli/src/commands.rs Normal file
View File

@@ -0,0 +1,67 @@
use std::path::PathBuf;
use clap::Subcommand;
#[derive(Subcommand)]
pub(crate) enum Commands {
New {
tree_height: usize,
/// Sets a custom config file
#[arg(short, long)]
config: PathBuf,
},
NewWithParams {
tree_height: usize,
/// Sets a custom config file
#[arg(short, long)]
config: PathBuf,
#[arg(short, long)]
tree_config_input: PathBuf,
},
SetTree {
tree_height: usize,
},
SetLeaf {
index: usize,
#[arg(short, long)]
file: PathBuf,
},
SetMultipleLeaves {
index: usize,
#[arg(short, long)]
file: PathBuf,
},
ResetMultipleLeaves {
#[arg(short, long)]
file: PathBuf,
},
SetNextLeaf {
#[arg(short, long)]
file: PathBuf,
},
DeleteLeaf {
index: usize,
},
GetRoot,
GetProof {
index: usize,
},
Prove {
#[arg(short, long)]
input: PathBuf,
},
Verify {
#[arg(short, long)]
file: PathBuf,
},
GenerateProof {
#[arg(short, long)]
input: PathBuf,
},
VerifyWithRoots {
#[arg(short, long)]
input: PathBuf,
#[arg(short, long)]
roots: PathBuf,
},
}

29
rln-cli/src/config.rs Normal file
View File

@@ -0,0 +1,29 @@
use color_eyre::Result;
use serde::{Deserialize, Serialize};
use std::{fs::File, io::Read, path::PathBuf};
pub const RLN_STATE_PATH: &str = "RLN_STATE_PATH";
#[derive(Default, Serialize, Deserialize)]
pub(crate) struct Config {
pub inner: Option<InnerConfig>,
}
#[derive(Default, Serialize, Deserialize)]
pub(crate) struct InnerConfig {
pub file: PathBuf,
pub tree_height: usize,
}
impl Config {
pub(crate) fn load_config() -> Result<Config> {
let path = PathBuf::from(std::env::var(RLN_STATE_PATH)?);
let mut file = File::open(path)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let state: Config = serde_json::from_str(&contents)?;
Ok(state)
}
}

157
rln-cli/src/main.rs Normal file
View File

@@ -0,0 +1,157 @@
use std::{fs::File, io::Read, path::Path};
use clap::Parser;
use color_eyre::{Report, Result};
use commands::Commands;
use rln::public::RLN;
use state::State;
mod commands;
mod config;
mod state;
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
struct Cli {
#[command(subcommand)]
command: Option<Commands>,
}
fn main() -> Result<()> {
let cli = Cli::parse();
let mut state = State::load_state()?;
match &cli.command {
Some(Commands::New {
tree_height,
config,
}) => {
let resources = File::open(&config)?;
state.rln = Some(RLN::new(*tree_height, resources)?);
Ok(())
}
Some(Commands::NewWithParams {
tree_height,
config,
tree_config_input,
}) => {
let mut resources: Vec<Vec<u8>> = Vec::new();
for filename in ["rln.wasm", "rln_final.zkey", "verification_key.json"] {
let fullpath = config.join(Path::new(filename));
let mut file = File::open(&fullpath)?;
let metadata = std::fs::metadata(&fullpath)?;
let mut buffer = vec![0; metadata.len() as usize];
file.read_exact(&mut buffer)?;
resources.push(buffer);
}
let tree_config_input_file = File::open(&tree_config_input)?;
state.rln = Some(RLN::new_with_params(
*tree_height,
resources[0].clone(),
resources[1].clone(),
resources[2].clone(),
tree_config_input_file,
)?);
Ok(())
}
Some(Commands::SetTree { tree_height }) => {
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_tree(*tree_height)?;
Ok(())
}
Some(Commands::SetLeaf { index, file }) => {
let input_data = File::open(&file)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_leaf(*index, input_data)?;
Ok(())
}
Some(Commands::SetMultipleLeaves { index, file }) => {
let input_data = File::open(&file)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_leaves_from(*index, input_data)?;
Ok(())
}
Some(Commands::ResetMultipleLeaves { file }) => {
let input_data = File::open(&file)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.init_tree_with_leaves(input_data)?;
Ok(())
}
Some(Commands::SetNextLeaf { file }) => {
let input_data = File::open(&file)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_next_leaf(input_data)?;
Ok(())
}
Some(Commands::DeleteLeaf { index }) => {
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.delete_leaf(*index)?;
Ok(())
}
Some(Commands::GetRoot) => {
let writer = std::io::stdout();
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.get_root(writer)?;
Ok(())
}
Some(Commands::GetProof { index }) => {
let writer = std::io::stdout();
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.get_proof(*index, writer)?;
Ok(())
}
Some(Commands::Prove { input }) => {
let input_data = File::open(&input)?;
let writer = std::io::stdout();
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.prove(input_data, writer)?;
Ok(())
}
Some(Commands::Verify { file }) => {
let input_data = File::open(&file)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.verify(input_data)?;
Ok(())
}
Some(Commands::GenerateProof { input }) => {
let input_data = File::open(&input)?;
let writer = std::io::stdout();
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.generate_rln_proof(input_data, writer)?;
Ok(())
}
Some(Commands::VerifyWithRoots { input, roots }) => {
let input_data = File::open(&input)?;
let roots_data = File::open(&roots)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.verify_with_roots(input_data, roots_data)?;
Ok(())
}
None => Ok(()),
}
}

23
rln-cli/src/state.rs Normal file
View File

@@ -0,0 +1,23 @@
use color_eyre::Result;
use rln::public::RLN;
use std::fs::File;
use crate::config::{Config, InnerConfig};
#[derive(Default)]
pub(crate) struct State<'a> {
pub rln: Option<RLN<'a>>,
}
impl<'a> State<'a> {
pub(crate) fn load_state() -> Result<State<'a>> {
let config = Config::load_config()?;
let rln = if let Some(InnerConfig { file, tree_height }) = config.inner {
let resources = File::open(&file)?;
Some(RLN::new(tree_height, resources)?)
} else {
None
};
Ok(State { rln })
}
}

View File

@@ -1,6 +1,6 @@
[package]
name = "rln-wasm"
version = "0.0.7"
version = "0.0.9"
edition = "2021"
license = "MIT or Apache2"
@@ -31,6 +31,3 @@ console_error_panic_hook = { version = "0.1.7", optional = true }
wasm-bindgen-test = "0.3.13"
wasm-bindgen-futures = "0.4.33"
[profile.release]
# Tell `rustc` to optimize for small code size.
opt-level = "s"

View File

@@ -9,9 +9,14 @@ script = "sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/
clear = true
dependencies = [
"pack-build",
"pack-rename"
"pack-rename",
"post-build"
]
[tasks.post-build]
command = "wasm-strip"
args = ["./pkg/rln_wasm_bg.wasm"]
[tasks.test]
command = "wasm-pack"
args = ["test", "--release", "--node"]

View File

@@ -10,11 +10,22 @@ curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
```
cargo install cargo-make
```
OR
```
make installdeps
```
3. Compile zerokit for `wasm32-unknown-unknown`:
```
cd rln-wasm
cargo make build
```
4. Compile a slimmer version of zerokit for `wasm32-unknown-unknown`:
```
cd rln-wasm
cargo make post-build
```
## Running tests
```

View File

@@ -3,9 +3,11 @@
extern crate wasm_bindgen;
extern crate web_sys;
use std::vec::Vec;
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
use num_bigint::BigInt;
use rln::public::RLN;
use rln::public::{hash, poseidon_hash, RLN};
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
@@ -20,34 +22,196 @@ pub struct RLNWrapper {
instance: RLN<'static>,
}
// Macro to call methods with arbitrary amount of arguments,
// which have the last argument is output buffer pointer
// First argument to the macro is context,
// second is the actual method on `RLN`
// third is the aforementioned output buffer argument
// rest are all other arguments to the method
macro_rules! call_with_output_and_error_msg {
// this variant is needed for the case when
// there are zero other arguments
($instance:expr, $method:ident, $error_msg:expr) => {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
if let Err(err) = new_instance.instance.$method(&mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
($instance:expr, $method:ident, $error_msg:expr, $( $arg:expr ),* ) => {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
if let Err(err) = new_instance.instance.$method($($arg.process()),*, &mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
}
// Macro to call_with_error_msg methods with arbitrary amount of arguments,
// First argument to the macro is context,
// second is the actual method on `RLNWrapper`
// rest are all other arguments to the method
macro_rules! call_with_error_msg {
($instance:expr, $method:ident, $error_msg:expr $(, $arg:expr)*) => {
{
let new_instance: &mut RLNWrapper = $instance.process();
if let Err(err) = new_instance.instance.$method($($arg.process()),*) {
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
Ok(())
}
}
}
}
macro_rules! call {
($instance:expr, $method:ident $(, $arg:expr)*) => {
{
let new_instance: &mut RLNWrapper = $instance.process();
new_instance.instance.$method($($arg.process()),*)
}
}
}
macro_rules! call_bool_method_with_error_msg {
($instance:expr, $method:ident, $error_msg:expr $(, $arg:expr)*) => {
{
let new_instance: &RLNWrapper = $instance.process();
new_instance.instance.$method($($arg.process()),*).map_err(|err| format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
}
}
}
// Macro to execute a function with arbitrary amount of arguments,
// First argument is the function to execute
// Rest are all other arguments to the method
macro_rules! fn_call_with_output_and_error_msg {
// this variant is needed for the case when
// there are zero other arguments
($func:ident, $error_msg:expr) => {
{
let mut output_data: Vec<u8> = Vec::new();
if let Err(err) = $func(&mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
($func:ident, $error_msg:expr, $( $arg:expr ),* ) => {
{
let mut output_data: Vec<u8> = Vec::new();
if let Err(err) = $func($($arg.process()),*, &mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
}
trait ProcessArg {
type ReturnType;
fn process(self) -> Self::ReturnType;
}
impl ProcessArg for usize {
type ReturnType = usize;
fn process(self) -> Self::ReturnType {
self
}
}
impl<T> ProcessArg for Vec<T> {
type ReturnType = Vec<T>;
fn process(self) -> Self::ReturnType {
self
}
}
impl<'a> ProcessArg for *const RLN<'a> {
type ReturnType = &'a RLN<'a>;
fn process(self) -> Self::ReturnType {
unsafe { &*self }
}
}
impl ProcessArg for *const RLNWrapper {
type ReturnType = &'static RLNWrapper;
fn process(self) -> Self::ReturnType {
unsafe { &*self }
}
}
impl ProcessArg for *mut RLNWrapper {
type ReturnType = &'static mut RLNWrapper;
fn process(self) -> Self::ReturnType {
unsafe { &mut *self }
}
}
impl<'a> ProcessArg for &'a [u8] {
type ReturnType = &'a [u8];
fn process(self) -> Self::ReturnType {
self
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = newRLN)]
pub fn wasm_new(tree_height: usize, zkey: Uint8Array, vk: Uint8Array) -> *mut RLNWrapper {
let instance = RLN::new_with_params(tree_height, zkey.to_vec(), vk.to_vec());
pub fn wasm_new(
tree_height: usize,
zkey: Uint8Array,
vk: Uint8Array,
) -> Result<*mut RLNWrapper, String> {
let instance = RLN::new_with_params(tree_height, zkey.to_vec(), vk.to_vec())
.map_err(|err| format!("{:#?}", err))?;
let wrapper = RLNWrapper { instance };
Box::into_raw(Box::new(wrapper))
Ok(Box::into_raw(Box::new(wrapper)))
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = getSerializedRLNWitness)]
pub fn wasm_get_serialized_rln_witness(ctx: *mut RLNWrapper, input: Uint8Array) -> Uint8Array {
let wrapper = unsafe { &mut *ctx };
let rln_witness = wrapper
.instance
.get_serialized_rln_witness(&input.to_vec()[..]);
Uint8Array::from(&rln_witness[..])
pub fn wasm_get_serialized_rln_witness(
ctx: *mut RLNWrapper,
input: Uint8Array,
) -> Result<Uint8Array, String> {
let rln_witness = call!(ctx, get_serialized_rln_witness, &input.to_vec()[..])
.map_err(|err| format!("{:#?}", err))?;
Ok(Uint8Array::from(&rln_witness[..]))
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = insertMember)]
pub fn wasm_set_next_leaf(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
let wrapper = unsafe { &mut *ctx };
if wrapper.instance.set_next_leaf(&input.to_vec()[..]).is_ok() {
Ok(())
} else {
Err("could not insert member into merkle tree".into())
}
call_with_error_msg!(
ctx,
set_next_leaf,
"could not insert member into merkle tree".to_string(),
&input.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -57,45 +221,59 @@ pub fn wasm_set_leaves_from(
index: usize,
input: Uint8Array,
) -> Result<(), String> {
let wrapper = unsafe { &mut *ctx };
if wrapper
.instance
.set_leaves_from(index as usize, &input.to_vec()[..])
.is_ok()
{
Ok(())
} else {
Err("could not set multiple leaves".into())
}
call_with_error_msg!(
ctx,
set_leaves_from,
"could not set multiple leaves".to_string(),
index,
&*input.to_vec()
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = deleteLeaf)]
pub fn wasm_delete_leaf(ctx: *mut RLNWrapper, index: usize) -> Result<(), String> {
call_with_error_msg!(ctx, delete_leaf, "could not delete leaf".to_string(), index)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = setMetadata)]
pub fn wasm_set_metadata(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
call_with_error_msg!(
ctx,
set_metadata,
"could not set metadata".to_string(),
&*input.to_vec()
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = getMetadata)]
pub fn wasm_get_metadata(ctx: *mut RLNWrapper) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(ctx, get_metadata, "could not get metadata".to_string())
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = initTreeWithLeaves)]
pub fn wasm_init_tree_with_leaves(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
let wrapper = unsafe { &mut *ctx };
if wrapper
.instance
.init_tree_with_leaves(&input.to_vec()[..])
.is_ok()
{
Ok(())
} else {
Err("could not init merkle tree".into())
}
call_with_error_msg!(
ctx,
init_tree_with_leaves,
"could not init merkle tree".to_string(),
&*input.to_vec()
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = RLNWitnessToJson)]
pub fn rln_witness_to_json(ctx: *mut RLNWrapper, serialized_witness: Uint8Array) -> Object {
let wrapper = unsafe { &mut *ctx };
let inputs = wrapper
.instance
.get_rln_witness_json(&serialized_witness.to_vec()[..])
.unwrap();
let js_value = serde_wasm_bindgen::to_value(&inputs).unwrap();
let obj = Object::from_entries(&js_value);
obj.unwrap()
pub fn rln_witness_to_json(
ctx: *mut RLNWrapper,
serialized_witness: Uint8Array,
) -> Result<Object, String> {
let inputs = call!(ctx, get_rln_witness_json, &serialized_witness.to_vec()[..])
.map_err(|err| err.to_string())?;
let js_value = serde_wasm_bindgen::to_value(&inputs).map_err(|err| err.to_string())?;
Object::from_entries(&js_value).map_err(|err| format!("{:#?}", err))
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -105,83 +283,49 @@ pub fn generate_rln_proof_with_witness(
calculated_witness: Vec<JsBigInt>,
serialized_witness: Uint8Array,
) -> Result<Uint8Array, String> {
let wrapper = unsafe { &mut *ctx };
let mut witness_vec: Vec<BigInt> = vec![];
let witness_vec: Vec<BigInt> = calculated_witness
.iter()
.map(|v| {
for v in calculated_witness {
witness_vec.push(
v.to_string(10)
.unwrap()
.map_err(|err| format!("{:#?}", err))?
.as_string()
.unwrap()
.ok_or("not a string error")?
.parse::<BigInt>()
.unwrap()
})
.collect();
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.generate_rln_proof_with_witness(witness_vec, serialized_witness.to_vec(), &mut output_data)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate proof".into())
.map_err(|err| format!("{:#?}", err))?,
);
}
call_with_output_and_error_msg!(
ctx,
generate_rln_proof_with_witness,
"could not generate proof",
witness_vec,
serialized_witness.to_vec()
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateMembershipKey)]
pub fn wasm_key_gen(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper.instance.key_gen(&mut output_data).is_ok() {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership keys".into())
}
call_with_output_and_error_msg!(ctx, key_gen, "could not generate membership keys")
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateExtendedMembershipKey)]
pub fn wasm_extended_key_gen(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper.instance.extended_key_gen(&mut output_data).is_ok() {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership keys".into())
}
call_with_output_and_error_msg!(ctx, extended_key_gen, "could not generate membership keys")
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateSeededMembershipKey)]
pub fn wasm_seeded_key_gen(ctx: *const RLNWrapper, seed: Uint8Array) -> Result<Uint8Array, String> {
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.seeded_key_gen(&seed.to_vec()[..], &mut output_data)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership key".into())
}
call_with_output_and_error_msg!(
ctx,
seeded_key_gen,
"could not generate membership key",
&seed.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -190,20 +334,12 @@ pub fn wasm_seeded_extended_key_gen(
ctx: *const RLNWrapper,
seed: Uint8Array,
) -> Result<Uint8Array, String> {
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.seeded_extended_key_gen(&seed.to_vec()[..], &mut output_data)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership key".into())
}
call_with_output_and_error_msg!(
ctx,
seeded_extended_key_gen,
"could not generate membership key",
&seed.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -213,38 +349,24 @@ pub fn wasm_recover_id_secret(
input_proof_data_1: Uint8Array,
input_proof_data_2: Uint8Array,
) -> Result<Uint8Array, String> {
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.recover_id_secret(
&input_proof_data_1.to_vec()[..],
&input_proof_data_2.to_vec()[..],
&mut output_data,
)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not recover id secret".into())
}
call_with_output_and_error_msg!(
ctx,
recover_id_secret,
"could not recover id secret",
&input_proof_data_1.to_vec()[..],
&input_proof_data_2.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = verifyRLNProof)]
pub fn wasm_verify_rln_proof(ctx: *const RLNWrapper, proof: Uint8Array) -> Result<bool, String> {
let wrapper = unsafe { &*ctx };
if match wrapper.instance.verify_rln_proof(&proof.to_vec()[..]) {
Ok(verified) => verified,
Err(_) => return Err("error while verifying rln proof".into()),
} {
return Ok(true);
}
Ok(false)
call_bool_method_with_error_msg!(
ctx,
verify_rln_proof,
"error while verifying rln proof".to_string(),
&proof.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -254,31 +376,31 @@ pub fn wasm_verify_with_roots(
proof: Uint8Array,
roots: Uint8Array,
) -> Result<bool, String> {
let wrapper = unsafe { &*ctx };
if match wrapper
.instance
.verify_with_roots(&proof.to_vec()[..], &roots.to_vec()[..])
{
Ok(verified) => verified,
Err(_) => return Err("error while verifying proof with roots".into()),
} {
return Ok(true);
}
Ok(false)
call_bool_method_with_error_msg!(
ctx,
verify_with_roots,
"error while verifying proof with roots".to_string(),
&proof.to_vec()[..],
&roots.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = getRoot)]
pub fn wasm_get_root(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper.instance.get_root(&mut output_data).is_ok() {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not obtain root".into())
}
call_with_output_and_error_msg!(ctx, get_root, "could not obtain root")
}
#[wasm_bindgen(js_name = hash)]
pub fn wasm_hash(input: Uint8Array) -> Result<Uint8Array, String> {
fn_call_with_output_and_error_msg!(hash, "could not generate hash", &input.to_vec()[..])
}
#[wasm_bindgen(js_name = poseidonHash)]
pub fn wasm_poseidon_hash(input: Uint8Array) -> Result<Uint8Array, String> {
fn_call_with_output_and_error_msg!(
poseidon_hash,
"could not generate poseidon hash",
&input.to_vec()[..]
)
}

View File

@@ -4,9 +4,9 @@
mod tests {
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
use rln::circuit::TEST_TREE_HEIGHT;
use rln::utils::normalize_usize;
use rln_wasm::*;
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsValue;
use wasm_bindgen::{prelude::*, JsValue};
use wasm_bindgen_test::wasm_bindgen_test;
#[wasm_bindgen(module = "src/utils.js")]
@@ -29,7 +29,7 @@ mod tests {
let vk = read_file(&vk_path).unwrap();
// Creating an instance of RLN
let rln_instance = wasm_new(tree_height, zkey, vk);
let rln_instance = wasm_new(tree_height, zkey, vk).unwrap();
// Creating membership key
let mem_keys = wasm_key_gen(rln_instance).unwrap();
@@ -41,28 +41,28 @@ mod tests {
// Prepare the message
let signal = "Hello World".as_bytes();
let signal_len: u64 = signal.len() as u64;
// Setting up the epoch (With 0s for the test)
let epoch = Uint8Array::new_with_length(32);
epoch.fill(0, 0, 32);
let identity_index: u64 = 0;
let identity_index: usize = 0;
// Serializing the message
let mut serialized_vec: Vec<u8> = Vec::new();
serialized_vec.append(&mut idkey.to_vec());
serialized_vec.append(&mut identity_index.to_le_bytes().to_vec());
serialized_vec.append(&mut normalize_usize(identity_index));
serialized_vec.append(&mut epoch.to_vec());
serialized_vec.append(&mut signal_len.to_le_bytes().to_vec());
serialized_vec.append(&mut normalize_usize(signal.len()));
serialized_vec.append(&mut signal.to_vec());
let serialized_message = Uint8Array::from(&serialized_vec[..]);
let serialized_rln_witness =
wasm_get_serialized_rln_witness(rln_instance, serialized_message);
wasm_get_serialized_rln_witness(rln_instance, serialized_message).unwrap();
// Obtaining inputs that should be sent to circom witness calculator
let json_inputs = rln_witness_to_json(rln_instance, serialized_rln_witness.clone());
let json_inputs =
rln_witness_to_json(rln_instance, serialized_rln_witness.clone()).unwrap();
// Calculating witness with JS
// (Using a JSON since wasm_bindgen does not like Result<Vec<JsBigInt>,JsValue>)
@@ -88,7 +88,7 @@ mod tests {
// Add signal_len | signal
let mut proof_bytes = proof.to_vec();
proof_bytes.append(&mut signal_len.to_le_bytes().to_vec());
proof_bytes.append(&mut normalize_usize(signal.len()));
proof_bytes.append(&mut signal.to_vec());
let proof_with_signal = Uint8Array::from(&proof_bytes[..]);
@@ -108,4 +108,28 @@ mod tests {
let is_proof_valid = wasm_verify_with_roots(rln_instance, proof_with_signal, roots);
assert!(is_proof_valid.unwrap(), "verifying proof with roots failed");
}
#[wasm_bindgen_test]
fn test_metadata() {
let tree_height = TEST_TREE_HEIGHT;
let circom_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln.wasm");
let zkey_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
let vk_path =
format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.json");
let zkey = read_file(&zkey_path).unwrap();
let vk = read_file(&vk_path).unwrap();
// Creating an instance of RLN
let rln_instance = wasm_new(tree_height, zkey, vk).unwrap();
let test_metadata = Uint8Array::new(&JsValue::from_str("test"));
// Inserting random metadata
wasm_set_metadata(rln_instance, test_metadata.clone()).unwrap();
// Getting metadata
let metadata = wasm_get_metadata(rln_instance).unwrap();
assert_eq!(metadata.to_vec(), test_metadata.to_vec());
}
}

View File

@@ -1,10 +1,16 @@
[package]
name = "rln"
version = "0.1.0"
version = "0.3.2"
edition = "2021"
license = "MIT OR Apache-2.0"
description = "APIs to manage, compute and verify zkSNARK proofs and RLN primitives"
documentation = "https://github.com/vacp2p/zerokit"
homepage = "https://vac.dev"
repository = "https://github.com/vacp2p/zerokit"
[lib]
crate-type = ["cdylib", "rlib", "staticlib"]
crate-type = ["rlib", "staticlib"]
bench = false
# This flag disable cargo doctests, i.e. testing example code-snippets in documentation
doctest = false
@@ -12,45 +18,51 @@ doctest = false
[dependencies]
# ZKP Generation
ark-ec = { version = "0.3.0", default-features = false }
ark-ff = { version = "0.3.0", default-features = false, features = [ "asm"] }
ark-std = { version = "0.3.0", default-features = false }
ark-bn254 = { version = "0.3.0" }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", default-features = false }
ark-relations = { version = "0.3.0", default-features = false, features = [ "std" ] }
ark-serialize = { version = "0.3.0", default-features = false }
ark-circom = { git = "https://github.com/vacp2p/ark-circom", branch = "wasm", default-features = false, features = ["circom-2"] }
#ark-circom = { git = "https://github.com/vacp2p/ark-circom", branch = "no-ethers-core", features = ["circom-2"] }
ark-ec = { version = "=0.4.1", default-features = false }
ark-ff = { version = "=0.4.1", default-features = false, features = [ "asm"] }
ark-std = { version = "=0.4.0", default-features = false }
ark-bn254 = { version = "=0.4.0" }
ark-groth16 = { version = "=0.4.0", features = ["parallel"], default-features = false }
ark-relations = { version = "=0.4.0", default-features = false, features = [ "std" ] }
ark-serialize = { version = "=0.4.1", default-features = false }
ark-circom = { version = "=0.1.0", default-features = false, features = ["circom-2"] }
# WASM
wasmer = { version = "2.3.0", default-features = false }
wasmer = { version = "=2.3.0", default-features = false }
# error handling
color-eyre = "0.5.11"
thiserror = "1.0.0"
color-eyre = "=0.6.2"
thiserror = "=1.0.39"
# utilities
cfg-if = "1.0"
num-bigint = { version = "0.4.3", default-features = false, features = ["rand"] }
num-traits = "0.2.11"
once_cell = "1.14.0"
rand = "0.8"
rand_chacha = "0.3.1"
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
utils = { path = "../utils/", default-features = false }
cfg-if = "=1.0"
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
num-traits = "=0.2.15"
once_cell = "=1.17.1"
rand = "=0.8.5"
rand_chacha = "=0.3.1"
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
utils = { package = "zerokit_utils", version = "=0.3.2", path = "../utils/", default-features = false }
# serialization
serde_json = "1.0.48"
serde_json = "=1.0.96"
serde = { version = "=1.0.163", features = ["derive"] }
include_dir = "=0.7.3"
[dev-dependencies]
pmtree = { git = "https://github.com/Rate-Limiting-Nullifier/pmtree" }
sled = "0.34.7"
sled = "=0.34.7"
criterion = { version = "=0.4.0", features = ["html_reports"] }
[features]
default = ["parallel", "wasmer/sys-default"]
default = ["parallel", "wasmer/sys-default", "pmtree-ft"]
parallel = ["ark-ec/parallel", "ark-ff/parallel", "ark-std/parallel", "ark-groth16/parallel", "utils/parallel"]
wasm = ["wasmer/js", "wasmer/std"]
fullmerkletree = ["default"]
# Note: pmtree feature is still experimental
pmtree = ["default"]
pmtree-ft = ["utils/pmtree-ft"]
[[bench]]
name = "pmtree_benchmark"
harness = false

View File

@@ -5,3 +5,7 @@ args = ["build", "--release"]
[tasks.test]
command = "cargo"
args = ["test", "--release"]
[tasks.bench]
command = "cargo"
args = ["bench"]

View File

@@ -3,15 +3,21 @@
This module provides APIs to manage, compute and verify [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and RLN primitives.
## Pre-requisites
### Install
### Install dependencies and clone repo
```sh
make installdeps
git clone https://github.com/vacp2p/zerokit.git
cd zerokit/rln
```
Implemented tests can be executed by running within the module folder
`cargo test --release`
### Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```
### Compile ZK circuits

View File

@@ -0,0 +1,44 @@
use criterion::{criterion_group, criterion_main, Criterion};
use utils::ZerokitMerkleTree;
use rln::{circuit::Fr, pm_tree_adapter::PmTree};
pub fn pmtree_benchmark(c: &mut Criterion) {
let mut tree = PmTree::default(2).unwrap();
let leaves: Vec<Fr> = (0..4).map(|s| Fr::from(s)).collect();
c.bench_function("Pmtree::set", |b| {
b.iter(|| {
tree.set(0, leaves[0]).unwrap();
})
});
c.bench_function("Pmtree:delete", |b| {
b.iter(|| {
tree.delete(0).unwrap();
})
});
c.bench_function("Pmtree::override_range", |b| {
b.iter(|| {
tree.override_range(0, leaves.clone(), [0, 1, 2, 3])
.unwrap();
})
});
c.bench_function("Pmtree::compute_root", |b| {
b.iter(|| {
tree.compute_root().unwrap();
})
});
c.bench_function("Pmtree::get", |b| {
b.iter(|| {
tree.get(0).unwrap();
})
});
}
criterion_group!(benches, pmtree_benchmark);
criterion_main!(benches);

View File

@@ -8,11 +8,10 @@ use ark_circom::read_zkey;
use ark_groth16::{ProvingKey, VerifyingKey};
use ark_relations::r1cs::ConstraintMatrices;
use cfg_if::cfg_if;
use color_eyre::{Report, Result};
use num_bigint::BigUint;
use serde_json::Value;
use std::fs::File;
use std::io::{Cursor, Error, ErrorKind, Result};
use std::path::Path;
use std::io::Cursor;
use std::str::FromStr;
cfg_if! {
@@ -21,11 +20,13 @@ cfg_if! {
use once_cell::sync::OnceCell;
use std::sync::Mutex;
use wasmer::{Module, Store};
use include_dir::{include_dir, Dir};
use std::path::Path;
}
}
const ZKEY_FILENAME: &str = "rln_final.zkey";
const VK_FILENAME: &str = "verifying_key.json";
const VK_FILENAME: &str = "verification_key.json";
const WASM_FILENAME: &str = "rln.wasm";
// These parameters are used for tests
@@ -33,11 +34,11 @@ const WASM_FILENAME: &str = "rln.wasm";
// Changing these parameters to other values than these defaults will cause zkSNARK proof verification to fail
pub const TEST_PARAMETERS_INDEX: usize = 2;
pub const TEST_TREE_HEIGHT: usize = [15, 19, 20][TEST_PARAMETERS_INDEX];
pub const TEST_RESOURCES_FOLDER: &str = [
"./resources/tree_height_15/",
"./resources/tree_height_19/",
"./resources/tree_height_20/",
][TEST_PARAMETERS_INDEX];
pub const TEST_RESOURCES_FOLDER: &str =
["tree_height_15", "tree_height_19", "tree_height_20"][TEST_PARAMETERS_INDEX];
#[cfg(not(target_arch = "wasm32"))]
static RESOURCES_DIR: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/resources");
// The following types define the pairing friendly elliptic curve, the underlying finite fields and groups default to this module
// Note that proofs are serialized assuming Fr to be 4x8 = 32 bytes in size. Hence, changing to a curve with different encoding will make proof verification to fail
@@ -57,21 +58,22 @@ pub fn zkey_from_raw(zkey_data: &Vec<u8>) -> Result<(ProvingKey<Curve>, Constrai
let proving_key_and_matrices = read_zkey(&mut c)?;
Ok(proving_key_and_matrices)
} else {
Err(Error::new(ErrorKind::NotFound, "No proving key found!"))
Err(Report::msg("No proving key found!"))
}
}
// Loads the proving key
#[cfg(not(target_arch = "wasm32"))]
pub fn zkey_from_folder(
resources_folder: &str,
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
if Path::new(&zkey_path).exists() {
let mut file = File::open(&zkey_path)?;
let proving_key_and_matrices = read_zkey(&mut file)?;
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
if let Some(zkey) = zkey {
let mut c = Cursor::new(zkey.contents());
let proving_key_and_matrices = read_zkey(&mut c)?;
Ok(proving_key_and_matrices)
} else {
Err(Error::new(ErrorKind::NotFound, "No proving key found!"))
Err(Report::msg("No proving key found!"))
}
}
@@ -80,39 +82,35 @@ pub fn vk_from_raw(vk_data: &Vec<u8>, zkey_data: &Vec<u8>) -> Result<VerifyingKe
let verifying_key: VerifyingKey<Curve>;
if !vk_data.is_empty() {
verifying_key = vk_from_vector(vk_data);
verifying_key = vk_from_vector(vk_data)?;
Ok(verifying_key)
} else if !zkey_data.is_empty() {
let (proving_key, _matrices) = zkey_from_raw(zkey_data)?;
verifying_key = proving_key.vk;
Ok(verifying_key)
} else {
Err(Error::new(
ErrorKind::NotFound,
"No proving/verification key found!",
))
Err(Report::msg("No proving/verification key found!"))
}
}
// Loads the verification key
#[cfg(not(target_arch = "wasm32"))]
pub fn vk_from_folder(resources_folder: &str) -> Result<VerifyingKey<Curve>> {
let vk_path = format!("{resources_folder}{VK_FILENAME}");
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
let vk = RESOURCES_DIR.get_file(Path::new(resources_folder).join(VK_FILENAME));
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
let verifying_key: VerifyingKey<Curve>;
if Path::new(&vk_path).exists() {
verifying_key = vk_from_json(&vk_path);
if let Some(vk) = vk {
verifying_key = vk_from_json(vk.contents_utf8().ok_or(Report::msg(
"Could not read verification key from JSON file!",
))?)?;
Ok(verifying_key)
} else if Path::new(&zkey_path).exists() {
} else if let Some(_zkey) = zkey {
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
verifying_key = proving_key.vk;
Ok(verifying_key)
} else {
Err(Error::new(
ErrorKind::NotFound,
"No proving/verification key found!",
))
Err(Report::msg("No proving/verification key found!"))
}
}
@@ -121,129 +119,150 @@ static WITNESS_CALCULATOR: OnceCell<Mutex<WitnessCalculator>> = OnceCell::new();
// Initializes the witness calculator using a bytes vector
#[cfg(not(target_arch = "wasm32"))]
pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> &'static Mutex<WitnessCalculator> {
WITNESS_CALCULATOR.get_or_init(|| {
pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> Result<&'static Mutex<WitnessCalculator>> {
WITNESS_CALCULATOR.get_or_try_init(|| {
let store = Store::default();
let module = Module::new(&store, wasm_buffer).unwrap();
let result =
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
Mutex::new(result)
let module = Module::new(&store, wasm_buffer)?;
let result = WitnessCalculator::from_module(module)?;
Ok::<Mutex<WitnessCalculator>, Report>(Mutex::new(result))
})
}
// Initializes the witness calculator
#[cfg(not(target_arch = "wasm32"))]
pub fn circom_from_folder(resources_folder: &str) -> &'static Mutex<WitnessCalculator> {
pub fn circom_from_folder(resources_folder: &str) -> Result<&'static Mutex<WitnessCalculator>> {
// We read the wasm file
let wasm_path = format!("{resources_folder}{WASM_FILENAME}");
let wasm_buffer = std::fs::read(&wasm_path).unwrap();
circom_from_raw(wasm_buffer)
let wasm = RESOURCES_DIR.get_file(Path::new(resources_folder).join(WASM_FILENAME));
if let Some(wasm) = wasm {
let wasm_buffer = wasm.contents();
circom_from_raw(wasm_buffer.to_vec())
} else {
Err(Report::msg("No wasm file found!"))
}
}
// The following function implementations are taken/adapted from https://github.com/gakonst/ark-circom/blob/1732e15d6313fe176b0b1abb858ac9e095d0dbd7/src/zkey.rs
// Utilities to convert a json verification key in a groth16::VerificationKey
fn fq_from_str(s: &str) -> Fq {
Fq::try_from(BigUint::from_str(s).unwrap()).unwrap()
fn fq_from_str(s: &str) -> Result<Fq> {
Ok(Fq::try_from(BigUint::from_str(s)?)?)
}
// Extracts the element in G1 corresponding to its JSON serialization
fn json_to_g1(json: &Value, key: &str) -> G1Affine {
fn json_to_g1(json: &Value, key: &str) -> Result<G1Affine> {
let els: Vec<String> = json
.get(key)
.unwrap()
.ok_or(Report::msg("no json value"))?
.as_array()
.unwrap()
.ok_or(Report::msg("value not an array"))?
.iter()
.map(|i| i.as_str().unwrap().to_string())
.collect();
G1Affine::from(G1Projective::new(
fq_from_str(&els[0]),
fq_from_str(&els[1]),
fq_from_str(&els[2]),
))
.map(|i| i.as_str().ok_or(Report::msg("element is not a string")))
.map(|x| x.map(|v| v.to_owned()))
.collect::<Result<Vec<String>>>()?;
Ok(G1Affine::from(G1Projective::new(
fq_from_str(&els[0])?,
fq_from_str(&els[1])?,
fq_from_str(&els[2])?,
)))
}
// Extracts the vector of G1 elements corresponding to its JSON serialization
fn json_to_g1_vec(json: &Value, key: &str) -> Vec<G1Affine> {
fn json_to_g1_vec(json: &Value, key: &str) -> Result<Vec<G1Affine>> {
let els: Vec<Vec<String>> = json
.get(key)
.unwrap()
.ok_or(Report::msg("no json value"))?
.as_array()
.unwrap()
.ok_or(Report::msg("value not an array"))?
.iter()
.map(|i| {
i.as_array()
.unwrap()
.iter()
.map(|x| x.as_str().unwrap().to_string())
.collect::<Vec<String>>()
.ok_or(Report::msg("element is not an array"))
.and_then(|array| {
array
.iter()
.map(|x| x.as_str().ok_or(Report::msg("element is not a string")))
.map(|x| x.map(|v| v.to_owned()))
.collect::<Result<Vec<String>>>()
})
})
.collect();
.collect::<Result<Vec<Vec<String>>>>()?;
els.iter()
.map(|coords| {
G1Affine::from(G1Projective::new(
fq_from_str(&coords[0]),
fq_from_str(&coords[1]),
fq_from_str(&coords[2]),
))
})
.collect()
let mut res = vec![];
for coords in els {
res.push(G1Affine::from(G1Projective::new(
fq_from_str(&coords[0])?,
fq_from_str(&coords[1])?,
fq_from_str(&coords[2])?,
)))
}
Ok(res)
}
// Extracts the element in G2 corresponding to its JSON serialization
fn json_to_g2(json: &Value, key: &str) -> G2Affine {
fn json_to_g2(json: &Value, key: &str) -> Result<G2Affine> {
let els: Vec<Vec<String>> = json
.get(key)
.unwrap()
.ok_or(Report::msg("no json value"))?
.as_array()
.unwrap()
.ok_or(Report::msg("value not an array"))?
.iter()
.map(|i| {
i.as_array()
.unwrap()
.iter()
.map(|x| x.as_str().unwrap().to_string())
.collect::<Vec<String>>()
.ok_or(Report::msg("element is not an array"))
.and_then(|array| {
array
.iter()
.map(|x| x.as_str().ok_or(Report::msg("element is not a string")))
.map(|x| x.map(|v| v.to_owned()))
.collect::<Result<Vec<String>>>()
})
})
.collect();
.collect::<Result<Vec<Vec<String>>>>()?;
let x = Fq2::new(fq_from_str(&els[0][0]), fq_from_str(&els[0][1]));
let y = Fq2::new(fq_from_str(&els[1][0]), fq_from_str(&els[1][1]));
let z = Fq2::new(fq_from_str(&els[2][0]), fq_from_str(&els[2][1]));
G2Affine::from(G2Projective::new(x, y, z))
let x = Fq2::new(fq_from_str(&els[0][0])?, fq_from_str(&els[0][1])?);
let y = Fq2::new(fq_from_str(&els[1][0])?, fq_from_str(&els[1][1])?);
let z = Fq2::new(fq_from_str(&els[2][0])?, fq_from_str(&els[2][1])?);
Ok(G2Affine::from(G2Projective::new(x, y, z)))
}
// Converts JSON to a VerifyingKey
fn to_verifying_key(json: serde_json::Value) -> VerifyingKey<Curve> {
VerifyingKey {
alpha_g1: json_to_g1(&json, "vk_alpha_1"),
beta_g2: json_to_g2(&json, "vk_beta_2"),
gamma_g2: json_to_g2(&json, "vk_gamma_2"),
delta_g2: json_to_g2(&json, "vk_delta_2"),
gamma_abc_g1: json_to_g1_vec(&json, "IC"),
}
fn to_verifying_key(json: serde_json::Value) -> Result<VerifyingKey<Curve>> {
Ok(VerifyingKey {
alpha_g1: json_to_g1(&json, "vk_alpha_1")?,
beta_g2: json_to_g2(&json, "vk_beta_2")?,
gamma_g2: json_to_g2(&json, "vk_gamma_2")?,
delta_g2: json_to_g2(&json, "vk_delta_2")?,
gamma_abc_g1: json_to_g1_vec(&json, "IC")?,
})
}
// Computes the verification key from its JSON serialization
fn vk_from_json(vk_path: &str) -> VerifyingKey<Curve> {
let json = std::fs::read_to_string(vk_path).unwrap();
let json: Value = serde_json::from_str(&json).unwrap();
fn vk_from_json(vk: &str) -> Result<VerifyingKey<Curve>> {
let json: Value = serde_json::from_str(vk)?;
to_verifying_key(json)
}
// Computes the verification key from a bytes vector containing its JSON serialization
fn vk_from_vector(vk: &[u8]) -> VerifyingKey<Curve> {
let json = String::from_utf8(vk.to_vec()).expect("Found invalid UTF-8");
let json: Value = serde_json::from_str(&json).unwrap();
fn vk_from_vector(vk: &[u8]) -> Result<VerifyingKey<Curve>> {
let json = String::from_utf8(vk.to_vec())?;
let json: Value = serde_json::from_str(&json)?;
to_verifying_key(json)
}
// Checks verification key to be correct with respect to proving key
pub fn check_vk_from_zkey(resources_folder: &str, verifying_key: VerifyingKey<Curve>) {
let (proving_key, _matrices) = zkey_from_folder(resources_folder).unwrap();
assert_eq!(proving_key.vk, verifying_key);
#[cfg(not(target_arch = "wasm32"))]
pub fn check_vk_from_zkey(
resources_folder: &str,
verifying_key: VerifyingKey<Curve>,
) -> Result<()> {
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
if proving_key.vk == verifying_key {
Ok(())
} else {
Err(Report::msg("verifying_keys are not equal"))
}
}

View File

@@ -2,7 +2,7 @@
use std::slice;
use crate::public::RLN;
use crate::public::{hash as public_hash, poseidon_hash as public_poseidon_hash, RLN};
// Macro to call methods with arbitrary amount of arguments,
// First argument to the macro is context,
@@ -12,7 +12,15 @@ macro_rules! call {
($instance:expr, $method:ident $(, $arg:expr)*) => {
{
let new_instance: &mut RLN = $instance.process();
new_instance.$method($($arg.process()),*).is_ok()
match new_instance.$method($($arg.process()),*) {
Ok(()) => {
true
}
Err(err) => {
eprintln!("execution error: {err}");
false
}
}
}
}
}
@@ -30,13 +38,17 @@ macro_rules! call_with_output_arg {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
if new_instance.$method(&mut output_data).is_ok() {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
} else {
std::mem::forget(output_data);
false
match new_instance.$method(&mut output_data) {
Ok(()) => {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
}
Err(err) => {
std::mem::forget(output_data);
eprintln!("execution error: {err}");
false
}
}
}
};
@@ -44,13 +56,43 @@ macro_rules! call_with_output_arg {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
if new_instance.$method($($arg.process()),*, &mut output_data).is_ok() {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
} else {
std::mem::forget(output_data);
false
match new_instance.$method($($arg.process()),*, &mut output_data) {
Ok(()) => {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
}
Err(err) => {
std::mem::forget(output_data);
eprintln!("execution error: {err}");
false
}
}
}
};
}
// Macro to call methods with arbitrary amount of arguments,
// which are not implemented in a ctx RLN object
// First argument is the method to call
// Second argument is the output buffer argument
// The remaining arguments are all other inputs to the method
macro_rules! no_ctx_call_with_output_arg {
($method:ident, $output_arg:expr, $( $arg:expr ),* ) => {
{
let mut output_data: Vec<u8> = Vec::new();
match $method($($arg.process()),*, &mut output_data) {
Ok(()) => {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
}
Err(err) => {
std::mem::forget(output_data);
eprintln!("execution error: {err}");
false
}
}
}
}
@@ -67,8 +109,11 @@ macro_rules! call_with_bool_arg {
{
let new_instance = $instance.process();
if match new_instance.$method($($arg.process()),*,) {
Ok(verified) => verified,
Err(_) => return false,
Ok(result) => result,
Err(err) => {
eprintln!("execution error: {err}");
return false
},
} {
unsafe { *$bool_arg = true };
} else {
@@ -149,9 +194,16 @@ impl<'a> From<&Buffer> for &'a [u8] {
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut *mut RLN) -> bool {
let rln = RLN::new(tree_height, input_buffer.process());
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
match RLN::new(tree_height, input_buffer.process()) {
Ok(rln) => {
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
Err(err) => {
eprintln!("could not instantiate rln: {err}");
false
}
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -161,16 +213,25 @@ pub extern "C" fn new_with_params(
circom_buffer: *const Buffer,
zkey_buffer: *const Buffer,
vk_buffer: *const Buffer,
tree_config: *const Buffer,
ctx: *mut *mut RLN,
) -> bool {
let rln = RLN::new_with_params(
match RLN::new_with_params(
tree_height,
circom_buffer.process().to_vec(),
zkey_buffer.process().to_vec(),
vk_buffer.process().to_vec(),
);
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
tree_config.process(),
) {
Ok(rln) => {
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
Err(err) => {
eprintln!("could not instantiate rln: {err}");
false
}
}
}
////////////////////////////////////////////////////////
@@ -194,6 +255,12 @@ pub extern "C" fn set_leaf(ctx: *mut RLN, index: usize, input_buffer: *const Buf
call!(ctx, set_leaf, index, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn get_leaf(ctx: *mut RLN, index: usize, output_buffer: *mut Buffer) -> bool {
call_with_output_arg!(ctx, get_leaf, output_buffer, index)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn set_next_leaf(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
@@ -216,6 +283,33 @@ pub extern "C" fn init_tree_with_leaves(ctx: *mut RLN, input_buffer: *const Buff
call!(ctx, init_tree_with_leaves, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn atomic_operation(
ctx: *mut RLN,
index: usize,
leaves_buffer: *const Buffer,
indices_buffer: *const Buffer,
) -> bool {
call!(ctx, atomic_operation, index, leaves_buffer, indices_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn seq_atomic_operation(
ctx: *mut RLN,
leaves_buffer: *const Buffer,
indices_buffer: *const Buffer,
) -> bool {
call!(
ctx,
atomic_operation,
ctx.process().leaves_set(),
leaves_buffer,
indices_buffer
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn get_root(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
@@ -340,12 +434,36 @@ pub extern "C" fn recover_id_secret(
)
}
////////////////////////////////////////////////////////
// Persistent metadata APIs
////////////////////////////////////////////////////////
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn hash(
ctx: *mut RLN,
input_buffer: *const Buffer,
output_buffer: *mut Buffer,
) -> bool {
call_with_output_arg!(ctx, hash, output_buffer, input_buffer)
pub extern "C" fn set_metadata(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
call!(ctx, set_metadata, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn get_metadata(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
call_with_output_arg!(ctx, get_metadata, output_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn flush(ctx: *mut RLN) -> bool {
call!(ctx, flush)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn hash(input_buffer: *const Buffer, output_buffer: *mut Buffer) -> bool {
no_ctx_call_with_output_arg!(public_hash, output_buffer, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn poseidon_hash(input_buffer: *const Buffer, output_buffer: *mut Buffer) -> bool {
no_ctx_call_with_output_arg!(public_poseidon_hash, output_buffer, input_buffer)
}

58
rln/src/hashers.rs Normal file
View File

@@ -0,0 +1,58 @@
/// This crate instantiates the Poseidon hash algorithm.
use crate::{circuit::Fr, utils::bytes_le_to_fr};
use once_cell::sync::Lazy;
use tiny_keccak::{Hasher, Keccak};
use utils::poseidon::Poseidon;
/// These indexed constants hardcode the supported round parameters tuples (t, RF, RN, SKIP_MATRICES) for the Bn254 scalar field.
/// SKIP_MATRICES is the index of the randomly generated secure MDS matrix.
/// TODO: generate these parameters
pub const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
(2, 8, 56, 0),
(3, 8, 57, 0),
(4, 8, 56, 0),
(5, 8, 60, 0),
(6, 8, 60, 0),
(7, 8, 63, 0),
(8, 8, 64, 0),
(9, 8, 63, 0),
];
/// Poseidon Hash wrapper over above implementation.
static POSEIDON: Lazy<Poseidon<Fr>> = Lazy::new(|| Poseidon::<Fr>::from(&ROUND_PARAMS));
pub fn poseidon_hash(input: &[Fr]) -> Fr {
POSEIDON
.hash(input.to_vec())
.expect("hash with fixed input size can't fail")
}
/// The zerokit RLN Merkle tree Hasher.
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct PoseidonHash;
/// The default Hasher trait used by Merkle tree implementation in utils.
impl utils::merkle_tree::Hasher for PoseidonHash {
type Fr = Fr;
fn default_leaf() -> Self::Fr {
Self::Fr::from(0)
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}
/// Hashes arbitrary signal to the underlying prime field.
pub fn hash_to_field(signal: &[u8]) -> Fr {
// We hash the input signal using Keccak256
let mut hash = [0; 32];
let mut hasher = Keccak::v256();
hasher.update(signal);
hasher.finalize(&mut hash);
// We export the hash as a field element
let (el, _) = bytes_le_to_fr(hash.as_ref());
el
}

View File

@@ -1,7 +1,9 @@
#![allow(dead_code)]
pub mod circuit;
pub mod poseidon_hash;
pub mod hashers;
#[cfg(feature = "pmtree-ft")]
pub mod pm_tree_adapter;
pub mod poseidon_tree;
pub mod protocol;
pub mod public;

339
rln/src/pm_tree_adapter.rs Normal file
View File

@@ -0,0 +1,339 @@
use std::fmt::Debug;
use std::path::PathBuf;
use std::str::FromStr;
use color_eyre::{Report, Result};
use serde_json::Value;
use utils::pmtree::{Database, Hasher};
use utils::*;
use crate::circuit::Fr;
use crate::hashers::{poseidon_hash, PoseidonHash};
use crate::utils::{bytes_le_to_fr, fr_to_bytes_le};
const METADATA_KEY: [u8; 8] = *b"metadata";
pub struct PmTree {
tree: pmtree::MerkleTree<SledDB, PoseidonHash>,
// metadata that an application may use to store additional information
metadata: Vec<u8>,
}
pub struct PmTreeProof {
proof: pmtree::tree::MerkleProof<PoseidonHash>,
}
pub type FrOf<H> = <H as Hasher>::Fr;
// The pmtree Hasher trait used by pmtree Merkle tree
impl Hasher for PoseidonHash {
type Fr = Fr;
fn serialize(value: Self::Fr) -> pmtree::Value {
fr_to_bytes_le(&value)
}
fn deserialize(value: pmtree::Value) -> Self::Fr {
let (fr, _) = bytes_le_to_fr(&value);
fr
}
fn default_leaf() -> Self::Fr {
Fr::from(0)
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}
fn get_tmp_path() -> PathBuf {
std::env::temp_dir().join(format!("pmtree-{}", rand::random::<u64>()))
}
fn get_tmp() -> bool {
true
}
pub struct PmtreeConfig(Config);
impl FromStr for PmtreeConfig {
type Err = Report;
fn from_str(s: &str) -> Result<Self> {
let config: Value = serde_json::from_str(s)?;
let path = config["path"].as_str();
let path = path.map(PathBuf::from);
let temporary = config["temporary"].as_bool();
let cache_capacity = config["cache_capacity"].as_u64();
let flush_every_ms = config["flush_every_ms"].as_u64();
let mode = match config["mode"].as_str() {
Some("HighThroughput") => Mode::HighThroughput,
Some("LowSpace") => Mode::LowSpace,
_ => Mode::HighThroughput,
};
let use_compression = config["use_compression"].as_bool();
if temporary.is_some()
&& path.is_some()
&& temporary.unwrap()
&& path.as_ref().unwrap().exists()
{
return Err(Report::msg(format!(
"Path {:?} already exists, cannot use temporary",
path.unwrap()
)));
}
let config = Config::new()
.temporary(temporary.unwrap_or(get_tmp()))
.path(path.unwrap_or(get_tmp_path()))
.cache_capacity(cache_capacity.unwrap_or(1024 * 1024 * 1024))
.flush_every_ms(flush_every_ms)
.mode(mode)
.use_compression(use_compression.unwrap_or(false));
Ok(PmtreeConfig(config))
}
}
impl Default for PmtreeConfig {
fn default() -> Self {
let tmp_path = get_tmp_path();
PmtreeConfig(
Config::new()
.temporary(true)
.path(tmp_path)
.cache_capacity(150_000)
.mode(Mode::HighThroughput)
.use_compression(false)
.flush_every_ms(Some(12_000)),
)
}
}
impl Debug for PmtreeConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
impl Clone for PmtreeConfig {
fn clone(&self) -> Self {
PmtreeConfig(self.0.clone())
}
}
impl ZerokitMerkleTree for PmTree {
type Proof = PmTreeProof;
type Hasher = PoseidonHash;
type Config = PmtreeConfig;
fn default(depth: usize) -> Result<Self> {
let default_config = PmtreeConfig::default();
PmTree::new(depth, Self::Hasher::default_leaf(), default_config)
}
fn new(depth: usize, _default_leaf: FrOf<Self::Hasher>, config: Self::Config) -> Result<Self> {
let tree_loaded = pmtree::MerkleTree::load(config.clone().0);
let tree = match tree_loaded {
Ok(tree) => tree,
Err(_) => pmtree::MerkleTree::new(depth, config.0)?,
};
Ok(PmTree {
tree,
metadata: Vec::new(),
})
}
fn depth(&self) -> usize {
self.tree.depth()
}
fn capacity(&self) -> usize {
self.tree.capacity()
}
fn leaves_set(&mut self) -> usize {
self.tree.leaves_set()
}
fn root(&self) -> FrOf<Self::Hasher> {
self.tree.root()
}
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>> {
Ok(self.tree.root())
}
fn set(&mut self, index: usize, leaf: FrOf<Self::Hasher>) -> Result<()> {
self.tree
.set(index, leaf)
.map_err(|e| Report::msg(e.to_string()))
}
fn set_range<I: IntoIterator<Item = FrOf<Self::Hasher>>>(
&mut self,
start: usize,
values: I,
) -> Result<()> {
self.tree
.set_range(start, values)
.map_err(|e| Report::msg(e.to_string()))
}
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>> {
self.tree.get(index).map_err(|e| Report::msg(e.to_string()))
}
fn override_range<I: IntoIterator<Item = FrOf<Self::Hasher>>, J: IntoIterator<Item = usize>>(
&mut self,
start: usize,
leaves: I,
indices: J,
) -> Result<()> {
let leaves = leaves.into_iter().collect::<Vec<_>>();
let mut indices = indices.into_iter().collect::<Vec<_>>();
indices.sort();
match (leaves.is_empty(), indices.is_empty()) {
(true, true) => Err(Report::msg("no leaves or indices to be removed")),
(false, true) => self.set_range_with_leaves(start, leaves),
(true, false) => self.remove_indices(indices),
(false, false) => self.remove_indices_and_set_leaves(start, leaves, indices),
}
}
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<()> {
self.tree
.update_next(leaf)
.map_err(|e| Report::msg(e.to_string()))
}
fn delete(&mut self, index: usize) -> Result<()> {
self.tree
.delete(index)
.map_err(|e| Report::msg(e.to_string()))
}
fn proof(&self, index: usize) -> Result<Self::Proof> {
let proof = self.tree.proof(index)?;
Ok(PmTreeProof { proof })
}
fn verify(&self, leaf: &FrOf<Self::Hasher>, witness: &Self::Proof) -> Result<bool> {
if self.tree.verify(leaf, &witness.proof) {
Ok(true)
} else {
Err(Report::msg("verify failed"))
}
}
fn set_metadata(&mut self, metadata: &[u8]) -> Result<()> {
self.tree.db.put(METADATA_KEY, metadata.to_vec())?;
self.metadata = metadata.to_vec();
Ok(())
}
fn metadata(&self) -> Result<Vec<u8>> {
if !self.metadata.is_empty() {
return Ok(self.metadata.clone());
}
// if empty, try searching the db
let data = self.tree.db.get(METADATA_KEY)?;
if data.is_none() {
return Err(Report::msg("metadata does not exist"));
}
Ok(data.unwrap())
}
fn close_db_connection(&mut self) -> Result<()> {
self.tree.db.close().map_err(|e| Report::msg(e.to_string()))
}
}
type PmTreeHasher = <PmTree as ZerokitMerkleTree>::Hasher;
type FrOfPmTreeHasher = FrOf<PmTreeHasher>;
impl PmTree {
fn set_range_with_leaves(&mut self, start: usize, leaves: Vec<FrOfPmTreeHasher>) -> Result<()> {
self.tree
.set_range(start, leaves)
.map_err(|e| Report::msg(e.to_string()))
}
fn remove_indices(&mut self, indices: Vec<usize>) -> Result<()> {
let start = indices[0];
let end = indices.last().unwrap() + 1;
let mut new_leaves: Vec<_> = (start..end)
.map(|i| self.tree.get(i))
.collect::<Result<_, _>>()?;
new_leaves
.iter_mut()
.take(indices.len())
.for_each(|leaf| *leaf = PmTreeHasher::default_leaf());
self.tree
.set_range(start, new_leaves)
.map_err(|e| Report::msg(e.to_string()))
}
fn remove_indices_and_set_leaves(
&mut self,
start: usize,
leaves: Vec<FrOfPmTreeHasher>,
indices: Vec<usize>,
) -> Result<()> {
let min_index = *indices.first().unwrap();
let max_index = start + leaves.len();
// Generated a placeholder with the exact size needed,
// Initiated with default values to be overridden throughout the method
let mut set_values = vec![PmTreeHasher::default_leaf(); max_index - min_index];
// If the index is not in indices list, keep the original value
for i in min_index..start {
if !indices.contains(&i) {
let value = self.tree.get(i)?;
set_values[i - min_index] = value;
}
}
// Insert new leaves after 'start' position
for (i, &leaf) in leaves.iter().enumerate() {
set_values[start - min_index + i] = leaf;
}
self.tree
.set_range(min_index, set_values)
.map_err(|e| Report::msg(e.to_string()))
}
}
impl ZerokitMerkleProof for PmTreeProof {
type Index = u8;
type Hasher = PoseidonHash;
fn length(&self) -> usize {
self.proof.length()
}
fn leaf_index(&self) -> usize {
self.proof.leaf_index()
}
fn get_path_elements(&self) -> Vec<FrOf<Self::Hasher>> {
self.proof.get_path_elements()
}
fn get_path_index(&self) -> Vec<Self::Index> {
self.proof.get_path_index()
}
fn compute_root_from(&self, leaf: &FrOf<Self::Hasher>) -> FrOf<Self::Hasher> {
self.proof.compute_root_from(leaf)
}
}

View File

@@ -1,28 +0,0 @@
// This crate instantiate the Poseidon hash algorithm
use crate::circuit::Fr;
use once_cell::sync::Lazy;
use utils::poseidon::Poseidon;
// These indexed constants hardcodes the supported round parameters tuples (t, RF, RN, SKIP_MATRICES) for the Bn254 scalar field
// SKIP_MATRICES is the index of the randomly generated secure MDS matrix. See security note in the zerokit_utils::poseidon::poseidon_constants crate on this.
// TODO: generate these parameters
pub const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
(2, 8, 56, 0),
(3, 8, 57, 0),
(4, 8, 56, 0),
(5, 8, 60, 0),
(6, 8, 60, 0),
(7, 8, 63, 0),
(8, 8, 64, 0),
(9, 8, 63, 0),
];
// Poseidon Hash wrapper over above implementation. Adapted from semaphore-rs poseidon hash wrapper.
static POSEIDON: Lazy<Poseidon<Fr>> = Lazy::new(|| Poseidon::<Fr>::from(&ROUND_PARAMS));
pub fn poseidon_hash(input: &[Fr]) -> Fr {
POSEIDON
.hash(input.to_vec())
.expect("hash with fixed input size can't fail")
}

View File

@@ -2,10 +2,16 @@
// Implementation inspired by https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/poseidon_tree.rs (no differences)
use crate::circuit::Fr;
use crate::poseidon_hash::poseidon_hash;
use cfg_if::cfg_if;
use utils::merkle_tree::*;
cfg_if! {
if #[cfg(feature = "pmtree-ft")] {
use crate::pm_tree_adapter::*;
} else {
use crate::hashers::{PoseidonHash};
use utils::merkle_tree::*;
}
}
// The zerokit RLN default Merkle tree implementation is the OptimalMerkleTree.
// To switch to FullMerkleTree implementation, it is enough to enable the fullmerkletree feature
@@ -14,25 +20,11 @@ cfg_if! {
if #[cfg(feature = "fullmerkletree")] {
pub type PoseidonTree = FullMerkleTree<PoseidonHash>;
pub type MerkleProof = FullMerkleProof<PoseidonHash>;
} else if #[cfg(feature = "pmtree-ft")] {
pub type PoseidonTree = PmTree;
pub type MerkleProof = PmTreeProof;
} else {
pub type PoseidonTree = OptimalMerkleTree<PoseidonHash>;
pub type MerkleProof = OptimalMerkleProof<PoseidonHash>;
}
}
// The zerokit RLN Merkle tree Hasher
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct PoseidonHash;
// The default Hasher trait used by Merkle tree implementation in utils
impl utils::merkle_tree::Hasher for PoseidonHash {
type Fr = Fr;
fn default_leaf() -> Self::Fr {
Self::Fr::from(0)
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}

View File

@@ -1,14 +1,11 @@
// This crate collects all the underlying primitives used to implement RLN
use ark_circom::{CircomReduction, WitnessCalculator};
use ark_groth16::{
create_proof_with_reduction_and_matrices, prepare_verifying_key,
verify_proof as ark_verify_proof, Proof as ArkProof, ProvingKey, VerifyingKey,
};
use ark_groth16::{prepare_verifying_key, Groth16, Proof as ArkProof, ProvingKey, VerifyingKey};
use ark_relations::r1cs::ConstraintMatrices;
use ark_relations::r1cs::SynthesisError;
use ark_std::{rand::thread_rng, UniformRand};
use color_eyre::Result;
use color_eyre::{Report, Result};
use num_bigint::BigInt;
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha20Rng;
@@ -20,11 +17,13 @@ use thiserror::Error;
use tiny_keccak::{Hasher as _, Keccak};
use crate::circuit::{Curve, Fr};
use crate::poseidon_hash::poseidon_hash;
use crate::hashers::hash_to_field;
use crate::hashers::poseidon_hash;
use crate::poseidon_tree::*;
use crate::public::RLN_IDENTIFIER;
use crate::utils::*;
use cfg_if::cfg_if;
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
///////////////////////////////////////////////////////
// RLN Witness data structure and utility functions
@@ -53,82 +52,84 @@ pub struct RLNProofValues {
}
pub fn serialize_field_element(element: Fr) -> Vec<u8> {
return fr_to_bytes_le(&element);
fr_to_bytes_le(&element)
}
pub fn deserialize_field_element(serialized: Vec<u8>) -> Fr {
let (element, _) = bytes_le_to_fr(&serialized);
return element;
element
}
pub fn deserialize_identity_pair(serialized: Vec<u8>) -> (Fr, Fr) {
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized);
let (id_commitment, _) = bytes_le_to_fr(&serialized[read..].to_vec());
let (id_commitment, _) = bytes_le_to_fr(&serialized[read..]);
return (identity_secret_hash, id_commitment);
(identity_secret_hash, id_commitment)
}
pub fn deserialize_identity_tuple(serialized: Vec<u8>) -> (Fr, Fr, Fr, Fr) {
let mut all_read = 0;
let (identity_trapdoor, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (identity_trapdoor, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (identity_nullifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (identity_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (identity_commitment, _) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (identity_commitment, _) = bytes_le_to_fr(&serialized[all_read..]);
return (
(
identity_trapdoor,
identity_nullifier,
identity_secret_hash,
identity_commitment,
);
)
}
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Vec<u8> {
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Result<Vec<u8>> {
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&rln_witness.identity_secret));
serialized.append(&mut vec_fr_to_bytes_le(&rln_witness.path_elements));
serialized.append(&mut vec_u8_to_bytes_le(&rln_witness.identity_path_index));
serialized.append(&mut vec_fr_to_bytes_le(&rln_witness.path_elements)?);
serialized.append(&mut vec_u8_to_bytes_le(&rln_witness.identity_path_index)?);
serialized.append(&mut fr_to_bytes_le(&rln_witness.x));
serialized.append(&mut fr_to_bytes_le(&rln_witness.epoch));
serialized.append(&mut fr_to_bytes_le(&rln_witness.rln_identifier));
serialized
Ok(serialized)
}
pub fn deserialize_witness(serialized: &[u8]) -> (RLNWitnessInput, usize) {
pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)> {
let mut all_read: usize = 0;
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..].to_vec());
let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..])?;
all_read += read;
let (identity_path_index, read) = bytes_le_to_vec_u8(&serialized[all_read..].to_vec());
let (identity_path_index, read) = bytes_le_to_vec_u8(&serialized[all_read..])?;
all_read += read;
let (x, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (x, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
// TODO: check rln_identifier against public::RLN_IDENTIFIER
assert_eq!(serialized.len(), all_read);
if serialized.len() != all_read {
return Err(Report::msg("serialized length is not equal to all_read"));
}
(
Ok((
RLNWitnessInput {
identity_secret,
path_elements,
@@ -138,7 +139,7 @@ pub fn deserialize_witness(serialized: &[u8]) -> (RLNWitnessInput, usize) {
rln_identifier,
},
all_read,
)
))
}
// This function deserializes input for kilic's rln generate_proof public API
@@ -148,24 +149,28 @@ pub fn deserialize_witness(serialized: &[u8]) -> (RLNWitnessInput, usize) {
pub fn proof_inputs_to_rln_witness(
tree: &mut PoseidonTree,
serialized: &[u8],
) -> (RLNWitnessInput, usize) {
) -> Result<(RLNWitnessInput, usize)> {
let mut all_read: usize = 0;
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let id_index = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap());
let id_index = usize::try_from(u64::from_le_bytes(
serialized[all_read..all_read + 8].try_into()?,
))?;
all_read += 8;
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let signal_len = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap());
let signal_len = usize::try_from(u64::from_le_bytes(
serialized[all_read..all_read + 8].try_into()?,
))?;
all_read += 8;
let signal: Vec<u8> = serialized[all_read..all_read + (signal_len as usize)].to_vec();
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
let merkle_proof = tree.proof(id_index as usize).expect("proof should exist");
let merkle_proof = tree.proof(id_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
@@ -173,7 +178,7 @@ pub fn proof_inputs_to_rln_witness(
let rln_identifier = hash_to_field(RLN_IDENTIFIER);
(
Ok((
RLNWitnessInput {
identity_secret,
path_elements,
@@ -183,45 +188,48 @@ pub fn proof_inputs_to_rln_witness(
rln_identifier,
},
all_read,
)
))
}
pub fn rln_witness_from_json(input_json_str: &str) -> RLNWitnessInput {
pub fn rln_witness_from_json(input_json_str: &str) -> Result<RLNWitnessInput> {
let input_json: serde_json::Value =
serde_json::from_str(input_json_str).expect("JSON was not well-formatted");
let identity_secret = str_to_fr(&input_json["identity_secret"].to_string(), 10);
let identity_secret = str_to_fr(&input_json["identity_secret"].to_string(), 10)?;
let path_elements = input_json["path_elements"]
.as_array()
.unwrap()
.ok_or(Report::msg("not an array"))?
.iter()
.map(|v| str_to_fr(&v.to_string(), 10))
.collect();
.collect::<Result<_>>()?;
let identity_path_index = input_json["identity_path_index"]
let identity_path_index_array = input_json["identity_path_index"]
.as_array()
.unwrap()
.iter()
.map(|v| v.as_u64().unwrap() as u8)
.collect();
.ok_or(Report::msg("not an arrray"))?;
let x = str_to_fr(&input_json["x"].to_string(), 10);
let mut identity_path_index: Vec<u8> = vec![];
let epoch = str_to_fr(&input_json["epoch"].to_string(), 16);
for v in identity_path_index_array {
identity_path_index.push(v.as_u64().ok_or(Report::msg("not a u64 value"))? as u8);
}
let rln_identifier = str_to_fr(&input_json["rln_identifier"].to_string(), 10);
let x = str_to_fr(&input_json["x"].to_string(), 10)?;
let epoch = str_to_fr(&input_json["epoch"].to_string(), 16)?;
let rln_identifier = str_to_fr(&input_json["rln_identifier"].to_string(), 10)?;
// TODO: check rln_identifier against public::RLN_IDENTIFIER
RLNWitnessInput {
Ok(RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
epoch,
rln_identifier,
}
})
}
pub fn rln_witness_from_values(
@@ -317,22 +325,22 @@ pub fn serialize_proof_values(rln_proof_values: &RLNProofValues) -> Vec<u8> {
pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) {
let mut all_read: usize = 0;
let (root, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (root, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (x, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (x, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (y, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (y, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (nullifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
(
@@ -354,29 +362,26 @@ pub fn prepare_prove_input(
epoch: Fr,
signal: &[u8],
) -> Vec<u8> {
let signal_len = u64::try_from(signal.len()).unwrap();
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret));
serialized.append(&mut id_index.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(id_index));
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
return serialized;
serialized
}
#[allow(clippy::redundant_clone)]
pub fn prepare_verify_input(proof_data: Vec<u8>, signal: &[u8]) -> Vec<u8> {
let signal_len = u64::try_from(signal.len()).unwrap();
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut proof_data.clone());
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
return serialized;
serialized
}
///////////////////////////////////////////////////////
@@ -483,20 +488,6 @@ pub fn extended_seeded_keygen(signal: &[u8]) -> (Fr, Fr, Fr, Fr) {
)
}
// Hashes arbitrary signal to the underlying prime field
pub fn hash_to_field(signal: &[u8]) -> Fr {
// We hash the input signal using Keccak256
// (note that a bigger curve order might require a bigger hash blocksize)
let mut hash = [0; 32];
let mut hasher = Keccak::v256();
hasher.update(signal);
hasher.finalize(&mut hash);
// We export the hash as a field element
let (el, _) = bytes_le_to_fr(hash.as_ref());
el
}
pub fn compute_id_secret(
share1: (Fr, Fr),
share2: (Fr, Fr),
@@ -519,9 +510,9 @@ pub fn compute_id_secret(
if a_1 == computed_a_1 {
// We successfully recovered the identity secret
return Ok(a_0);
Ok(a_0)
} else {
return Err("Cannot recover identity_secret_hash from provided shares".into());
Err("Cannot recover identity_secret_hash from provided shares".into())
}
}
@@ -532,33 +523,36 @@ pub fn compute_id_secret(
#[derive(Error, Debug)]
pub enum ProofError {
#[error("Error reading circuit key: {0}")]
CircuitKeyError(#[from] std::io::Error),
CircuitKeyError(#[from] Report),
#[error("Error producing witness: {0}")]
WitnessError(color_eyre::Report),
WitnessError(Report),
#[error("Error producing proof: {0}")]
SynthesisError(#[from] SynthesisError),
}
fn calculate_witness_element<E: ark_ec::PairingEngine>(witness: Vec<BigInt>) -> Result<Vec<E::Fr>> {
use ark_ff::{FpParameters, PrimeField};
let modulus = <<E::Fr as PrimeField>::Params as FpParameters>::MODULUS;
fn calculate_witness_element<E: ark_ec::pairing::Pairing>(
witness: Vec<BigInt>,
) -> Result<Vec<E::ScalarField>> {
use ark_ff::PrimeField;
let modulus = <E::ScalarField as PrimeField>::MODULUS;
// convert it to field elements
use num_traits::Signed;
let witness = witness
.into_iter()
.map(|w| {
let w = if w.sign() == num_bigint::Sign::Minus {
// Need to negate the witness element if negative
modulus.into() - w.abs().to_biguint().unwrap()
} else {
w.to_biguint().unwrap()
};
E::Fr::from(w)
})
.collect::<Vec<_>>();
let mut witness_vec = vec![];
for w in witness.into_iter() {
let w = if w.sign() == num_bigint::Sign::Minus {
// Need to negate the witness element if negative
modulus.into()
- w.abs()
.to_biguint()
.ok_or(Report::msg("not a biguint value"))?
} else {
w.to_biguint().ok_or(Report::msg("not a biguint value"))?
};
witness_vec.push(E::ScalarField::from(w))
}
Ok(witness)
Ok(witness_vec)
}
pub fn generate_proof_with_witness(
@@ -569,9 +563,8 @@ pub fn generate_proof_with_witness(
#[cfg(debug_assertions)]
let now = Instant::now();
let full_assignment = calculate_witness_element::<Curve>(witness)
.map_err(ProofError::WitnessError)
.unwrap();
let full_assignment =
calculate_witness_element::<Curve>(witness).map_err(ProofError::WitnessError)?;
#[cfg(debug_assertions)]
println!("witness generation took: {:.2?}", now.elapsed());
@@ -585,7 +578,7 @@ pub fn generate_proof_with_witness(
#[cfg(debug_assertions)]
let now = Instant::now();
let proof = create_proof_with_reduction_and_matrices::<_, CircomReduction>(
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
&proving_key.0,
r,
s,
@@ -593,8 +586,7 @@ pub fn generate_proof_with_witness(
proving_key.1.num_instance_variables,
proving_key.1.num_constraints,
full_assignment.as_slice(),
)
.unwrap();
)?;
#[cfg(debug_assertions)]
println!("proof generation took: {:.2?}", now.elapsed());
@@ -602,14 +594,16 @@ pub fn generate_proof_with_witness(
Ok(proof)
}
pub fn inputs_for_witness_calculation(rln_witness: &RLNWitnessInput) -> [(&str, Vec<BigInt>); 6] {
pub fn inputs_for_witness_calculation(
rln_witness: &RLNWitnessInput,
) -> Result<[(&str, Vec<BigInt>); 6]> {
// We confert the path indexes to field elements
// TODO: check if necessary
let mut path_elements = Vec::new();
rln_witness
.path_elements
.iter()
.for_each(|v| path_elements.push(to_bigint(v)));
for v in rln_witness.path_elements.iter() {
path_elements.push(to_bigint(v)?);
}
let mut identity_path_index = Vec::new();
rln_witness
@@ -617,20 +611,20 @@ pub fn inputs_for_witness_calculation(rln_witness: &RLNWitnessInput) -> [(&str,
.iter()
.for_each(|v| identity_path_index.push(BigInt::from(*v)));
[
Ok([
(
"identity_secret",
vec![to_bigint(&rln_witness.identity_secret)],
vec![to_bigint(&rln_witness.identity_secret)?],
),
("path_elements", path_elements),
("identity_path_index", identity_path_index),
("x", vec![to_bigint(&rln_witness.x)]),
("epoch", vec![to_bigint(&rln_witness.epoch)]),
("x", vec![to_bigint(&rln_witness.x)?]),
("epoch", vec![to_bigint(&rln_witness.epoch)?]),
(
"rln_identifier",
vec![to_bigint(&rln_witness.rln_identifier)],
vec![to_bigint(&rln_witness.rln_identifier)?],
),
]
])
}
/// Generates a RLN proof
@@ -644,7 +638,7 @@ pub fn generate_proof(
proving_key: &(ProvingKey<Curve>, ConstraintMatrices<Fr>),
rln_witness: &RLNWitnessInput,
) -> Result<ArkProof<Curve>, ProofError> {
let inputs = inputs_for_witness_calculation(rln_witness)
let inputs = inputs_for_witness_calculation(rln_witness)?
.into_iter()
.map(|(name, values)| (name.to_string(), values));
@@ -678,7 +672,7 @@ pub fn generate_proof(
#[cfg(debug_assertions)]
let now = Instant::now();
let proof = create_proof_with_reduction_and_matrices::<_, CircomReduction>(
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
&proving_key.0,
r,
s,
@@ -723,7 +717,7 @@ pub fn verify_proof(
#[cfg(debug_assertions)]
let now = Instant::now();
let verified = ark_verify_proof(&pvk, proof, &inputs)?;
let verified = Groth16::<_, CircomReduction>::verify_proof(&pvk, proof, &inputs)?;
#[cfg(debug_assertions)]
println!("verify took: {:.2?}", now.elapsed());
@@ -735,12 +729,12 @@ pub fn verify_proof(
///
/// Returns a JSON object containing the inputs necessary to calculate
/// the witness with CIRCOM on javascript
pub fn get_json_inputs(rln_witness: &RLNWitnessInput) -> serde_json::Value {
pub fn get_json_inputs(rln_witness: &RLNWitnessInput) -> Result<serde_json::Value> {
let mut path_elements = Vec::new();
rln_witness
.path_elements
.iter()
.for_each(|v| path_elements.push(to_bigint(v).to_str_radix(10)));
for v in rln_witness.path_elements.iter() {
path_elements.push(to_bigint(v)?.to_str_radix(10));
}
let mut identity_path_index = Vec::new();
rln_witness
@@ -749,13 +743,13 @@ pub fn get_json_inputs(rln_witness: &RLNWitnessInput) -> serde_json::Value {
.for_each(|v| identity_path_index.push(BigInt::from(*v).to_str_radix(10)));
let inputs = serde_json::json!({
"identity_secret": to_bigint(&rln_witness.identity_secret).to_str_radix(10),
"identity_secret": to_bigint(&rln_witness.identity_secret)?.to_str_radix(10),
"path_elements": path_elements,
"identity_path_index": identity_path_index,
"x": to_bigint(&rln_witness.x).to_str_radix(10),
"epoch": format!("0x{:064x}", to_bigint(&rln_witness.epoch)),
"rln_identifier": to_bigint(&rln_witness.rln_identifier).to_str_radix(10),
"x": to_bigint(&rln_witness.x)?.to_str_radix(10),
"epoch": format!("0x{:064x}", to_bigint(&rln_witness.epoch)?),
"rln_identifier": to_bigint(&rln_witness.rln_identifier)?.to_str_radix(10),
});
inputs
Ok(inputs)
}

File diff suppressed because it is too large Load Diff

View File

@@ -2,22 +2,25 @@
use crate::circuit::Fr;
use ark_ff::PrimeField;
use color_eyre::{Report, Result};
use num_bigint::{BigInt, BigUint};
use num_traits::Num;
use std::iter::Extend;
pub fn to_bigint(el: &Fr) -> BigInt {
let res: BigUint = (*el).try_into().unwrap();
res.try_into().unwrap()
pub fn to_bigint(el: &Fr) -> Result<BigInt> {
let res: BigUint = (*el).try_into()?;
Ok(res.into())
}
pub fn fr_byte_size() -> usize {
let mbs = <Fr as PrimeField>::size_in_bits();
(mbs + 64 - (mbs % 64)) / 8
let mbs = <Fr as PrimeField>::MODULUS_BIT_SIZE;
((mbs + 64 - (mbs % 64)) / 8) as usize
}
pub fn str_to_fr(input: &str, radix: u32) -> Fr {
assert!((radix == 10) || (radix == 16));
pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr> {
if !(radix == 10 || radix == 16) {
return Err(Report::msg("wrong radix"));
}
// We remove any quote present and we trim
let single_quote: char = '\"';
@@ -25,16 +28,10 @@ pub fn str_to_fr(input: &str, radix: u32) -> Fr {
input_clean = input_clean.trim().to_string();
if radix == 10 {
BigUint::from_str_radix(&input_clean, radix)
.unwrap()
.try_into()
.unwrap()
Ok(BigUint::from_str_radix(&input_clean, radix)?.try_into()?)
} else {
input_clean = input_clean.replace("0x", "");
BigUint::from_str_radix(&input_clean, radix)
.unwrap()
.try_into()
.unwrap()
Ok(BigUint::from_str_radix(&input_clean, radix)?.try_into()?)
}
}
@@ -75,99 +72,111 @@ pub fn fr_to_bytes_be(input: &Fr) -> Vec<u8> {
res
}
pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Vec<u8> {
pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Result<Vec<u8>> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len()).unwrap().to_le_bytes().to_vec());
bytes.extend(u64::try_from(input.len())?.to_le_bytes().to_vec());
// We store each element
input.iter().for_each(|el| bytes.extend(fr_to_bytes_le(el)));
bytes
Ok(bytes)
}
pub fn vec_fr_to_bytes_be(input: &[Fr]) -> Vec<u8> {
pub fn vec_fr_to_bytes_be(input: &[Fr]) -> Result<Vec<u8>> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len()).unwrap().to_be_bytes().to_vec());
bytes.extend(u64::try_from(input.len())?.to_be_bytes().to_vec());
// We store each element
input.iter().for_each(|el| bytes.extend(fr_to_bytes_be(el)));
bytes
Ok(bytes)
}
pub fn vec_u8_to_bytes_le(input: &[u8]) -> Vec<u8> {
pub fn vec_u8_to_bytes_le(input: &[u8]) -> Result<Vec<u8>> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len()).unwrap().to_le_bytes().to_vec());
bytes.extend(u64::try_from(input.len())?.to_le_bytes().to_vec());
bytes.extend(input);
bytes
Ok(bytes)
}
pub fn vec_u8_to_bytes_be(input: Vec<u8>) -> Vec<u8> {
pub fn vec_u8_to_bytes_be(input: Vec<u8>) -> Result<Vec<u8>> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len()).unwrap().to_be_bytes().to_vec());
bytes.extend(u64::try_from(input.len())?.to_be_bytes().to_vec());
bytes.extend(input);
bytes
Ok(bytes)
}
pub fn bytes_le_to_vec_u8(input: &[u8]) -> (Vec<u8>, usize) {
pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
let mut read: usize = 0;
let len = u64::from_le_bytes(input[0..8].try_into().unwrap()) as usize;
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
read += 8;
let res = input[8..8 + len].to_vec();
read += res.len();
(res, read)
Ok((res, read))
}
pub fn bytes_be_to_vec_u8(input: &[u8]) -> (Vec<u8>, usize) {
pub fn bytes_be_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
let mut read: usize = 0;
let len = u64::from_be_bytes(input[0..8].try_into().unwrap()) as usize;
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into()?))?;
read += 8;
let res = input[8..8 + len].to_vec();
read += res.len();
(res, read)
Ok((res, read))
}
pub fn bytes_le_to_vec_fr(input: &[u8]) -> (Vec<Fr>, usize) {
pub fn bytes_le_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
let mut read: usize = 0;
let mut res: Vec<Fr> = Vec::new();
let len = u64::from_le_bytes(input[0..8].try_into().unwrap()) as usize;
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
read += 8;
let el_size = fr_byte_size();
for i in 0..len {
let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec());
let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]);
res.push(curr_el);
read += el_size;
}
(res, read)
Ok((res, read))
}
pub fn bytes_be_to_vec_fr(input: &[u8]) -> (Vec<Fr>, usize) {
pub fn bytes_be_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
let mut read: usize = 0;
let mut res: Vec<Fr> = Vec::new();
let len = u64::from_be_bytes(input[0..8].try_into().unwrap()) as usize;
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into()?))?;
read += 8;
let el_size = fr_byte_size();
for i in 0..len {
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec());
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]);
res.push(curr_el);
read += el_size;
}
(res, read)
Ok((res, read))
}
pub fn normalize_usize(input: usize) -> Vec<u8> {
let mut normalized_usize = input.to_le_bytes().to_vec();
normalized_usize.resize(8, 0);
normalized_usize
}
/* Old conversion utilities between different libraries data types

View File

@@ -3,11 +3,12 @@ mod test {
use ark_std::{rand::thread_rng, UniformRand};
use rand::Rng;
use rln::circuit::*;
use rln::ffi::*;
use rln::poseidon_hash::poseidon_hash;
use rln::ffi::{hash as ffi_hash, poseidon_hash as ffi_poseidon_hash, *};
use rln::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
use rln::protocol::*;
use rln::public::RLN;
use rln::utils::*;
use serde_json::json;
use std::fs::File;
use std::io::Read;
use std::mem::MaybeUninit;
@@ -28,7 +29,8 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let input_config = json!({ "resource_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -78,7 +80,7 @@ mod test {
assert!(success, "set tree call failed");
// We add leaves in a batch into the tree
let leaves_ser = vec_fr_to_bytes_le(&leaves);
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
let input_buffer = &Buffer::from(leaves_ser.as_ref());
let success = init_tree_with_leaves(rln_pointer, input_buffer);
assert!(success, "init tree with leaves call failed");
@@ -96,8 +98,7 @@ mod test {
// We now delete all leaves set and check if the root corresponds to the empty tree root
// delete calls over indexes higher than no_of_leaves are ignored and will not increase self.tree.next_index
let delete_range = 2 * no_of_leaves;
for i in 0..delete_range {
for i in 0..no_of_leaves {
let success = delete_leaf(rln_pointer, i);
assert!(success, "delete leaf call failed");
}
@@ -136,7 +137,8 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -153,7 +155,7 @@ mod test {
let set_index = rng.gen_range(0..no_of_leaves) as usize;
// We add leaves in a batch into the tree
let leaves_ser = vec_fr_to_bytes_le(&leaves);
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
let input_buffer = &Buffer::from(leaves_ser.as_ref());
let success = init_tree_with_leaves(rln_pointer, input_buffer);
assert!(success, "init tree with leaves call failed");
@@ -170,13 +172,13 @@ mod test {
// `init_tree_with_leaves` resets the tree to the height it was initialized with, using `set_tree`
// We add leaves in a batch starting from index 0..set_index
let leaves_m = vec_fr_to_bytes_le(&leaves[0..set_index]);
let leaves_m = vec_fr_to_bytes_le(&leaves[0..set_index]).unwrap();
let buffer = &Buffer::from(leaves_m.as_ref());
let success = init_tree_with_leaves(rln_pointer, buffer);
assert!(success, "init tree with leaves call failed");
// We add the remaining n leaves in a batch starting from index set_index
let leaves_n = vec_fr_to_bytes_le(&leaves[set_index..]);
let leaves_n = vec_fr_to_bytes_le(&leaves[set_index..]).unwrap();
let buffer = &Buffer::from(leaves_n.as_ref());
let success = set_leaves_from(rln_pointer, set_index, buffer);
assert!(success, "set leaves from call failed");
@@ -216,6 +218,71 @@ mod test {
assert_eq!(root_batch_with_init, root_single_additions);
}
#[test]
// This test is similar to the one in public.rs but it uses the RLN object as a pointer
fn test_atomic_operation_ffi() {
let tree_height = TEST_TREE_HEIGHT;
let no_of_leaves = 256;
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
// We generate a vector of random leaves
let mut leaves: Vec<Fr> = Vec::new();
let mut rng = thread_rng();
for _ in 0..no_of_leaves {
leaves.push(Fr::rand(&mut rng));
}
// We add leaves in a batch into the tree
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
let input_buffer = &Buffer::from(leaves_ser.as_ref());
let success = init_tree_with_leaves(rln_pointer, input_buffer);
assert!(success, "init tree with leaves call failed");
// We get the root of the tree obtained adding leaves in batch
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
assert!(success, "get root call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (root_after_insertion, _) = bytes_le_to_fr(&result_data);
let last_leaf = leaves.last().unwrap();
let last_leaf_index = no_of_leaves - 1;
let indices = vec![last_leaf_index as u8];
let last_leaf = vec![*last_leaf];
let indices = vec_u8_to_bytes_le(&indices).unwrap();
let indices_buffer = &Buffer::from(indices.as_ref());
let leaves = vec_fr_to_bytes_le(&last_leaf).unwrap();
let leaves_buffer = &Buffer::from(leaves.as_ref());
let success = atomic_operation(
rln_pointer,
last_leaf_index as usize,
leaves_buffer,
indices_buffer,
);
assert!(success, "atomic operation call failed");
// We get the root of the tree obtained after a no-op
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
assert!(success, "get root call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (root_after_noop, _) = bytes_le_to_fr(&result_data);
assert_eq!(root_after_insertion, root_after_noop);
}
#[test]
// This test is similar to the one in public.rs but it uses the RLN object as a pointer
fn test_set_leaves_bad_index_ffi() {
@@ -233,7 +300,8 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -248,7 +316,7 @@ mod test {
let (root_empty, _) = bytes_le_to_fr(&result_data);
// We add leaves in a batch into the tree
let leaves = vec_fr_to_bytes_le(&leaves);
let leaves = vec_fr_to_bytes_le(&leaves).unwrap();
let buffer = &Buffer::from(leaves.as_ref());
let success = set_leaves_from(rln_pointer, bad_index, buffer);
assert!(!success, "set leaves from call succeeded");
@@ -273,14 +341,15 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
// generate identity
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
let id_commitment = poseidon_hash(&vec![identity_secret_hash]);
let id_commitment = utils_poseidon_hash(&vec![identity_secret_hash]);
// We prepare id_commitment and we set the leaf at provided index
let leaf_ser = fr_to_bytes_le(&id_commitment);
@@ -303,71 +372,86 @@ mod test {
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (path_elements, read) = bytes_le_to_vec_fr(&result_data);
let (identity_path_index, _) = bytes_le_to_vec_u8(&result_data[read..].to_vec());
let (path_elements, read) = bytes_le_to_vec_fr(&result_data).unwrap();
let (identity_path_index, _) = bytes_le_to_vec_u8(&result_data[read..].to_vec()).unwrap();
// We check correct computation of the path and indexes
let mut expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
),
)
.unwrap(),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
),
)
.unwrap(),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
),
)
.unwrap(),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
),
)
.unwrap(),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
),
)
.unwrap(),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
),
)
.unwrap(),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
),
)
.unwrap(),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
),
)
.unwrap(),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
),
)
.unwrap(),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
),
)
.unwrap(),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
),
)
.unwrap(),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
),
)
.unwrap(),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
),
)
.unwrap(),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
),
)
.unwrap(),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
),
)
.unwrap(),
];
let mut expected_identity_path_index: Vec<u8> =
@@ -379,19 +463,23 @@ mod test {
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
),
)
.unwrap(),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
),
)
.unwrap(),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
),
)
.unwrap(),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
),
)
.unwrap(),
]);
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
}
@@ -400,7 +488,8 @@ mod test {
expected_path_elements.append(&mut vec![str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
)]);
)
.unwrap()]);
expected_identity_path_index.append(&mut vec![0]);
}
@@ -421,7 +510,8 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -439,7 +529,7 @@ mod test {
let proof_values = proof_values_from_witness(&rln_witness);
// We prepare id_commitment and we set the leaf at provided index
let rln_witness_ser = serialize_witness(&rln_witness);
let rln_witness_ser = serialize_witness(&rln_witness).unwrap();
let input_buffer = &Buffer::from(rln_witness_ser.as_ref());
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let now = Instant::now();
@@ -483,7 +573,8 @@ mod test {
// We create a RLN instance using a resource folder path
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -526,11 +617,14 @@ mod test {
// Creating a RLN instance passing the raw data
let mut rln_pointer_raw_bytes = MaybeUninit::<*mut RLN>::uninit();
let tree_config = "".to_string();
let tree_config_buffer = &Buffer::from(tree_config.as_bytes());
let success = new_with_params(
tree_height,
circom_data,
zkey_data,
vk_data,
tree_config_buffer,
rln_pointer_raw_bytes.as_mut_ptr(),
);
assert!(success, "RLN object creation failed");
@@ -563,13 +657,14 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
// We add leaves in a batch into the tree
let leaves_ser = vec_fr_to_bytes_le(&leaves);
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
let input_buffer = &Buffer::from(leaves_ser.as_ref());
let success = init_tree_with_leaves(rln_pointer, input_buffer);
assert!(success, "init tree with leaves call failed");
@@ -589,12 +684,11 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index: u64 = no_of_leaves;
let identity_index: usize = no_of_leaves;
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let signal_len = u64::try_from(signal.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -603,9 +697,9 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut identity_index.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
// We call generate_rln_proof
@@ -620,7 +714,7 @@ mod test {
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data | signal_len<8> | signal<var> ]
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal.to_vec());
// We call verify_rln_proof
@@ -648,13 +742,14 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
// We add leaves in a batch into the tree
let leaves_ser = vec_fr_to_bytes_le(&leaves);
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
let input_buffer = &Buffer::from(leaves_ser.as_ref());
let success = init_tree_with_leaves(rln_pointer, input_buffer);
assert!(success, "set leaves call failed");
@@ -674,12 +769,11 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index: u64 = no_of_leaves;
let identity_index: usize = no_of_leaves;
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let signal_len = u64::try_from(signal.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -688,9 +782,9 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut identity_index.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
// We call generate_rln_proof
@@ -705,7 +799,7 @@ mod test {
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data | signal_len<8> | signal<var> ]
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal.to_vec());
// We test verify_with_roots
@@ -767,7 +861,8 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -787,18 +882,16 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index: u64 = 0;
let identity_index: usize = 0;
// We generate two proofs using same epoch but different signals.
// We generate two random signals
let mut rng = rand::thread_rng();
let signal1: [u8; 32] = rng.gen();
let signal1_len = u64::try_from(signal1.len()).unwrap();
// We generate two random signals
let signal2: [u8; 32] = rng.gen();
let signal2_len = u64::try_from(signal2.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -807,18 +900,18 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized1: Vec<u8> = Vec::new();
serialized1.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized1.append(&mut identity_index.to_le_bytes().to_vec());
serialized1.append(&mut normalize_usize(identity_index));
serialized1.append(&mut fr_to_bytes_le(&epoch));
// The first part is the same for both proof input, so we clone
let mut serialized2 = serialized1.clone();
// We attach the first signal to the first proof input
serialized1.append(&mut signal1_len.to_le_bytes().to_vec());
serialized1.append(&mut normalize_usize(signal1.len()));
serialized1.append(&mut signal1.to_vec());
// We attach the second signal to the first proof input
serialized2.append(&mut signal2_len.to_le_bytes().to_vec());
serialized2.append(&mut normalize_usize(signal2.len()));
serialized2.append(&mut signal2.to_vec());
// We call generate_rln_proof for first proof values
@@ -877,20 +970,19 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index_new: u64 = 1;
let identity_index_new: usize = 1;
// We generate a random signals
let signal3: [u8; 32] = rng.gen();
let signal3_len = u64::try_from(signal3.len()).unwrap();
// We prepare input for generate_rln_proof API
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
// Note that epoch is the same as before
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash_new));
serialized.append(&mut identity_index_new.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(identity_index_new));
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut signal3_len.to_le_bytes().to_vec());
serialized.append(&mut normalize_usize(signal3.len()));
serialized.append(&mut signal3.to_vec());
// We call generate_rln_proof
@@ -929,7 +1021,8 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -957,19 +1050,19 @@ mod test {
assert_eq!(
identity_secret_hash,
expected_identity_secret_hash_seed_bytes
expected_identity_secret_hash_seed_bytes.unwrap()
);
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
assert_eq!(id_commitment, expected_id_commitment_seed_bytes.unwrap());
}
#[test]
// Tests hash to field using FFI APIs
fn test_seeded_extended_keygen_ffi() {
let tree_height = TEST_TREE_HEIGHT;
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -1004,34 +1097,31 @@ mod test {
16,
);
assert_eq!(identity_trapdoor, expected_identity_trapdoor_seed_bytes);
assert_eq!(identity_nullifier, expected_identity_nullifier_seed_bytes);
assert_eq!(
identity_trapdoor,
expected_identity_trapdoor_seed_bytes.unwrap()
);
assert_eq!(
identity_nullifier,
expected_identity_nullifier_seed_bytes.unwrap()
);
assert_eq!(
identity_secret_hash,
expected_identity_secret_hash_seed_bytes
expected_identity_secret_hash_seed_bytes.unwrap()
);
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
assert_eq!(id_commitment, expected_id_commitment_seed_bytes.unwrap());
}
#[test]
// Tests hash to field using FFI APIs
fn test_hash_to_field_ffi() {
let tree_height = TEST_TREE_HEIGHT;
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
// We prepare id_commitment and we set the leaf at provided index
let input_buffer = &Buffer::from(signal.as_ref());
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = hash(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
let success = ffi_hash(input_buffer, output_buffer.as_mut_ptr());
assert!(success, "hash call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
@@ -1043,4 +1133,103 @@ mod test {
assert_eq!(hash1, hash2);
}
#[test]
// Test Poseidon hash FFI
fn test_poseidon_hash_ffi() {
// generate random number between 1..ROUND_PARAMS.len()
let mut rng = thread_rng();
let number_of_inputs = rng.gen_range(1..ROUND_PARAMS.len());
let mut inputs = Vec::with_capacity(number_of_inputs);
for _ in 0..number_of_inputs {
inputs.push(Fr::rand(&mut rng));
}
let inputs_ser = vec_fr_to_bytes_le(&inputs).unwrap();
let input_buffer = &Buffer::from(inputs_ser.as_ref());
let expected_hash = utils_poseidon_hash(inputs.as_ref());
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = ffi_poseidon_hash(input_buffer, output_buffer.as_mut_ptr());
assert!(success, "poseidon hash call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (received_hash, _) = bytes_le_to_fr(&result_data);
assert_eq!(received_hash, expected_hash);
}
#[test]
fn test_get_leaf() {
// We create a RLN instance
let tree_height = TEST_TREE_HEIGHT;
let no_of_leaves = 1 << TEST_TREE_HEIGHT;
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
// We generate a new identity tuple from an input seed
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
let input_buffer = &Buffer::from(seed_bytes);
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success =
seeded_extended_key_gen(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
assert!(success, "seeded key gen call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (_, _, _, id_commitment) = deserialize_identity_tuple(result_data);
// We insert the id_commitment into the tree at a random index
let mut rng = thread_rng();
let index = rng.gen_range(0..no_of_leaves) as usize;
let leaf = fr_to_bytes_le(&id_commitment);
let input_buffer = &Buffer::from(leaf.as_ref());
let success = set_leaf(rln_pointer, index, input_buffer);
assert!(success, "set leaf call failed");
// We get the leaf at the same index
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = get_leaf(rln_pointer, index, output_buffer.as_mut_ptr());
assert!(success, "get leaf call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (received_id_commitment, _) = bytes_le_to_fr(&result_data);
// We check that the received id_commitment is the same as the one we inserted
assert_eq!(received_id_commitment, id_commitment);
}
#[test]
fn test_metadata() {
// We create a RLN instance
let tree_height = TEST_TREE_HEIGHT;
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
let input_buffer = &Buffer::from(seed_bytes);
let success = set_metadata(rln_pointer, input_buffer);
assert!(success, "set_metadata call failed");
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = get_metadata(rln_pointer, output_buffer.as_mut_ptr());
assert!(success, "get_metadata call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
assert_eq!(result_data, seed_bytes.to_vec());
}
}

View File

@@ -5,8 +5,8 @@
#[cfg(test)]
mod test {
use rln::circuit::*;
use rln::poseidon_tree::*;
use utils::{FullMerkleTree, OptimalMerkleTree};
use rln::hashers::PoseidonHash;
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
#[test]
/// A basic performance comparison between the two supported Merkle Tree implementations
@@ -25,16 +25,16 @@ mod test {
for _ in 0..sample_size.try_into().unwrap() {
let now = Instant::now();
FullMerkleTree::<PoseidonHash>::default(tree_height);
FullMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
gen_time_full += now.elapsed().as_nanos();
let now = Instant::now();
OptimalMerkleTree::<PoseidonHash>::default(tree_height);
OptimalMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
gen_time_opt += now.elapsed().as_nanos();
}
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height);
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height);
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
for i in 0..sample_size.try_into().unwrap() {
let now = Instant::now();
@@ -78,495 +78,3 @@ mod test {
);
}
}
// Test module for testing pmtree integration and features in zerokit
// enabled only if the pmtree feature is enabled
#[cfg(feature = "pmtree")]
#[cfg(test)]
mod pmtree_test {
use pmtree::*;
use rln::circuit::Fr;
use rln::poseidon_hash::poseidon_hash;
use rln::poseidon_tree::PoseidonHash;
use rln::protocol::hash_to_field;
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, str_to_fr};
use sled::Db as Sled;
use std::collections::HashMap;
use std::fs;
use std::path::Path;
use utils::{FullMerkleTree, OptimalMerkleTree};
// The pmtree Hasher trait used by pmtree Merkle tree
impl pmtree::Hasher for PoseidonHash {
type Fr = Fr;
fn default_leaf() -> Self::Fr {
Fr::from(0)
}
fn serialize(value: Self::Fr) -> Value {
fr_to_bytes_le(&value)
}
fn deserialize(value: Value) -> Self::Fr {
let (fr, _) = bytes_le_to_fr(&value);
fr
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}
// pmtree supports in-memory and on-disk databases (Database trait) for storing the Merkle tree state
// We implement Database for hashmaps, an in-memory database
struct MemoryDB(HashMap<DBKey, Value>);
impl Database for MemoryDB {
fn new(_dbpath: &str) -> Result<Self> {
Ok(MemoryDB(HashMap::new()))
}
fn load(_dbpath: &str) -> Result<Self> {
Err(Error("Cannot load in-memory DB".to_string()))
}
fn get(&self, key: DBKey) -> Result<Option<Value>> {
Ok(self.0.get(&key).cloned())
}
fn put(&mut self, key: DBKey, value: Value) -> Result<()> {
self.0.insert(key, value);
Ok(())
}
}
// We implement Database for sled DB, an on-disk database
struct SledDB(Sled);
impl Database for SledDB {
fn new(dbpath: &str) -> Result<Self> {
if Path::new(dbpath).exists() {
match fs::remove_dir_all(dbpath) {
Ok(x) => x,
Err(e) => return Err(Error(e.to_string())),
}
}
let db: Sled = match sled::open(dbpath) {
Ok(db) => db,
Err(e) => return Err(Error(e.to_string())),
};
Ok(SledDB(db))
}
fn load(dbpath: &str) -> Result<Self> {
let db: Sled = match sled::open(dbpath) {
Ok(db) => db,
Err(e) => return Err(Error(e.to_string())),
};
if !db.was_recovered() {
return Err(Error("Trying to load non-existing database!".to_string()));
}
Ok(SledDB(db))
}
fn get(&self, key: DBKey) -> Result<Option<Value>> {
match self.0.get(key) {
Ok(value) => Ok(value.map(|val| val.to_vec())),
Err(e) => Err(Error(e.to_string())),
}
}
fn put(&mut self, key: DBKey, value: Value) -> Result<()> {
match self.0.insert(key, value) {
Ok(_) => Ok(()),
Err(e) => Err(Error(e.to_string())),
}
}
}
#[test]
/// A basic performance comparison between the two supported Merkle Tree implementations and in-memory/on-disk pmtree implementations
fn test_zerokit_and_pmtree_merkle_implementations_performances() {
use std::time::{Duration, Instant};
let tree_height = 20;
let sample_size = 100;
let leaves: Vec<Fr> = (0..sample_size).map(|s| Fr::from(s)).collect();
let mut gen_time_full: u128 = 0;
let mut upd_time_full: u128 = 0;
let mut gen_time_opt: u128 = 0;
let mut upd_time_opt: u128 = 0;
let mut gen_time_pm_memory: u128 = 0;
let mut upd_time_pm_memory: u128 = 0;
let mut gen_time_pm_sled: u128 = 0;
let mut upd_time_pm_sled: u128 = 0;
for _ in 0..sample_size.try_into().unwrap() {
let now = Instant::now();
FullMerkleTree::<PoseidonHash>::default(tree_height);
gen_time_full += now.elapsed().as_nanos();
let now = Instant::now();
OptimalMerkleTree::<PoseidonHash>::default(tree_height);
gen_time_opt += now.elapsed().as_nanos();
let now = Instant::now();
pmtree::MerkleTree::<MemoryDB, PoseidonHash>::default(tree_height).unwrap();
gen_time_pm_memory += now.elapsed().as_nanos();
let now = Instant::now();
pmtree::MerkleTree::<SledDB, PoseidonHash>::default(tree_height).unwrap();
gen_time_pm_sled += now.elapsed().as_nanos();
}
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height);
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height);
let mut tree_pm_memory =
pmtree::MerkleTree::<MemoryDB, PoseidonHash>::default(tree_height).unwrap();
let mut tree_pm_sled =
pmtree::MerkleTree::<SledDB, PoseidonHash>::default(tree_height).unwrap();
for i in 0..sample_size.try_into().unwrap() {
let now = Instant::now();
tree_full.set(i, leaves[i]).unwrap();
upd_time_full += now.elapsed().as_nanos();
let proof = tree_full.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
let now = Instant::now();
tree_opt.set(i, leaves[i]).unwrap();
upd_time_opt += now.elapsed().as_nanos();
let proof = tree_opt.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
let now = Instant::now();
tree_pm_memory.set(i, leaves[i]).unwrap();
upd_time_pm_memory += now.elapsed().as_nanos();
let proof = tree_pm_memory.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
let now = Instant::now();
tree_pm_sled.set(i, leaves[i]).unwrap();
upd_time_pm_sled += now.elapsed().as_nanos();
let proof = tree_pm_sled.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
}
// We check all roots are the same
let tree_full_root = tree_full.root();
let tree_opt_root = tree_opt.root();
let tree_pm_memory_root = tree_pm_memory.root();
let tree_pm_sled_root = tree_pm_sled.root();
assert_eq!(tree_full_root, tree_opt_root);
assert_eq!(tree_opt_root, tree_pm_memory_root);
assert_eq!(tree_pm_memory_root, tree_pm_sled_root);
println!(" Average tree generation time:");
println!(
" - Full Merkle Tree: {:?}",
Duration::from_nanos((gen_time_full / sample_size).try_into().unwrap())
);
println!(
" - Optimal Merkle Tree: {:?}",
Duration::from_nanos((gen_time_opt / sample_size).try_into().unwrap())
);
println!(
" - Pmtree-HashMap Merkle Tree: {:?}",
Duration::from_nanos((gen_time_pm_memory / sample_size).try_into().unwrap())
);
println!(
" - Pmtree-Sled Merkle Tree: {:?}",
Duration::from_nanos((gen_time_pm_sled / sample_size).try_into().unwrap())
);
println!(" Average update_next execution time:");
println!(
" - Full Merkle Tree: {:?}",
Duration::from_nanos((upd_time_full / sample_size).try_into().unwrap())
);
println!(
" - Optimal Merkle Tree: {:?}",
Duration::from_nanos((upd_time_opt / sample_size).try_into().unwrap())
);
println!(
" - Pmtree-HashMap Merkle Tree: {:?}",
Duration::from_nanos((upd_time_pm_memory / sample_size).try_into().unwrap())
);
println!(
" - Pmtree-Sled Merkle Tree: {:?}",
Duration::from_nanos((upd_time_pm_sled / sample_size).try_into().unwrap())
);
}
// The following two tests contain values that come from public::test_merkle_proof test
// We check that pmtree and zerokit Merkle tree implementations match.
#[test]
fn test_pmtree_hashmap() -> Result<()> {
let tree_height = 20;
let mut tree = pmtree::MerkleTree::<MemoryDB, PoseidonHash>::default(tree_height).unwrap();
let leaf_index = 3;
let identity_secret = hash_to_field(b"test-merkle-proof");
let id_commitment = poseidon_hash(&[identity_secret]);
// let default_leaf = Fr::from(0);
tree.set(leaf_index, id_commitment).unwrap();
// We check correct computation of the root
let root = tree.root();
assert_eq!(
root,
str_to_fr(
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
);
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
// We check correct computation of the path and indexes
// These values refers to tree height = 20
let expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
),
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
),
str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
),
];
let expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
assert_eq!(path_elements, expected_path_elements);
assert_eq!(identity_path_index, expected_identity_path_index);
// We check correct verification of the proof
assert!(tree.verify(&id_commitment, &merkle_proof));
Ok(())
}
#[test]
fn test_pmtree_sled() -> Result<()> {
let tree_height = 20;
let mut tree = pmtree::MerkleTree::<SledDB, PoseidonHash>::default(tree_height).unwrap();
let leaf_index = 3;
let identity_secret = hash_to_field(b"test-merkle-proof");
let id_commitment = poseidon_hash(&[identity_secret]);
// let default_leaf = Fr::from(0);
tree.set(leaf_index, id_commitment).unwrap();
// We check correct computation of the root
let root = tree.root();
assert_eq!(
root,
str_to_fr(
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
);
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
// We check correct computation of the path and indexes
// These values refers to tree height = 20
let expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
),
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
),
str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
),
];
let expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
assert_eq!(path_elements, expected_path_elements);
assert_eq!(identity_path_index, expected_identity_path_index);
// We check correct verification of the proof
assert!(tree.verify(&id_commitment, &merkle_proof));
Ok(())
}
}

View File

@@ -4,10 +4,13 @@ mod test {
circom_from_folder, vk_from_folder, zkey_from_folder, Fr, TEST_RESOURCES_FOLDER,
TEST_TREE_HEIGHT,
};
use rln::poseidon_hash::poseidon_hash;
use rln::hashers::{hash_to_field, poseidon_hash};
use rln::poseidon_tree::PoseidonTree;
use rln::protocol::*;
use rln::utils::str_to_fr;
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
const WITNESS_JSON_15: &str = r#"
@@ -171,7 +174,12 @@ mod test {
// generate merkle tree
let default_leaf = Fr::from(0);
let mut tree = PoseidonTree::new(tree_height, default_leaf);
let mut tree = PoseidonTree::new(
tree_height,
default_leaf,
ConfigOf::<PoseidonTree>::default(),
)
.unwrap();
tree.set(leaf_index, id_commitment.into()).unwrap();
// We check correct computation of the root
@@ -184,6 +192,7 @@ mod test {
"0x1984f2e01184aef5cb974640898a5f5c25556554e2b06d99d4841badb8b198cd",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 19 {
assert_eq!(
@@ -192,6 +201,7 @@ mod test {
"0x219ceb53f2b1b7a6cf74e80d50d44d68ecb4a53c6cc65b25593c8d56343fb1fe",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 20 {
assert_eq!(
@@ -200,6 +210,7 @@ mod test {
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
.unwrap()
);
}
@@ -213,63 +224,78 @@ mod test {
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
),
)
.unwrap(),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
),
)
.unwrap(),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
),
)
.unwrap(),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
),
)
.unwrap(),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
),
)
.unwrap(),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
),
)
.unwrap(),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
),
)
.unwrap(),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
),
)
.unwrap(),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
),
)
.unwrap(),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
),
)
.unwrap(),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
),
)
.unwrap(),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
),
)
.unwrap(),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
),
)
.unwrap(),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
),
)
.unwrap(),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
),
)
.unwrap(),
];
let mut expected_identity_path_index: Vec<u8> =
@@ -281,19 +307,23 @@ mod test {
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
),
)
.unwrap(),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
),
)
.unwrap(),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
),
)
.unwrap(),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
),
)
.unwrap(),
]);
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
}
@@ -302,7 +332,8 @@ mod test {
expected_path_elements.append(&mut vec![str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
)]);
)
.unwrap()]);
expected_identity_path_index.append(&mut vec![0]);
}
@@ -319,7 +350,7 @@ mod test {
// We generate all relevant keys
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let builder = circom_from_folder(TEST_RESOURCES_FOLDER);
let builder = circom_from_folder(TEST_RESOURCES_FOLDER).unwrap();
// We compute witness from the json input example
let mut witness_json: &str = "";
@@ -334,10 +365,12 @@ mod test {
let rln_witness = rln_witness_from_json(witness_json);
// Let's generate a zkSNARK proof
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
let rln_witness_unwrapped = rln_witness.unwrap();
let proof_values = proof_values_from_witness(&rln_witness);
// Let's generate a zkSNARK proof
let proof = generate_proof(builder, &proving_key, &rln_witness_unwrapped).unwrap();
let proof_values = proof_values_from_witness(&rln_witness_unwrapped);
// Let's verify the proof
let verified = verify_proof(&verification_key, &proof, &proof_values);
@@ -356,7 +389,12 @@ mod test {
//// generate merkle tree
let default_leaf = Fr::from(0);
let mut tree = PoseidonTree::new(tree_height, default_leaf);
let mut tree = PoseidonTree::new(
tree_height,
default_leaf,
ConfigOf::<PoseidonTree>::default(),
)
.unwrap();
tree.set(leaf_index, id_commitment.into()).unwrap();
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
@@ -378,7 +416,7 @@ mod test {
// We generate all relevant keys
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let builder = circom_from_folder(TEST_RESOURCES_FOLDER);
let builder = circom_from_folder(TEST_RESOURCES_FOLDER).unwrap();
// Let's generate a zkSNARK proof
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
@@ -404,10 +442,10 @@ mod test {
witness_json = WITNESS_JSON_20;
}
let rln_witness = rln_witness_from_json(witness_json);
let rln_witness = rln_witness_from_json(witness_json).unwrap();
let ser = serialize_witness(&rln_witness);
let (deser, _) = deserialize_witness(&ser);
let ser = serialize_witness(&rln_witness).unwrap();
let (deser, _) = deserialize_witness(&ser).unwrap();
assert_eq!(rln_witness, deser);
// We test Proof values serialization
@@ -429,11 +467,13 @@ mod test {
let expected_identity_secret_hash_seed_phrase = str_to_fr(
"0x20df38f3f00496f19fe7c6535492543b21798ed7cb91aebe4af8012db884eda3",
16,
);
)
.unwrap();
let expected_id_commitment_seed_phrase = str_to_fr(
"0x1223a78a5d66043a7f9863e14507dc80720a5602b2a894923e5b5147d5a9c325",
16,
);
)
.unwrap();
assert_eq!(
identity_secret_hash,
@@ -449,11 +489,13 @@ mod test {
let expected_identity_secret_hash_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
);
)
.unwrap();
let expected_id_commitment_seed_bytes = str_to_fr(
"0xbf16d2b5c0d6f9d9d561e05bfca16a81b4b873bb063508fae360d8c74cef51f",
16,
);
)
.unwrap();
assert_eq!(
identity_secret_hash,

View File

@@ -1,11 +1,13 @@
#[cfg(test)]
mod test {
use ark_std::{rand::thread_rng, UniformRand};
use rand::Rng;
use rln::circuit::{TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
use rln::poseidon_hash::poseidon_hash;
use rln::protocol::{compute_tree_root, deserialize_identity_tuple, hash_to_field};
use rln::public::RLN;
use rln::circuit::{Fr, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
use rln::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
use rln::protocol::{compute_tree_root, deserialize_identity_tuple};
use rln::public::{hash as public_hash, poseidon_hash as public_poseidon_hash, RLN};
use rln::utils::*;
use serde_json::json;
use std::io::Cursor;
#[test]
@@ -14,12 +16,13 @@ mod test {
let tree_height = TEST_TREE_HEIGHT;
let leaf_index = 3;
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
let mut rln = RLN::new(tree_height, input_buffer);
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
// generate identity
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
let id_commitment = poseidon_hash(&vec![identity_secret_hash]);
let id_commitment = utils_poseidon_hash(&vec![identity_secret_hash]);
// We pass id_commitment as Read buffer to RLN's set_leaf
let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment));
@@ -37,6 +40,7 @@ mod test {
"0x1984f2e01184aef5cb974640898a5f5c25556554e2b06d99d4841badb8b198cd",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 19 {
assert_eq!(
@@ -45,6 +49,7 @@ mod test {
"0x219ceb53f2b1b7a6cf74e80d50d44d68ecb4a53c6cc65b25593c8d56343fb1fe",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 20 {
assert_eq!(
@@ -53,6 +58,7 @@ mod test {
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
.unwrap()
);
}
@@ -61,71 +67,86 @@ mod test {
rln.get_proof(leaf_index, &mut buffer).unwrap();
let buffer_inner = buffer.into_inner();
let (path_elements, read) = bytes_le_to_vec_fr(&buffer_inner);
let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec());
let (path_elements, read) = bytes_le_to_vec_fr(&buffer_inner).unwrap();
let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec()).unwrap();
// We check correct computation of the path and indexes
let mut expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
),
)
.unwrap(),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
),
)
.unwrap(),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
),
)
.unwrap(),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
),
)
.unwrap(),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
),
)
.unwrap(),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
),
)
.unwrap(),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
),
)
.unwrap(),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
),
)
.unwrap(),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
),
)
.unwrap(),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
),
)
.unwrap(),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
),
)
.unwrap(),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
),
)
.unwrap(),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
),
)
.unwrap(),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
),
)
.unwrap(),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
),
)
.unwrap(),
];
let mut expected_identity_path_index: Vec<u8> =
@@ -137,19 +158,23 @@ mod test {
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
),
)
.unwrap(),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
),
)
.unwrap(),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
),
)
.unwrap(),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
),
)
.unwrap(),
]);
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
}
@@ -158,7 +183,8 @@ mod test {
expected_path_elements.append(&mut vec![str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
)]);
)
.unwrap()]);
expected_identity_path_index.append(&mut vec![0]);
}
@@ -192,11 +218,13 @@ mod test {
let expected_identity_secret_hash_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
);
)
.unwrap();
let expected_id_commitment_seed_bytes = str_to_fr(
"0xbf16d2b5c0d6f9d9d561e05bfca16a81b4b873bb063508fae360d8c74cef51f",
16,
);
)
.unwrap();
assert_eq!(
identity_secret_hash,
@@ -225,19 +253,23 @@ mod test {
let expected_identity_trapdoor_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
);
)
.unwrap();
let expected_identity_nullifier_seed_bytes = str_to_fr(
"0x1f18714c7bc83b5bca9e89d404cf6f2f585bc4c0f7ed8b53742b7e2b298f50b4",
16,
);
)
.unwrap();
let expected_identity_secret_hash_seed_bytes = str_to_fr(
"0x2aca62aaa7abaf3686fff2caf00f55ab9462dc12db5b5d4bcf3994e671f8e521",
16,
);
)
.unwrap();
let expected_id_commitment_seed_bytes = str_to_fr(
"0x68b66aa0a8320d2e56842581553285393188714c48f9b17acd198b4f1734c5c",
16,
);
)
.unwrap();
assert_eq!(identity_trapdoor, expected_identity_trapdoor_seed_bytes);
assert_eq!(identity_nullifier, expected_identity_nullifier_seed_bytes);
@@ -250,15 +282,13 @@ mod test {
#[test]
fn test_hash_to_field() {
let rln = RLN::default();
let mut rng = rand::thread_rng();
let mut rng = thread_rng();
let signal: [u8; 32] = rng.gen();
let mut input_buffer = Cursor::new(&signal);
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.hash(&mut input_buffer, &mut output_buffer).unwrap();
public_hash(&mut input_buffer, &mut output_buffer).unwrap();
let serialized_hash = output_buffer.into_inner();
let (hash1, _) = bytes_le_to_fr(&serialized_hash);
@@ -266,4 +296,24 @@ mod test {
assert_eq!(hash1, hash2);
}
#[test]
fn test_poseidon_hash() {
let mut rng = thread_rng();
let number_of_inputs = rng.gen_range(1..ROUND_PARAMS.len());
let mut inputs = Vec::with_capacity(number_of_inputs);
for _ in 0..number_of_inputs {
inputs.push(Fr::rand(&mut rng));
}
let expected_hash = utils_poseidon_hash(&inputs);
let mut input_buffer = Cursor::new(vec_fr_to_bytes_le(&inputs).unwrap());
let mut output_buffer = Cursor::new(Vec::<u8>::new());
public_poseidon_hash(&mut input_buffer, &mut output_buffer).unwrap();
let serialized_hash = output_buffer.into_inner();
let (hash, _) = bytes_le_to_fr(&serialized_hash);
assert_eq!(hash, expected_hash);
}
}

View File

@@ -1,7 +1,8 @@
[package]
name = "semaphore-wrapper"
version = "0.1.0"
version = "0.3.0"
edition = "2021"
license = "MIT OR Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -11,17 +12,16 @@ dylib = [ "wasmer/dylib", "wasmer-engine-dylib", "wasmer-compiler-cranelift" ]
[dependencies]
ark-bn254 = { version = "0.3.0" }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features=["circom-2"] }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features=["circom-2"], rev = "35ce5a9" }
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
ark-relations = { version = "0.3.0", default-features = false }
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
color-eyre = "0.5"
num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
color-eyre = "0.6.1"
once_cell = "1.8"
rand = "0.8.4"
semaphore = { git = "https://github.com/worldcoin/semaphore-rs", rev = "ee658c2"}
ethers-core = { git = "https://github.com/gakonst/ethers-rs", default-features = false }
ethers-core = { version = "2.0.8", default-features = false }
ruint = { version = "1.2.0", features = [ "serde", "num-bigint", "ark-ff" ] }
serde = "1.0"
thiserror = "1.0.0"
@@ -32,10 +32,10 @@ rand_chacha = "0.3.1"
serde_json = "1.0.79"
[build-dependencies]
color-eyre = "0.5"
color-eyre = "0.6.1"
wasmer = { version = "2.0" }
wasmer-engine-dylib = { version = "2.2.1", optional = true }
wasmer-compiler-cranelift = { version = "2.2.1", optional = true }
wasmer-compiler-cranelift = { version = "3.1.1", optional = true }
[profile.release]
codegen-units = 1

View File

@@ -8,3 +8,11 @@ Goal is also to provide a basic FFI around protocol.rs, which is currently not
in scope for that project.
See that project for more information.
## Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```

View File

@@ -37,7 +37,7 @@ fn build_circuit() -> Result<()> {
.current_dir("./vendor/semaphore")
.status()?
.success()
.then(|| ())
.then_some(())
.ok_or(eyre!("procees returned failure"))?;
Ok(())
};

View File

@@ -50,7 +50,7 @@ fn from_dylib(path: &Path) -> Mutex<WitnessCalculator> {
#[must_use]
pub fn zkey() -> &'static (ProvingKey<Bn254>, ConstraintMatrices<Fr>) {
&*ZKEY
&ZKEY
}
#[cfg(feature = "dylib")]

View File

@@ -12,7 +12,7 @@ use ark_groth16::{
};
use ark_relations::r1cs::SynthesisError;
use ark_std::UniformRand;
use color_eyre::Result;
use color_eyre::{Report, Result};
use ethers_core::types::U256;
use rand::{thread_rng, Rng};
use semaphore::{
@@ -89,7 +89,7 @@ pub enum ProofError {
#[error("Error reading circuit key: {0}")]
CircuitKeyError(#[from] std::io::Error),
#[error("Error producing witness: {0}")]
WitnessError(color_eyre::Report),
WitnessError(Report),
#[error("Error producing proof: {0}")]
SynthesisError(#[from] SynthesisError),
#[error("Error converting public input: {0}")]

View File

@@ -1,18 +1,36 @@
[package]
name = "utils"
version = "0.1.0"
name = "zerokit_utils"
version = "0.3.2"
edition = "2021"
license = "MIT OR Apache-2.0"
description = "Various utilities for Zerokit"
documentation = "https://github.com/vacp2p/zerokit"
homepage = "https://vac.dev"
repository = "https://github.com/vacp2p/zerokit"
[lib]
bench = false
[dependencies]
ark-ff = { version = "0.3.0", default-features = false, features = ["asm"] }
num-bigint = { version = "0.4.3", default-features = false, features = ["rand"] }
ark-ff = { version = "=0.4.1", default-features = false, features = ["asm"] }
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
color-eyre = "=0.6.2"
pmtree = { package = "pmtree", version = "=2.0.0", optional = true}
sled = "=0.34.7"
serde = "=1.0.163"
[dev-dependencies]
ark-bn254 = { version = "0.3.0" }
num-traits = "0.2.11"
hex-literal = "0.3.4"
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
ark-bn254 = "=0.4.0"
num-traits = "=0.2.15"
hex-literal = "=0.3.4"
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
criterion = { version = "=0.4.0", features = ["html_reports"] }
[features]
default = ["parallel"]
parallel = ["ark-ff/parallel"]
pmtree-ft = ["pmtree"]
[[bench]]
name = "merkle_tree_benchmark"
harness = false

View File

@@ -5,3 +5,7 @@ args = ["build", "--release"]
[tasks.test]
command = "cargo"
args = ["test", "--release"]
[tasks.bench]
command = "cargo"
args = ["bench"]

15
utils/README.md Normal file
View File

@@ -0,0 +1,15 @@
# Utils crate
## Building
1. `cargo build`
## Testing
1. `cargo test`
## Benchmarking
1. `cargo bench`
To view the results of the benchmark, open the `target/criterion/report/index.html` file generated after the bench

View File

@@ -0,0 +1,119 @@
use criterion::{criterion_group, criterion_main, Criterion};
use hex_literal::hex;
use tiny_keccak::{Hasher as _, Keccak};
use zerokit_utils::{
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,
ZerokitMerkleTree,
};
#[derive(Clone, Copy, Eq, PartialEq)]
struct Keccak256;
impl Hasher for Keccak256 {
type Fr = [u8; 32];
fn default_leaf() -> Self::Fr {
[0; 32]
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
let mut output = [0; 32];
let mut hasher = Keccak::v256();
for element in inputs {
hasher.update(element);
}
hasher.finalize(&mut output);
output
}
}
pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default()).unwrap();
let leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
c.bench_function("OptimalMerkleTree::set", |b| {
b.iter(|| {
tree.set(0, leaves[0]).unwrap();
})
});
c.bench_function("OptimalMerkleTree::delete", |b| {
b.iter(|| {
tree.delete(0).unwrap();
})
});
c.bench_function("OptimalMerkleTree::override_range", |b| {
b.iter(|| {
tree.override_range(0, leaves, [0, 1, 2, 3]).unwrap();
})
});
c.bench_function("OptimalMerkleTree::compute_root", |b| {
b.iter(|| {
tree.compute_root().unwrap();
})
});
c.bench_function("OptimalMerkleTree::get", |b| {
b.iter(|| {
tree.get(0).unwrap();
})
});
}
pub fn full_merkle_tree_benchmark(c: &mut Criterion) {
let mut tree =
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
let leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
c.bench_function("FullMerkleTree::set", |b| {
b.iter(|| {
tree.set(0, leaves[0]).unwrap();
})
});
c.bench_function("FullMerkleTree::delete", |b| {
b.iter(|| {
tree.delete(0).unwrap();
})
});
c.bench_function("FullMerkleTree::override_range", |b| {
b.iter(|| {
tree.override_range(0, leaves, [0, 1, 2, 3]).unwrap();
})
});
c.bench_function("FullMerkleTree::compute_root", |b| {
b.iter(|| {
tree.compute_root().unwrap();
})
});
c.bench_function("FullMerkleTree::get", |b| {
b.iter(|| {
tree.get(0).unwrap();
})
});
}
criterion_group!(
benches,
optimal_merkle_tree_benchmark,
full_merkle_tree_benchmark
);
criterion_main!(benches);

View File

@@ -3,3 +3,8 @@ pub use self::poseidon::*;
pub mod merkle_tree;
pub use self::merkle_tree::*;
#[cfg(feature = "pmtree-ft")]
pub mod pm_tree;
#[cfg(feature = "pmtree-ft")]
pub use self::pm_tree::*;

View File

@@ -0,0 +1,373 @@
use crate::merkle_tree::{FrOf, Hasher, ZerokitMerkleProof, ZerokitMerkleTree};
use color_eyre::{Report, Result};
use std::{
cmp::max,
fmt::Debug,
iter::{once, repeat, successors},
str::FromStr,
};
////////////////////////////////////////////////////////////
/// Full Merkle Tree Implementation
////////////////////////////////////////////////////////////
/// Merkle tree with all leaf and intermediate hashes stored
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct FullMerkleTree<H: Hasher> {
/// The depth of the tree, i.e. the number of levels from leaf to root
depth: usize,
/// The nodes cached from the empty part of the tree (where leaves are set to default).
/// Since the rightmost part of the tree is usually changed much later than its creation,
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
/// and by caching few intermediate nodes to the root computed from default leaves
cached_nodes: Vec<H::Fr>,
/// The tree nodes
nodes: Vec<H::Fr>,
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
// (deletions leave next_index unchanged)
next_index: usize,
// metadata that an application may use to store additional information
metadata: Vec<u8>,
}
/// Element of a Merkle proof
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum FullMerkleBranch<H: Hasher> {
/// Left branch taken, value is the right sibling hash.
Left(H::Fr),
/// Right branch taken, value is the left sibling hash.
Right(H::Fr),
}
/// Merkle proof path, bottom to top.
#[derive(Clone, PartialEq, Eq)]
pub struct FullMerkleProof<H: Hasher>(pub Vec<FullMerkleBranch<H>>);
#[derive(Default)]
pub struct FullMerkleConfig(());
impl FromStr for FullMerkleConfig {
type Err = Report;
fn from_str(_s: &str) -> Result<Self> {
Ok(FullMerkleConfig::default())
}
}
/// Implementations
impl<H: Hasher> ZerokitMerkleTree for FullMerkleTree<H>
where
H: Hasher,
{
type Proof = FullMerkleProof<H>;
type Hasher = H;
type Config = FullMerkleConfig;
fn default(depth: usize) -> Result<Self> {
FullMerkleTree::<H>::new(depth, Self::Hasher::default_leaf(), Self::Config::default())
}
/// Creates a new `MerkleTree`
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
fn new(depth: usize, initial_leaf: FrOf<Self::Hasher>, _config: Self::Config) -> Result<Self> {
// Compute cache node values, leaf to root
let cached_nodes = successors(Some(initial_leaf), |prev| Some(H::hash(&[*prev, *prev])))
.take(depth + 1)
.collect::<Vec<_>>();
// Compute node values
let nodes = cached_nodes
.iter()
.rev()
.enumerate()
.flat_map(|(levels, hash)| repeat(hash).take(1 << levels))
.cloned()
.collect::<Vec<_>>();
debug_assert!(nodes.len() == (1 << (depth + 1)) - 1);
let next_index = 0;
Ok(Self {
depth,
cached_nodes,
nodes,
next_index,
metadata: Vec::new(),
})
}
fn close_db_connection(&mut self) -> Result<()> {
Ok(())
}
// Returns the depth of the tree
fn depth(&self) -> usize {
self.depth
}
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
fn capacity(&self) -> usize {
1 << self.depth
}
// Returns the total number of leaves set
fn leaves_set(&mut self) -> usize {
self.next_index
}
#[must_use]
// Returns the root of the tree
fn root(&self) -> FrOf<Self::Hasher> {
self.nodes[0]
}
// Sets a leaf at the specified tree index
fn set(&mut self, leaf: usize, hash: FrOf<Self::Hasher>) -> Result<()> {
self.set_range(leaf, once(hash))?;
self.next_index = max(self.next_index, leaf + 1);
Ok(())
}
// Get a leaf from the specified tree index
fn get(&self, leaf: usize) -> Result<FrOf<Self::Hasher>> {
if leaf >= self.capacity() {
return Err(Report::msg("leaf index out of bounds"));
}
Ok(self.nodes[self.capacity() + leaf - 1])
}
// Sets tree nodes, starting from start index
// Function proper of FullMerkleTree implementation
fn set_range<I: IntoIterator<Item = FrOf<Self::Hasher>>>(
&mut self,
start: usize,
hashes: I,
) -> Result<()> {
let index = self.capacity() + start - 1;
let mut count = 0;
// first count number of hashes, and check that they fit in the tree
// then insert into the tree
let hashes = hashes.into_iter().collect::<Vec<_>>();
if hashes.len() + start > self.capacity() {
return Err(Report::msg("provided hashes do not fit in the tree"));
}
hashes.into_iter().for_each(|hash| {
self.nodes[index + count] = hash;
count += 1;
});
if count != 0 {
self.update_nodes(index, index + (count - 1))?;
self.next_index = max(self.next_index, start + count);
}
Ok(())
}
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
where
I: IntoIterator<Item = FrOf<Self::Hasher>>,
J: IntoIterator<Item = usize>,
{
let index = self.capacity() + start - 1;
let mut count = 0;
let leaves = leaves.into_iter().collect::<Vec<_>>();
let to_remove_indices = to_remove_indices.into_iter().collect::<Vec<_>>();
// first count number of hashes, and check that they fit in the tree
// then insert into the tree
if leaves.len() + start - to_remove_indices.len() > self.capacity() {
return Err(Report::msg("provided hashes do not fit in the tree"));
}
// remove leaves
for i in &to_remove_indices {
self.delete(*i)?;
}
// insert new leaves
for hash in leaves {
self.nodes[index + count] = hash;
count += 1;
}
if count != 0 {
self.update_nodes(index, index + (count - 1))?;
self.next_index = max(self.next_index, start + count - to_remove_indices.len());
}
Ok(())
}
// Sets a leaf at the next available index
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<()> {
self.set(self.next_index, leaf)?;
Ok(())
}
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
fn delete(&mut self, index: usize) -> Result<()> {
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
}
Ok(())
}
// Computes a merkle proof the the leaf at the specified index
fn proof(&self, leaf: usize) -> Result<FullMerkleProof<H>> {
if leaf >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
let mut index = self.capacity() + leaf - 1;
let mut path = Vec::with_capacity(self.depth + 1);
while let Some(parent) = self.parent(index) {
// Add proof for node at index to parent
path.push(match index & 1 {
1 => FullMerkleBranch::Left(self.nodes[index + 1]),
0 => FullMerkleBranch::Right(self.nodes[index - 1]),
_ => unreachable!(),
});
index = parent;
}
Ok(FullMerkleProof(path))
}
// Verifies a Merkle proof with respect to the input leaf and the tree root
fn verify(&self, hash: &FrOf<Self::Hasher>, proof: &FullMerkleProof<H>) -> Result<bool> {
Ok(proof.compute_root_from(hash) == self.root())
}
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>> {
Ok(self.root())
}
fn set_metadata(&mut self, metadata: &[u8]) -> Result<()> {
self.metadata = metadata.to_vec();
Ok(())
}
fn metadata(&self) -> Result<Vec<u8>> {
Ok(self.metadata.to_vec())
}
}
impl<H: Hasher> FullMerkleTree<H>
where
H: Hasher,
{
// Utilities for updating the tree nodes
/// For a given node index, return the parent node index
/// Returns None if there is no parent (root node)
fn parent(&self, index: usize) -> Option<usize> {
if index == 0 {
None
} else {
Some(((index + 1) >> 1) - 1)
}
}
/// For a given node index, return index of the first (left) child.
fn first_child(&self, index: usize) -> usize {
(index << 1) + 1
}
fn levels(&self, index: usize) -> usize {
// `n.next_power_of_two()` will return `n` iff `n` is a power of two.
// The extra offset corrects this.
(index + 2).next_power_of_two().trailing_zeros() as usize - 1
}
fn update_nodes(&mut self, start: usize, end: usize) -> Result<()> {
if self.levels(start) != self.levels(end) {
return Err(Report::msg("self.levels(start) != self.levels(end)"));
}
if let (Some(start), Some(end)) = (self.parent(start), self.parent(end)) {
for parent in start..=end {
let child = self.first_child(parent);
self.nodes[parent] = H::hash(&[self.nodes[child], self.nodes[child + 1]]);
}
self.update_nodes(start, end)?;
}
Ok(())
}
}
impl<H: Hasher> ZerokitMerkleProof for FullMerkleProof<H> {
type Index = u8;
type Hasher = H;
#[must_use]
// Returns the length of a Merkle proof
fn length(&self) -> usize {
self.0.len()
}
/// Computes the leaf index corresponding to a Merkle proof
#[must_use]
fn leaf_index(&self) -> usize {
self.0.iter().rev().fold(0, |index, branch| match branch {
FullMerkleBranch::Left(_) => index << 1,
FullMerkleBranch::Right(_) => (index << 1) + 1,
})
}
#[must_use]
/// Returns the path elements forming a Merkle proof
fn get_path_elements(&self) -> Vec<FrOf<Self::Hasher>> {
self.0
.iter()
.map(|x| match x {
FullMerkleBranch::Left(value) | FullMerkleBranch::Right(value) => *value,
})
.collect()
}
/// Returns the path indexes forming a Merkle proof
#[must_use]
fn get_path_index(&self) -> Vec<Self::Index> {
self.0
.iter()
.map(|branch| match branch {
FullMerkleBranch::Left(_) => 0,
FullMerkleBranch::Right(_) => 1,
})
.collect()
}
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
#[must_use]
fn compute_root_from(&self, hash: &FrOf<Self::Hasher>) -> FrOf<Self::Hasher> {
self.0.iter().fold(*hash, |hash, branch| match branch {
FullMerkleBranch::Left(sibling) => H::hash(&[hash, *sibling]),
FullMerkleBranch::Right(sibling) => H::hash(&[*sibling, hash]),
})
}
}
// Debug formatting for printing a (Full) Merkle Proof Branch
impl<H> Debug for FullMerkleBranch<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Left(arg0) => f.debug_tuple("Left").field(arg0).finish(),
Self::Right(arg0) => f.debug_tuple("Right").field(arg0).finish(),
}
}
}
// Debug formatting for printing a (Full) Merkle Proof
impl<H> Debug for FullMerkleProof<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Proof").field(&self.0).finish()
}
}

View File

@@ -13,21 +13,15 @@
//! * Disk based storage backend (using mmaped files should be easy)
//! * Implement serialization for tree and Merkle proof
#![allow(dead_code)]
use std::str::FromStr;
use std::collections::HashMap;
use std::io;
use std::{
cmp::max,
fmt::Debug,
iter::{once, repeat, successors},
};
use color_eyre::Result;
/// In the Hasher trait we define the node type, the default leaf
/// and the hash function used to initialize a Merkle Tree implementation
pub trait Hasher {
/// Type of the leaf and tree node
type Fr: Copy + Clone + Eq;
type Fr: Clone + Copy + Eq;
/// Returns the default tree leaf
fn default_leaf() -> Self::Fr;
@@ -36,539 +30,51 @@ pub trait Hasher {
fn hash(input: &[Self::Fr]) -> Self::Fr;
}
////////////////////////////////////////////////////////////
/// Optimal Merkle Tree Implementation
////////////////////////////////////////////////////////////
pub type FrOf<H> = <H as Hasher>::Fr;
/// The Merkle tree structure
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct OptimalMerkleTree<H>
where
H: Hasher,
{
/// The depth of the tree, i.e. the number of levels from leaf to root
depth: usize,
/// In the ZerokitMerkleTree trait we define the methods that are required to be implemented by a Merkle tree
/// Including, OptimalMerkleTree, FullMerkleTree
pub trait ZerokitMerkleTree {
type Proof: ZerokitMerkleProof;
type Hasher: Hasher;
type Config: Default + FromStr;
/// The nodes cached from the empty part of the tree (where leaves are set to default).
/// Since the rightmost part of the tree is usually changed much later than its creation,
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
/// and by caching few intermediate nodes to the root computed from default leaves
cached_nodes: Vec<H::Fr>,
/// The tree nodes
nodes: HashMap<(usize, usize), H::Fr>,
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
// (deletions leave next_index unchanged)
next_index: usize,
fn default(depth: usize) -> Result<Self>
where
Self: Sized;
fn new(depth: usize, default_leaf: FrOf<Self::Hasher>, config: Self::Config) -> Result<Self>
where
Self: Sized;
fn depth(&self) -> usize;
fn capacity(&self) -> usize;
fn leaves_set(&mut self) -> usize;
fn root(&self) -> FrOf<Self::Hasher>;
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>>;
fn set(&mut self, index: usize, leaf: FrOf<Self::Hasher>) -> Result<()>;
fn set_range<I>(&mut self, start: usize, leaves: I) -> Result<()>
where
I: IntoIterator<Item = FrOf<Self::Hasher>>;
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>>;
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
where
I: IntoIterator<Item = FrOf<Self::Hasher>>,
J: IntoIterator<Item = usize>;
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<()>;
fn delete(&mut self, index: usize) -> Result<()>;
fn proof(&self, index: usize) -> Result<Self::Proof>;
fn verify(&self, leaf: &FrOf<Self::Hasher>, witness: &Self::Proof) -> Result<bool>;
fn set_metadata(&mut self, metadata: &[u8]) -> Result<()>;
fn metadata(&self) -> Result<Vec<u8>>;
fn close_db_connection(&mut self) -> Result<()>;
}
/// The Merkle proof
/// Contains a vector of (node, branch_index) that defines the proof path elements and branch direction (1 or 0)
#[derive(Clone, PartialEq, Eq)]
pub struct OptimalMerkleProof<H: Hasher>(pub Vec<(H::Fr, u8)>);
pub trait ZerokitMerkleProof {
type Index;
type Hasher: Hasher;
/// Implementations
impl<H: Hasher> OptimalMerkleTree<H> {
pub fn default(depth: usize) -> Self {
OptimalMerkleTree::<H>::new(depth, H::default_leaf())
}
/// Creates a new `MerkleTree`
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
pub fn new(depth: usize, default_leaf: H::Fr) -> Self {
let mut cached_nodes: Vec<H::Fr> = Vec::with_capacity(depth + 1);
cached_nodes.push(default_leaf);
for i in 0..depth {
cached_nodes.push(H::hash(&[cached_nodes[i]; 2]));
}
cached_nodes.reverse();
OptimalMerkleTree {
cached_nodes: cached_nodes.clone(),
depth: depth,
nodes: HashMap::new(),
next_index: 0,
}
}
// Returns the depth of the tree
pub fn depth(&self) -> usize {
self.depth
}
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
pub fn capacity(&self) -> usize {
1 << self.depth
}
// Returns the total number of leaves set
pub fn leaves_set(&mut self) -> usize {
self.next_index
}
#[must_use]
// Returns the root of the tree
pub fn root(&self) -> H::Fr {
self.get_node(0, 0)
}
// Sets a leaf at the specified tree index
pub fn set(&mut self, index: usize, leaf: H::Fr) -> io::Result<()> {
if index >= self.capacity() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds set size",
));
}
self.nodes.insert((self.depth, index), leaf);
self.recalculate_from(index);
self.next_index = max(self.next_index, index + 1);
Ok(())
}
// Sets multiple leaves from the specified tree index
pub fn set_range<I: IntoIterator<Item = H::Fr>>(
&mut self,
start: usize,
leaves: I,
) -> io::Result<()> {
let leaves = leaves.into_iter().collect::<Vec<_>>();
// check if the range is valid
if start + leaves.len() > self.capacity() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"provided range exceeds set size",
));
}
for (i, leaf) in leaves.iter().enumerate() {
self.nodes.insert((self.depth, start + i), *leaf);
self.recalculate_from(start + i);
}
self.next_index = max(self.next_index, start + leaves.len());
Ok(())
}
// Sets a leaf at the next available index
pub fn update_next(&mut self, leaf: H::Fr) -> io::Result<()> {
self.set(self.next_index, leaf)?;
Ok(())
}
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
pub fn delete(&mut self, index: usize) -> io::Result<()> {
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
}
Ok(())
}
// Computes a merkle proof the the leaf at the specified index
pub fn proof(&self, index: usize) -> io::Result<OptimalMerkleProof<H>> {
if index >= self.capacity() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds set size",
));
}
let mut witness = Vec::<(H::Fr, u8)>::with_capacity(self.depth);
let mut i = index;
let mut depth = self.depth;
loop {
i ^= 1;
witness.push((self.get_node(depth, i), (1 - (i & 1)).try_into().unwrap()));
i >>= 1;
depth -= 1;
if depth == 0 {
break;
}
}
assert_eq!(i, 0);
Ok(OptimalMerkleProof(witness))
}
// Verifies a Merkle proof with respect to the input leaf and the tree root
pub fn verify(&self, leaf: &H::Fr, witness: &OptimalMerkleProof<H>) -> io::Result<bool> {
if witness.length() != self.depth {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"witness length doesn't match tree depth",
));
}
let expected_root = witness.compute_root_from(leaf);
Ok(expected_root.eq(&self.root()))
}
// Utilities for updating the tree nodes
fn get_node(&self, depth: usize, index: usize) -> H::Fr {
let node = *self
.nodes
.get(&(depth, index))
.unwrap_or_else(|| &self.cached_nodes[depth]);
node
}
fn get_leaf(&self, index: usize) -> H::Fr {
self.get_node(self.depth, index)
}
fn hash_couple(&mut self, depth: usize, index: usize) -> H::Fr {
let b = index & !1;
H::hash(&[self.get_node(depth, b), self.get_node(depth, b + 1)])
}
fn recalculate_from(&mut self, index: usize) {
let mut i = index;
let mut depth = self.depth;
loop {
let h = self.hash_couple(depth, i);
i >>= 1;
depth -= 1;
self.nodes.insert((depth, i), h);
if depth == 0 {
break;
}
}
assert_eq!(depth, 0);
assert_eq!(i, 0);
}
}
impl<H: Hasher> OptimalMerkleProof<H> {
#[must_use]
// Returns the length of a Merkle proof
pub fn length(&self) -> usize {
self.0.len()
}
/// Computes the leaf index corresponding to a Merkle proof
#[must_use]
pub fn leaf_index(&self) -> usize {
// In current implementation the path indexes in a proof correspond to the binary representation of the leaf index
let mut binary_repr = self.get_path_index();
binary_repr.reverse();
binary_repr
.into_iter()
.fold(0, |acc, digit| (acc << 1) + usize::from(digit))
}
#[must_use]
/// Returns the path elements forming a Merkle proof
pub fn get_path_elements(&self) -> Vec<H::Fr> {
self.0.iter().map(|x| x.0).collect()
}
/// Returns the path indexes forming a Merkle proof
#[must_use]
pub fn get_path_index(&self) -> Vec<u8> {
self.0.iter().map(|x| x.1).collect()
}
#[must_use]
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
pub fn compute_root_from(&self, leaf: &H::Fr) -> H::Fr {
let mut acc: H::Fr = *leaf;
for w in self.0.iter() {
if w.1 == 0 {
acc = H::hash(&[acc, w.0]);
} else {
acc = H::hash(&[w.0, acc]);
}
}
acc
}
}
// Debug formatting for printing a (Optimal) Merkle Proof
impl<H> Debug for OptimalMerkleProof<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Proof").field(&self.0).finish()
}
}
////////////////////////////////////////////////////////////
/// Full Merkle Tree Implementation
////////////////////////////////////////////////////////////
/// Merkle tree with all leaf and intermediate hashes stored
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct FullMerkleTree<H: Hasher> {
/// The depth of the tree, i.e. the number of levels from leaf to root
depth: usize,
/// The nodes cached from the empty part of the tree (where leaves are set to default).
/// Since the rightmost part of the tree is usually changed much later than its creation,
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
/// and by caching few intermediate nodes to the root computed from default leaves
cached_nodes: Vec<H::Fr>,
/// The tree nodes
nodes: Vec<H::Fr>,
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
// (deletions leave next_index unchanged)
next_index: usize,
}
/// Element of a Merkle proof
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum FullMerkleBranch<H: Hasher> {
/// Left branch taken, value is the right sibling hash.
Left(H::Fr),
/// Right branch taken, value is the left sibling hash.
Right(H::Fr),
}
/// Merkle proof path, bottom to top.
#[derive(Clone, PartialEq, Eq)]
pub struct FullMerkleProof<H: Hasher>(pub Vec<FullMerkleBranch<H>>);
/// Implementations
impl<H: Hasher> FullMerkleTree<H> {
pub fn default(depth: usize) -> Self {
FullMerkleTree::<H>::new(depth, H::default_leaf())
}
/// Creates a new `MerkleTree`
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
pub fn new(depth: usize, initial_leaf: H::Fr) -> Self {
// Compute cache node values, leaf to root
let cached_nodes = successors(Some(initial_leaf), |prev| Some(H::hash(&[*prev, *prev])))
.take(depth + 1)
.collect::<Vec<_>>();
// Compute node values
let nodes = cached_nodes
.iter()
.rev()
.enumerate()
.flat_map(|(levels, hash)| repeat(hash).take(1 << levels))
.cloned()
.collect::<Vec<_>>();
debug_assert!(nodes.len() == (1 << (depth + 1)) - 1);
let next_index = 0;
Self {
depth,
cached_nodes,
nodes,
next_index,
}
}
// Returns the depth of the tree
pub fn depth(&self) -> usize {
self.depth
}
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
pub fn capacity(&self) -> usize {
1 << self.depth
}
// Returns the total number of leaves set
pub fn leaves_set(&mut self) -> usize {
self.next_index
}
#[must_use]
// Returns the root of the tree
pub fn root(&self) -> H::Fr {
self.nodes[0]
}
// Sets a leaf at the specified tree index
pub fn set(&mut self, leaf: usize, hash: H::Fr) -> io::Result<()> {
self.set_range(leaf, once(hash))?;
self.next_index = max(self.next_index, leaf + 1);
Ok(())
}
// Sets tree nodes, starting from start index
// Function proper of FullMerkleTree implementation
fn set_range<I: IntoIterator<Item = H::Fr>>(
&mut self,
start: usize,
hashes: I,
) -> io::Result<()> {
let index = self.capacity() + start - 1;
let mut count = 0;
// first count number of hashes, and check that they fit in the tree
// then insert into the tree
let hashes = hashes.into_iter().collect::<Vec<_>>();
if hashes.len() + start > self.capacity() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"provided hashes do not fit in the tree",
));
}
hashes.into_iter().for_each(|hash| {
self.nodes[index + count] = hash;
count += 1;
});
if count != 0 {
self.update_nodes(index, index + (count - 1));
self.next_index = max(self.next_index, start + count);
}
Ok(())
}
// Sets a leaf at the next available index
pub fn update_next(&mut self, leaf: H::Fr) -> io::Result<()> {
self.set(self.next_index, leaf)?;
Ok(())
}
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
pub fn delete(&mut self, index: usize) -> io::Result<()> {
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
}
Ok(())
}
// Computes a merkle proof the the leaf at the specified index
pub fn proof(&self, leaf: usize) -> io::Result<FullMerkleProof<H>> {
if leaf >= self.capacity() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds set size",
));
}
let mut index = self.capacity() + leaf - 1;
let mut path = Vec::with_capacity(self.depth + 1);
while let Some(parent) = self.parent(index) {
// Add proof for node at index to parent
path.push(match index & 1 {
1 => FullMerkleBranch::Left(self.nodes[index + 1]),
0 => FullMerkleBranch::Right(self.nodes[index - 1]),
_ => unreachable!(),
});
index = parent;
}
Ok(FullMerkleProof(path))
}
// Verifies a Merkle proof with respect to the input leaf and the tree root
pub fn verify(&self, hash: &H::Fr, proof: &FullMerkleProof<H>) -> io::Result<bool> {
Ok(proof.compute_root_from(hash) == self.root())
}
// Utilities for updating the tree nodes
/// For a given node index, return the parent node index
/// Returns None if there is no parent (root node)
fn parent(&self, index: usize) -> Option<usize> {
if index == 0 {
None
} else {
Some(((index + 1) >> 1) - 1)
}
}
/// For a given node index, return index of the first (left) child.
fn first_child(&self, index: usize) -> usize {
(index << 1) + 1
}
fn levels(&self, index: usize) -> usize {
// `n.next_power_of_two()` will return `n` iff `n` is a power of two.
// The extra offset corrects this.
(index + 2).next_power_of_two().trailing_zeros() as usize - 1
}
fn update_nodes(&mut self, start: usize, end: usize) {
debug_assert_eq!(self.levels(start), self.levels(end));
if let (Some(start), Some(end)) = (self.parent(start), self.parent(end)) {
for parent in start..=end {
let child = self.first_child(parent);
self.nodes[parent] = H::hash(&[self.nodes[child], self.nodes[child + 1]]);
}
self.update_nodes(start, end);
}
}
}
impl<H: Hasher> FullMerkleProof<H> {
#[must_use]
// Returns the length of a Merkle proof
pub fn length(&self) -> usize {
self.0.len()
}
/// Computes the leaf index corresponding to a Merkle proof
#[must_use]
pub fn leaf_index(&self) -> usize {
self.0.iter().rev().fold(0, |index, branch| match branch {
FullMerkleBranch::Left(_) => index << 1,
FullMerkleBranch::Right(_) => (index << 1) + 1,
})
}
#[must_use]
/// Returns the path elements forming a Merkle proof
pub fn get_path_elements(&self) -> Vec<H::Fr> {
self.0
.iter()
.map(|x| match x {
FullMerkleBranch::Left(value) | FullMerkleBranch::Right(value) => *value,
})
.collect()
}
/// Returns the path indexes forming a Merkle proof
#[must_use]
pub fn get_path_index(&self) -> Vec<u8> {
self.0
.iter()
.map(|branch| match branch {
FullMerkleBranch::Left(_) => 0,
FullMerkleBranch::Right(_) => 1,
})
.collect()
}
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
#[must_use]
pub fn compute_root_from(&self, hash: &H::Fr) -> H::Fr {
self.0.iter().fold(*hash, |hash, branch| match branch {
FullMerkleBranch::Left(sibling) => H::hash(&[hash, *sibling]),
FullMerkleBranch::Right(sibling) => H::hash(&[*sibling, hash]),
})
}
}
// Debug formatting for printing a (Full) Merkle Proof Branch
impl<H> Debug for FullMerkleBranch<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Left(arg0) => f.debug_tuple("Left").field(arg0).finish(),
Self::Right(arg0) => f.debug_tuple("Right").field(arg0).finish(),
}
}
}
// Debug formatting for printing a (Full) Merkle Proof
impl<H> Debug for FullMerkleProof<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Proof").field(&self.0).finish()
}
fn length(&self) -> usize;
fn leaf_index(&self) -> usize;
fn get_path_elements(&self) -> Vec<FrOf<Self::Hasher>>;
fn get_path_index(&self) -> Vec<Self::Index>;
fn compute_root_from(&self, leaf: &FrOf<Self::Hasher>) -> FrOf<Self::Hasher>;
}

View File

@@ -1,2 +1,7 @@
pub mod full_merkle_tree;
#[allow(clippy::module_inception)]
pub mod merkle_tree;
pub mod optimal_merkle_tree;
pub use self::full_merkle_tree::*;
pub use self::merkle_tree::*;
pub use self::optimal_merkle_tree::*;

View File

@@ -0,0 +1,343 @@
use crate::merkle_tree::{Hasher, ZerokitMerkleProof, ZerokitMerkleTree};
use crate::FrOf;
use color_eyre::{Report, Result};
use std::collections::HashMap;
use std::str::FromStr;
use std::{cmp::max, fmt::Debug};
////////////////////////////////////////////////////////////
/// Optimal Merkle Tree Implementation
////////////////////////////////////////////////////////////
/// The Merkle tree structure
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct OptimalMerkleTree<H>
where
H: Hasher,
{
/// The depth of the tree, i.e. the number of levels from leaf to root
depth: usize,
/// The nodes cached from the empty part of the tree (where leaves are set to default).
/// Since the rightmost part of the tree is usually changed much later than its creation,
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
/// and by caching few intermediate nodes to the root computed from default leaves
cached_nodes: Vec<H::Fr>,
/// The tree nodes
nodes: HashMap<(usize, usize), H::Fr>,
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
// (deletions leave next_index unchanged)
next_index: usize,
// metadata that an application may use to store additional information
metadata: Vec<u8>,
}
/// The Merkle proof
/// Contains a vector of (node, branch_index) that defines the proof path elements and branch direction (1 or 0)
#[derive(Clone, PartialEq, Eq)]
pub struct OptimalMerkleProof<H: Hasher>(pub Vec<(H::Fr, u8)>);
#[derive(Default)]
pub struct OptimalMerkleConfig(());
impl FromStr for OptimalMerkleConfig {
type Err = Report;
fn from_str(_s: &str) -> Result<Self> {
Ok(OptimalMerkleConfig::default())
}
}
/// Implementations
impl<H: Hasher> ZerokitMerkleTree for OptimalMerkleTree<H>
where
H: Hasher,
{
type Proof = OptimalMerkleProof<H>;
type Hasher = H;
type Config = OptimalMerkleConfig;
fn default(depth: usize) -> Result<Self> {
OptimalMerkleTree::<H>::new(depth, H::default_leaf(), Self::Config::default())
}
/// Creates a new `MerkleTree`
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
fn new(depth: usize, default_leaf: H::Fr, _config: Self::Config) -> Result<Self> {
let mut cached_nodes: Vec<H::Fr> = Vec::with_capacity(depth + 1);
cached_nodes.push(default_leaf);
for i in 0..depth {
cached_nodes.push(H::hash(&[cached_nodes[i]; 2]));
}
cached_nodes.reverse();
Ok(OptimalMerkleTree {
cached_nodes: cached_nodes.clone(),
depth,
nodes: HashMap::new(),
next_index: 0,
metadata: Vec::new(),
})
}
fn close_db_connection(&mut self) -> Result<()> {
Ok(())
}
// Returns the depth of the tree
fn depth(&self) -> usize {
self.depth
}
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
fn capacity(&self) -> usize {
1 << self.depth
}
// Returns the total number of leaves set
fn leaves_set(&mut self) -> usize {
self.next_index
}
#[must_use]
// Returns the root of the tree
fn root(&self) -> H::Fr {
self.get_node(0, 0)
}
// Sets a leaf at the specified tree index
fn set(&mut self, index: usize, leaf: H::Fr) -> Result<()> {
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
self.nodes.insert((self.depth, index), leaf);
self.recalculate_from(index)?;
self.next_index = max(self.next_index, index + 1);
Ok(())
}
// Get a leaf from the specified tree index
fn get(&self, index: usize) -> Result<H::Fr> {
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
Ok(self.get_node(self.depth, index))
}
// Sets multiple leaves from the specified tree index
fn set_range<I: IntoIterator<Item = H::Fr>>(&mut self, start: usize, leaves: I) -> Result<()> {
let leaves = leaves.into_iter().collect::<Vec<_>>();
// check if the range is valid
if start + leaves.len() > self.capacity() {
return Err(Report::msg("provided range exceeds set size"));
}
for (i, leaf) in leaves.iter().enumerate() {
self.nodes.insert((self.depth, start + i), *leaf);
self.recalculate_from(start + i)?;
}
self.next_index = max(self.next_index, start + leaves.len());
Ok(())
}
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
where
I: IntoIterator<Item = FrOf<Self::Hasher>>,
J: IntoIterator<Item = usize>,
{
let leaves = leaves.into_iter().collect::<Vec<_>>();
let to_remove_indices = to_remove_indices.into_iter().collect::<Vec<_>>();
// check if the range is valid
if leaves.len() + start - to_remove_indices.len() > self.capacity() {
return Err(Report::msg("provided range exceeds set size"));
}
// remove leaves
for i in &to_remove_indices {
self.delete(*i)?;
}
// add leaves
for (i, leaf) in leaves.iter().enumerate() {
self.nodes.insert((self.depth, start + i), *leaf);
self.recalculate_from(start + i)?;
}
self.next_index = max(
self.next_index,
start + leaves.len() - to_remove_indices.len(),
);
Ok(())
}
// Sets a leaf at the next available index
fn update_next(&mut self, leaf: H::Fr) -> Result<()> {
self.set(self.next_index, leaf)?;
Ok(())
}
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
fn delete(&mut self, index: usize) -> Result<()> {
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
}
Ok(())
}
// Computes a merkle proof the the leaf at the specified index
fn proof(&self, index: usize) -> Result<Self::Proof> {
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
let mut witness = Vec::<(H::Fr, u8)>::with_capacity(self.depth);
let mut i = index;
let mut depth = self.depth;
loop {
i ^= 1;
witness.push((self.get_node(depth, i), (1 - (i & 1)).try_into().unwrap()));
i >>= 1;
depth -= 1;
if depth == 0 {
break;
}
}
if i != 0 {
Err(Report::msg("i != 0"))
} else {
Ok(OptimalMerkleProof(witness))
}
}
// Verifies a Merkle proof with respect to the input leaf and the tree root
fn verify(&self, leaf: &H::Fr, witness: &Self::Proof) -> Result<bool> {
if witness.length() != self.depth {
return Err(Report::msg("witness length doesn't match tree depth"));
}
let expected_root = witness.compute_root_from(leaf);
Ok(expected_root.eq(&self.root()))
}
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>> {
self.recalculate_from(0)?;
Ok(self.root())
}
fn set_metadata(&mut self, metadata: &[u8]) -> Result<()> {
self.metadata = metadata.to_vec();
Ok(())
}
fn metadata(&self) -> Result<Vec<u8>> {
Ok(self.metadata.to_vec())
}
}
impl<H: Hasher> OptimalMerkleTree<H>
where
H: Hasher,
{
// Utilities for updating the tree nodes
fn get_node(&self, depth: usize, index: usize) -> H::Fr {
let node = *self
.nodes
.get(&(depth, index))
.unwrap_or_else(|| &self.cached_nodes[depth]);
node
}
pub fn get_leaf(&self, index: usize) -> H::Fr {
self.get_node(self.depth, index)
}
fn hash_couple(&mut self, depth: usize, index: usize) -> H::Fr {
let b = index & !1;
H::hash(&[self.get_node(depth, b), self.get_node(depth, b + 1)])
}
fn recalculate_from(&mut self, index: usize) -> Result<()> {
let mut i = index;
let mut depth = self.depth;
loop {
let h = self.hash_couple(depth, i);
i >>= 1;
depth -= 1;
self.nodes.insert((depth, i), h);
if depth == 0 {
break;
}
}
if depth != 0 {
return Err(Report::msg("did not reach the depth"));
}
if i != 0 {
return Err(Report::msg("did not go through all indexes"));
}
Ok(())
}
}
impl<H: Hasher> ZerokitMerkleProof for OptimalMerkleProof<H>
where
H: Hasher,
{
type Index = u8;
type Hasher = H;
#[must_use]
// Returns the length of a Merkle proof
fn length(&self) -> usize {
self.0.len()
}
/// Computes the leaf index corresponding to a Merkle proof
#[must_use]
fn leaf_index(&self) -> usize {
// In current implementation the path indexes in a proof correspond to the binary representation of the leaf index
let mut binary_repr = self.get_path_index();
binary_repr.reverse();
binary_repr
.into_iter()
.fold(0, |acc, digit| (acc << 1) + usize::from(digit))
}
#[must_use]
/// Returns the path elements forming a Merkle proof
fn get_path_elements(&self) -> Vec<H::Fr> {
self.0.iter().map(|x| x.0).collect()
}
/// Returns the path indexes forming a Merkle proof
#[must_use]
fn get_path_index(&self) -> Vec<u8> {
self.0.iter().map(|x| x.1).collect()
}
#[must_use]
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
fn compute_root_from(&self, leaf: &H::Fr) -> H::Fr {
let mut acc: H::Fr = *leaf;
for w in self.0.iter() {
if w.1 == 0 {
acc = H::hash(&[acc, w.0]);
} else {
acc = H::hash(&[w.0, acc]);
}
}
acc
}
}
// Debug formatting for printing a (Optimal) Merkle Proof
impl<H> Debug for OptimalMerkleProof<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Proof").field(&self.0).finish()
}
}

4
utils/src/pm_tree/mod.rs Normal file
View File

@@ -0,0 +1,4 @@
pub mod sled_adapter;
pub use self::sled_adapter::*;
pub use pmtree;
pub use sled::*;

View File

@@ -0,0 +1,105 @@
use pmtree::*;
use sled::Db as Sled;
use std::collections::HashMap;
use std::thread;
use std::time::Duration;
pub struct SledDB(Sled);
impl SledDB {
fn new_with_tries(config: <SledDB as Database>::Config, tries: u32) -> PmtreeResult<Self> {
// If we've tried more than 10 times, we give up and return an error.
if tries >= 10 {
return Err(PmtreeErrorKind::DatabaseError(
DatabaseErrorKind::CustomError(format!(
"Cannot create database: exceeded maximum retry attempts. {config:#?}"
)),
));
}
match config.open() {
Ok(db) => Ok(SledDB(db)),
Err(e) if e.to_string().contains("WouldBlock") => {
// try till the fd is freed
// sleep for 10^tries milliseconds, then recursively try again
thread::sleep(Duration::from_millis(10u64.pow(tries)));
Self::new_with_tries(config, tries + 1)
}
Err(e) => {
// On any other error, we return immediately.
Err(PmtreeErrorKind::DatabaseError(
DatabaseErrorKind::CustomError(format!(
"Cannot create database: {e} {config:#?}"
)),
))
}
}
}
}
impl Database for SledDB {
type Config = sled::Config;
fn new(config: Self::Config) -> PmtreeResult<Self> {
let db = Self::new_with_tries(config, 0)?;
Ok(db)
}
fn load(config: Self::Config) -> PmtreeResult<Self> {
let db = match config.open() {
Ok(db) => db,
Err(e) => {
return Err(PmtreeErrorKind::DatabaseError(
DatabaseErrorKind::CustomError(format!("Cannot load database: {e}")),
))
}
};
if !db.was_recovered() {
return Err(PmtreeErrorKind::DatabaseError(
DatabaseErrorKind::CustomError(format!(
"Database was not recovered: {}",
config.path.display()
)),
));
}
Ok(SledDB(db))
}
fn close(&mut self) -> PmtreeResult<()> {
let _ = self.0.flush().map_err(|_| {
PmtreeErrorKind::DatabaseError(DatabaseErrorKind::CustomError(
"Cannot flush database".to_string(),
))
})?;
Ok(())
}
fn get(&self, key: DBKey) -> PmtreeResult<Option<Value>> {
match self.0.get(key) {
Ok(value) => Ok(value.map(|val| val.to_vec())),
Err(_e) => Err(PmtreeErrorKind::TreeError(TreeErrorKind::InvalidKey)),
}
}
fn put(&mut self, key: DBKey, value: Value) -> PmtreeResult<()> {
match self.0.insert(key, value) {
Ok(_) => Ok(()),
Err(_e) => Err(PmtreeErrorKind::TreeError(TreeErrorKind::InvalidKey)),
}
}
fn put_batch(&mut self, subtree: HashMap<DBKey, Value>) -> PmtreeResult<()> {
let mut batch = sled::Batch::default();
for (key, value) in subtree {
batch.insert(&key, value);
}
self.0
.apply_batch(batch)
.map_err(|_| PmtreeErrorKind::TreeError(TreeErrorKind::InvalidKey))?;
Ok(())
}
}

View File

@@ -11,7 +11,7 @@
#![allow(dead_code)]
use ark_ff::{FpParameters, PrimeField};
use ark_ff::PrimeField;
use num_bigint::BigUint;
pub struct PoseidonGrainLFSR {
@@ -36,20 +36,12 @@ impl PoseidonGrainLFSR {
assert!(is_field == 1);
// b0, b1 describes the field
if is_field == 1 {
state[1] = true;
} else {
state[1] = false;
}
state[1] = is_field == 1;
assert!(is_sbox_an_inverse == 0 || is_sbox_an_inverse == 1);
// b2, ..., b5 describes the S-BOX
if is_sbox_an_inverse == 1 {
state[5] = true;
} else {
state[5] = false;
}
state[5] = is_sbox_an_inverse == 1;
// b6, ..., b17 are the binary representation of n (prime_num_bits)
{
@@ -88,8 +80,8 @@ impl PoseidonGrainLFSR {
}
// b50, ..., b79 are set to 1
for i in 50..=79 {
state[i] = true;
for item in state.iter_mut().skip(50) {
*item = true;
}
let head = 0;
@@ -111,7 +103,7 @@ impl PoseidonGrainLFSR {
let mut new_bit = self.update();
// Loop until the first bit is true
while new_bit == false {
while !new_bit {
// Discard the second bit
let _ = self.update();
// Obtain another first bit
@@ -129,8 +121,8 @@ impl PoseidonGrainLFSR {
&mut self,
num_elems: usize,
) -> Vec<F> {
assert_eq!(F::Params::MODULUS_BITS as u64, self.prime_num_bits);
let modulus: BigUint = F::Params::MODULUS.into();
assert_eq!(F::MODULUS_BIT_SIZE as u64, self.prime_num_bits);
let modulus: BigUint = F::MODULUS.into();
let mut res = Vec::new();
for _ in 0..num_elems {
@@ -163,7 +155,7 @@ impl PoseidonGrainLFSR {
}
pub fn get_field_elements_mod_p<F: PrimeField>(&mut self, num_elems: usize) -> Vec<F> {
assert_eq!(F::Params::MODULUS_BITS as u64, self.prime_num_bits);
assert_eq!(F::MODULUS_BIT_SIZE as u64, self.prime_num_bits);
let mut res = Vec::new();
for _ in 0..num_elems {
@@ -263,8 +255,8 @@ pub fn find_poseidon_ark_and_mds<F: PrimeField>(
let ys = lfsr.get_field_elements_mod_p::<F>(rate);
for i in 0..(rate) {
for j in 0..(rate) {
mds[i][j] = (xs[i] + &ys[j]).inverse().unwrap();
for (j, ys_item) in ys.iter().enumerate().take(rate) {
mds[i][j] = (xs[i] + ys_item).inverse().unwrap();
}
}

View File

@@ -4,7 +4,7 @@
// and adapted to work over arkworks field traits and custom data structures
use crate::poseidon_constants::find_poseidon_ark_and_mds;
use ark_ff::{FpParameters, PrimeField};
use ark_ff::PrimeField;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RoundParamenters<F: PrimeField> {
@@ -28,22 +28,21 @@ impl<F: PrimeField> Poseidon<F> {
pub fn from(poseidon_params: &[(usize, usize, usize, usize)]) -> Self {
let mut read_params = Vec::<RoundParamenters<F>>::new();
for i in 0..poseidon_params.len() {
let (t, n_rounds_f, n_rounds_p, skip_matrices) = poseidon_params[i];
for &(t, n_rounds_f, n_rounds_p, skip_matrices) in poseidon_params {
let (ark, mds) = find_poseidon_ark_and_mds::<F>(
1, // is_field = 1
0, // is_sbox_inverse = 0
F::Params::MODULUS_BITS as u64,
F::MODULUS_BIT_SIZE as u64,
t,
n_rounds_f as u64,
n_rounds_p as u64,
skip_matrices,
);
let rp = RoundParamenters {
t: t,
n_rounds_p: n_rounds_p,
n_rounds_f: n_rounds_f,
skip_matrices: skip_matrices,
t,
n_rounds_p,
n_rounds_f,
skip_matrices,
c: ark,
m: mds,
};
@@ -67,11 +66,11 @@ impl<F: PrimeField> Poseidon<F> {
pub fn sbox(&self, n_rounds_f: usize, n_rounds_p: usize, state: &mut [F], i: usize) {
if (i < n_rounds_f / 2) || (i >= n_rounds_f / 2 + n_rounds_p) {
for j in 0..state.len() {
let aux = state[j];
state[j] *= state[j];
state[j] *= state[j];
state[j] *= aux;
for current_state in &mut state.iter_mut() {
let aux = *current_state;
*current_state *= *current_state;
*current_state *= *current_state;
*current_state *= aux;
}
} else {
let aux = state[0];
@@ -85,9 +84,9 @@ impl<F: PrimeField> Poseidon<F> {
let mut new_state: Vec<F> = Vec::new();
for i in 0..state.len() {
new_state.push(F::zero());
for j in 0..state.len() {
for (j, state_item) in state.iter().enumerate() {
let mut mij = m[i][j];
mij *= state[j];
mij *= state_item;
new_state[i] += mij;
}
}
@@ -116,7 +115,7 @@ impl<F: PrimeField> Poseidon<F> {
self.ark(
&mut state,
&self.round_params[param_index].c,
(i as usize) * self.round_params[param_index].t,
i * self.round_params[param_index].t,
);
self.sbox(
self.round_params[param_index].n_rounds_f,

View File

@@ -3,8 +3,11 @@
mod test {
use hex_literal::hex;
use tiny_keccak::{Hasher as _, Keccak};
use utils::{FullMerkleTree, Hasher, OptimalMerkleTree};
use zerokit_utils::{
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,
ZerokitMerkleProof, ZerokitMerkleTree,
};
#[derive(Clone, Copy, Eq, PartialEq)]
struct Keccak256;
impl Hasher for Keccak256 {
@@ -44,14 +47,17 @@ mod test {
hex!("a9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36"),
];
let mut tree = FullMerkleTree::<Keccak256>::new(2, [0; 32]);
let mut tree =
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
assert_eq!(tree.root(), default_tree_root);
for i in 0..leaves.len() {
tree.set(i, leaves[i]).unwrap();
assert_eq!(tree.root(), roots[i]);
}
let mut tree = OptimalMerkleTree::<Keccak256>::new(2, [0; 32]);
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
.unwrap();
assert_eq!(tree.root(), default_tree_root);
for i in 0..leaves.len() {
tree.set(i, leaves[i]).unwrap();
@@ -69,7 +75,8 @@ mod test {
];
// We thest the FullMerkleTree implementation
let mut tree = FullMerkleTree::<Keccak256>::new(2, [0; 32]);
let mut tree =
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
for i in 0..leaves.len() {
// We set the leaves
tree.set(i, leaves[i]).unwrap();
@@ -93,7 +100,9 @@ mod test {
}
// We test the OptimalMerkleTree implementation
let mut tree = OptimalMerkleTree::<Keccak256>::new(2, [0; 32]);
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
.unwrap();
for i in 0..leaves.len() {
// We set the leaves
tree.set(i, leaves[i]).unwrap();
@@ -116,4 +125,41 @@ mod test {
.unwrap());
}
}
#[test]
fn test_override_range() {
let initial_leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
.unwrap();
// We set the leaves
tree.set_range(0, initial_leaves.iter().cloned()).unwrap();
let new_leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000005"),
hex!("0000000000000000000000000000000000000000000000000000000000000006"),
];
let to_delete_indices: [usize; 2] = [0, 1];
// We override the leaves
tree.override_range(
0, // start from the end of the initial leaves
new_leaves.iter().cloned(),
to_delete_indices.iter().cloned(),
)
.unwrap();
// ensure that the leaves are set correctly
for i in 0..new_leaves.len() {
assert_eq!(tree.get_leaf(i), new_leaves[i]);
}
}
}

View File

@@ -3,7 +3,7 @@ mod test {
use ark_bn254::Fr;
use num_bigint::BigUint;
use num_traits::Num;
use utils::poseidon_hash::Poseidon;
use zerokit_utils::poseidon_hash::Poseidon;
const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
(2, 8, 56, 0),