Compare commits

..

9 Commits

Author SHA1 Message Date
Rahul Ghangas
936b7fc3cb test: add helper function to wrap init 2023-02-24 11:56:01 +05:30
Rahul Ghangas
cf7e2de380 test: call init and init_thread_pool before tests start 2023-02-24 11:55:46 +05:30
Rahul Ghangas
4fbb1525ea chore: use browser for testing, since node is not supported 2023-02-24 11:54:44 +05:30
Rahul Ghangas
a7f6d63060 chore: add new line at the end of rust-toolchain file 2023-02-02 01:07:08 +05:30
Rahul Ghangas
95f895c64c feat: target toolchain and add rustflags 2023-02-02 01:03:48 +05:30
Rahul Ghangas
228ccba90a chore: remove unused dependency in rln that's causing wasm issues 2023-02-02 01:02:16 +05:30
Rahul Ghangas
e04fa2c7a5 feat: rexport init_thread_pool 2023-02-02 01:01:50 +05:30
Rahul Ghangas
6539bb4d5e chore: enable parallel feature for rln 2023-02-02 00:59:32 +05:30
Rahul Ghangas
5394050c0b chore: add wasm rayon for threading 2023-02-02 00:58:46 +05:30
71 changed files with 2299 additions and 8555 deletions

View File

@@ -28,32 +28,59 @@ on:
name: Tests
jobs:
test:
multiplier:
strategy:
matrix:
platform: [ubuntu-latest, macos-latest]
crate: [multiplier, semaphore, rln, utils]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: test - ${{ matrix.crate }} - ${{ matrix.platform }}
name: multiplier - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo-make test
run: |
cargo make test --release
working-directory: ${{ matrix.crate }}
working-directory: multiplier
semaphore:
strategy:
matrix:
platform: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: semaphore - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo-make test
run: |
cargo make test --release
working-directory: semaphore
rln-wasm:
strategy:
matrix:
@@ -61,38 +88,88 @@ jobs:
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: test - rln-wasm - ${{ matrix.platform }}
name: rln-wasm - ${{ matrix.platform }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- run: git submodule update --init --recursive
- name: Install wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
- run: cargo install cargo-make
- run: cargo make build
working-directory: rln-wasm
- run: cargo make test --release
- run: cargo-make test
working-directory: rln-wasm
rln:
strategy:
matrix:
platform: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: rln - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo-make test
run: |
cargo make test --release
working-directory: rln
utils:
strategy:
matrix:
platform: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: utils - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo-make test
run: |
cargo make test --release
working-directory: utils
lint:
strategy:
matrix:
# we run lint tests only on ubuntu
platform: [ubuntu-latest]
crate: [multiplier, semaphore, rln, utils]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: lint - ${{ matrix.crate }} - ${{ matrix.platform }}
name: lint - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
@@ -100,38 +177,31 @@ jobs:
toolchain: stable
override: true
components: rustfmt, clippy
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- name: Update git submodules
run: git submodule update --init --recursive
- name: cargo fmt
if: success() || failure()
run: cargo fmt -- --check
working-directory: ${{ matrix.crate }}
- name: cargo clippy
run: cargo fmt --all -- --check
- name: multiplier - cargo clippy
if: success() || failure()
run: |
cargo clippy --release -- -D warnings
working-directory: ${{ matrix.crate }}
cargo clippy --release
working-directory: multiplier
- name: semaphore - cargo clippy
if: success() || failure()
run: |
cargo clippy --release
working-directory: semaphore
- name: rln - cargo clippy
if: success() || failure()
run: |
cargo clippy --release
working-directory: rln
- name: utils - cargo clippy
if: success() || failure()
run: |
cargo clippy --release
working-directory: utils
# We skip clippy on rln-wasm, since wasm target is managed by cargo make
# Currently not treating warnings as error, too noisy
# -- -D warnings
benchmark:
# run only in pull requests
if: github.event_name == 'pull_request'
strategy:
matrix:
# we run benchmark tests only on ubuntu
platform: [ubuntu-latest]
crate: [rln, utils]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: benchmark - ${{ matrix.platform }} - ${{ matrix.crate }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
- uses: Swatinem/rust-cache@v2
- uses: boa-dev/criterion-compare-action@v3
with:
branchName: ${{ github.base_ref }}
cwd: ${{ matrix.crate }}
# -- -D warnings

View File

@@ -6,74 +6,61 @@ on:
jobs:
linux:
strategy:
matrix:
target:
- x86_64-unknown-linux-gnu
- aarch64-unknown-linux-gnu
- i686-unknown-linux-gnu
name: Linux build
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
target: ${{ matrix.target }}
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: cross build
- name: Update git submodules
run: git submodule update --init --recursive
- name: cargo build
run: |
cross build --release --target ${{ matrix.target }} --workspace --exclude rln-wasm
cargo build --release --workspace --exclude rln-wasm
mkdir release
cp target/${{ matrix.target }}/release/librln* release/
tar -czvf ${{ matrix.target }}-rln.tar.gz release/
cp target/release/librln* release/
tar -czvf linux-rln.tar.gz release/
- name: Upload archive artifact
uses: actions/upload-artifact@v2
with:
name: ${{ matrix.target }}-archive
path: ${{ matrix.target }}-rln.tar.gz
name: linux-archive
path: linux-rln.tar.gz
retention-days: 2
macos:
name: MacOS build
runs-on: macos-latest
strategy:
matrix:
target:
- x86_64-apple-darwin
- aarch64-apple-darwin
steps:
- name: Checkout sources
uses: actions/checkout@v3
uses: actions/checkout@v2
with:
ref: master
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
target: ${{ matrix.target }}
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: cross build
- name: Update git submodules
run: git submodule update --init --recursive
- name: cargo build
run: |
cross build --release --target ${{ matrix.target }} --workspace --exclude rln-wasm
cargo build --release --workspace --exclude rln-wasm
mkdir release
cp target/${{ matrix.target }}/release/librln* release/
tar -czvf ${{ matrix.target }}-rln.tar.gz release/
cp target/release/librln* release/
tar -czvf macos-rln.tar.gz release/
- name: Upload archive artifact
uses: actions/upload-artifact@v2
with:
name: ${{ matrix.target }}-archive
path: ${{ matrix.target }}-rln.tar.gz
name: macos-archive
path: macos-rln.tar.gz
retention-days: 2
browser-rln-wasm:
@@ -81,21 +68,23 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
uses: actions/checkout@v2
with:
ref: master
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- run: git submodule update --init --recursive
- name: Install wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
- name: cross make build
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo make build
run: |
cross make build
cargo make build
mkdir release
cp pkg/** release/
tar -czvf browser-rln-wasm.tar.gz release/
@@ -115,14 +104,14 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@v2
with:
ref: master
- name: Download artifacts
uses: actions/download-artifact@v2
- name: Delete tag
uses: dev-drprasad/delete-tag-and-release@v0.2.1
uses: dev-drprasad/delete-tag-and-release@v0.2.0
with:
delete_release: true
tag_name: nightly
@@ -131,13 +120,14 @@ jobs:
- name: Create prerelease
run: |
start_tag=$(gh release list -L 2 --exclude-drafts | grep -v nightly | cut -d$'\t' -f3 | sed -n '1p')
start_tag=$(gh release list -L 2 --exclude-drafts | grep -v nightly | cut -d$'\t' -f3)
gh release create nightly --prerelease --target master \
--title 'Nightly build ("master" branch)' \
--generate-notes \
--draft=false \
--notes-start-tag $start_tag \
*-archive/*.tar.gz \
linux-archive/linux-rln.tar.gz \
macos-archive/macos-rln.tar.gz \
browser-rln-wasm-archive/browser-rln-wasm.tar.gz
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -146,4 +136,6 @@ jobs:
with:
failOnError: false
name: |
*-archive
linux-archive
macos-archive
browser-rln-wasm-archive

View File

@@ -9,7 +9,7 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- uses: micnncim/action-label-syncer@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

5
.gitignore vendored
View File

@@ -8,7 +8,10 @@ rln/pmtree_db
# will have compiled files and executables
debug/
target/
wabt/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock
# These are backup files generated by rustfmt
**/*.rs.bk

View File

@@ -1,15 +1,18 @@
## 2023-02-28 v0.2
## Upcoming release
This release contains:
- Improved code quality
Release highlights:
- Allows consumers of zerokit RLN to set leaves to the Merkle Tree from an arbitrary index. Useful for batching updates to the Merkle Tree.
- Improved performance for proof generation and verification
- rln_wasm which allows for the consumption of RLN through a WebAssembly interface
- Refactored to generate Semaphore-compatible credentials
- Dual License under Apache 2.0 and MIT
- RLN compiles as a static library, which can be consumed through a C FFI
The full list of changes is below.
### Features
- Creation of `set_leaves_from`, which allows consumers to add leaves to a tree from a given starting index. `init_tree_with_leaves` internally uses `set_leaves_from`, with index 0.
### Changes
- Renaming of `set_leaves` to `init_tree_with_leaves`, which is a more accurate representation of the function's utility.
### Fixes
- None
## 2022-09-19 v0.1

4276
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,21 +4,6 @@ members = [
"private-settlement",
"semaphore",
"rln",
"rln-cli",
"rln-wasm",
"utils",
]
resolver = "2"
# Compilation profile for any non-workspace member.
# Dependencies are optimized, even in a dev build. This improves dev performance
# while having neglible impact on incremental build times.
[profile.dev.package."*"]
opt-level = 3
[profile.release.package."rln-wasm"]
# Tell `rustc` to optimize for small code size.
opt-level = "s"
[profile.release.package."semaphore"]
codegen-units = 1

View File

@@ -1,32 +0,0 @@
[target.x86_64-pc-windows-gnu]
image = "ghcr.io/cross-rs/x86_64-pc-windows-gnu:latest"
[target.aarch64-unknown-linux-gnu]
image = "ghcr.io/cross-rs/aarch64-unknown-linux-gnu:latest"
[target.x86_64-unknown-linux-gnu]
image = "ghcr.io/cross-rs/x86_64-unknown-linux-gnu:latest"
[target.arm-unknown-linux-gnueabi]
image = "ghcr.io/cross-rs/arm-unknown-linux-gnueabi:latest"
[target.i686-pc-windows-gnu]
image = "ghcr.io/cross-rs/i686-pc-windows-gnu:latest"
[target.i686-unknown-linux-gnu]
image = "ghcr.io/cross-rs/i686-unknown-linux-gnu:latest"
[target.arm-unknown-linux-gnueabihf]
image = "ghcr.io/cross-rs/arm-unknown-linux-gnueabihf:latest"
[target.mips-unknown-linux-gnu]
image = "ghcr.io/cross-rs/mips-unknown-linux-gnu:latest"
[target.mips64-unknown-linux-gnuabi64]
image = "ghcr.io/cross-rs/mips64-unknown-linux-gnuabi64:latest"
[target.mips64el-unknown-linux-gnuabi64]
image = "ghcr.io/cross-rs/mips64el-unknown-linux-gnuabi64:latest"
[target.mipsel-unknown-linux-gnu]
image = "ghcr.io/cross-rs/mipsel-unknown-linux-gnu:latest"

View File

@@ -1,203 +0,0 @@
Copyright (c) 2022 Vac Research
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -1,25 +0,0 @@
Copyright (c) 2022 Vac Research
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE O THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@@ -1,31 +1,13 @@
.PHONY: all installdeps build test clean
.PHONY: all test clean
all: .pre-build build
.fetch-submodules:
@git submodule update --init --recursive
.pre-build: .fetch-submodules
@cargo install cargo-make
ifdef CI
@cargo install cross --git https://github.com/cross-rs/cross.git --rev 1511a28
endif
installdeps: .pre-build
ifeq ($(shell uname),Darwin)
# commented due to https://github.com/orgs/Homebrew/discussions/4612
# @brew update
@brew install cmake ninja
else ifeq ($(shell uname),Linux)
@sudo apt-get update
@sudo apt-get install -y cmake ninja-build
endif
@git clone --recursive https://github.com/WebAssembly/wabt.git
@cd wabt && mkdir build && cd build && cmake .. -GNinja && ninja && sudo ninja install
build: .pre-build
all: .pre-build
@cargo make build
.pre-build:
ifndef $(cargo make --help)
@cargo install --force cargo-make
endif
test: .pre-build
@cargo make test

View File

@@ -17,19 +17,3 @@ in Rust.
- [RLN library](https://github.com/kilic/rln) written in Rust based on Bellman.
- [semaphore-rs](https://github.com/worldcoin/semaphore-rs) written in Rust based on ark-circom.
## Build and Test
To install missing dependencies, run the following commands from the root folder
```bash
make installdeps
```
To build and test all crates, run the following commands from the root folder
```bash
make build
make test
```
## Release assets
We use [`cross-rs`](https://github.com/cross-rs/cross) to cross-compile and generate release assets for rln.

View File

@@ -1,8 +1,7 @@
[package]
name = "multiplier"
version = "0.3.0"
version = "0.1.0"
edition = "2018"
license = "MIT OR Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -13,19 +12,23 @@ license = "MIT OR Apache-2.0"
# fnv = { version = "1.0.3", default-features = false }
# num = { version = "0.4.0" }
# num-traits = { version = "0.2.0", default-features = false }
num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
# ZKP Generation
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
# ark-ff = { version = "0.3.0", default-features = false, features = ["parallel", "asm"] }
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
ark-bn254 = { version = "0.3.0" }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
# ark-poly = { version = "^0.3.0", default-features = false, features = ["parallel"] }
ark-relations = { version = "0.3.0", default-features = false }
ark-serialize = { version = "0.3.0", default-features = false }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circom-2"], rev = "35ce5a9" }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circom-2"] }
# error handling
color-eyre = "0.6.1"
# thiserror = "1.0.26"
color-eyre = "0.5"
# decoding of data
# hex = "0.4.3"

View File

@@ -3,15 +3,7 @@
Example wrapper around a basic Circom circuit to test Circom 2 integration
through ark-circom and FFI.
## Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```
## FFI
# FFI
To generate C or Nim bindings from Rust FFI, use `cbindgen` or `nbindgen`:

View File

@@ -31,12 +31,12 @@ impl<'a> From<&Buffer> for &'a [u8] {
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn new_circuit(ctx: *mut *mut Multiplier) -> bool {
if let Ok(mul) = Multiplier::new() {
unsafe { *ctx = Box::into_raw(Box::new(mul)) };
true
} else {
false
}
println!("multiplier ffi: new");
let mul = Multiplier::new();
unsafe { *ctx = Box::into_raw(Box::new(mul)) };
true
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]

View File

@@ -1,6 +1,6 @@
use ark_circom::{CircomBuilder, CircomConfig};
use ark_std::rand::thread_rng;
use color_eyre::{Report, Result};
use color_eyre::Result;
use ark_bn254::Bn254;
use ark_groth16::{
@@ -25,18 +25,17 @@ fn groth16_proof_example() -> Result<()> {
let circom = builder.build()?;
let inputs = circom
.get_public_inputs()
.ok_or(Report::msg("no public inputs"))?;
let inputs = circom.get_public_inputs().unwrap();
let proof = prove(circom, &params, &mut rng)?;
let pvk = prepare_verifying_key(&params.vk);
match verify_proof(&pvk, &proof, &inputs) {
Ok(_) => Ok(()),
Err(_) => Err(Report::msg("not verified")),
}
let verified = verify_proof(&pvk, &proof, &inputs)?;
assert!(verified);
Ok(())
}
fn main() {

View File

@@ -7,8 +7,9 @@ use ark_groth16::{
Proof, ProvingKey,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use color_eyre::{Report, Result};
use std::io::{Read, Write};
// , SerializationError};
use std::io::{self, Read, Write};
pub struct Multiplier {
circom: CircomCircuit<Bn254>,
@@ -17,11 +18,12 @@ pub struct Multiplier {
impl Multiplier {
// TODO Break this apart here
pub fn new() -> Result<Multiplier> {
pub fn new() -> Multiplier {
let cfg = CircomConfig::<Bn254>::new(
"./resources/circom2_multiplier2.wasm",
"./resources/circom2_multiplier2.r1cs",
)?;
)
.unwrap();
let mut builder = CircomBuilder::new(cfg);
builder.push_input("a", 3);
@@ -32,41 +34,40 @@ impl Multiplier {
let mut rng = thread_rng();
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng)?;
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng).unwrap();
let circom = builder.build()?;
let circom = builder.build().unwrap();
Ok(Multiplier { circom, params })
//let inputs = circom.get_public_inputs().unwrap();
Multiplier { circom, params }
}
// TODO Input Read
pub fn prove<W: Write>(&self, result_data: W) -> Result<()> {
pub fn prove<W: Write>(&self, result_data: W) -> io::Result<()> {
let mut rng = thread_rng();
// XXX: There's probably a better way to do this
let circom = self.circom.clone();
let params = self.params.clone();
let proof = prove(circom, &params, &mut rng)?;
let proof = prove(circom, &params, &mut rng).unwrap();
// XXX: Unclear if this is different from other serialization(s)
proof.serialize(result_data)?;
let _ = proof.serialize(result_data).unwrap();
Ok(())
}
pub fn verify<R: Read>(&self, input_data: R) -> Result<bool> {
let proof = Proof::deserialize(input_data)?;
pub fn verify<R: Read>(&self, input_data: R) -> io::Result<bool> {
let proof = Proof::deserialize(input_data).unwrap();
let pvk = prepare_verifying_key(&self.params.vk);
// XXX Part of input data?
let inputs = self
.circom
.get_public_inputs()
.ok_or(Report::msg("no public inputs"))?;
let inputs = self.circom.get_public_inputs().unwrap();
let verified = verify_proof(&pvk, &proof, &inputs)?;
let verified = verify_proof(&pvk, &proof, &inputs).unwrap();
Ok(verified)
}
@@ -74,6 +75,6 @@ impl Multiplier {
impl Default for Multiplier {
fn default() -> Self {
Self::new().unwrap()
Self::new()
}
}

View File

@@ -4,7 +4,8 @@ mod tests {
#[test]
fn multiplier_proof() {
let mul = Multiplier::new().unwrap();
let mul = Multiplier::new();
//let inputs = mul.circom.get_public_inputs().unwrap();
let mut output_data: Vec<u8> = Vec::new();
let _ = mul.prove(&mut output_data);

View File

@@ -1,8 +1,7 @@
[package]
name = "private-settlement"
version = "0.3.0"
version = "0.1.0"
edition = "2021"
license = "MIT OR Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@@ -2,10 +2,3 @@
This module is to provide APIs to manage, compute and verify [Private Settlement](https://rfc.vac.dev/spec/44/) zkSNARK proofs and primitives.
## Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```

View File

@@ -1,13 +0,0 @@
[package]
name = "rln-cli"
version = "0.3.0"
edition = "2021"
[dependencies]
rln = { path = "../rln" }
clap = { version = "4.2.7", features = ["cargo", "derive", "env"]}
clap_derive = { version = "=4.2.0" }
color-eyre = "=0.6.2"
# serialization
serde_json = "1.0.48"
serde = { version = "1.0.130", features = ["derive"] }

View File

@@ -1,67 +0,0 @@
use std::path::PathBuf;
use clap::Subcommand;
#[derive(Subcommand)]
pub(crate) enum Commands {
New {
tree_height: usize,
/// Sets a custom config file
#[arg(short, long)]
config: PathBuf,
},
NewWithParams {
tree_height: usize,
/// Sets a custom config file
#[arg(short, long)]
config: PathBuf,
#[arg(short, long)]
tree_config_input: PathBuf,
},
SetTree {
tree_height: usize,
},
SetLeaf {
index: usize,
#[arg(short, long)]
file: PathBuf,
},
SetMultipleLeaves {
index: usize,
#[arg(short, long)]
file: PathBuf,
},
ResetMultipleLeaves {
#[arg(short, long)]
file: PathBuf,
},
SetNextLeaf {
#[arg(short, long)]
file: PathBuf,
},
DeleteLeaf {
index: usize,
},
GetRoot,
GetProof {
index: usize,
},
Prove {
#[arg(short, long)]
input: PathBuf,
},
Verify {
#[arg(short, long)]
file: PathBuf,
},
GenerateProof {
#[arg(short, long)]
input: PathBuf,
},
VerifyWithRoots {
#[arg(short, long)]
input: PathBuf,
#[arg(short, long)]
roots: PathBuf,
},
}

View File

@@ -1,29 +0,0 @@
use color_eyre::Result;
use serde::{Deserialize, Serialize};
use std::{fs::File, io::Read, path::PathBuf};
pub const RLN_STATE_PATH: &str = "RLN_STATE_PATH";
#[derive(Default, Serialize, Deserialize)]
pub(crate) struct Config {
pub inner: Option<InnerConfig>,
}
#[derive(Default, Serialize, Deserialize)]
pub(crate) struct InnerConfig {
pub file: PathBuf,
pub tree_height: usize,
}
impl Config {
pub(crate) fn load_config() -> Result<Config> {
let path = PathBuf::from(std::env::var(RLN_STATE_PATH)?);
let mut file = File::open(path)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let state: Config = serde_json::from_str(&contents)?;
Ok(state)
}
}

View File

@@ -1,157 +0,0 @@
use std::{fs::File, io::Read, path::Path};
use clap::Parser;
use color_eyre::{Report, Result};
use commands::Commands;
use rln::public::RLN;
use state::State;
mod commands;
mod config;
mod state;
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
struct Cli {
#[command(subcommand)]
command: Option<Commands>,
}
fn main() -> Result<()> {
let cli = Cli::parse();
let mut state = State::load_state()?;
match &cli.command {
Some(Commands::New {
tree_height,
config,
}) => {
let resources = File::open(&config)?;
state.rln = Some(RLN::new(*tree_height, resources)?);
Ok(())
}
Some(Commands::NewWithParams {
tree_height,
config,
tree_config_input,
}) => {
let mut resources: Vec<Vec<u8>> = Vec::new();
for filename in ["rln.wasm", "rln_final.zkey", "verification_key.json"] {
let fullpath = config.join(Path::new(filename));
let mut file = File::open(&fullpath)?;
let metadata = std::fs::metadata(&fullpath)?;
let mut buffer = vec![0; metadata.len() as usize];
file.read_exact(&mut buffer)?;
resources.push(buffer);
}
let tree_config_input_file = File::open(&tree_config_input)?;
state.rln = Some(RLN::new_with_params(
*tree_height,
resources[0].clone(),
resources[1].clone(),
resources[2].clone(),
tree_config_input_file,
)?);
Ok(())
}
Some(Commands::SetTree { tree_height }) => {
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_tree(*tree_height)?;
Ok(())
}
Some(Commands::SetLeaf { index, file }) => {
let input_data = File::open(&file)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_leaf(*index, input_data)?;
Ok(())
}
Some(Commands::SetMultipleLeaves { index, file }) => {
let input_data = File::open(&file)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_leaves_from(*index, input_data)?;
Ok(())
}
Some(Commands::ResetMultipleLeaves { file }) => {
let input_data = File::open(&file)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.init_tree_with_leaves(input_data)?;
Ok(())
}
Some(Commands::SetNextLeaf { file }) => {
let input_data = File::open(&file)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_next_leaf(input_data)?;
Ok(())
}
Some(Commands::DeleteLeaf { index }) => {
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.delete_leaf(*index)?;
Ok(())
}
Some(Commands::GetRoot) => {
let writer = std::io::stdout();
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.get_root(writer)?;
Ok(())
}
Some(Commands::GetProof { index }) => {
let writer = std::io::stdout();
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.get_proof(*index, writer)?;
Ok(())
}
Some(Commands::Prove { input }) => {
let input_data = File::open(&input)?;
let writer = std::io::stdout();
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.prove(input_data, writer)?;
Ok(())
}
Some(Commands::Verify { file }) => {
let input_data = File::open(&file)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.verify(input_data)?;
Ok(())
}
Some(Commands::GenerateProof { input }) => {
let input_data = File::open(&input)?;
let writer = std::io::stdout();
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.generate_rln_proof(input_data, writer)?;
Ok(())
}
Some(Commands::VerifyWithRoots { input, roots }) => {
let input_data = File::open(&input)?;
let roots_data = File::open(&roots)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.verify_with_roots(input_data, roots_data)?;
Ok(())
}
None => Ok(()),
}
}

View File

@@ -1,23 +0,0 @@
use color_eyre::Result;
use rln::public::RLN;
use std::fs::File;
use crate::config::{Config, InnerConfig};
#[derive(Default)]
pub(crate) struct State<'a> {
pub rln: Option<RLN<'a>>,
}
impl<'a> State<'a> {
pub(crate) fn load_state() -> Result<State<'a>> {
let config = Config::load_config()?;
let rln = if let Some(InnerConfig { file, tree_height }) = config.inner {
let resources = File::open(&file)?;
Some(RLN::new(tree_height, resources)?)
} else {
None
};
Ok(State { rln })
}
}

5
rln-wasm/.cargo/config Normal file
View File

@@ -0,0 +1,5 @@
[target.wasm32-unknown-unknown]
rustflags = ["-C", "target-feature=+atomics,+bulk-memory,+mutable-globals"]
[unstable]
build-std = ["panic_abort", "std"]

View File

@@ -1,6 +1,6 @@
[package]
name = "rln-wasm"
version = "0.0.9"
version = "0.0.7"
edition = "2021"
license = "MIT or Apache2"
@@ -11,12 +11,13 @@ crate-type = ["cdylib", "rlib"]
default = ["console_error_panic_hook"]
[dependencies]
rln = { path = "../rln", default-features = false, features = ["wasm"] }
rln = { path = "../rln", default-features = false, features = ["wasm", "parallel"] }
num-bigint = { version = "0.4", default-features = false, features = ["rand", "serde"] }
wasmer = { version = "2.3", default-features = false, features = ["js", "std"] }
wasmer = { version = "3.1.1", default-features = false, features = ["js", "std"] }
web-sys = {version = "0.3", features=["console"]}
getrandom = { version = "0.2.7", default-features = false, features = ["js"] }
wasm-bindgen = "0.2.63"
wasm-bindgen-rayon = "1.0.3"
serde-wasm-bindgen = "0.4"
js-sys = "0.3.59"
serde_json = "1.0.85"
@@ -31,3 +32,6 @@ console_error_panic_hook = { version = "0.1.7", optional = true }
wasm-bindgen-test = "0.3.13"
wasm-bindgen-futures = "0.4.33"
[profile.release]
# Tell `rustc` to optimize for small code size.
opt-level = "s"

View File

@@ -9,17 +9,12 @@ script = "sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/
clear = true
dependencies = [
"pack-build",
"pack-rename",
"post-build"
"pack-rename"
]
[tasks.post-build]
command = "wasm-strip"
args = ["./pkg/rln_wasm_bg.wasm"]
[tasks.test]
command = "wasm-pack"
args = ["test", "--release", "--node"]
args = ["test", "--release", "--firefox", "--headless"]
dependencies = ["build"]
[tasks.login]

View File

@@ -10,22 +10,11 @@ curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
```
cargo install cargo-make
```
OR
```
make installdeps
```
3. Compile zerokit for `wasm32-unknown-unknown`:
```
cd rln-wasm
cargo make build
```
4. Compile a slimmer version of zerokit for `wasm32-unknown-unknown`:
```
cd rln-wasm
cargo make post-build
```
## Running tests
```

1
rln-wasm/rust-toolchain Normal file
View File

@@ -0,0 +1 @@
nightly-2022-12-12

View File

@@ -3,13 +3,13 @@
extern crate wasm_bindgen;
extern crate web_sys;
use std::vec::Vec;
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
use num_bigint::BigInt;
use rln::public::{hash, poseidon_hash, RLN};
use rln::public::RLN;
use wasm_bindgen::prelude::*;
pub use wasm_bindgen_rayon::init_thread_pool;
#[wasm_bindgen]
pub fn init_panic_hook() {
console_error_panic_hook::set_once();
@@ -22,196 +22,34 @@ pub struct RLNWrapper {
instance: RLN<'static>,
}
// Macro to call methods with arbitrary amount of arguments,
// which have the last argument is output buffer pointer
// First argument to the macro is context,
// second is the actual method on `RLN`
// third is the aforementioned output buffer argument
// rest are all other arguments to the method
macro_rules! call_with_output_and_error_msg {
// this variant is needed for the case when
// there are zero other arguments
($instance:expr, $method:ident, $error_msg:expr) => {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
if let Err(err) = new_instance.instance.$method(&mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
($instance:expr, $method:ident, $error_msg:expr, $( $arg:expr ),* ) => {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
if let Err(err) = new_instance.instance.$method($($arg.process()),*, &mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
}
// Macro to call_with_error_msg methods with arbitrary amount of arguments,
// First argument to the macro is context,
// second is the actual method on `RLNWrapper`
// rest are all other arguments to the method
macro_rules! call_with_error_msg {
($instance:expr, $method:ident, $error_msg:expr $(, $arg:expr)*) => {
{
let new_instance: &mut RLNWrapper = $instance.process();
if let Err(err) = new_instance.instance.$method($($arg.process()),*) {
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
Ok(())
}
}
}
}
macro_rules! call {
($instance:expr, $method:ident $(, $arg:expr)*) => {
{
let new_instance: &mut RLNWrapper = $instance.process();
new_instance.instance.$method($($arg.process()),*)
}
}
}
macro_rules! call_bool_method_with_error_msg {
($instance:expr, $method:ident, $error_msg:expr $(, $arg:expr)*) => {
{
let new_instance: &RLNWrapper = $instance.process();
new_instance.instance.$method($($arg.process()),*).map_err(|err| format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
}
}
}
// Macro to execute a function with arbitrary amount of arguments,
// First argument is the function to execute
// Rest are all other arguments to the method
macro_rules! fn_call_with_output_and_error_msg {
// this variant is needed for the case when
// there are zero other arguments
($func:ident, $error_msg:expr) => {
{
let mut output_data: Vec<u8> = Vec::new();
if let Err(err) = $func(&mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
($func:ident, $error_msg:expr, $( $arg:expr ),* ) => {
{
let mut output_data: Vec<u8> = Vec::new();
if let Err(err) = $func($($arg.process()),*, &mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
}
trait ProcessArg {
type ReturnType;
fn process(self) -> Self::ReturnType;
}
impl ProcessArg for usize {
type ReturnType = usize;
fn process(self) -> Self::ReturnType {
self
}
}
impl<T> ProcessArg for Vec<T> {
type ReturnType = Vec<T>;
fn process(self) -> Self::ReturnType {
self
}
}
impl<'a> ProcessArg for *const RLN<'a> {
type ReturnType = &'a RLN<'a>;
fn process(self) -> Self::ReturnType {
unsafe { &*self }
}
}
impl ProcessArg for *const RLNWrapper {
type ReturnType = &'static RLNWrapper;
fn process(self) -> Self::ReturnType {
unsafe { &*self }
}
}
impl ProcessArg for *mut RLNWrapper {
type ReturnType = &'static mut RLNWrapper;
fn process(self) -> Self::ReturnType {
unsafe { &mut *self }
}
}
impl<'a> ProcessArg for &'a [u8] {
type ReturnType = &'a [u8];
fn process(self) -> Self::ReturnType {
self
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = newRLN)]
pub fn wasm_new(
tree_height: usize,
zkey: Uint8Array,
vk: Uint8Array,
) -> Result<*mut RLNWrapper, String> {
let instance = RLN::new_with_params(tree_height, zkey.to_vec(), vk.to_vec())
.map_err(|err| format!("{:#?}", err))?;
pub fn wasm_new(tree_height: usize, zkey: Uint8Array, vk: Uint8Array) -> *mut RLNWrapper {
let instance = RLN::new_with_params(tree_height, zkey.to_vec(), vk.to_vec());
let wrapper = RLNWrapper { instance };
Ok(Box::into_raw(Box::new(wrapper)))
Box::into_raw(Box::new(wrapper))
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = getSerializedRLNWitness)]
pub fn wasm_get_serialized_rln_witness(
ctx: *mut RLNWrapper,
input: Uint8Array,
) -> Result<Uint8Array, String> {
let rln_witness = call!(ctx, get_serialized_rln_witness, &input.to_vec()[..])
.map_err(|err| format!("{:#?}", err))?;
Ok(Uint8Array::from(&rln_witness[..]))
pub fn wasm_get_serialized_rln_witness(ctx: *mut RLNWrapper, input: Uint8Array) -> Uint8Array {
let wrapper = unsafe { &mut *ctx };
let rln_witness = wrapper
.instance
.get_serialized_rln_witness(&input.to_vec()[..]);
Uint8Array::from(&rln_witness[..])
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = insertMember)]
pub fn wasm_set_next_leaf(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
call_with_error_msg!(
ctx,
set_next_leaf,
"could not insert member into merkle tree".to_string(),
&input.to_vec()[..]
)
let wrapper = unsafe { &mut *ctx };
if wrapper.instance.set_next_leaf(&input.to_vec()[..]).is_ok() {
Ok(())
} else {
Err("could not insert member into merkle tree".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -221,59 +59,45 @@ pub fn wasm_set_leaves_from(
index: usize,
input: Uint8Array,
) -> Result<(), String> {
call_with_error_msg!(
ctx,
set_leaves_from,
"could not set multiple leaves".to_string(),
index,
&*input.to_vec()
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = deleteLeaf)]
pub fn wasm_delete_leaf(ctx: *mut RLNWrapper, index: usize) -> Result<(), String> {
call_with_error_msg!(ctx, delete_leaf, "could not delete leaf".to_string(), index)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = setMetadata)]
pub fn wasm_set_metadata(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
call_with_error_msg!(
ctx,
set_metadata,
"could not set metadata".to_string(),
&*input.to_vec()
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = getMetadata)]
pub fn wasm_get_metadata(ctx: *mut RLNWrapper) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(ctx, get_metadata, "could not get metadata".to_string())
let wrapper = unsafe { &mut *ctx };
if wrapper
.instance
.set_leaves_from(index as usize, &input.to_vec()[..])
.is_ok()
{
Ok(())
} else {
Err("could not set multiple leaves".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = initTreeWithLeaves)]
pub fn wasm_init_tree_with_leaves(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
call_with_error_msg!(
ctx,
init_tree_with_leaves,
"could not init merkle tree".to_string(),
&*input.to_vec()
)
let wrapper = unsafe { &mut *ctx };
if wrapper
.instance
.init_tree_with_leaves(&input.to_vec()[..])
.is_ok()
{
Ok(())
} else {
Err("could not init merkle tree".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = RLNWitnessToJson)]
pub fn rln_witness_to_json(
ctx: *mut RLNWrapper,
serialized_witness: Uint8Array,
) -> Result<Object, String> {
let inputs = call!(ctx, get_rln_witness_json, &serialized_witness.to_vec()[..])
.map_err(|err| err.to_string())?;
let js_value = serde_wasm_bindgen::to_value(&inputs).map_err(|err| err.to_string())?;
Object::from_entries(&js_value).map_err(|err| format!("{:#?}", err))
pub fn rln_witness_to_json(ctx: *mut RLNWrapper, serialized_witness: Uint8Array) -> Object {
let wrapper = unsafe { &mut *ctx };
let inputs = wrapper
.instance
.get_rln_witness_json(&serialized_witness.to_vec()[..])
.unwrap();
let js_value = serde_wasm_bindgen::to_value(&inputs).unwrap();
let obj = Object::from_entries(&js_value);
obj.unwrap()
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -283,49 +107,83 @@ pub fn generate_rln_proof_with_witness(
calculated_witness: Vec<JsBigInt>,
serialized_witness: Uint8Array,
) -> Result<Uint8Array, String> {
let mut witness_vec: Vec<BigInt> = vec![];
let wrapper = unsafe { &mut *ctx };
for v in calculated_witness {
witness_vec.push(
let witness_vec: Vec<BigInt> = calculated_witness
.iter()
.map(|v| {
v.to_string(10)
.map_err(|err| format!("{:#?}", err))?
.unwrap()
.as_string()
.ok_or("not a string error")?
.unwrap()
.parse::<BigInt>()
.map_err(|err| format!("{:#?}", err))?,
);
}
.unwrap()
})
.collect();
call_with_output_and_error_msg!(
ctx,
generate_rln_proof_with_witness,
"could not generate proof",
witness_vec,
serialized_witness.to_vec()
)
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.generate_rln_proof_with_witness(witness_vec, serialized_witness.to_vec(), &mut output_data)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate proof".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateMembershipKey)]
pub fn wasm_key_gen(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(ctx, key_gen, "could not generate membership keys")
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper.instance.key_gen(&mut output_data).is_ok() {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership keys".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateExtendedMembershipKey)]
pub fn wasm_extended_key_gen(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(ctx, extended_key_gen, "could not generate membership keys")
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper.instance.extended_key_gen(&mut output_data).is_ok() {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership keys".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateSeededMembershipKey)]
pub fn wasm_seeded_key_gen(ctx: *const RLNWrapper, seed: Uint8Array) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(
ctx,
seeded_key_gen,
"could not generate membership key",
&seed.to_vec()[..]
)
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.seeded_key_gen(&seed.to_vec()[..], &mut output_data)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership key".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -334,12 +192,20 @@ pub fn wasm_seeded_extended_key_gen(
ctx: *const RLNWrapper,
seed: Uint8Array,
) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(
ctx,
seeded_extended_key_gen,
"could not generate membership key",
&seed.to_vec()[..]
)
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.seeded_extended_key_gen(&seed.to_vec()[..], &mut output_data)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership key".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -349,24 +215,38 @@ pub fn wasm_recover_id_secret(
input_proof_data_1: Uint8Array,
input_proof_data_2: Uint8Array,
) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(
ctx,
recover_id_secret,
"could not recover id secret",
&input_proof_data_1.to_vec()[..],
&input_proof_data_2.to_vec()[..]
)
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.recover_id_secret(
&input_proof_data_1.to_vec()[..],
&input_proof_data_2.to_vec()[..],
&mut output_data,
)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not recover id secret".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = verifyRLNProof)]
pub fn wasm_verify_rln_proof(ctx: *const RLNWrapper, proof: Uint8Array) -> Result<bool, String> {
call_bool_method_with_error_msg!(
ctx,
verify_rln_proof,
"error while verifying rln proof".to_string(),
&proof.to_vec()[..]
)
let wrapper = unsafe { &*ctx };
if match wrapper.instance.verify_rln_proof(&proof.to_vec()[..]) {
Ok(verified) => verified,
Err(_) => return Err("error while verifying rln proof".into()),
} {
return Ok(true);
}
Ok(false)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -376,31 +256,31 @@ pub fn wasm_verify_with_roots(
proof: Uint8Array,
roots: Uint8Array,
) -> Result<bool, String> {
call_bool_method_with_error_msg!(
ctx,
verify_with_roots,
"error while verifying proof with roots".to_string(),
&proof.to_vec()[..],
&roots.to_vec()[..]
)
let wrapper = unsafe { &*ctx };
if match wrapper
.instance
.verify_with_roots(&proof.to_vec()[..], &roots.to_vec()[..])
{
Ok(verified) => verified,
Err(_) => return Err("error while verifying proof with roots".into()),
} {
return Ok(true);
}
Ok(false)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = getRoot)]
pub fn wasm_get_root(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(ctx, get_root, "could not obtain root")
}
#[wasm_bindgen(js_name = hash)]
pub fn wasm_hash(input: Uint8Array) -> Result<Uint8Array, String> {
fn_call_with_output_and_error_msg!(hash, "could not generate hash", &input.to_vec()[..])
}
#[wasm_bindgen(js_name = poseidonHash)]
pub fn wasm_poseidon_hash(input: Uint8Array) -> Result<Uint8Array, String> {
fn_call_with_output_and_error_msg!(
poseidon_hash,
"could not generate poseidon hash",
&input.to_vec()[..]
)
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper.instance.get_root(&mut output_data).is_ok() {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not obtain root".into())
}
}

View File

@@ -1,4 +1,5 @@
const fs = require("fs");
const rln_wasm = require("/pkg/rln_wasm.js");
// Utils functions for loading circom witness calculator and reading files from test
@@ -7,8 +8,12 @@ module.exports = {
return fs.readFileSync(path);
},
calculateWitness: async function(circom_path, inputs){
const wc = require("resources/witness_calculator.js");
initWasm: async function() {
await rln_wasm();
},
calculateWitness: async function(circom_path, inputs) {
const wc = require("/resources/witness_calculator.js");
const wasmFile = fs.readFileSync(circom_path);
const wasmFileBuffer = wasmFile.slice(wasmFile.byteOffset, wasmFile.byteOffset + wasmFile.byteLength);
const witnessCalculator = await wc(wasmFileBuffer);

View File

@@ -4,22 +4,31 @@
mod tests {
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
use rln::circuit::TEST_TREE_HEIGHT;
use rln::utils::normalize_usize;
use rln_wasm::*;
use wasm_bindgen::{prelude::*, JsValue};
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsValue;
use wasm_bindgen_test::wasm_bindgen_test;
use wasm_bindgen_rayon::init_thread_pool;
#[wasm_bindgen(module = "src/utils.js")]
wasm_bindgen_test::wasm_bindgen_test_configure!(run_in_browser);
#[wasm_bindgen(module = "/src/utils.js")]
extern "C" {
#[wasm_bindgen(catch)]
fn read_file(path: &str) -> Result<Uint8Array, JsValue>;
#[wasm_bindgen(catch)]
async fn calculateWitness(circom_path: &str, input: Object) -> Result<JsValue, JsValue>;
async fn initWasm() -> Result<(), JsValue>;
#[wasm_bindgen(catch)]
async fn calculateWitness(circom_path: &str, inputs: Object) -> Result<JsValue, JsValue>;
}
#[wasm_bindgen_test]
pub async fn test_basic_flow() {
initWasm().await.unwrap();
wasm_bindgen_futures::JsFuture::from(init_thread_pool(4)).await.unwrap();
let tree_height = TEST_TREE_HEIGHT;
let circom_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln.wasm");
let zkey_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
@@ -29,7 +38,7 @@ mod tests {
let vk = read_file(&vk_path).unwrap();
// Creating an instance of RLN
let rln_instance = wasm_new(tree_height, zkey, vk).unwrap();
let rln_instance = wasm_new(tree_height, zkey, vk);
// Creating membership key
let mem_keys = wasm_key_gen(rln_instance).unwrap();
@@ -41,28 +50,28 @@ mod tests {
// Prepare the message
let signal = "Hello World".as_bytes();
let signal_len: u64 = signal.len() as u64;
// Setting up the epoch (With 0s for the test)
let epoch = Uint8Array::new_with_length(32);
epoch.fill(0, 0, 32);
let identity_index: usize = 0;
let identity_index: u64 = 0;
// Serializing the message
let mut serialized_vec: Vec<u8> = Vec::new();
serialized_vec.append(&mut idkey.to_vec());
serialized_vec.append(&mut normalize_usize(identity_index));
serialized_vec.append(&mut identity_index.to_le_bytes().to_vec());
serialized_vec.append(&mut epoch.to_vec());
serialized_vec.append(&mut normalize_usize(signal.len()));
serialized_vec.append(&mut signal_len.to_le_bytes().to_vec());
serialized_vec.append(&mut signal.to_vec());
let serialized_message = Uint8Array::from(&serialized_vec[..]);
let serialized_rln_witness =
wasm_get_serialized_rln_witness(rln_instance, serialized_message).unwrap();
wasm_get_serialized_rln_witness(rln_instance, serialized_message);
// Obtaining inputs that should be sent to circom witness calculator
let json_inputs =
rln_witness_to_json(rln_instance, serialized_rln_witness.clone()).unwrap();
let json_inputs = rln_witness_to_json(rln_instance, serialized_rln_witness.clone());
// Calculating witness with JS
// (Using a JSON since wasm_bindgen does not like Result<Vec<JsBigInt>,JsValue>)
@@ -88,7 +97,7 @@ mod tests {
// Add signal_len | signal
let mut proof_bytes = proof.to_vec();
proof_bytes.append(&mut normalize_usize(signal.len()));
proof_bytes.append(&mut signal_len.to_le_bytes().to_vec());
proof_bytes.append(&mut signal.to_vec());
let proof_with_signal = Uint8Array::from(&proof_bytes[..]);
@@ -108,28 +117,4 @@ mod tests {
let is_proof_valid = wasm_verify_with_roots(rln_instance, proof_with_signal, roots);
assert!(is_proof_valid.unwrap(), "verifying proof with roots failed");
}
#[wasm_bindgen_test]
fn test_metadata() {
let tree_height = TEST_TREE_HEIGHT;
let circom_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln.wasm");
let zkey_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
let vk_path =
format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.json");
let zkey = read_file(&zkey_path).unwrap();
let vk = read_file(&vk_path).unwrap();
// Creating an instance of RLN
let rln_instance = wasm_new(tree_height, zkey, vk).unwrap();
let test_metadata = Uint8Array::new(&JsValue::from_str("test"));
// Inserting random metadata
wasm_set_metadata(rln_instance, test_metadata.clone()).unwrap();
// Getting metadata
let metadata = wasm_get_metadata(rln_instance).unwrap();
assert_eq!(metadata.to_vec(), test_metadata.to_vec());
}
}

View File

@@ -1,16 +1,10 @@
[package]
name = "rln"
version = "0.3.2"
version = "0.1.0"
edition = "2021"
license = "MIT OR Apache-2.0"
description = "APIs to manage, compute and verify zkSNARK proofs and RLN primitives"
documentation = "https://github.com/vacp2p/zerokit"
homepage = "https://vac.dev"
repository = "https://github.com/vacp2p/zerokit"
[lib]
crate-type = ["rlib", "staticlib"]
bench = false
crate-type = ["cdylib", "rlib", "staticlib"]
# This flag disable cargo doctests, i.e. testing example code-snippets in documentation
doctest = false
@@ -18,51 +12,44 @@ doctest = false
[dependencies]
# ZKP Generation
ark-ec = { version = "=0.4.1", default-features = false }
ark-ff = { version = "=0.4.1", default-features = false, features = [ "asm"] }
ark-std = { version = "=0.4.0", default-features = false }
ark-bn254 = { version = "=0.4.0" }
ark-groth16 = { version = "=0.4.0", features = ["parallel"], default-features = false }
ark-relations = { version = "=0.4.0", default-features = false, features = [ "std" ] }
ark-serialize = { version = "=0.4.1", default-features = false }
ark-circom = { version = "=0.1.0", default-features = false, features = ["circom-2"] }
ark-ec = { version = "0.3.0", default-features = false }
ark-ff = { version = "0.3.0", default-features = false, features = [ "asm"] }
ark-std = { version = "0.3.0", default-features = false }
ark-bn254 = { version = "0.3.0" }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", default-features = false }
ark-relations = { version = "0.3.0", default-features = false, features = [ "std" ] }
ark-serialize = { version = "0.3.0", default-features = false }
ark-circom = { git = "https://github.com/vacp2p/ark-circom", branch = "wasm", default-features = false, features = ["circom-2"] }
#ark-circom = { git = "https://github.com/vacp2p/ark-circom", branch = "no-ethers-core", features = ["circom-2"] }
# WASM
wasmer = { version = "=2.3.0", default-features = false }
wasmer = { version = "2.3.0", default-features = false }
# error handling
color-eyre = "=0.6.2"
thiserror = "=1.0.39"
color-eyre = "0.5.11"
thiserror = "1.0.0"
# utilities
cfg-if = "=1.0"
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
num-traits = "=0.2.15"
once_cell = "=1.17.1"
rand = "=0.8.5"
rand_chacha = "=0.3.1"
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
utils = { package = "zerokit_utils", version = "=0.3.2", path = "../utils/", default-features = false }
cfg-if = "1.0"
num-bigint = { version = "0.4.3", default-features = false, features = ["rand"] }
num-traits = "0.2.11"
once_cell = "1.14.0"
rand = "0.8"
rand_chacha = "0.3.1"
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
utils = { path = "../utils", default-features = false }
# serialization
serde_json = "=1.0.96"
serde = { version = "=1.0.163", features = ["derive"] }
include_dir = "=0.7.3"
serde_json = "1.0.48"
[dev-dependencies]
sled = "=0.34.7"
criterion = { version = "=0.4.0", features = ["html_reports"] }
pmtree = { git = "https://github.com/Rate-Limiting-Nullifier/pmtree" }
[features]
default = ["parallel", "wasmer/sys-default", "pmtree-ft"]
default = ["parallel", "wasmer/sys-default"]
parallel = ["ark-ec/parallel", "ark-ff/parallel", "ark-std/parallel", "ark-groth16/parallel", "utils/parallel"]
wasm = ["wasmer/js", "wasmer/std"]
fullmerkletree = ["default"]
# Note: pmtree feature is still experimental
pmtree-ft = ["utils/pmtree-ft"]
[[bench]]
name = "pmtree_benchmark"
harness = false
pmtree = ["default"]

View File

@@ -5,7 +5,3 @@ args = ["build", "--release"]
[tasks.test]
command = "cargo"
args = ["test", "--release"]
[tasks.bench]
command = "cargo"
args = ["bench"]

View File

@@ -3,21 +3,15 @@
This module provides APIs to manage, compute and verify [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and RLN primitives.
## Pre-requisites
### Install dependencies and clone repo
### Install
```sh
make installdeps
git clone https://github.com/vacp2p/zerokit.git
cd zerokit/rln
```
Implemented tests can be executed by running within the module folder
### Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```
`cargo test --release`
### Compile ZK circuits

View File

@@ -1,44 +0,0 @@
use criterion::{criterion_group, criterion_main, Criterion};
use utils::ZerokitMerkleTree;
use rln::{circuit::Fr, pm_tree_adapter::PmTree};
pub fn pmtree_benchmark(c: &mut Criterion) {
let mut tree = PmTree::default(2).unwrap();
let leaves: Vec<Fr> = (0..4).map(|s| Fr::from(s)).collect();
c.bench_function("Pmtree::set", |b| {
b.iter(|| {
tree.set(0, leaves[0]).unwrap();
})
});
c.bench_function("Pmtree:delete", |b| {
b.iter(|| {
tree.delete(0).unwrap();
})
});
c.bench_function("Pmtree::override_range", |b| {
b.iter(|| {
tree.override_range(0, leaves.clone(), [0, 1, 2, 3])
.unwrap();
})
});
c.bench_function("Pmtree::compute_root", |b| {
b.iter(|| {
tree.compute_root().unwrap();
})
});
c.bench_function("Pmtree::get", |b| {
b.iter(|| {
tree.get(0).unwrap();
})
});
}
criterion_group!(benches, pmtree_benchmark);
criterion_main!(benches);

View File

@@ -8,10 +8,11 @@ use ark_circom::read_zkey;
use ark_groth16::{ProvingKey, VerifyingKey};
use ark_relations::r1cs::ConstraintMatrices;
use cfg_if::cfg_if;
use color_eyre::{Report, Result};
use num_bigint::BigUint;
use serde_json::Value;
use std::io::Cursor;
use std::fs::File;
use std::io::{Cursor, Error, ErrorKind, Result};
use std::path::Path;
use std::str::FromStr;
cfg_if! {
@@ -20,13 +21,11 @@ cfg_if! {
use once_cell::sync::OnceCell;
use std::sync::Mutex;
use wasmer::{Module, Store};
use include_dir::{include_dir, Dir};
use std::path::Path;
}
}
const ZKEY_FILENAME: &str = "rln_final.zkey";
const VK_FILENAME: &str = "verification_key.json";
const VK_FILENAME: &str = "verifying_key.json";
const WASM_FILENAME: &str = "rln.wasm";
// These parameters are used for tests
@@ -34,11 +33,11 @@ const WASM_FILENAME: &str = "rln.wasm";
// Changing these parameters to other values than these defaults will cause zkSNARK proof verification to fail
pub const TEST_PARAMETERS_INDEX: usize = 2;
pub const TEST_TREE_HEIGHT: usize = [15, 19, 20][TEST_PARAMETERS_INDEX];
pub const TEST_RESOURCES_FOLDER: &str =
["tree_height_15", "tree_height_19", "tree_height_20"][TEST_PARAMETERS_INDEX];
#[cfg(not(target_arch = "wasm32"))]
static RESOURCES_DIR: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/resources");
pub const TEST_RESOURCES_FOLDER: &str = [
"./resources/tree_height_15/",
"./resources/tree_height_19/",
"./resources/tree_height_20/",
][TEST_PARAMETERS_INDEX];
// The following types define the pairing friendly elliptic curve, the underlying finite fields and groups default to this module
// Note that proofs are serialized assuming Fr to be 4x8 = 32 bytes in size. Hence, changing to a curve with different encoding will make proof verification to fail
@@ -58,22 +57,21 @@ pub fn zkey_from_raw(zkey_data: &Vec<u8>) -> Result<(ProvingKey<Curve>, Constrai
let proving_key_and_matrices = read_zkey(&mut c)?;
Ok(proving_key_and_matrices)
} else {
Err(Report::msg("No proving key found!"))
Err(Error::new(ErrorKind::NotFound, "No proving key found!"))
}
}
// Loads the proving key
#[cfg(not(target_arch = "wasm32"))]
pub fn zkey_from_folder(
resources_folder: &str,
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
if let Some(zkey) = zkey {
let mut c = Cursor::new(zkey.contents());
let proving_key_and_matrices = read_zkey(&mut c)?;
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
if Path::new(&zkey_path).exists() {
let mut file = File::open(&zkey_path)?;
let proving_key_and_matrices = read_zkey(&mut file)?;
Ok(proving_key_and_matrices)
} else {
Err(Report::msg("No proving key found!"))
Err(Error::new(ErrorKind::NotFound, "No proving key found!"))
}
}
@@ -82,35 +80,39 @@ pub fn vk_from_raw(vk_data: &Vec<u8>, zkey_data: &Vec<u8>) -> Result<VerifyingKe
let verifying_key: VerifyingKey<Curve>;
if !vk_data.is_empty() {
verifying_key = vk_from_vector(vk_data)?;
verifying_key = vk_from_vector(vk_data);
Ok(verifying_key)
} else if !zkey_data.is_empty() {
let (proving_key, _matrices) = zkey_from_raw(zkey_data)?;
verifying_key = proving_key.vk;
Ok(verifying_key)
} else {
Err(Report::msg("No proving/verification key found!"))
Err(Error::new(
ErrorKind::NotFound,
"No proving/verification key found!",
))
}
}
// Loads the verification key
#[cfg(not(target_arch = "wasm32"))]
pub fn vk_from_folder(resources_folder: &str) -> Result<VerifyingKey<Curve>> {
let vk = RESOURCES_DIR.get_file(Path::new(resources_folder).join(VK_FILENAME));
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
let vk_path = format!("{resources_folder}{VK_FILENAME}");
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
let verifying_key: VerifyingKey<Curve>;
if let Some(vk) = vk {
verifying_key = vk_from_json(vk.contents_utf8().ok_or(Report::msg(
"Could not read verification key from JSON file!",
))?)?;
if Path::new(&vk_path).exists() {
verifying_key = vk_from_json(&vk_path);
Ok(verifying_key)
} else if let Some(_zkey) = zkey {
} else if Path::new(&zkey_path).exists() {
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
verifying_key = proving_key.vk;
Ok(verifying_key)
} else {
Err(Report::msg("No proving/verification key found!"))
Err(Error::new(
ErrorKind::NotFound,
"No proving/verification key found!",
))
}
}
@@ -119,150 +121,129 @@ static WITNESS_CALCULATOR: OnceCell<Mutex<WitnessCalculator>> = OnceCell::new();
// Initializes the witness calculator using a bytes vector
#[cfg(not(target_arch = "wasm32"))]
pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> Result<&'static Mutex<WitnessCalculator>> {
WITNESS_CALCULATOR.get_or_try_init(|| {
pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> &'static Mutex<WitnessCalculator> {
WITNESS_CALCULATOR.get_or_init(|| {
let store = Store::default();
let module = Module::new(&store, wasm_buffer)?;
let result = WitnessCalculator::from_module(module)?;
Ok::<Mutex<WitnessCalculator>, Report>(Mutex::new(result))
let module = Module::new(&store, wasm_buffer).unwrap();
let result =
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
Mutex::new(result)
})
}
// Initializes the witness calculator
#[cfg(not(target_arch = "wasm32"))]
pub fn circom_from_folder(resources_folder: &str) -> Result<&'static Mutex<WitnessCalculator>> {
pub fn circom_from_folder(resources_folder: &str) -> &'static Mutex<WitnessCalculator> {
// We read the wasm file
let wasm = RESOURCES_DIR.get_file(Path::new(resources_folder).join(WASM_FILENAME));
if let Some(wasm) = wasm {
let wasm_buffer = wasm.contents();
circom_from_raw(wasm_buffer.to_vec())
} else {
Err(Report::msg("No wasm file found!"))
}
let wasm_path = format!("{resources_folder}{WASM_FILENAME}");
let wasm_buffer = std::fs::read(&wasm_path).unwrap();
circom_from_raw(wasm_buffer)
}
// The following function implementations are taken/adapted from https://github.com/gakonst/ark-circom/blob/1732e15d6313fe176b0b1abb858ac9e095d0dbd7/src/zkey.rs
// Utilities to convert a json verification key in a groth16::VerificationKey
fn fq_from_str(s: &str) -> Result<Fq> {
Ok(Fq::try_from(BigUint::from_str(s)?)?)
fn fq_from_str(s: &str) -> Fq {
Fq::try_from(BigUint::from_str(s).unwrap()).unwrap()
}
// Extracts the element in G1 corresponding to its JSON serialization
fn json_to_g1(json: &Value, key: &str) -> Result<G1Affine> {
fn json_to_g1(json: &Value, key: &str) -> G1Affine {
let els: Vec<String> = json
.get(key)
.ok_or(Report::msg("no json value"))?
.unwrap()
.as_array()
.ok_or(Report::msg("value not an array"))?
.unwrap()
.iter()
.map(|i| i.as_str().ok_or(Report::msg("element is not a string")))
.map(|x| x.map(|v| v.to_owned()))
.collect::<Result<Vec<String>>>()?;
Ok(G1Affine::from(G1Projective::new(
fq_from_str(&els[0])?,
fq_from_str(&els[1])?,
fq_from_str(&els[2])?,
)))
.map(|i| i.as_str().unwrap().to_string())
.collect();
G1Affine::from(G1Projective::new(
fq_from_str(&els[0]),
fq_from_str(&els[1]),
fq_from_str(&els[2]),
))
}
// Extracts the vector of G1 elements corresponding to its JSON serialization
fn json_to_g1_vec(json: &Value, key: &str) -> Result<Vec<G1Affine>> {
fn json_to_g1_vec(json: &Value, key: &str) -> Vec<G1Affine> {
let els: Vec<Vec<String>> = json
.get(key)
.ok_or(Report::msg("no json value"))?
.unwrap()
.as_array()
.ok_or(Report::msg("value not an array"))?
.unwrap()
.iter()
.map(|i| {
i.as_array()
.ok_or(Report::msg("element is not an array"))
.and_then(|array| {
array
.iter()
.map(|x| x.as_str().ok_or(Report::msg("element is not a string")))
.map(|x| x.map(|v| v.to_owned()))
.collect::<Result<Vec<String>>>()
})
.unwrap()
.iter()
.map(|x| x.as_str().unwrap().to_string())
.collect::<Vec<String>>()
})
.collect::<Result<Vec<Vec<String>>>>()?;
.collect();
let mut res = vec![];
for coords in els {
res.push(G1Affine::from(G1Projective::new(
fq_from_str(&coords[0])?,
fq_from_str(&coords[1])?,
fq_from_str(&coords[2])?,
)))
}
Ok(res)
els.iter()
.map(|coords| {
G1Affine::from(G1Projective::new(
fq_from_str(&coords[0]),
fq_from_str(&coords[1]),
fq_from_str(&coords[2]),
))
})
.collect()
}
// Extracts the element in G2 corresponding to its JSON serialization
fn json_to_g2(json: &Value, key: &str) -> Result<G2Affine> {
fn json_to_g2(json: &Value, key: &str) -> G2Affine {
let els: Vec<Vec<String>> = json
.get(key)
.ok_or(Report::msg("no json value"))?
.unwrap()
.as_array()
.ok_or(Report::msg("value not an array"))?
.unwrap()
.iter()
.map(|i| {
i.as_array()
.ok_or(Report::msg("element is not an array"))
.and_then(|array| {
array
.iter()
.map(|x| x.as_str().ok_or(Report::msg("element is not a string")))
.map(|x| x.map(|v| v.to_owned()))
.collect::<Result<Vec<String>>>()
})
.unwrap()
.iter()
.map(|x| x.as_str().unwrap().to_string())
.collect::<Vec<String>>()
})
.collect::<Result<Vec<Vec<String>>>>()?;
.collect();
let x = Fq2::new(fq_from_str(&els[0][0])?, fq_from_str(&els[0][1])?);
let y = Fq2::new(fq_from_str(&els[1][0])?, fq_from_str(&els[1][1])?);
let z = Fq2::new(fq_from_str(&els[2][0])?, fq_from_str(&els[2][1])?);
Ok(G2Affine::from(G2Projective::new(x, y, z)))
let x = Fq2::new(fq_from_str(&els[0][0]), fq_from_str(&els[0][1]));
let y = Fq2::new(fq_from_str(&els[1][0]), fq_from_str(&els[1][1]));
let z = Fq2::new(fq_from_str(&els[2][0]), fq_from_str(&els[2][1]));
G2Affine::from(G2Projective::new(x, y, z))
}
// Converts JSON to a VerifyingKey
fn to_verifying_key(json: serde_json::Value) -> Result<VerifyingKey<Curve>> {
Ok(VerifyingKey {
alpha_g1: json_to_g1(&json, "vk_alpha_1")?,
beta_g2: json_to_g2(&json, "vk_beta_2")?,
gamma_g2: json_to_g2(&json, "vk_gamma_2")?,
delta_g2: json_to_g2(&json, "vk_delta_2")?,
gamma_abc_g1: json_to_g1_vec(&json, "IC")?,
})
fn to_verifying_key(json: serde_json::Value) -> VerifyingKey<Curve> {
VerifyingKey {
alpha_g1: json_to_g1(&json, "vk_alpha_1"),
beta_g2: json_to_g2(&json, "vk_beta_2"),
gamma_g2: json_to_g2(&json, "vk_gamma_2"),
delta_g2: json_to_g2(&json, "vk_delta_2"),
gamma_abc_g1: json_to_g1_vec(&json, "IC"),
}
}
// Computes the verification key from its JSON serialization
fn vk_from_json(vk: &str) -> Result<VerifyingKey<Curve>> {
let json: Value = serde_json::from_str(vk)?;
fn vk_from_json(vk_path: &str) -> VerifyingKey<Curve> {
let json = std::fs::read_to_string(vk_path).unwrap();
let json: Value = serde_json::from_str(&json).unwrap();
to_verifying_key(json)
}
// Computes the verification key from a bytes vector containing its JSON serialization
fn vk_from_vector(vk: &[u8]) -> Result<VerifyingKey<Curve>> {
let json = String::from_utf8(vk.to_vec())?;
let json: Value = serde_json::from_str(&json)?;
fn vk_from_vector(vk: &[u8]) -> VerifyingKey<Curve> {
let json = String::from_utf8(vk.to_vec()).expect("Found invalid UTF-8");
let json: Value = serde_json::from_str(&json).unwrap();
to_verifying_key(json)
}
// Checks verification key to be correct with respect to proving key
#[cfg(not(target_arch = "wasm32"))]
pub fn check_vk_from_zkey(
resources_folder: &str,
verifying_key: VerifyingKey<Curve>,
) -> Result<()> {
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
if proving_key.vk == verifying_key {
Ok(())
} else {
Err(Report::msg("verifying_keys are not equal"))
}
pub fn check_vk_from_zkey(resources_folder: &str, verifying_key: VerifyingKey<Curve>) {
let (proving_key, _matrices) = zkey_from_folder(resources_folder).unwrap();
assert_eq!(proving_key.vk, verifying_key);
}

View File

@@ -2,7 +2,7 @@
use std::slice;
use crate::public::{hash as public_hash, poseidon_hash as public_poseidon_hash, RLN};
use crate::public::RLN;
// Macro to call methods with arbitrary amount of arguments,
// First argument to the macro is context,
@@ -12,15 +12,7 @@ macro_rules! call {
($instance:expr, $method:ident $(, $arg:expr)*) => {
{
let new_instance: &mut RLN = $instance.process();
match new_instance.$method($($arg.process()),*) {
Ok(()) => {
true
}
Err(err) => {
eprintln!("execution error: {err}");
false
}
}
new_instance.$method($($arg.process()),*).is_ok()
}
}
}
@@ -38,17 +30,13 @@ macro_rules! call_with_output_arg {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
match new_instance.$method(&mut output_data) {
Ok(()) => {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
}
Err(err) => {
std::mem::forget(output_data);
eprintln!("execution error: {err}");
false
}
if new_instance.$method(&mut output_data).is_ok() {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
} else {
std::mem::forget(output_data);
false
}
}
};
@@ -56,43 +44,13 @@ macro_rules! call_with_output_arg {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
match new_instance.$method($($arg.process()),*, &mut output_data) {
Ok(()) => {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
}
Err(err) => {
std::mem::forget(output_data);
eprintln!("execution error: {err}");
false
}
}
}
};
}
// Macro to call methods with arbitrary amount of arguments,
// which are not implemented in a ctx RLN object
// First argument is the method to call
// Second argument is the output buffer argument
// The remaining arguments are all other inputs to the method
macro_rules! no_ctx_call_with_output_arg {
($method:ident, $output_arg:expr, $( $arg:expr ),* ) => {
{
let mut output_data: Vec<u8> = Vec::new();
match $method($($arg.process()),*, &mut output_data) {
Ok(()) => {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
}
Err(err) => {
std::mem::forget(output_data);
eprintln!("execution error: {err}");
false
}
if new_instance.$method($($arg.process()),*, &mut output_data).is_ok() {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
} else {
std::mem::forget(output_data);
false
}
}
}
@@ -109,11 +67,8 @@ macro_rules! call_with_bool_arg {
{
let new_instance = $instance.process();
if match new_instance.$method($($arg.process()),*,) {
Ok(result) => result,
Err(err) => {
eprintln!("execution error: {err}");
return false
},
Ok(verified) => verified,
Err(_) => return false,
} {
unsafe { *$bool_arg = true };
} else {
@@ -194,16 +149,9 @@ impl<'a> From<&Buffer> for &'a [u8] {
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut *mut RLN) -> bool {
match RLN::new(tree_height, input_buffer.process()) {
Ok(rln) => {
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
Err(err) => {
eprintln!("could not instantiate rln: {err}");
false
}
}
let rln = RLN::new(tree_height, input_buffer.process());
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -213,25 +161,16 @@ pub extern "C" fn new_with_params(
circom_buffer: *const Buffer,
zkey_buffer: *const Buffer,
vk_buffer: *const Buffer,
tree_config: *const Buffer,
ctx: *mut *mut RLN,
) -> bool {
match RLN::new_with_params(
let rln = RLN::new_with_params(
tree_height,
circom_buffer.process().to_vec(),
zkey_buffer.process().to_vec(),
vk_buffer.process().to_vec(),
tree_config.process(),
) {
Ok(rln) => {
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
Err(err) => {
eprintln!("could not instantiate rln: {err}");
false
}
}
);
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
////////////////////////////////////////////////////////
@@ -255,12 +194,6 @@ pub extern "C" fn set_leaf(ctx: *mut RLN, index: usize, input_buffer: *const Buf
call!(ctx, set_leaf, index, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn get_leaf(ctx: *mut RLN, index: usize, output_buffer: *mut Buffer) -> bool {
call_with_output_arg!(ctx, get_leaf, output_buffer, index)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn set_next_leaf(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
@@ -283,33 +216,6 @@ pub extern "C" fn init_tree_with_leaves(ctx: *mut RLN, input_buffer: *const Buff
call!(ctx, init_tree_with_leaves, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn atomic_operation(
ctx: *mut RLN,
index: usize,
leaves_buffer: *const Buffer,
indices_buffer: *const Buffer,
) -> bool {
call!(ctx, atomic_operation, index, leaves_buffer, indices_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn seq_atomic_operation(
ctx: *mut RLN,
leaves_buffer: *const Buffer,
indices_buffer: *const Buffer,
) -> bool {
call!(
ctx,
atomic_operation,
ctx.process().leaves_set(),
leaves_buffer,
indices_buffer
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn get_root(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
@@ -434,36 +340,12 @@ pub extern "C" fn recover_id_secret(
)
}
////////////////////////////////////////////////////////
// Persistent metadata APIs
////////////////////////////////////////////////////////
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn set_metadata(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
call!(ctx, set_metadata, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn get_metadata(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
call_with_output_arg!(ctx, get_metadata, output_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn flush(ctx: *mut RLN) -> bool {
call!(ctx, flush)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn hash(input_buffer: *const Buffer, output_buffer: *mut Buffer) -> bool {
no_ctx_call_with_output_arg!(public_hash, output_buffer, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn poseidon_hash(input_buffer: *const Buffer, output_buffer: *mut Buffer) -> bool {
no_ctx_call_with_output_arg!(public_poseidon_hash, output_buffer, input_buffer)
pub extern "C" fn hash(
ctx: *mut RLN,
input_buffer: *const Buffer,
output_buffer: *mut Buffer,
) -> bool {
call_with_output_arg!(ctx, hash, output_buffer, input_buffer)
}

View File

@@ -1,58 +0,0 @@
/// This crate instantiates the Poseidon hash algorithm.
use crate::{circuit::Fr, utils::bytes_le_to_fr};
use once_cell::sync::Lazy;
use tiny_keccak::{Hasher, Keccak};
use utils::poseidon::Poseidon;
/// These indexed constants hardcode the supported round parameters tuples (t, RF, RN, SKIP_MATRICES) for the Bn254 scalar field.
/// SKIP_MATRICES is the index of the randomly generated secure MDS matrix.
/// TODO: generate these parameters
pub const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
(2, 8, 56, 0),
(3, 8, 57, 0),
(4, 8, 56, 0),
(5, 8, 60, 0),
(6, 8, 60, 0),
(7, 8, 63, 0),
(8, 8, 64, 0),
(9, 8, 63, 0),
];
/// Poseidon Hash wrapper over above implementation.
static POSEIDON: Lazy<Poseidon<Fr>> = Lazy::new(|| Poseidon::<Fr>::from(&ROUND_PARAMS));
pub fn poseidon_hash(input: &[Fr]) -> Fr {
POSEIDON
.hash(input.to_vec())
.expect("hash with fixed input size can't fail")
}
/// The zerokit RLN Merkle tree Hasher.
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct PoseidonHash;
/// The default Hasher trait used by Merkle tree implementation in utils.
impl utils::merkle_tree::Hasher for PoseidonHash {
type Fr = Fr;
fn default_leaf() -> Self::Fr {
Self::Fr::from(0)
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}
/// Hashes arbitrary signal to the underlying prime field.
pub fn hash_to_field(signal: &[u8]) -> Fr {
// We hash the input signal using Keccak256
let mut hash = [0; 32];
let mut hasher = Keccak::v256();
hasher.update(signal);
hasher.finalize(&mut hash);
// We export the hash as a field element
let (el, _) = bytes_le_to_fr(hash.as_ref());
el
}

View File

@@ -1,9 +1,7 @@
#![allow(dead_code)]
pub mod circuit;
pub mod hashers;
#[cfg(feature = "pmtree-ft")]
pub mod pm_tree_adapter;
pub mod poseidon_hash;
pub mod poseidon_tree;
pub mod protocol;
pub mod public;

View File

@@ -1,339 +0,0 @@
use std::fmt::Debug;
use std::path::PathBuf;
use std::str::FromStr;
use color_eyre::{Report, Result};
use serde_json::Value;
use utils::pmtree::{Database, Hasher};
use utils::*;
use crate::circuit::Fr;
use crate::hashers::{poseidon_hash, PoseidonHash};
use crate::utils::{bytes_le_to_fr, fr_to_bytes_le};
const METADATA_KEY: [u8; 8] = *b"metadata";
pub struct PmTree {
tree: pmtree::MerkleTree<SledDB, PoseidonHash>,
// metadata that an application may use to store additional information
metadata: Vec<u8>,
}
pub struct PmTreeProof {
proof: pmtree::tree::MerkleProof<PoseidonHash>,
}
pub type FrOf<H> = <H as Hasher>::Fr;
// The pmtree Hasher trait used by pmtree Merkle tree
impl Hasher for PoseidonHash {
type Fr = Fr;
fn serialize(value: Self::Fr) -> pmtree::Value {
fr_to_bytes_le(&value)
}
fn deserialize(value: pmtree::Value) -> Self::Fr {
let (fr, _) = bytes_le_to_fr(&value);
fr
}
fn default_leaf() -> Self::Fr {
Fr::from(0)
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}
fn get_tmp_path() -> PathBuf {
std::env::temp_dir().join(format!("pmtree-{}", rand::random::<u64>()))
}
fn get_tmp() -> bool {
true
}
pub struct PmtreeConfig(Config);
impl FromStr for PmtreeConfig {
type Err = Report;
fn from_str(s: &str) -> Result<Self> {
let config: Value = serde_json::from_str(s)?;
let path = config["path"].as_str();
let path = path.map(PathBuf::from);
let temporary = config["temporary"].as_bool();
let cache_capacity = config["cache_capacity"].as_u64();
let flush_every_ms = config["flush_every_ms"].as_u64();
let mode = match config["mode"].as_str() {
Some("HighThroughput") => Mode::HighThroughput,
Some("LowSpace") => Mode::LowSpace,
_ => Mode::HighThroughput,
};
let use_compression = config["use_compression"].as_bool();
if temporary.is_some()
&& path.is_some()
&& temporary.unwrap()
&& path.as_ref().unwrap().exists()
{
return Err(Report::msg(format!(
"Path {:?} already exists, cannot use temporary",
path.unwrap()
)));
}
let config = Config::new()
.temporary(temporary.unwrap_or(get_tmp()))
.path(path.unwrap_or(get_tmp_path()))
.cache_capacity(cache_capacity.unwrap_or(1024 * 1024 * 1024))
.flush_every_ms(flush_every_ms)
.mode(mode)
.use_compression(use_compression.unwrap_or(false));
Ok(PmtreeConfig(config))
}
}
impl Default for PmtreeConfig {
fn default() -> Self {
let tmp_path = get_tmp_path();
PmtreeConfig(
Config::new()
.temporary(true)
.path(tmp_path)
.cache_capacity(150_000)
.mode(Mode::HighThroughput)
.use_compression(false)
.flush_every_ms(Some(12_000)),
)
}
}
impl Debug for PmtreeConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
impl Clone for PmtreeConfig {
fn clone(&self) -> Self {
PmtreeConfig(self.0.clone())
}
}
impl ZerokitMerkleTree for PmTree {
type Proof = PmTreeProof;
type Hasher = PoseidonHash;
type Config = PmtreeConfig;
fn default(depth: usize) -> Result<Self> {
let default_config = PmtreeConfig::default();
PmTree::new(depth, Self::Hasher::default_leaf(), default_config)
}
fn new(depth: usize, _default_leaf: FrOf<Self::Hasher>, config: Self::Config) -> Result<Self> {
let tree_loaded = pmtree::MerkleTree::load(config.clone().0);
let tree = match tree_loaded {
Ok(tree) => tree,
Err(_) => pmtree::MerkleTree::new(depth, config.0)?,
};
Ok(PmTree {
tree,
metadata: Vec::new(),
})
}
fn depth(&self) -> usize {
self.tree.depth()
}
fn capacity(&self) -> usize {
self.tree.capacity()
}
fn leaves_set(&mut self) -> usize {
self.tree.leaves_set()
}
fn root(&self) -> FrOf<Self::Hasher> {
self.tree.root()
}
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>> {
Ok(self.tree.root())
}
fn set(&mut self, index: usize, leaf: FrOf<Self::Hasher>) -> Result<()> {
self.tree
.set(index, leaf)
.map_err(|e| Report::msg(e.to_string()))
}
fn set_range<I: IntoIterator<Item = FrOf<Self::Hasher>>>(
&mut self,
start: usize,
values: I,
) -> Result<()> {
self.tree
.set_range(start, values)
.map_err(|e| Report::msg(e.to_string()))
}
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>> {
self.tree.get(index).map_err(|e| Report::msg(e.to_string()))
}
fn override_range<I: IntoIterator<Item = FrOf<Self::Hasher>>, J: IntoIterator<Item = usize>>(
&mut self,
start: usize,
leaves: I,
indices: J,
) -> Result<()> {
let leaves = leaves.into_iter().collect::<Vec<_>>();
let mut indices = indices.into_iter().collect::<Vec<_>>();
indices.sort();
match (leaves.is_empty(), indices.is_empty()) {
(true, true) => Err(Report::msg("no leaves or indices to be removed")),
(false, true) => self.set_range_with_leaves(start, leaves),
(true, false) => self.remove_indices(indices),
(false, false) => self.remove_indices_and_set_leaves(start, leaves, indices),
}
}
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<()> {
self.tree
.update_next(leaf)
.map_err(|e| Report::msg(e.to_string()))
}
fn delete(&mut self, index: usize) -> Result<()> {
self.tree
.delete(index)
.map_err(|e| Report::msg(e.to_string()))
}
fn proof(&self, index: usize) -> Result<Self::Proof> {
let proof = self.tree.proof(index)?;
Ok(PmTreeProof { proof })
}
fn verify(&self, leaf: &FrOf<Self::Hasher>, witness: &Self::Proof) -> Result<bool> {
if self.tree.verify(leaf, &witness.proof) {
Ok(true)
} else {
Err(Report::msg("verify failed"))
}
}
fn set_metadata(&mut self, metadata: &[u8]) -> Result<()> {
self.tree.db.put(METADATA_KEY, metadata.to_vec())?;
self.metadata = metadata.to_vec();
Ok(())
}
fn metadata(&self) -> Result<Vec<u8>> {
if !self.metadata.is_empty() {
return Ok(self.metadata.clone());
}
// if empty, try searching the db
let data = self.tree.db.get(METADATA_KEY)?;
if data.is_none() {
return Err(Report::msg("metadata does not exist"));
}
Ok(data.unwrap())
}
fn close_db_connection(&mut self) -> Result<()> {
self.tree.db.close().map_err(|e| Report::msg(e.to_string()))
}
}
type PmTreeHasher = <PmTree as ZerokitMerkleTree>::Hasher;
type FrOfPmTreeHasher = FrOf<PmTreeHasher>;
impl PmTree {
fn set_range_with_leaves(&mut self, start: usize, leaves: Vec<FrOfPmTreeHasher>) -> Result<()> {
self.tree
.set_range(start, leaves)
.map_err(|e| Report::msg(e.to_string()))
}
fn remove_indices(&mut self, indices: Vec<usize>) -> Result<()> {
let start = indices[0];
let end = indices.last().unwrap() + 1;
let mut new_leaves: Vec<_> = (start..end)
.map(|i| self.tree.get(i))
.collect::<Result<_, _>>()?;
new_leaves
.iter_mut()
.take(indices.len())
.for_each(|leaf| *leaf = PmTreeHasher::default_leaf());
self.tree
.set_range(start, new_leaves)
.map_err(|e| Report::msg(e.to_string()))
}
fn remove_indices_and_set_leaves(
&mut self,
start: usize,
leaves: Vec<FrOfPmTreeHasher>,
indices: Vec<usize>,
) -> Result<()> {
let min_index = *indices.first().unwrap();
let max_index = start + leaves.len();
// Generated a placeholder with the exact size needed,
// Initiated with default values to be overridden throughout the method
let mut set_values = vec![PmTreeHasher::default_leaf(); max_index - min_index];
// If the index is not in indices list, keep the original value
for i in min_index..start {
if !indices.contains(&i) {
let value = self.tree.get(i)?;
set_values[i - min_index] = value;
}
}
// Insert new leaves after 'start' position
for (i, &leaf) in leaves.iter().enumerate() {
set_values[start - min_index + i] = leaf;
}
self.tree
.set_range(min_index, set_values)
.map_err(|e| Report::msg(e.to_string()))
}
}
impl ZerokitMerkleProof for PmTreeProof {
type Index = u8;
type Hasher = PoseidonHash;
fn length(&self) -> usize {
self.proof.length()
}
fn leaf_index(&self) -> usize {
self.proof.leaf_index()
}
fn get_path_elements(&self) -> Vec<FrOf<Self::Hasher>> {
self.proof.get_path_elements()
}
fn get_path_index(&self) -> Vec<Self::Index> {
self.proof.get_path_index()
}
fn compute_root_from(&self, leaf: &FrOf<Self::Hasher>) -> FrOf<Self::Hasher> {
self.proof.compute_root_from(leaf)
}
}

28
rln/src/poseidon_hash.rs Normal file
View File

@@ -0,0 +1,28 @@
// This crate instantiate the Poseidon hash algorithm
use crate::circuit::Fr;
use once_cell::sync::Lazy;
use utils::poseidon::Poseidon;
// These indexed constants hardcodes the supported round parameters tuples (t, RF, RN, SKIP_MATRICES) for the Bn254 scalar field
// SKIP_MATRICES is the index of the randomly generated secure MDS matrix. See security note in the zerokit_utils::poseidon::poseidon_constants crate on this.
// TODO: generate these parameters
pub const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
(2, 8, 56, 0),
(3, 8, 57, 0),
(4, 8, 56, 0),
(5, 8, 60, 0),
(6, 8, 60, 0),
(7, 8, 63, 0),
(8, 8, 64, 0),
(9, 8, 63, 0),
];
// Poseidon Hash wrapper over above implementation. Adapted from semaphore-rs poseidon hash wrapper.
static POSEIDON: Lazy<Poseidon<Fr>> = Lazy::new(|| Poseidon::<Fr>::from(&ROUND_PARAMS));
pub fn poseidon_hash(input: &[Fr]) -> Fr {
POSEIDON
.hash(input.to_vec())
.expect("hash with fixed input size can't fail")
}

View File

@@ -2,16 +2,10 @@
// Implementation inspired by https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/poseidon_tree.rs (no differences)
use crate::circuit::Fr;
use crate::poseidon_hash::poseidon_hash;
use cfg_if::cfg_if;
cfg_if! {
if #[cfg(feature = "pmtree-ft")] {
use crate::pm_tree_adapter::*;
} else {
use crate::hashers::{PoseidonHash};
use utils::merkle_tree::*;
}
}
use utils::merkle_tree::*;
// The zerokit RLN default Merkle tree implementation is the OptimalMerkleTree.
// To switch to FullMerkleTree implementation, it is enough to enable the fullmerkletree feature
@@ -20,11 +14,25 @@ cfg_if! {
if #[cfg(feature = "fullmerkletree")] {
pub type PoseidonTree = FullMerkleTree<PoseidonHash>;
pub type MerkleProof = FullMerkleProof<PoseidonHash>;
} else if #[cfg(feature = "pmtree-ft")] {
pub type PoseidonTree = PmTree;
pub type MerkleProof = PmTreeProof;
} else {
pub type PoseidonTree = OptimalMerkleTree<PoseidonHash>;
pub type MerkleProof = OptimalMerkleProof<PoseidonHash>;
}
}
// The zerokit RLN Merkle tree Hasher
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct PoseidonHash;
// The default Hasher trait used by Merkle tree implementation in utils
impl utils::merkle_tree::Hasher for PoseidonHash {
type Fr = Fr;
fn default_leaf() -> Self::Fr {
Self::Fr::from(0)
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}

View File

@@ -1,11 +1,14 @@
// This crate collects all the underlying primitives used to implement RLN
use ark_circom::{CircomReduction, WitnessCalculator};
use ark_groth16::{prepare_verifying_key, Groth16, Proof as ArkProof, ProvingKey, VerifyingKey};
use ark_groth16::{
create_proof_with_reduction_and_matrices, prepare_verifying_key,
verify_proof as ark_verify_proof, Proof as ArkProof, ProvingKey, VerifyingKey,
};
use ark_relations::r1cs::ConstraintMatrices;
use ark_relations::r1cs::SynthesisError;
use ark_std::{rand::thread_rng, UniformRand};
use color_eyre::{Report, Result};
use color_eyre::Result;
use num_bigint::BigInt;
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha20Rng;
@@ -17,13 +20,11 @@ use thiserror::Error;
use tiny_keccak::{Hasher as _, Keccak};
use crate::circuit::{Curve, Fr};
use crate::hashers::hash_to_field;
use crate::hashers::poseidon_hash;
use crate::poseidon_hash::poseidon_hash;
use crate::poseidon_tree::*;
use crate::public::RLN_IDENTIFIER;
use crate::utils::*;
use cfg_if::cfg_if;
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
///////////////////////////////////////////////////////
// RLN Witness data structure and utility functions
@@ -52,84 +53,82 @@ pub struct RLNProofValues {
}
pub fn serialize_field_element(element: Fr) -> Vec<u8> {
fr_to_bytes_le(&element)
return fr_to_bytes_le(&element);
}
pub fn deserialize_field_element(serialized: Vec<u8>) -> Fr {
let (element, _) = bytes_le_to_fr(&serialized);
element
return element;
}
pub fn deserialize_identity_pair(serialized: Vec<u8>) -> (Fr, Fr) {
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized);
let (id_commitment, _) = bytes_le_to_fr(&serialized[read..]);
let (id_commitment, _) = bytes_le_to_fr(&serialized[read..].to_vec());
(identity_secret_hash, id_commitment)
return (identity_secret_hash, id_commitment);
}
pub fn deserialize_identity_tuple(serialized: Vec<u8>) -> (Fr, Fr, Fr, Fr) {
let mut all_read = 0;
let (identity_trapdoor, read) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_trapdoor, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (identity_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_nullifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (identity_commitment, _) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_commitment, _) = bytes_le_to_fr(&serialized[all_read..].to_vec());
(
return (
identity_trapdoor,
identity_nullifier,
identity_secret_hash,
identity_commitment,
)
);
}
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Result<Vec<u8>> {
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Vec<u8> {
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&rln_witness.identity_secret));
serialized.append(&mut vec_fr_to_bytes_le(&rln_witness.path_elements)?);
serialized.append(&mut vec_u8_to_bytes_le(&rln_witness.identity_path_index)?);
serialized.append(&mut vec_fr_to_bytes_le(&rln_witness.path_elements));
serialized.append(&mut vec_u8_to_bytes_le(&rln_witness.identity_path_index));
serialized.append(&mut fr_to_bytes_le(&rln_witness.x));
serialized.append(&mut fr_to_bytes_le(&rln_witness.epoch));
serialized.append(&mut fr_to_bytes_le(&rln_witness.rln_identifier));
Ok(serialized)
serialized
}
pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)> {
pub fn deserialize_witness(serialized: &[u8]) -> (RLNWitnessInput, usize) {
let mut all_read: usize = 0;
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..])?;
let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..].to_vec());
all_read += read;
let (identity_path_index, read) = bytes_le_to_vec_u8(&serialized[all_read..])?;
let (identity_path_index, read) = bytes_le_to_vec_u8(&serialized[all_read..].to_vec());
all_read += read;
let (x, read) = bytes_le_to_fr(&serialized[all_read..]);
let (x, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..]);
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
// TODO: check rln_identifier against public::RLN_IDENTIFIER
if serialized.len() != all_read {
return Err(Report::msg("serialized length is not equal to all_read"));
}
assert_eq!(serialized.len(), all_read);
Ok((
(
RLNWitnessInput {
identity_secret,
path_elements,
@@ -139,7 +138,7 @@ pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)
rln_identifier,
},
all_read,
))
)
}
// This function deserializes input for kilic's rln generate_proof public API
@@ -149,28 +148,24 @@ pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)
pub fn proof_inputs_to_rln_witness(
tree: &mut PoseidonTree,
serialized: &[u8],
) -> Result<(RLNWitnessInput, usize)> {
) -> (RLNWitnessInput, usize) {
let mut all_read: usize = 0;
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let id_index = usize::try_from(u64::from_le_bytes(
serialized[all_read..all_read + 8].try_into()?,
))?;
let id_index = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap());
all_read += 8;
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let signal_len = usize::try_from(u64::from_le_bytes(
serialized[all_read..all_read + 8].try_into()?,
))?;
let signal_len = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap());
all_read += 8;
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
let signal: Vec<u8> = serialized[all_read..all_read + (signal_len as usize)].to_vec();
let merkle_proof = tree.proof(id_index).expect("proof should exist");
let merkle_proof = tree.proof(id_index as usize).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
@@ -178,7 +173,7 @@ pub fn proof_inputs_to_rln_witness(
let rln_identifier = hash_to_field(RLN_IDENTIFIER);
Ok((
(
RLNWitnessInput {
identity_secret,
path_elements,
@@ -188,48 +183,45 @@ pub fn proof_inputs_to_rln_witness(
rln_identifier,
},
all_read,
))
)
}
pub fn rln_witness_from_json(input_json_str: &str) -> Result<RLNWitnessInput> {
pub fn rln_witness_from_json(input_json_str: &str) -> RLNWitnessInput {
let input_json: serde_json::Value =
serde_json::from_str(input_json_str).expect("JSON was not well-formatted");
let identity_secret = str_to_fr(&input_json["identity_secret"].to_string(), 10)?;
let identity_secret = str_to_fr(&input_json["identity_secret"].to_string(), 10);
let path_elements = input_json["path_elements"]
.as_array()
.ok_or(Report::msg("not an array"))?
.unwrap()
.iter()
.map(|v| str_to_fr(&v.to_string(), 10))
.collect::<Result<_>>()?;
.collect();
let identity_path_index_array = input_json["identity_path_index"]
let identity_path_index = input_json["identity_path_index"]
.as_array()
.ok_or(Report::msg("not an arrray"))?;
.unwrap()
.iter()
.map(|v| v.as_u64().unwrap() as u8)
.collect();
let mut identity_path_index: Vec<u8> = vec![];
let x = str_to_fr(&input_json["x"].to_string(), 10);
for v in identity_path_index_array {
identity_path_index.push(v.as_u64().ok_or(Report::msg("not a u64 value"))? as u8);
}
let epoch = str_to_fr(&input_json["epoch"].to_string(), 16);
let x = str_to_fr(&input_json["x"].to_string(), 10)?;
let epoch = str_to_fr(&input_json["epoch"].to_string(), 16)?;
let rln_identifier = str_to_fr(&input_json["rln_identifier"].to_string(), 10)?;
let rln_identifier = str_to_fr(&input_json["rln_identifier"].to_string(), 10);
// TODO: check rln_identifier against public::RLN_IDENTIFIER
Ok(RLNWitnessInput {
RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
epoch,
rln_identifier,
})
}
}
pub fn rln_witness_from_values(
@@ -325,22 +317,22 @@ pub fn serialize_proof_values(rln_proof_values: &RLNProofValues) -> Vec<u8> {
pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) {
let mut all_read: usize = 0;
let (root, read) = bytes_le_to_fr(&serialized[all_read..]);
let (root, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (x, read) = bytes_le_to_fr(&serialized[all_read..]);
let (x, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (y, read) = bytes_le_to_fr(&serialized[all_read..]);
let (y, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
let (nullifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..]);
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
(
@@ -362,26 +354,29 @@ pub fn prepare_prove_input(
epoch: Fr,
signal: &[u8],
) -> Vec<u8> {
let signal_len = u64::try_from(signal.len()).unwrap();
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret));
serialized.append(&mut normalize_usize(id_index));
serialized.append(&mut id_index.to_le_bytes().to_vec());
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut signal.to_vec());
serialized
return serialized;
}
#[allow(clippy::redundant_clone)]
pub fn prepare_verify_input(proof_data: Vec<u8>, signal: &[u8]) -> Vec<u8> {
let signal_len = u64::try_from(signal.len()).unwrap();
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut proof_data.clone());
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut signal.to_vec());
serialized
return serialized;
}
///////////////////////////////////////////////////////
@@ -488,6 +483,20 @@ pub fn extended_seeded_keygen(signal: &[u8]) -> (Fr, Fr, Fr, Fr) {
)
}
// Hashes arbitrary signal to the underlying prime field
pub fn hash_to_field(signal: &[u8]) -> Fr {
// We hash the input signal using Keccak256
// (note that a bigger curve order might require a bigger hash blocksize)
let mut hash = [0; 32];
let mut hasher = Keccak::v256();
hasher.update(signal);
hasher.finalize(&mut hash);
// We export the hash as a field element
let (el, _) = bytes_le_to_fr(hash.as_ref());
el
}
pub fn compute_id_secret(
share1: (Fr, Fr),
share2: (Fr, Fr),
@@ -510,9 +519,9 @@ pub fn compute_id_secret(
if a_1 == computed_a_1 {
// We successfully recovered the identity secret
Ok(a_0)
return Ok(a_0);
} else {
Err("Cannot recover identity_secret_hash from provided shares".into())
return Err("Cannot recover identity_secret_hash from provided shares".into());
}
}
@@ -523,36 +532,33 @@ pub fn compute_id_secret(
#[derive(Error, Debug)]
pub enum ProofError {
#[error("Error reading circuit key: {0}")]
CircuitKeyError(#[from] Report),
CircuitKeyError(#[from] std::io::Error),
#[error("Error producing witness: {0}")]
WitnessError(Report),
WitnessError(color_eyre::Report),
#[error("Error producing proof: {0}")]
SynthesisError(#[from] SynthesisError),
}
fn calculate_witness_element<E: ark_ec::pairing::Pairing>(
witness: Vec<BigInt>,
) -> Result<Vec<E::ScalarField>> {
use ark_ff::PrimeField;
let modulus = <E::ScalarField as PrimeField>::MODULUS;
fn calculate_witness_element<E: ark_ec::PairingEngine>(witness: Vec<BigInt>) -> Result<Vec<E::Fr>> {
use ark_ff::{FpParameters, PrimeField};
let modulus = <<E::Fr as PrimeField>::Params as FpParameters>::MODULUS;
// convert it to field elements
use num_traits::Signed;
let mut witness_vec = vec![];
for w in witness.into_iter() {
let w = if w.sign() == num_bigint::Sign::Minus {
// Need to negate the witness element if negative
modulus.into()
- w.abs()
.to_biguint()
.ok_or(Report::msg("not a biguint value"))?
} else {
w.to_biguint().ok_or(Report::msg("not a biguint value"))?
};
witness_vec.push(E::ScalarField::from(w))
}
let witness = witness
.into_iter()
.map(|w| {
let w = if w.sign() == num_bigint::Sign::Minus {
// Need to negate the witness element if negative
modulus.into() - w.abs().to_biguint().unwrap()
} else {
w.to_biguint().unwrap()
};
E::Fr::from(w)
})
.collect::<Vec<_>>();
Ok(witness_vec)
Ok(witness)
}
pub fn generate_proof_with_witness(
@@ -563,8 +569,9 @@ pub fn generate_proof_with_witness(
#[cfg(debug_assertions)]
let now = Instant::now();
let full_assignment =
calculate_witness_element::<Curve>(witness).map_err(ProofError::WitnessError)?;
let full_assignment = calculate_witness_element::<Curve>(witness)
.map_err(ProofError::WitnessError)
.unwrap();
#[cfg(debug_assertions)]
println!("witness generation took: {:.2?}", now.elapsed());
@@ -578,7 +585,7 @@ pub fn generate_proof_with_witness(
#[cfg(debug_assertions)]
let now = Instant::now();
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
let proof = create_proof_with_reduction_and_matrices::<_, CircomReduction>(
&proving_key.0,
r,
s,
@@ -586,7 +593,8 @@ pub fn generate_proof_with_witness(
proving_key.1.num_instance_variables,
proving_key.1.num_constraints,
full_assignment.as_slice(),
)?;
)
.unwrap();
#[cfg(debug_assertions)]
println!("proof generation took: {:.2?}", now.elapsed());
@@ -594,16 +602,14 @@ pub fn generate_proof_with_witness(
Ok(proof)
}
pub fn inputs_for_witness_calculation(
rln_witness: &RLNWitnessInput,
) -> Result<[(&str, Vec<BigInt>); 6]> {
pub fn inputs_for_witness_calculation(rln_witness: &RLNWitnessInput) -> [(&str, Vec<BigInt>); 6] {
// We confert the path indexes to field elements
// TODO: check if necessary
let mut path_elements = Vec::new();
for v in rln_witness.path_elements.iter() {
path_elements.push(to_bigint(v)?);
}
rln_witness
.path_elements
.iter()
.for_each(|v| path_elements.push(to_bigint(v)));
let mut identity_path_index = Vec::new();
rln_witness
@@ -611,20 +617,20 @@ pub fn inputs_for_witness_calculation(
.iter()
.for_each(|v| identity_path_index.push(BigInt::from(*v)));
Ok([
[
(
"identity_secret",
vec![to_bigint(&rln_witness.identity_secret)?],
vec![to_bigint(&rln_witness.identity_secret)],
),
("path_elements", path_elements),
("identity_path_index", identity_path_index),
("x", vec![to_bigint(&rln_witness.x)?]),
("epoch", vec![to_bigint(&rln_witness.epoch)?]),
("x", vec![to_bigint(&rln_witness.x)]),
("epoch", vec![to_bigint(&rln_witness.epoch)]),
(
"rln_identifier",
vec![to_bigint(&rln_witness.rln_identifier)?],
vec![to_bigint(&rln_witness.rln_identifier)],
),
])
]
}
/// Generates a RLN proof
@@ -638,7 +644,7 @@ pub fn generate_proof(
proving_key: &(ProvingKey<Curve>, ConstraintMatrices<Fr>),
rln_witness: &RLNWitnessInput,
) -> Result<ArkProof<Curve>, ProofError> {
let inputs = inputs_for_witness_calculation(rln_witness)?
let inputs = inputs_for_witness_calculation(rln_witness)
.into_iter()
.map(|(name, values)| (name.to_string(), values));
@@ -672,7 +678,7 @@ pub fn generate_proof(
#[cfg(debug_assertions)]
let now = Instant::now();
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
let proof = create_proof_with_reduction_and_matrices::<_, CircomReduction>(
&proving_key.0,
r,
s,
@@ -717,7 +723,7 @@ pub fn verify_proof(
#[cfg(debug_assertions)]
let now = Instant::now();
let verified = Groth16::<_, CircomReduction>::verify_proof(&pvk, proof, &inputs)?;
let verified = ark_verify_proof(&pvk, proof, &inputs)?;
#[cfg(debug_assertions)]
println!("verify took: {:.2?}", now.elapsed());
@@ -729,12 +735,12 @@ pub fn verify_proof(
///
/// Returns a JSON object containing the inputs necessary to calculate
/// the witness with CIRCOM on javascript
pub fn get_json_inputs(rln_witness: &RLNWitnessInput) -> Result<serde_json::Value> {
pub fn get_json_inputs(rln_witness: &RLNWitnessInput) -> serde_json::Value {
let mut path_elements = Vec::new();
for v in rln_witness.path_elements.iter() {
path_elements.push(to_bigint(v)?.to_str_radix(10));
}
rln_witness
.path_elements
.iter()
.for_each(|v| path_elements.push(to_bigint(v).to_str_radix(10)));
let mut identity_path_index = Vec::new();
rln_witness
@@ -743,13 +749,13 @@ pub fn get_json_inputs(rln_witness: &RLNWitnessInput) -> Result<serde_json::Valu
.for_each(|v| identity_path_index.push(BigInt::from(*v).to_str_radix(10)));
let inputs = serde_json::json!({
"identity_secret": to_bigint(&rln_witness.identity_secret)?.to_str_radix(10),
"identity_secret": to_bigint(&rln_witness.identity_secret).to_str_radix(10),
"path_elements": path_elements,
"identity_path_index": identity_path_index,
"x": to_bigint(&rln_witness.x)?.to_str_radix(10),
"epoch": format!("0x{:064x}", to_bigint(&rln_witness.epoch)?),
"rln_identifier": to_bigint(&rln_witness.rln_identifier)?.to_str_radix(10),
"x": to_bigint(&rln_witness.x).to_str_radix(10),
"epoch": format!("0x{:064x}", to_bigint(&rln_witness.epoch)),
"rln_identifier": to_bigint(&rln_witness.rln_identifier).to_str_radix(10),
});
Ok(inputs)
inputs
}

File diff suppressed because it is too large Load Diff

View File

@@ -2,25 +2,22 @@
use crate::circuit::Fr;
use ark_ff::PrimeField;
use color_eyre::{Report, Result};
use num_bigint::{BigInt, BigUint};
use num_traits::Num;
use std::iter::Extend;
pub fn to_bigint(el: &Fr) -> Result<BigInt> {
let res: BigUint = (*el).try_into()?;
Ok(res.into())
pub fn to_bigint(el: &Fr) -> BigInt {
let res: BigUint = (*el).try_into().unwrap();
res.try_into().unwrap()
}
pub fn fr_byte_size() -> usize {
let mbs = <Fr as PrimeField>::MODULUS_BIT_SIZE;
((mbs + 64 - (mbs % 64)) / 8) as usize
let mbs = <Fr as PrimeField>::size_in_bits();
(mbs + 64 - (mbs % 64)) / 8
}
pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr> {
if !(radix == 10 || radix == 16) {
return Err(Report::msg("wrong radix"));
}
pub fn str_to_fr(input: &str, radix: u32) -> Fr {
assert!((radix == 10) || (radix == 16));
// We remove any quote present and we trim
let single_quote: char = '\"';
@@ -28,10 +25,16 @@ pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr> {
input_clean = input_clean.trim().to_string();
if radix == 10 {
Ok(BigUint::from_str_radix(&input_clean, radix)?.try_into()?)
BigUint::from_str_radix(&input_clean, radix)
.unwrap()
.try_into()
.unwrap()
} else {
input_clean = input_clean.replace("0x", "");
Ok(BigUint::from_str_radix(&input_clean, radix)?.try_into()?)
BigUint::from_str_radix(&input_clean, radix)
.unwrap()
.try_into()
.unwrap()
}
}
@@ -72,111 +75,99 @@ pub fn fr_to_bytes_be(input: &Fr) -> Vec<u8> {
res
}
pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Result<Vec<u8>> {
pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len())?.to_le_bytes().to_vec());
bytes.extend(u64::try_from(input.len()).unwrap().to_le_bytes().to_vec());
// We store each element
input.iter().for_each(|el| bytes.extend(fr_to_bytes_le(el)));
Ok(bytes)
bytes
}
pub fn vec_fr_to_bytes_be(input: &[Fr]) -> Result<Vec<u8>> {
pub fn vec_fr_to_bytes_be(input: &[Fr]) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len())?.to_be_bytes().to_vec());
bytes.extend(u64::try_from(input.len()).unwrap().to_be_bytes().to_vec());
// We store each element
input.iter().for_each(|el| bytes.extend(fr_to_bytes_be(el)));
Ok(bytes)
bytes
}
pub fn vec_u8_to_bytes_le(input: &[u8]) -> Result<Vec<u8>> {
pub fn vec_u8_to_bytes_le(input: &[u8]) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len())?.to_le_bytes().to_vec());
bytes.extend(u64::try_from(input.len()).unwrap().to_le_bytes().to_vec());
bytes.extend(input);
Ok(bytes)
bytes
}
pub fn vec_u8_to_bytes_be(input: Vec<u8>) -> Result<Vec<u8>> {
pub fn vec_u8_to_bytes_be(input: Vec<u8>) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len())?.to_be_bytes().to_vec());
bytes.extend(u64::try_from(input.len()).unwrap().to_be_bytes().to_vec());
bytes.extend(input);
Ok(bytes)
bytes
}
pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
pub fn bytes_le_to_vec_u8(input: &[u8]) -> (Vec<u8>, usize) {
let mut read: usize = 0;
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
let len = u64::from_le_bytes(input[0..8].try_into().unwrap()) as usize;
read += 8;
let res = input[8..8 + len].to_vec();
read += res.len();
Ok((res, read))
(res, read)
}
pub fn bytes_be_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
pub fn bytes_be_to_vec_u8(input: &[u8]) -> (Vec<u8>, usize) {
let mut read: usize = 0;
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into()?))?;
let len = u64::from_be_bytes(input[0..8].try_into().unwrap()) as usize;
read += 8;
let res = input[8..8 + len].to_vec();
read += res.len();
Ok((res, read))
(res, read)
}
pub fn bytes_le_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
pub fn bytes_le_to_vec_fr(input: &[u8]) -> (Vec<Fr>, usize) {
let mut read: usize = 0;
let mut res: Vec<Fr> = Vec::new();
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
let len = u64::from_le_bytes(input[0..8].try_into().unwrap()) as usize;
read += 8;
let el_size = fr_byte_size();
for i in 0..len {
let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]);
let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec());
res.push(curr_el);
read += el_size;
}
Ok((res, read))
(res, read)
}
pub fn bytes_be_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
pub fn bytes_be_to_vec_fr(input: &[u8]) -> (Vec<Fr>, usize) {
let mut read: usize = 0;
let mut res: Vec<Fr> = Vec::new();
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into()?))?;
let len = u64::from_be_bytes(input[0..8].try_into().unwrap()) as usize;
read += 8;
let el_size = fr_byte_size();
for i in 0..len {
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]);
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec());
res.push(curr_el);
read += el_size;
}
Ok((res, read))
}
pub fn normalize_usize(input: usize) -> Vec<u8> {
let mut normalized_usize = input.to_le_bytes().to_vec();
normalized_usize.resize(8, 0);
normalized_usize
(res, read)
}
/* Old conversion utilities between different libraries data types

View File

@@ -3,12 +3,11 @@ mod test {
use ark_std::{rand::thread_rng, UniformRand};
use rand::Rng;
use rln::circuit::*;
use rln::ffi::{hash as ffi_hash, poseidon_hash as ffi_poseidon_hash, *};
use rln::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
use rln::ffi::*;
use rln::poseidon_hash::poseidon_hash;
use rln::protocol::*;
use rln::public::RLN;
use rln::utils::*;
use serde_json::json;
use std::fs::File;
use std::io::Read;
use std::mem::MaybeUninit;
@@ -29,8 +28,7 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resource_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -80,7 +78,7 @@ mod test {
assert!(success, "set tree call failed");
// We add leaves in a batch into the tree
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
let leaves_ser = vec_fr_to_bytes_le(&leaves);
let input_buffer = &Buffer::from(leaves_ser.as_ref());
let success = init_tree_with_leaves(rln_pointer, input_buffer);
assert!(success, "init tree with leaves call failed");
@@ -98,7 +96,8 @@ mod test {
// We now delete all leaves set and check if the root corresponds to the empty tree root
// delete calls over indexes higher than no_of_leaves are ignored and will not increase self.tree.next_index
for i in 0..no_of_leaves {
let delete_range = 2 * no_of_leaves;
for i in 0..delete_range {
let success = delete_leaf(rln_pointer, i);
assert!(success, "delete leaf call failed");
}
@@ -137,8 +136,7 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -155,7 +153,7 @@ mod test {
let set_index = rng.gen_range(0..no_of_leaves) as usize;
// We add leaves in a batch into the tree
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
let leaves_ser = vec_fr_to_bytes_le(&leaves);
let input_buffer = &Buffer::from(leaves_ser.as_ref());
let success = init_tree_with_leaves(rln_pointer, input_buffer);
assert!(success, "init tree with leaves call failed");
@@ -172,13 +170,13 @@ mod test {
// `init_tree_with_leaves` resets the tree to the height it was initialized with, using `set_tree`
// We add leaves in a batch starting from index 0..set_index
let leaves_m = vec_fr_to_bytes_le(&leaves[0..set_index]).unwrap();
let leaves_m = vec_fr_to_bytes_le(&leaves[0..set_index]);
let buffer = &Buffer::from(leaves_m.as_ref());
let success = init_tree_with_leaves(rln_pointer, buffer);
assert!(success, "init tree with leaves call failed");
// We add the remaining n leaves in a batch starting from index set_index
let leaves_n = vec_fr_to_bytes_le(&leaves[set_index..]).unwrap();
let leaves_n = vec_fr_to_bytes_le(&leaves[set_index..]);
let buffer = &Buffer::from(leaves_n.as_ref());
let success = set_leaves_from(rln_pointer, set_index, buffer);
assert!(success, "set leaves from call failed");
@@ -218,71 +216,6 @@ mod test {
assert_eq!(root_batch_with_init, root_single_additions);
}
#[test]
// This test is similar to the one in public.rs but it uses the RLN object as a pointer
fn test_atomic_operation_ffi() {
let tree_height = TEST_TREE_HEIGHT;
let no_of_leaves = 256;
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
// We generate a vector of random leaves
let mut leaves: Vec<Fr> = Vec::new();
let mut rng = thread_rng();
for _ in 0..no_of_leaves {
leaves.push(Fr::rand(&mut rng));
}
// We add leaves in a batch into the tree
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
let input_buffer = &Buffer::from(leaves_ser.as_ref());
let success = init_tree_with_leaves(rln_pointer, input_buffer);
assert!(success, "init tree with leaves call failed");
// We get the root of the tree obtained adding leaves in batch
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
assert!(success, "get root call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (root_after_insertion, _) = bytes_le_to_fr(&result_data);
let last_leaf = leaves.last().unwrap();
let last_leaf_index = no_of_leaves - 1;
let indices = vec![last_leaf_index as u8];
let last_leaf = vec![*last_leaf];
let indices = vec_u8_to_bytes_le(&indices).unwrap();
let indices_buffer = &Buffer::from(indices.as_ref());
let leaves = vec_fr_to_bytes_le(&last_leaf).unwrap();
let leaves_buffer = &Buffer::from(leaves.as_ref());
let success = atomic_operation(
rln_pointer,
last_leaf_index as usize,
leaves_buffer,
indices_buffer,
);
assert!(success, "atomic operation call failed");
// We get the root of the tree obtained after a no-op
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
assert!(success, "get root call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (root_after_noop, _) = bytes_le_to_fr(&result_data);
assert_eq!(root_after_insertion, root_after_noop);
}
#[test]
// This test is similar to the one in public.rs but it uses the RLN object as a pointer
fn test_set_leaves_bad_index_ffi() {
@@ -300,8 +233,7 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -316,7 +248,7 @@ mod test {
let (root_empty, _) = bytes_le_to_fr(&result_data);
// We add leaves in a batch into the tree
let leaves = vec_fr_to_bytes_le(&leaves).unwrap();
let leaves = vec_fr_to_bytes_le(&leaves);
let buffer = &Buffer::from(leaves.as_ref());
let success = set_leaves_from(rln_pointer, bad_index, buffer);
assert!(!success, "set leaves from call succeeded");
@@ -341,15 +273,14 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
// generate identity
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
let id_commitment = utils_poseidon_hash(&vec![identity_secret_hash]);
let id_commitment = poseidon_hash(&vec![identity_secret_hash]);
// We prepare id_commitment and we set the leaf at provided index
let leaf_ser = fr_to_bytes_le(&id_commitment);
@@ -372,86 +303,71 @@ mod test {
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (path_elements, read) = bytes_le_to_vec_fr(&result_data).unwrap();
let (identity_path_index, _) = bytes_le_to_vec_u8(&result_data[read..].to_vec()).unwrap();
let (path_elements, read) = bytes_le_to_vec_fr(&result_data);
let (identity_path_index, _) = bytes_le_to_vec_u8(&result_data[read..].to_vec());
// We check correct computation of the path and indexes
let mut expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
)
.unwrap(),
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
)
.unwrap(),
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
)
.unwrap(),
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
)
.unwrap(),
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
)
.unwrap(),
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
)
.unwrap(),
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
)
.unwrap(),
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
)
.unwrap(),
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
)
.unwrap(),
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
)
.unwrap(),
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
)
.unwrap(),
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
)
.unwrap(),
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
)
.unwrap(),
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
)
.unwrap(),
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
)
.unwrap(),
),
];
let mut expected_identity_path_index: Vec<u8> =
@@ -463,23 +379,19 @@ mod test {
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
)
.unwrap(),
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
)
.unwrap(),
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
)
.unwrap(),
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
)
.unwrap(),
),
]);
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
}
@@ -488,8 +400,7 @@ mod test {
expected_path_elements.append(&mut vec![str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
)
.unwrap()]);
)]);
expected_identity_path_index.append(&mut vec![0]);
}
@@ -510,8 +421,7 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -529,7 +439,7 @@ mod test {
let proof_values = proof_values_from_witness(&rln_witness);
// We prepare id_commitment and we set the leaf at provided index
let rln_witness_ser = serialize_witness(&rln_witness).unwrap();
let rln_witness_ser = serialize_witness(&rln_witness);
let input_buffer = &Buffer::from(rln_witness_ser.as_ref());
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let now = Instant::now();
@@ -573,8 +483,7 @@ mod test {
// We create a RLN instance using a resource folder path
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -617,14 +526,11 @@ mod test {
// Creating a RLN instance passing the raw data
let mut rln_pointer_raw_bytes = MaybeUninit::<*mut RLN>::uninit();
let tree_config = "".to_string();
let tree_config_buffer = &Buffer::from(tree_config.as_bytes());
let success = new_with_params(
tree_height,
circom_data,
zkey_data,
vk_data,
tree_config_buffer,
rln_pointer_raw_bytes.as_mut_ptr(),
);
assert!(success, "RLN object creation failed");
@@ -657,14 +563,13 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
// We add leaves in a batch into the tree
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
let leaves_ser = vec_fr_to_bytes_le(&leaves);
let input_buffer = &Buffer::from(leaves_ser.as_ref());
let success = init_tree_with_leaves(rln_pointer, input_buffer);
assert!(success, "init tree with leaves call failed");
@@ -684,11 +589,12 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index: usize = no_of_leaves;
let identity_index: u64 = no_of_leaves;
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let signal_len = u64::try_from(signal.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -697,9 +603,9 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut identity_index.to_le_bytes().to_vec());
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut signal.to_vec());
// We call generate_rln_proof
@@ -714,7 +620,7 @@ mod test {
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data | signal_len<8> | signal<var> ]
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
proof_data.append(&mut signal.to_vec());
// We call verify_rln_proof
@@ -742,14 +648,13 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
// We add leaves in a batch into the tree
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
let leaves_ser = vec_fr_to_bytes_le(&leaves);
let input_buffer = &Buffer::from(leaves_ser.as_ref());
let success = init_tree_with_leaves(rln_pointer, input_buffer);
assert!(success, "set leaves call failed");
@@ -769,11 +674,12 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index: usize = no_of_leaves;
let identity_index: u64 = no_of_leaves;
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let signal_len = u64::try_from(signal.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -782,9 +688,9 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut identity_index.to_le_bytes().to_vec());
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut signal.to_vec());
// We call generate_rln_proof
@@ -799,7 +705,7 @@ mod test {
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data | signal_len<8> | signal<var> ]
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
proof_data.append(&mut signal.to_vec());
// We test verify_with_roots
@@ -861,8 +767,7 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -882,16 +787,18 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index: usize = 0;
let identity_index: u64 = 0;
// We generate two proofs using same epoch but different signals.
// We generate two random signals
let mut rng = rand::thread_rng();
let signal1: [u8; 32] = rng.gen();
let signal1_len = u64::try_from(signal1.len()).unwrap();
// We generate two random signals
let signal2: [u8; 32] = rng.gen();
let signal2_len = u64::try_from(signal2.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -900,18 +807,18 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized1: Vec<u8> = Vec::new();
serialized1.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized1.append(&mut normalize_usize(identity_index));
serialized1.append(&mut identity_index.to_le_bytes().to_vec());
serialized1.append(&mut fr_to_bytes_le(&epoch));
// The first part is the same for both proof input, so we clone
let mut serialized2 = serialized1.clone();
// We attach the first signal to the first proof input
serialized1.append(&mut normalize_usize(signal1.len()));
serialized1.append(&mut signal1_len.to_le_bytes().to_vec());
serialized1.append(&mut signal1.to_vec());
// We attach the second signal to the first proof input
serialized2.append(&mut normalize_usize(signal2.len()));
serialized2.append(&mut signal2_len.to_le_bytes().to_vec());
serialized2.append(&mut signal2.to_vec());
// We call generate_rln_proof for first proof values
@@ -970,19 +877,20 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index_new: usize = 1;
let identity_index_new: u64 = 1;
// We generate a random signals
let signal3: [u8; 32] = rng.gen();
let signal3_len = u64::try_from(signal3.len()).unwrap();
// We prepare input for generate_rln_proof API
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
// Note that epoch is the same as before
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash_new));
serialized.append(&mut normalize_usize(identity_index_new));
serialized.append(&mut identity_index_new.to_le_bytes().to_vec());
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut normalize_usize(signal3.len()));
serialized.append(&mut signal3_len.to_le_bytes().to_vec());
serialized.append(&mut signal3.to_vec());
// We call generate_rln_proof
@@ -1021,8 +929,7 @@ mod test {
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -1050,19 +957,19 @@ mod test {
assert_eq!(
identity_secret_hash,
expected_identity_secret_hash_seed_bytes.unwrap()
expected_identity_secret_hash_seed_bytes
);
assert_eq!(id_commitment, expected_id_commitment_seed_bytes.unwrap());
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
}
#[test]
// Tests hash to field using FFI APIs
fn test_seeded_extended_keygen_ffi() {
let tree_height = TEST_TREE_HEIGHT;
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
@@ -1097,31 +1004,34 @@ mod test {
16,
);
assert_eq!(
identity_trapdoor,
expected_identity_trapdoor_seed_bytes.unwrap()
);
assert_eq!(
identity_nullifier,
expected_identity_nullifier_seed_bytes.unwrap()
);
assert_eq!(identity_trapdoor, expected_identity_trapdoor_seed_bytes);
assert_eq!(identity_nullifier, expected_identity_nullifier_seed_bytes);
assert_eq!(
identity_secret_hash,
expected_identity_secret_hash_seed_bytes.unwrap()
expected_identity_secret_hash_seed_bytes
);
assert_eq!(id_commitment, expected_id_commitment_seed_bytes.unwrap());
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
}
#[test]
// Tests hash to field using FFI APIs
fn test_hash_to_field_ffi() {
let tree_height = TEST_TREE_HEIGHT;
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
// We prepare id_commitment and we set the leaf at provided index
let input_buffer = &Buffer::from(signal.as_ref());
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = ffi_hash(input_buffer, output_buffer.as_mut_ptr());
let success = hash(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
assert!(success, "hash call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
@@ -1133,103 +1043,4 @@ mod test {
assert_eq!(hash1, hash2);
}
#[test]
// Test Poseidon hash FFI
fn test_poseidon_hash_ffi() {
// generate random number between 1..ROUND_PARAMS.len()
let mut rng = thread_rng();
let number_of_inputs = rng.gen_range(1..ROUND_PARAMS.len());
let mut inputs = Vec::with_capacity(number_of_inputs);
for _ in 0..number_of_inputs {
inputs.push(Fr::rand(&mut rng));
}
let inputs_ser = vec_fr_to_bytes_le(&inputs).unwrap();
let input_buffer = &Buffer::from(inputs_ser.as_ref());
let expected_hash = utils_poseidon_hash(inputs.as_ref());
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = ffi_poseidon_hash(input_buffer, output_buffer.as_mut_ptr());
assert!(success, "poseidon hash call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (received_hash, _) = bytes_le_to_fr(&result_data);
assert_eq!(received_hash, expected_hash);
}
#[test]
fn test_get_leaf() {
// We create a RLN instance
let tree_height = TEST_TREE_HEIGHT;
let no_of_leaves = 1 << TEST_TREE_HEIGHT;
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
// We generate a new identity tuple from an input seed
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
let input_buffer = &Buffer::from(seed_bytes);
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success =
seeded_extended_key_gen(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
assert!(success, "seeded key gen call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (_, _, _, id_commitment) = deserialize_identity_tuple(result_data);
// We insert the id_commitment into the tree at a random index
let mut rng = thread_rng();
let index = rng.gen_range(0..no_of_leaves) as usize;
let leaf = fr_to_bytes_le(&id_commitment);
let input_buffer = &Buffer::from(leaf.as_ref());
let success = set_leaf(rln_pointer, index, input_buffer);
assert!(success, "set leaf call failed");
// We get the leaf at the same index
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = get_leaf(rln_pointer, index, output_buffer.as_mut_ptr());
assert!(success, "get leaf call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (received_id_commitment, _) = bytes_le_to_fr(&result_data);
// We check that the received id_commitment is the same as the one we inserted
assert_eq!(received_id_commitment, id_commitment);
}
#[test]
fn test_metadata() {
// We create a RLN instance
let tree_height = TEST_TREE_HEIGHT;
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
let input_buffer = &Buffer::from(input_config.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
let input_buffer = &Buffer::from(seed_bytes);
let success = set_metadata(rln_pointer, input_buffer);
assert!(success, "set_metadata call failed");
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = get_metadata(rln_pointer, output_buffer.as_mut_ptr());
assert!(success, "get_metadata call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
assert_eq!(result_data, seed_bytes.to_vec());
}
}

View File

@@ -5,8 +5,8 @@
#[cfg(test)]
mod test {
use rln::circuit::*;
use rln::hashers::PoseidonHash;
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
use rln::poseidon_tree::*;
use utils::{FullMerkleTree, OptimalMerkleTree};
#[test]
/// A basic performance comparison between the two supported Merkle Tree implementations
@@ -25,16 +25,16 @@ mod test {
for _ in 0..sample_size.try_into().unwrap() {
let now = Instant::now();
FullMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
FullMerkleTree::<PoseidonHash>::default(tree_height);
gen_time_full += now.elapsed().as_nanos();
let now = Instant::now();
OptimalMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
OptimalMerkleTree::<PoseidonHash>::default(tree_height);
gen_time_opt += now.elapsed().as_nanos();
}
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height);
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height);
for i in 0..sample_size.try_into().unwrap() {
let now = Instant::now();
@@ -78,3 +78,495 @@ mod test {
);
}
}
// Test module for testing pmtree integration and features in zerokit
// enabled only if the pmtree feature is enabled
#[cfg(feature = "pmtree")]
#[cfg(test)]
mod pmtree_test {
use pmtree::*;
use rln::circuit::Fr;
use rln::poseidon_hash::poseidon_hash;
use rln::poseidon_tree::PoseidonHash;
use rln::protocol::hash_to_field;
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, str_to_fr};
use sled::Db as Sled;
use std::collections::HashMap;
use std::fs;
use std::path::Path;
use utils::{FullMerkleTree, OptimalMerkleTree};
// The pmtree Hasher trait used by pmtree Merkle tree
impl pmtree::Hasher for PoseidonHash {
type Fr = Fr;
fn default_leaf() -> Self::Fr {
Fr::from(0)
}
fn serialize(value: Self::Fr) -> Value {
fr_to_bytes_le(&value)
}
fn deserialize(value: Value) -> Self::Fr {
let (fr, _) = bytes_le_to_fr(&value);
fr
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}
// pmtree supports in-memory and on-disk databases (Database trait) for storing the Merkle tree state
// We implement Database for hashmaps, an in-memory database
struct MemoryDB(HashMap<DBKey, Value>);
impl Database for MemoryDB {
fn new(_dbpath: &str) -> Result<Self> {
Ok(MemoryDB(HashMap::new()))
}
fn load(_dbpath: &str) -> Result<Self> {
Err(Error("Cannot load in-memory DB".to_string()))
}
fn get(&self, key: DBKey) -> Result<Option<Value>> {
Ok(self.0.get(&key).cloned())
}
fn put(&mut self, key: DBKey, value: Value) -> Result<()> {
self.0.insert(key, value);
Ok(())
}
}
// We implement Database for sled DB, an on-disk database
struct SledDB(Sled);
impl Database for SledDB {
fn new(dbpath: &str) -> Result<Self> {
if Path::new(dbpath).exists() {
match fs::remove_dir_all(dbpath) {
Ok(x) => x,
Err(e) => return Err(Error(e.to_string())),
}
}
let db: Sled = match sled::open(dbpath) {
Ok(db) => db,
Err(e) => return Err(Error(e.to_string())),
};
Ok(SledDB(db))
}
fn load(dbpath: &str) -> Result<Self> {
let db: Sled = match sled::open(dbpath) {
Ok(db) => db,
Err(e) => return Err(Error(e.to_string())),
};
if !db.was_recovered() {
return Err(Error("Trying to load non-existing database!".to_string()));
}
Ok(SledDB(db))
}
fn get(&self, key: DBKey) -> Result<Option<Value>> {
match self.0.get(key) {
Ok(value) => Ok(value.map(|val| val.to_vec())),
Err(e) => Err(Error(e.to_string())),
}
}
fn put(&mut self, key: DBKey, value: Value) -> Result<()> {
match self.0.insert(key, value) {
Ok(_) => Ok(()),
Err(e) => Err(Error(e.to_string())),
}
}
}
#[test]
/// A basic performance comparison between the two supported Merkle Tree implementations and in-memory/on-disk pmtree implementations
fn test_zerokit_and_pmtree_merkle_implementations_performances() {
use std::time::{Duration, Instant};
let tree_height = 20;
let sample_size = 100;
let leaves: Vec<Fr> = (0..sample_size).map(|s| Fr::from(s)).collect();
let mut gen_time_full: u128 = 0;
let mut upd_time_full: u128 = 0;
let mut gen_time_opt: u128 = 0;
let mut upd_time_opt: u128 = 0;
let mut gen_time_pm_memory: u128 = 0;
let mut upd_time_pm_memory: u128 = 0;
let mut gen_time_pm_sled: u128 = 0;
let mut upd_time_pm_sled: u128 = 0;
for _ in 0..sample_size.try_into().unwrap() {
let now = Instant::now();
FullMerkleTree::<PoseidonHash>::default(tree_height);
gen_time_full += now.elapsed().as_nanos();
let now = Instant::now();
OptimalMerkleTree::<PoseidonHash>::default(tree_height);
gen_time_opt += now.elapsed().as_nanos();
let now = Instant::now();
pmtree::MerkleTree::<MemoryDB, PoseidonHash>::default(tree_height).unwrap();
gen_time_pm_memory += now.elapsed().as_nanos();
let now = Instant::now();
pmtree::MerkleTree::<SledDB, PoseidonHash>::default(tree_height).unwrap();
gen_time_pm_sled += now.elapsed().as_nanos();
}
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height);
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height);
let mut tree_pm_memory =
pmtree::MerkleTree::<MemoryDB, PoseidonHash>::default(tree_height).unwrap();
let mut tree_pm_sled =
pmtree::MerkleTree::<SledDB, PoseidonHash>::default(tree_height).unwrap();
for i in 0..sample_size.try_into().unwrap() {
let now = Instant::now();
tree_full.set(i, leaves[i]).unwrap();
upd_time_full += now.elapsed().as_nanos();
let proof = tree_full.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
let now = Instant::now();
tree_opt.set(i, leaves[i]).unwrap();
upd_time_opt += now.elapsed().as_nanos();
let proof = tree_opt.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
let now = Instant::now();
tree_pm_memory.set(i, leaves[i]).unwrap();
upd_time_pm_memory += now.elapsed().as_nanos();
let proof = tree_pm_memory.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
let now = Instant::now();
tree_pm_sled.set(i, leaves[i]).unwrap();
upd_time_pm_sled += now.elapsed().as_nanos();
let proof = tree_pm_sled.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
}
// We check all roots are the same
let tree_full_root = tree_full.root();
let tree_opt_root = tree_opt.root();
let tree_pm_memory_root = tree_pm_memory.root();
let tree_pm_sled_root = tree_pm_sled.root();
assert_eq!(tree_full_root, tree_opt_root);
assert_eq!(tree_opt_root, tree_pm_memory_root);
assert_eq!(tree_pm_memory_root, tree_pm_sled_root);
println!(" Average tree generation time:");
println!(
" - Full Merkle Tree: {:?}",
Duration::from_nanos((gen_time_full / sample_size).try_into().unwrap())
);
println!(
" - Optimal Merkle Tree: {:?}",
Duration::from_nanos((gen_time_opt / sample_size).try_into().unwrap())
);
println!(
" - Pmtree-HashMap Merkle Tree: {:?}",
Duration::from_nanos((gen_time_pm_memory / sample_size).try_into().unwrap())
);
println!(
" - Pmtree-Sled Merkle Tree: {:?}",
Duration::from_nanos((gen_time_pm_sled / sample_size).try_into().unwrap())
);
println!(" Average update_next execution time:");
println!(
" - Full Merkle Tree: {:?}",
Duration::from_nanos((upd_time_full / sample_size).try_into().unwrap())
);
println!(
" - Optimal Merkle Tree: {:?}",
Duration::from_nanos((upd_time_opt / sample_size).try_into().unwrap())
);
println!(
" - Pmtree-HashMap Merkle Tree: {:?}",
Duration::from_nanos((upd_time_pm_memory / sample_size).try_into().unwrap())
);
println!(
" - Pmtree-Sled Merkle Tree: {:?}",
Duration::from_nanos((upd_time_pm_sled / sample_size).try_into().unwrap())
);
}
// The following two tests contain values that come from public::test_merkle_proof test
// We check that pmtree and zerokit Merkle tree implementations match.
#[test]
fn test_pmtree_hashmap() -> Result<()> {
let tree_height = 20;
let mut tree = pmtree::MerkleTree::<MemoryDB, PoseidonHash>::default(tree_height).unwrap();
let leaf_index = 3;
let identity_secret = hash_to_field(b"test-merkle-proof");
let id_commitment = poseidon_hash(&[identity_secret]);
// let default_leaf = Fr::from(0);
tree.set(leaf_index, id_commitment).unwrap();
// We check correct computation of the root
let root = tree.root();
assert_eq!(
root,
str_to_fr(
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
);
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
// We check correct computation of the path and indexes
// These values refers to tree height = 20
let expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
),
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
),
str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
),
];
let expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
assert_eq!(path_elements, expected_path_elements);
assert_eq!(identity_path_index, expected_identity_path_index);
// We check correct verification of the proof
assert!(tree.verify(&id_commitment, &merkle_proof));
Ok(())
}
#[test]
fn test_pmtree_sled() -> Result<()> {
let tree_height = 20;
let mut tree = pmtree::MerkleTree::<SledDB, PoseidonHash>::default(tree_height).unwrap();
let leaf_index = 3;
let identity_secret = hash_to_field(b"test-merkle-proof");
let id_commitment = poseidon_hash(&[identity_secret]);
// let default_leaf = Fr::from(0);
tree.set(leaf_index, id_commitment).unwrap();
// We check correct computation of the root
let root = tree.root();
assert_eq!(
root,
str_to_fr(
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
);
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
// We check correct computation of the path and indexes
// These values refers to tree height = 20
let expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
),
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
),
str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
),
];
let expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
assert_eq!(path_elements, expected_path_elements);
assert_eq!(identity_path_index, expected_identity_path_index);
// We check correct verification of the proof
assert!(tree.verify(&id_commitment, &merkle_proof));
Ok(())
}
}

View File

@@ -4,13 +4,10 @@ mod test {
circom_from_folder, vk_from_folder, zkey_from_folder, Fr, TEST_RESOURCES_FOLDER,
TEST_TREE_HEIGHT,
};
use rln::hashers::{hash_to_field, poseidon_hash};
use rln::poseidon_hash::poseidon_hash;
use rln::poseidon_tree::PoseidonTree;
use rln::protocol::*;
use rln::utils::str_to_fr;
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
const WITNESS_JSON_15: &str = r#"
@@ -174,12 +171,7 @@ mod test {
// generate merkle tree
let default_leaf = Fr::from(0);
let mut tree = PoseidonTree::new(
tree_height,
default_leaf,
ConfigOf::<PoseidonTree>::default(),
)
.unwrap();
let mut tree = PoseidonTree::new(tree_height, default_leaf);
tree.set(leaf_index, id_commitment.into()).unwrap();
// We check correct computation of the root
@@ -192,7 +184,6 @@ mod test {
"0x1984f2e01184aef5cb974640898a5f5c25556554e2b06d99d4841badb8b198cd",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 19 {
assert_eq!(
@@ -201,7 +192,6 @@ mod test {
"0x219ceb53f2b1b7a6cf74e80d50d44d68ecb4a53c6cc65b25593c8d56343fb1fe",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 20 {
assert_eq!(
@@ -210,7 +200,6 @@ mod test {
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
.unwrap()
);
}
@@ -224,78 +213,63 @@ mod test {
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
)
.unwrap(),
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
)
.unwrap(),
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
)
.unwrap(),
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
)
.unwrap(),
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
)
.unwrap(),
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
)
.unwrap(),
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
)
.unwrap(),
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
)
.unwrap(),
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
)
.unwrap(),
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
)
.unwrap(),
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
)
.unwrap(),
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
)
.unwrap(),
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
)
.unwrap(),
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
)
.unwrap(),
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
)
.unwrap(),
),
];
let mut expected_identity_path_index: Vec<u8> =
@@ -307,23 +281,19 @@ mod test {
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
)
.unwrap(),
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
)
.unwrap(),
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
)
.unwrap(),
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
)
.unwrap(),
),
]);
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
}
@@ -332,8 +302,7 @@ mod test {
expected_path_elements.append(&mut vec![str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
)
.unwrap()]);
)]);
expected_identity_path_index.append(&mut vec![0]);
}
@@ -350,7 +319,7 @@ mod test {
// We generate all relevant keys
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let builder = circom_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let builder = circom_from_folder(TEST_RESOURCES_FOLDER);
// We compute witness from the json input example
let mut witness_json: &str = "";
@@ -365,12 +334,10 @@ mod test {
let rln_witness = rln_witness_from_json(witness_json);
let rln_witness_unwrapped = rln_witness.unwrap();
// Let's generate a zkSNARK proof
let proof = generate_proof(builder, &proving_key, &rln_witness_unwrapped).unwrap();
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
let proof_values = proof_values_from_witness(&rln_witness_unwrapped);
let proof_values = proof_values_from_witness(&rln_witness);
// Let's verify the proof
let verified = verify_proof(&verification_key, &proof, &proof_values);
@@ -389,12 +356,7 @@ mod test {
//// generate merkle tree
let default_leaf = Fr::from(0);
let mut tree = PoseidonTree::new(
tree_height,
default_leaf,
ConfigOf::<PoseidonTree>::default(),
)
.unwrap();
let mut tree = PoseidonTree::new(tree_height, default_leaf);
tree.set(leaf_index, id_commitment.into()).unwrap();
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
@@ -416,7 +378,7 @@ mod test {
// We generate all relevant keys
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let builder = circom_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let builder = circom_from_folder(TEST_RESOURCES_FOLDER);
// Let's generate a zkSNARK proof
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
@@ -442,10 +404,10 @@ mod test {
witness_json = WITNESS_JSON_20;
}
let rln_witness = rln_witness_from_json(witness_json).unwrap();
let rln_witness = rln_witness_from_json(witness_json);
let ser = serialize_witness(&rln_witness).unwrap();
let (deser, _) = deserialize_witness(&ser).unwrap();
let ser = serialize_witness(&rln_witness);
let (deser, _) = deserialize_witness(&ser);
assert_eq!(rln_witness, deser);
// We test Proof values serialization
@@ -467,13 +429,11 @@ mod test {
let expected_identity_secret_hash_seed_phrase = str_to_fr(
"0x20df38f3f00496f19fe7c6535492543b21798ed7cb91aebe4af8012db884eda3",
16,
)
.unwrap();
);
let expected_id_commitment_seed_phrase = str_to_fr(
"0x1223a78a5d66043a7f9863e14507dc80720a5602b2a894923e5b5147d5a9c325",
16,
)
.unwrap();
);
assert_eq!(
identity_secret_hash,
@@ -489,13 +449,11 @@ mod test {
let expected_identity_secret_hash_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
)
.unwrap();
);
let expected_id_commitment_seed_bytes = str_to_fr(
"0xbf16d2b5c0d6f9d9d561e05bfca16a81b4b873bb063508fae360d8c74cef51f",
16,
)
.unwrap();
);
assert_eq!(
identity_secret_hash,

View File

@@ -1,13 +1,11 @@
#[cfg(test)]
mod test {
use ark_std::{rand::thread_rng, UniformRand};
use rand::Rng;
use rln::circuit::{Fr, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
use rln::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
use rln::protocol::{compute_tree_root, deserialize_identity_tuple};
use rln::public::{hash as public_hash, poseidon_hash as public_poseidon_hash, RLN};
use rln::circuit::{TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
use rln::poseidon_hash::poseidon_hash;
use rln::protocol::{compute_tree_root, deserialize_identity_tuple, hash_to_field};
use rln::public::RLN;
use rln::utils::*;
use serde_json::json;
use std::io::Cursor;
#[test]
@@ -16,13 +14,12 @@ mod test {
let tree_height = TEST_TREE_HEIGHT;
let leaf_index = 3;
let input_buffer =
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
let mut rln = RLN::new(tree_height, input_buffer);
// generate identity
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
let id_commitment = utils_poseidon_hash(&vec![identity_secret_hash]);
let id_commitment = poseidon_hash(&vec![identity_secret_hash]);
// We pass id_commitment as Read buffer to RLN's set_leaf
let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment));
@@ -40,7 +37,6 @@ mod test {
"0x1984f2e01184aef5cb974640898a5f5c25556554e2b06d99d4841badb8b198cd",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 19 {
assert_eq!(
@@ -49,7 +45,6 @@ mod test {
"0x219ceb53f2b1b7a6cf74e80d50d44d68ecb4a53c6cc65b25593c8d56343fb1fe",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 20 {
assert_eq!(
@@ -58,7 +53,6 @@ mod test {
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
.unwrap()
);
}
@@ -67,86 +61,71 @@ mod test {
rln.get_proof(leaf_index, &mut buffer).unwrap();
let buffer_inner = buffer.into_inner();
let (path_elements, read) = bytes_le_to_vec_fr(&buffer_inner).unwrap();
let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec()).unwrap();
let (path_elements, read) = bytes_le_to_vec_fr(&buffer_inner);
let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec());
// We check correct computation of the path and indexes
let mut expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
)
.unwrap(),
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
)
.unwrap(),
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
)
.unwrap(),
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
)
.unwrap(),
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
)
.unwrap(),
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
)
.unwrap(),
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
)
.unwrap(),
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
)
.unwrap(),
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
)
.unwrap(),
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
)
.unwrap(),
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
)
.unwrap(),
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
)
.unwrap(),
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
)
.unwrap(),
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
)
.unwrap(),
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
)
.unwrap(),
),
];
let mut expected_identity_path_index: Vec<u8> =
@@ -158,23 +137,19 @@ mod test {
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
)
.unwrap(),
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
)
.unwrap(),
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
)
.unwrap(),
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
)
.unwrap(),
),
]);
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
}
@@ -183,8 +158,7 @@ mod test {
expected_path_elements.append(&mut vec![str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
)
.unwrap()]);
)]);
expected_identity_path_index.append(&mut vec![0]);
}
@@ -218,13 +192,11 @@ mod test {
let expected_identity_secret_hash_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
)
.unwrap();
);
let expected_id_commitment_seed_bytes = str_to_fr(
"0xbf16d2b5c0d6f9d9d561e05bfca16a81b4b873bb063508fae360d8c74cef51f",
16,
)
.unwrap();
);
assert_eq!(
identity_secret_hash,
@@ -253,23 +225,19 @@ mod test {
let expected_identity_trapdoor_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
)
.unwrap();
);
let expected_identity_nullifier_seed_bytes = str_to_fr(
"0x1f18714c7bc83b5bca9e89d404cf6f2f585bc4c0f7ed8b53742b7e2b298f50b4",
16,
)
.unwrap();
);
let expected_identity_secret_hash_seed_bytes = str_to_fr(
"0x2aca62aaa7abaf3686fff2caf00f55ab9462dc12db5b5d4bcf3994e671f8e521",
16,
)
.unwrap();
);
let expected_id_commitment_seed_bytes = str_to_fr(
"0x68b66aa0a8320d2e56842581553285393188714c48f9b17acd198b4f1734c5c",
16,
)
.unwrap();
);
assert_eq!(identity_trapdoor, expected_identity_trapdoor_seed_bytes);
assert_eq!(identity_nullifier, expected_identity_nullifier_seed_bytes);
@@ -282,13 +250,15 @@ mod test {
#[test]
fn test_hash_to_field() {
let mut rng = thread_rng();
let rln = RLN::default();
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let mut input_buffer = Cursor::new(&signal);
let mut output_buffer = Cursor::new(Vec::<u8>::new());
public_hash(&mut input_buffer, &mut output_buffer).unwrap();
rln.hash(&mut input_buffer, &mut output_buffer).unwrap();
let serialized_hash = output_buffer.into_inner();
let (hash1, _) = bytes_le_to_fr(&serialized_hash);
@@ -296,24 +266,4 @@ mod test {
assert_eq!(hash1, hash2);
}
#[test]
fn test_poseidon_hash() {
let mut rng = thread_rng();
let number_of_inputs = rng.gen_range(1..ROUND_PARAMS.len());
let mut inputs = Vec::with_capacity(number_of_inputs);
for _ in 0..number_of_inputs {
inputs.push(Fr::rand(&mut rng));
}
let expected_hash = utils_poseidon_hash(&inputs);
let mut input_buffer = Cursor::new(vec_fr_to_bytes_le(&inputs).unwrap());
let mut output_buffer = Cursor::new(Vec::<u8>::new());
public_poseidon_hash(&mut input_buffer, &mut output_buffer).unwrap();
let serialized_hash = output_buffer.into_inner();
let (hash, _) = bytes_le_to_fr(&serialized_hash);
assert_eq!(hash, expected_hash);
}
}

View File

@@ -1,8 +1,7 @@
[package]
name = "semaphore-wrapper"
version = "0.3.0"
version = "0.1.0"
edition = "2021"
license = "MIT OR Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -12,16 +11,17 @@ dylib = [ "wasmer/dylib", "wasmer-engine-dylib", "wasmer-compiler-cranelift" ]
[dependencies]
ark-bn254 = { version = "0.3.0" }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features=["circom-2"], rev = "35ce5a9" }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features=["circom-2"] }
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
ark-relations = { version = "0.3.0", default-features = false }
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
color-eyre = "0.6.1"
color-eyre = "0.5"
num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
once_cell = "1.8"
rand = "0.8.4"
semaphore = { git = "https://github.com/worldcoin/semaphore-rs", rev = "ee658c2"}
ethers-core = { version = "2.0.8", default-features = false }
ethers-core = { git = "https://github.com/gakonst/ethers-rs", default-features = false }
ruint = { version = "1.2.0", features = [ "serde", "num-bigint", "ark-ff" ] }
serde = "1.0"
thiserror = "1.0.0"
@@ -32,10 +32,10 @@ rand_chacha = "0.3.1"
serde_json = "1.0.79"
[build-dependencies]
color-eyre = "0.6.1"
color-eyre = "0.5"
wasmer = { version = "2.0" }
wasmer-engine-dylib = { version = "2.2.1", optional = true }
wasmer-compiler-cranelift = { version = "3.1.1", optional = true }
wasmer-compiler-cranelift = { version = "2.2.1", optional = true }
[profile.release]
codegen-units = 1

View File

@@ -8,11 +8,3 @@ Goal is also to provide a basic FFI around protocol.rs, which is currently not
in scope for that project.
See that project for more information.
## Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```

View File

@@ -37,7 +37,7 @@ fn build_circuit() -> Result<()> {
.current_dir("./vendor/semaphore")
.status()?
.success()
.then_some(())
.then(|| ())
.ok_or(eyre!("procees returned failure"))?;
Ok(())
};

View File

@@ -50,7 +50,7 @@ fn from_dylib(path: &Path) -> Mutex<WitnessCalculator> {
#[must_use]
pub fn zkey() -> &'static (ProvingKey<Bn254>, ConstraintMatrices<Fr>) {
&ZKEY
&*ZKEY
}
#[cfg(feature = "dylib")]

View File

@@ -12,7 +12,7 @@ use ark_groth16::{
};
use ark_relations::r1cs::SynthesisError;
use ark_std::UniformRand;
use color_eyre::{Report, Result};
use color_eyre::Result;
use ethers_core::types::U256;
use rand::{thread_rng, Rng};
use semaphore::{
@@ -89,7 +89,7 @@ pub enum ProofError {
#[error("Error reading circuit key: {0}")]
CircuitKeyError(#[from] std::io::Error),
#[error("Error producing witness: {0}")]
WitnessError(Report),
WitnessError(color_eyre::Report),
#[error("Error producing proof: {0}")]
SynthesisError(#[from] SynthesisError),
#[error("Error converting public input: {0}")]

View File

@@ -1,36 +1,18 @@
[package]
name = "zerokit_utils"
version = "0.3.2"
name = "utils"
version = "0.1.0"
edition = "2021"
license = "MIT OR Apache-2.0"
description = "Various utilities for Zerokit"
documentation = "https://github.com/vacp2p/zerokit"
homepage = "https://vac.dev"
repository = "https://github.com/vacp2p/zerokit"
[lib]
bench = false
[dependencies]
ark-ff = { version = "=0.4.1", default-features = false, features = ["asm"] }
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
color-eyre = "=0.6.2"
pmtree = { package = "pmtree", version = "=2.0.0", optional = true}
sled = "=0.34.7"
serde = "=1.0.163"
ark-ff = { version = "0.3.0", default-features = false, features = ["asm"] }
num-bigint = { version = "0.4.3", default-features = false, features = ["rand"] }
[dev-dependencies]
ark-bn254 = "=0.4.0"
num-traits = "=0.2.15"
hex-literal = "=0.3.4"
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
criterion = { version = "=0.4.0", features = ["html_reports"] }
ark-bn254 = { version = "0.3.0" }
num-traits = "0.2.11"
hex-literal = "0.3.4"
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
[features]
default = ["parallel"]
parallel = ["ark-ff/parallel"]
pmtree-ft = ["pmtree"]
[[bench]]
name = "merkle_tree_benchmark"
harness = false

View File

@@ -5,7 +5,3 @@ args = ["build", "--release"]
[tasks.test]
command = "cargo"
args = ["test", "--release"]
[tasks.bench]
command = "cargo"
args = ["bench"]

View File

@@ -1,15 +0,0 @@
# Utils crate
## Building
1. `cargo build`
## Testing
1. `cargo test`
## Benchmarking
1. `cargo bench`
To view the results of the benchmark, open the `target/criterion/report/index.html` file generated after the bench

View File

@@ -1,119 +0,0 @@
use criterion::{criterion_group, criterion_main, Criterion};
use hex_literal::hex;
use tiny_keccak::{Hasher as _, Keccak};
use zerokit_utils::{
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,
ZerokitMerkleTree,
};
#[derive(Clone, Copy, Eq, PartialEq)]
struct Keccak256;
impl Hasher for Keccak256 {
type Fr = [u8; 32];
fn default_leaf() -> Self::Fr {
[0; 32]
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
let mut output = [0; 32];
let mut hasher = Keccak::v256();
for element in inputs {
hasher.update(element);
}
hasher.finalize(&mut output);
output
}
}
pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default()).unwrap();
let leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
c.bench_function("OptimalMerkleTree::set", |b| {
b.iter(|| {
tree.set(0, leaves[0]).unwrap();
})
});
c.bench_function("OptimalMerkleTree::delete", |b| {
b.iter(|| {
tree.delete(0).unwrap();
})
});
c.bench_function("OptimalMerkleTree::override_range", |b| {
b.iter(|| {
tree.override_range(0, leaves, [0, 1, 2, 3]).unwrap();
})
});
c.bench_function("OptimalMerkleTree::compute_root", |b| {
b.iter(|| {
tree.compute_root().unwrap();
})
});
c.bench_function("OptimalMerkleTree::get", |b| {
b.iter(|| {
tree.get(0).unwrap();
})
});
}
pub fn full_merkle_tree_benchmark(c: &mut Criterion) {
let mut tree =
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
let leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
c.bench_function("FullMerkleTree::set", |b| {
b.iter(|| {
tree.set(0, leaves[0]).unwrap();
})
});
c.bench_function("FullMerkleTree::delete", |b| {
b.iter(|| {
tree.delete(0).unwrap();
})
});
c.bench_function("FullMerkleTree::override_range", |b| {
b.iter(|| {
tree.override_range(0, leaves, [0, 1, 2, 3]).unwrap();
})
});
c.bench_function("FullMerkleTree::compute_root", |b| {
b.iter(|| {
tree.compute_root().unwrap();
})
});
c.bench_function("FullMerkleTree::get", |b| {
b.iter(|| {
tree.get(0).unwrap();
})
});
}
criterion_group!(
benches,
optimal_merkle_tree_benchmark,
full_merkle_tree_benchmark
);
criterion_main!(benches);

View File

@@ -3,8 +3,3 @@ pub use self::poseidon::*;
pub mod merkle_tree;
pub use self::merkle_tree::*;
#[cfg(feature = "pmtree-ft")]
pub mod pm_tree;
#[cfg(feature = "pmtree-ft")]
pub use self::pm_tree::*;

View File

@@ -1,373 +0,0 @@
use crate::merkle_tree::{FrOf, Hasher, ZerokitMerkleProof, ZerokitMerkleTree};
use color_eyre::{Report, Result};
use std::{
cmp::max,
fmt::Debug,
iter::{once, repeat, successors},
str::FromStr,
};
////////////////////////////////////////////////////////////
/// Full Merkle Tree Implementation
////////////////////////////////////////////////////////////
/// Merkle tree with all leaf and intermediate hashes stored
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct FullMerkleTree<H: Hasher> {
/// The depth of the tree, i.e. the number of levels from leaf to root
depth: usize,
/// The nodes cached from the empty part of the tree (where leaves are set to default).
/// Since the rightmost part of the tree is usually changed much later than its creation,
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
/// and by caching few intermediate nodes to the root computed from default leaves
cached_nodes: Vec<H::Fr>,
/// The tree nodes
nodes: Vec<H::Fr>,
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
// (deletions leave next_index unchanged)
next_index: usize,
// metadata that an application may use to store additional information
metadata: Vec<u8>,
}
/// Element of a Merkle proof
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum FullMerkleBranch<H: Hasher> {
/// Left branch taken, value is the right sibling hash.
Left(H::Fr),
/// Right branch taken, value is the left sibling hash.
Right(H::Fr),
}
/// Merkle proof path, bottom to top.
#[derive(Clone, PartialEq, Eq)]
pub struct FullMerkleProof<H: Hasher>(pub Vec<FullMerkleBranch<H>>);
#[derive(Default)]
pub struct FullMerkleConfig(());
impl FromStr for FullMerkleConfig {
type Err = Report;
fn from_str(_s: &str) -> Result<Self> {
Ok(FullMerkleConfig::default())
}
}
/// Implementations
impl<H: Hasher> ZerokitMerkleTree for FullMerkleTree<H>
where
H: Hasher,
{
type Proof = FullMerkleProof<H>;
type Hasher = H;
type Config = FullMerkleConfig;
fn default(depth: usize) -> Result<Self> {
FullMerkleTree::<H>::new(depth, Self::Hasher::default_leaf(), Self::Config::default())
}
/// Creates a new `MerkleTree`
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
fn new(depth: usize, initial_leaf: FrOf<Self::Hasher>, _config: Self::Config) -> Result<Self> {
// Compute cache node values, leaf to root
let cached_nodes = successors(Some(initial_leaf), |prev| Some(H::hash(&[*prev, *prev])))
.take(depth + 1)
.collect::<Vec<_>>();
// Compute node values
let nodes = cached_nodes
.iter()
.rev()
.enumerate()
.flat_map(|(levels, hash)| repeat(hash).take(1 << levels))
.cloned()
.collect::<Vec<_>>();
debug_assert!(nodes.len() == (1 << (depth + 1)) - 1);
let next_index = 0;
Ok(Self {
depth,
cached_nodes,
nodes,
next_index,
metadata: Vec::new(),
})
}
fn close_db_connection(&mut self) -> Result<()> {
Ok(())
}
// Returns the depth of the tree
fn depth(&self) -> usize {
self.depth
}
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
fn capacity(&self) -> usize {
1 << self.depth
}
// Returns the total number of leaves set
fn leaves_set(&mut self) -> usize {
self.next_index
}
#[must_use]
// Returns the root of the tree
fn root(&self) -> FrOf<Self::Hasher> {
self.nodes[0]
}
// Sets a leaf at the specified tree index
fn set(&mut self, leaf: usize, hash: FrOf<Self::Hasher>) -> Result<()> {
self.set_range(leaf, once(hash))?;
self.next_index = max(self.next_index, leaf + 1);
Ok(())
}
// Get a leaf from the specified tree index
fn get(&self, leaf: usize) -> Result<FrOf<Self::Hasher>> {
if leaf >= self.capacity() {
return Err(Report::msg("leaf index out of bounds"));
}
Ok(self.nodes[self.capacity() + leaf - 1])
}
// Sets tree nodes, starting from start index
// Function proper of FullMerkleTree implementation
fn set_range<I: IntoIterator<Item = FrOf<Self::Hasher>>>(
&mut self,
start: usize,
hashes: I,
) -> Result<()> {
let index = self.capacity() + start - 1;
let mut count = 0;
// first count number of hashes, and check that they fit in the tree
// then insert into the tree
let hashes = hashes.into_iter().collect::<Vec<_>>();
if hashes.len() + start > self.capacity() {
return Err(Report::msg("provided hashes do not fit in the tree"));
}
hashes.into_iter().for_each(|hash| {
self.nodes[index + count] = hash;
count += 1;
});
if count != 0 {
self.update_nodes(index, index + (count - 1))?;
self.next_index = max(self.next_index, start + count);
}
Ok(())
}
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
where
I: IntoIterator<Item = FrOf<Self::Hasher>>,
J: IntoIterator<Item = usize>,
{
let index = self.capacity() + start - 1;
let mut count = 0;
let leaves = leaves.into_iter().collect::<Vec<_>>();
let to_remove_indices = to_remove_indices.into_iter().collect::<Vec<_>>();
// first count number of hashes, and check that they fit in the tree
// then insert into the tree
if leaves.len() + start - to_remove_indices.len() > self.capacity() {
return Err(Report::msg("provided hashes do not fit in the tree"));
}
// remove leaves
for i in &to_remove_indices {
self.delete(*i)?;
}
// insert new leaves
for hash in leaves {
self.nodes[index + count] = hash;
count += 1;
}
if count != 0 {
self.update_nodes(index, index + (count - 1))?;
self.next_index = max(self.next_index, start + count - to_remove_indices.len());
}
Ok(())
}
// Sets a leaf at the next available index
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<()> {
self.set(self.next_index, leaf)?;
Ok(())
}
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
fn delete(&mut self, index: usize) -> Result<()> {
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
}
Ok(())
}
// Computes a merkle proof the the leaf at the specified index
fn proof(&self, leaf: usize) -> Result<FullMerkleProof<H>> {
if leaf >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
let mut index = self.capacity() + leaf - 1;
let mut path = Vec::with_capacity(self.depth + 1);
while let Some(parent) = self.parent(index) {
// Add proof for node at index to parent
path.push(match index & 1 {
1 => FullMerkleBranch::Left(self.nodes[index + 1]),
0 => FullMerkleBranch::Right(self.nodes[index - 1]),
_ => unreachable!(),
});
index = parent;
}
Ok(FullMerkleProof(path))
}
// Verifies a Merkle proof with respect to the input leaf and the tree root
fn verify(&self, hash: &FrOf<Self::Hasher>, proof: &FullMerkleProof<H>) -> Result<bool> {
Ok(proof.compute_root_from(hash) == self.root())
}
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>> {
Ok(self.root())
}
fn set_metadata(&mut self, metadata: &[u8]) -> Result<()> {
self.metadata = metadata.to_vec();
Ok(())
}
fn metadata(&self) -> Result<Vec<u8>> {
Ok(self.metadata.to_vec())
}
}
impl<H: Hasher> FullMerkleTree<H>
where
H: Hasher,
{
// Utilities for updating the tree nodes
/// For a given node index, return the parent node index
/// Returns None if there is no parent (root node)
fn parent(&self, index: usize) -> Option<usize> {
if index == 0 {
None
} else {
Some(((index + 1) >> 1) - 1)
}
}
/// For a given node index, return index of the first (left) child.
fn first_child(&self, index: usize) -> usize {
(index << 1) + 1
}
fn levels(&self, index: usize) -> usize {
// `n.next_power_of_two()` will return `n` iff `n` is a power of two.
// The extra offset corrects this.
(index + 2).next_power_of_two().trailing_zeros() as usize - 1
}
fn update_nodes(&mut self, start: usize, end: usize) -> Result<()> {
if self.levels(start) != self.levels(end) {
return Err(Report::msg("self.levels(start) != self.levels(end)"));
}
if let (Some(start), Some(end)) = (self.parent(start), self.parent(end)) {
for parent in start..=end {
let child = self.first_child(parent);
self.nodes[parent] = H::hash(&[self.nodes[child], self.nodes[child + 1]]);
}
self.update_nodes(start, end)?;
}
Ok(())
}
}
impl<H: Hasher> ZerokitMerkleProof for FullMerkleProof<H> {
type Index = u8;
type Hasher = H;
#[must_use]
// Returns the length of a Merkle proof
fn length(&self) -> usize {
self.0.len()
}
/// Computes the leaf index corresponding to a Merkle proof
#[must_use]
fn leaf_index(&self) -> usize {
self.0.iter().rev().fold(0, |index, branch| match branch {
FullMerkleBranch::Left(_) => index << 1,
FullMerkleBranch::Right(_) => (index << 1) + 1,
})
}
#[must_use]
/// Returns the path elements forming a Merkle proof
fn get_path_elements(&self) -> Vec<FrOf<Self::Hasher>> {
self.0
.iter()
.map(|x| match x {
FullMerkleBranch::Left(value) | FullMerkleBranch::Right(value) => *value,
})
.collect()
}
/// Returns the path indexes forming a Merkle proof
#[must_use]
fn get_path_index(&self) -> Vec<Self::Index> {
self.0
.iter()
.map(|branch| match branch {
FullMerkleBranch::Left(_) => 0,
FullMerkleBranch::Right(_) => 1,
})
.collect()
}
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
#[must_use]
fn compute_root_from(&self, hash: &FrOf<Self::Hasher>) -> FrOf<Self::Hasher> {
self.0.iter().fold(*hash, |hash, branch| match branch {
FullMerkleBranch::Left(sibling) => H::hash(&[hash, *sibling]),
FullMerkleBranch::Right(sibling) => H::hash(&[*sibling, hash]),
})
}
}
// Debug formatting for printing a (Full) Merkle Proof Branch
impl<H> Debug for FullMerkleBranch<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Left(arg0) => f.debug_tuple("Left").field(arg0).finish(),
Self::Right(arg0) => f.debug_tuple("Right").field(arg0).finish(),
}
}
}
// Debug formatting for printing a (Full) Merkle Proof
impl<H> Debug for FullMerkleProof<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Proof").field(&self.0).finish()
}
}

View File

@@ -13,15 +13,21 @@
//! * Disk based storage backend (using mmaped files should be easy)
//! * Implement serialization for tree and Merkle proof
use std::str::FromStr;
#![allow(dead_code)]
use color_eyre::Result;
use std::collections::HashMap;
use std::io;
use std::{
cmp::max,
fmt::Debug,
iter::{once, repeat, successors},
};
/// In the Hasher trait we define the node type, the default leaf
/// and the hash function used to initialize a Merkle Tree implementation
pub trait Hasher {
/// Type of the leaf and tree node
type Fr: Clone + Copy + Eq;
type Fr: Copy + Clone + Eq;
/// Returns the default tree leaf
fn default_leaf() -> Self::Fr;
@@ -30,51 +36,539 @@ pub trait Hasher {
fn hash(input: &[Self::Fr]) -> Self::Fr;
}
pub type FrOf<H> = <H as Hasher>::Fr;
////////////////////////////////////////////////////////////
/// Optimal Merkle Tree Implementation
////////////////////////////////////////////////////////////
/// In the ZerokitMerkleTree trait we define the methods that are required to be implemented by a Merkle tree
/// Including, OptimalMerkleTree, FullMerkleTree
pub trait ZerokitMerkleTree {
type Proof: ZerokitMerkleProof;
type Hasher: Hasher;
type Config: Default + FromStr;
/// The Merkle tree structure
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct OptimalMerkleTree<H>
where
H: Hasher,
{
/// The depth of the tree, i.e. the number of levels from leaf to root
depth: usize,
fn default(depth: usize) -> Result<Self>
where
Self: Sized;
fn new(depth: usize, default_leaf: FrOf<Self::Hasher>, config: Self::Config) -> Result<Self>
where
Self: Sized;
fn depth(&self) -> usize;
fn capacity(&self) -> usize;
fn leaves_set(&mut self) -> usize;
fn root(&self) -> FrOf<Self::Hasher>;
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>>;
fn set(&mut self, index: usize, leaf: FrOf<Self::Hasher>) -> Result<()>;
fn set_range<I>(&mut self, start: usize, leaves: I) -> Result<()>
where
I: IntoIterator<Item = FrOf<Self::Hasher>>;
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>>;
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
where
I: IntoIterator<Item = FrOf<Self::Hasher>>,
J: IntoIterator<Item = usize>;
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<()>;
fn delete(&mut self, index: usize) -> Result<()>;
fn proof(&self, index: usize) -> Result<Self::Proof>;
fn verify(&self, leaf: &FrOf<Self::Hasher>, witness: &Self::Proof) -> Result<bool>;
fn set_metadata(&mut self, metadata: &[u8]) -> Result<()>;
fn metadata(&self) -> Result<Vec<u8>>;
fn close_db_connection(&mut self) -> Result<()>;
/// The nodes cached from the empty part of the tree (where leaves are set to default).
/// Since the rightmost part of the tree is usually changed much later than its creation,
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
/// and by caching few intermediate nodes to the root computed from default leaves
cached_nodes: Vec<H::Fr>,
/// The tree nodes
nodes: HashMap<(usize, usize), H::Fr>,
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
// (deletions leave next_index unchanged)
next_index: usize,
}
pub trait ZerokitMerkleProof {
type Index;
type Hasher: Hasher;
/// The Merkle proof
/// Contains a vector of (node, branch_index) that defines the proof path elements and branch direction (1 or 0)
#[derive(Clone, PartialEq, Eq)]
pub struct OptimalMerkleProof<H: Hasher>(pub Vec<(H::Fr, u8)>);
fn length(&self) -> usize;
fn leaf_index(&self) -> usize;
fn get_path_elements(&self) -> Vec<FrOf<Self::Hasher>>;
fn get_path_index(&self) -> Vec<Self::Index>;
fn compute_root_from(&self, leaf: &FrOf<Self::Hasher>) -> FrOf<Self::Hasher>;
/// Implementations
impl<H: Hasher> OptimalMerkleTree<H> {
pub fn default(depth: usize) -> Self {
OptimalMerkleTree::<H>::new(depth, H::default_leaf())
}
/// Creates a new `MerkleTree`
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
pub fn new(depth: usize, default_leaf: H::Fr) -> Self {
let mut cached_nodes: Vec<H::Fr> = Vec::with_capacity(depth + 1);
cached_nodes.push(default_leaf);
for i in 0..depth {
cached_nodes.push(H::hash(&[cached_nodes[i]; 2]));
}
cached_nodes.reverse();
OptimalMerkleTree {
cached_nodes: cached_nodes.clone(),
depth: depth,
nodes: HashMap::new(),
next_index: 0,
}
}
// Returns the depth of the tree
pub fn depth(&self) -> usize {
self.depth
}
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
pub fn capacity(&self) -> usize {
1 << self.depth
}
// Returns the total number of leaves set
pub fn leaves_set(&mut self) -> usize {
self.next_index
}
#[must_use]
// Returns the root of the tree
pub fn root(&self) -> H::Fr {
self.get_node(0, 0)
}
// Sets a leaf at the specified tree index
pub fn set(&mut self, index: usize, leaf: H::Fr) -> io::Result<()> {
if index >= self.capacity() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds set size",
));
}
self.nodes.insert((self.depth, index), leaf);
self.recalculate_from(index);
self.next_index = max(self.next_index, index + 1);
Ok(())
}
// Sets multiple leaves from the specified tree index
pub fn set_range<I: IntoIterator<Item = H::Fr>>(
&mut self,
start: usize,
leaves: I,
) -> io::Result<()> {
let leaves = leaves.into_iter().collect::<Vec<_>>();
// check if the range is valid
if start + leaves.len() > self.capacity() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"provided range exceeds set size",
));
}
for (i, leaf) in leaves.iter().enumerate() {
self.nodes.insert((self.depth, start + i), *leaf);
self.recalculate_from(start + i);
}
self.next_index = max(self.next_index, start + leaves.len());
Ok(())
}
// Sets a leaf at the next available index
pub fn update_next(&mut self, leaf: H::Fr) -> io::Result<()> {
self.set(self.next_index, leaf)?;
Ok(())
}
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
pub fn delete(&mut self, index: usize) -> io::Result<()> {
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
}
Ok(())
}
// Computes a merkle proof the the leaf at the specified index
pub fn proof(&self, index: usize) -> io::Result<OptimalMerkleProof<H>> {
if index >= self.capacity() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds set size",
));
}
let mut witness = Vec::<(H::Fr, u8)>::with_capacity(self.depth);
let mut i = index;
let mut depth = self.depth;
loop {
i ^= 1;
witness.push((self.get_node(depth, i), (1 - (i & 1)).try_into().unwrap()));
i >>= 1;
depth -= 1;
if depth == 0 {
break;
}
}
assert_eq!(i, 0);
Ok(OptimalMerkleProof(witness))
}
// Verifies a Merkle proof with respect to the input leaf and the tree root
pub fn verify(&self, leaf: &H::Fr, witness: &OptimalMerkleProof<H>) -> io::Result<bool> {
if witness.length() != self.depth {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"witness length doesn't match tree depth",
));
}
let expected_root = witness.compute_root_from(leaf);
Ok(expected_root.eq(&self.root()))
}
// Utilities for updating the tree nodes
fn get_node(&self, depth: usize, index: usize) -> H::Fr {
let node = *self
.nodes
.get(&(depth, index))
.unwrap_or_else(|| &self.cached_nodes[depth]);
node
}
fn get_leaf(&self, index: usize) -> H::Fr {
self.get_node(self.depth, index)
}
fn hash_couple(&mut self, depth: usize, index: usize) -> H::Fr {
let b = index & !1;
H::hash(&[self.get_node(depth, b), self.get_node(depth, b + 1)])
}
fn recalculate_from(&mut self, index: usize) {
let mut i = index;
let mut depth = self.depth;
loop {
let h = self.hash_couple(depth, i);
i >>= 1;
depth -= 1;
self.nodes.insert((depth, i), h);
if depth == 0 {
break;
}
}
assert_eq!(depth, 0);
assert_eq!(i, 0);
}
}
impl<H: Hasher> OptimalMerkleProof<H> {
#[must_use]
// Returns the length of a Merkle proof
pub fn length(&self) -> usize {
self.0.len()
}
/// Computes the leaf index corresponding to a Merkle proof
#[must_use]
pub fn leaf_index(&self) -> usize {
// In current implementation the path indexes in a proof correspond to the binary representation of the leaf index
let mut binary_repr = self.get_path_index();
binary_repr.reverse();
binary_repr
.into_iter()
.fold(0, |acc, digit| (acc << 1) + usize::from(digit))
}
#[must_use]
/// Returns the path elements forming a Merkle proof
pub fn get_path_elements(&self) -> Vec<H::Fr> {
self.0.iter().map(|x| x.0).collect()
}
/// Returns the path indexes forming a Merkle proof
#[must_use]
pub fn get_path_index(&self) -> Vec<u8> {
self.0.iter().map(|x| x.1).collect()
}
#[must_use]
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
pub fn compute_root_from(&self, leaf: &H::Fr) -> H::Fr {
let mut acc: H::Fr = *leaf;
for w in self.0.iter() {
if w.1 == 0 {
acc = H::hash(&[acc, w.0]);
} else {
acc = H::hash(&[w.0, acc]);
}
}
acc
}
}
// Debug formatting for printing a (Optimal) Merkle Proof
impl<H> Debug for OptimalMerkleProof<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Proof").field(&self.0).finish()
}
}
////////////////////////////////////////////////////////////
/// Full Merkle Tree Implementation
////////////////////////////////////////////////////////////
/// Merkle tree with all leaf and intermediate hashes stored
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct FullMerkleTree<H: Hasher> {
/// The depth of the tree, i.e. the number of levels from leaf to root
depth: usize,
/// The nodes cached from the empty part of the tree (where leaves are set to default).
/// Since the rightmost part of the tree is usually changed much later than its creation,
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
/// and by caching few intermediate nodes to the root computed from default leaves
cached_nodes: Vec<H::Fr>,
/// The tree nodes
nodes: Vec<H::Fr>,
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
// (deletions leave next_index unchanged)
next_index: usize,
}
/// Element of a Merkle proof
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum FullMerkleBranch<H: Hasher> {
/// Left branch taken, value is the right sibling hash.
Left(H::Fr),
/// Right branch taken, value is the left sibling hash.
Right(H::Fr),
}
/// Merkle proof path, bottom to top.
#[derive(Clone, PartialEq, Eq)]
pub struct FullMerkleProof<H: Hasher>(pub Vec<FullMerkleBranch<H>>);
/// Implementations
impl<H: Hasher> FullMerkleTree<H> {
pub fn default(depth: usize) -> Self {
FullMerkleTree::<H>::new(depth, H::default_leaf())
}
/// Creates a new `MerkleTree`
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
pub fn new(depth: usize, initial_leaf: H::Fr) -> Self {
// Compute cache node values, leaf to root
let cached_nodes = successors(Some(initial_leaf), |prev| Some(H::hash(&[*prev, *prev])))
.take(depth + 1)
.collect::<Vec<_>>();
// Compute node values
let nodes = cached_nodes
.iter()
.rev()
.enumerate()
.flat_map(|(levels, hash)| repeat(hash).take(1 << levels))
.cloned()
.collect::<Vec<_>>();
debug_assert!(nodes.len() == (1 << (depth + 1)) - 1);
let next_index = 0;
Self {
depth,
cached_nodes,
nodes,
next_index,
}
}
// Returns the depth of the tree
pub fn depth(&self) -> usize {
self.depth
}
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
pub fn capacity(&self) -> usize {
1 << self.depth
}
// Returns the total number of leaves set
pub fn leaves_set(&mut self) -> usize {
self.next_index
}
#[must_use]
// Returns the root of the tree
pub fn root(&self) -> H::Fr {
self.nodes[0]
}
// Sets a leaf at the specified tree index
pub fn set(&mut self, leaf: usize, hash: H::Fr) -> io::Result<()> {
self.set_range(leaf, once(hash))?;
self.next_index = max(self.next_index, leaf + 1);
Ok(())
}
// Sets tree nodes, starting from start index
// Function proper of FullMerkleTree implementation
fn set_range<I: IntoIterator<Item = H::Fr>>(
&mut self,
start: usize,
hashes: I,
) -> io::Result<()> {
let index = self.capacity() + start - 1;
let mut count = 0;
// first count number of hashes, and check that they fit in the tree
// then insert into the tree
let hashes = hashes.into_iter().collect::<Vec<_>>();
if hashes.len() + start > self.capacity() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"provided hashes do not fit in the tree",
));
}
hashes.into_iter().for_each(|hash| {
self.nodes[index + count] = hash;
count += 1;
});
if count != 0 {
self.update_nodes(index, index + (count - 1));
self.next_index = max(self.next_index, start + count);
}
Ok(())
}
// Sets a leaf at the next available index
pub fn update_next(&mut self, leaf: H::Fr) -> io::Result<()> {
self.set(self.next_index, leaf)?;
Ok(())
}
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
pub fn delete(&mut self, index: usize) -> io::Result<()> {
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
}
Ok(())
}
// Computes a merkle proof the the leaf at the specified index
pub fn proof(&self, leaf: usize) -> io::Result<FullMerkleProof<H>> {
if leaf >= self.capacity() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds set size",
));
}
let mut index = self.capacity() + leaf - 1;
let mut path = Vec::with_capacity(self.depth + 1);
while let Some(parent) = self.parent(index) {
// Add proof for node at index to parent
path.push(match index & 1 {
1 => FullMerkleBranch::Left(self.nodes[index + 1]),
0 => FullMerkleBranch::Right(self.nodes[index - 1]),
_ => unreachable!(),
});
index = parent;
}
Ok(FullMerkleProof(path))
}
// Verifies a Merkle proof with respect to the input leaf and the tree root
pub fn verify(&self, hash: &H::Fr, proof: &FullMerkleProof<H>) -> io::Result<bool> {
Ok(proof.compute_root_from(hash) == self.root())
}
// Utilities for updating the tree nodes
/// For a given node index, return the parent node index
/// Returns None if there is no parent (root node)
fn parent(&self, index: usize) -> Option<usize> {
if index == 0 {
None
} else {
Some(((index + 1) >> 1) - 1)
}
}
/// For a given node index, return index of the first (left) child.
fn first_child(&self, index: usize) -> usize {
(index << 1) + 1
}
fn levels(&self, index: usize) -> usize {
// `n.next_power_of_two()` will return `n` iff `n` is a power of two.
// The extra offset corrects this.
(index + 2).next_power_of_two().trailing_zeros() as usize - 1
}
fn update_nodes(&mut self, start: usize, end: usize) {
debug_assert_eq!(self.levels(start), self.levels(end));
if let (Some(start), Some(end)) = (self.parent(start), self.parent(end)) {
for parent in start..=end {
let child = self.first_child(parent);
self.nodes[parent] = H::hash(&[self.nodes[child], self.nodes[child + 1]]);
}
self.update_nodes(start, end);
}
}
}
impl<H: Hasher> FullMerkleProof<H> {
#[must_use]
// Returns the length of a Merkle proof
pub fn length(&self) -> usize {
self.0.len()
}
/// Computes the leaf index corresponding to a Merkle proof
#[must_use]
pub fn leaf_index(&self) -> usize {
self.0.iter().rev().fold(0, |index, branch| match branch {
FullMerkleBranch::Left(_) => index << 1,
FullMerkleBranch::Right(_) => (index << 1) + 1,
})
}
#[must_use]
/// Returns the path elements forming a Merkle proof
pub fn get_path_elements(&self) -> Vec<H::Fr> {
self.0
.iter()
.map(|x| match x {
FullMerkleBranch::Left(value) | FullMerkleBranch::Right(value) => *value,
})
.collect()
}
/// Returns the path indexes forming a Merkle proof
#[must_use]
pub fn get_path_index(&self) -> Vec<u8> {
self.0
.iter()
.map(|branch| match branch {
FullMerkleBranch::Left(_) => 0,
FullMerkleBranch::Right(_) => 1,
})
.collect()
}
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
#[must_use]
pub fn compute_root_from(&self, hash: &H::Fr) -> H::Fr {
self.0.iter().fold(*hash, |hash, branch| match branch {
FullMerkleBranch::Left(sibling) => H::hash(&[hash, *sibling]),
FullMerkleBranch::Right(sibling) => H::hash(&[*sibling, hash]),
})
}
}
// Debug formatting for printing a (Full) Merkle Proof Branch
impl<H> Debug for FullMerkleBranch<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Left(arg0) => f.debug_tuple("Left").field(arg0).finish(),
Self::Right(arg0) => f.debug_tuple("Right").field(arg0).finish(),
}
}
}
// Debug formatting for printing a (Full) Merkle Proof
impl<H> Debug for FullMerkleProof<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Proof").field(&self.0).finish()
}
}

View File

@@ -1,7 +1,2 @@
pub mod full_merkle_tree;
#[allow(clippy::module_inception)]
pub mod merkle_tree;
pub mod optimal_merkle_tree;
pub use self::full_merkle_tree::*;
pub use self::merkle_tree::*;
pub use self::optimal_merkle_tree::*;

View File

@@ -1,343 +0,0 @@
use crate::merkle_tree::{Hasher, ZerokitMerkleProof, ZerokitMerkleTree};
use crate::FrOf;
use color_eyre::{Report, Result};
use std::collections::HashMap;
use std::str::FromStr;
use std::{cmp::max, fmt::Debug};
////////////////////////////////////////////////////////////
/// Optimal Merkle Tree Implementation
////////////////////////////////////////////////////////////
/// The Merkle tree structure
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct OptimalMerkleTree<H>
where
H: Hasher,
{
/// The depth of the tree, i.e. the number of levels from leaf to root
depth: usize,
/// The nodes cached from the empty part of the tree (where leaves are set to default).
/// Since the rightmost part of the tree is usually changed much later than its creation,
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
/// and by caching few intermediate nodes to the root computed from default leaves
cached_nodes: Vec<H::Fr>,
/// The tree nodes
nodes: HashMap<(usize, usize), H::Fr>,
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
// (deletions leave next_index unchanged)
next_index: usize,
// metadata that an application may use to store additional information
metadata: Vec<u8>,
}
/// The Merkle proof
/// Contains a vector of (node, branch_index) that defines the proof path elements and branch direction (1 or 0)
#[derive(Clone, PartialEq, Eq)]
pub struct OptimalMerkleProof<H: Hasher>(pub Vec<(H::Fr, u8)>);
#[derive(Default)]
pub struct OptimalMerkleConfig(());
impl FromStr for OptimalMerkleConfig {
type Err = Report;
fn from_str(_s: &str) -> Result<Self> {
Ok(OptimalMerkleConfig::default())
}
}
/// Implementations
impl<H: Hasher> ZerokitMerkleTree for OptimalMerkleTree<H>
where
H: Hasher,
{
type Proof = OptimalMerkleProof<H>;
type Hasher = H;
type Config = OptimalMerkleConfig;
fn default(depth: usize) -> Result<Self> {
OptimalMerkleTree::<H>::new(depth, H::default_leaf(), Self::Config::default())
}
/// Creates a new `MerkleTree`
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
fn new(depth: usize, default_leaf: H::Fr, _config: Self::Config) -> Result<Self> {
let mut cached_nodes: Vec<H::Fr> = Vec::with_capacity(depth + 1);
cached_nodes.push(default_leaf);
for i in 0..depth {
cached_nodes.push(H::hash(&[cached_nodes[i]; 2]));
}
cached_nodes.reverse();
Ok(OptimalMerkleTree {
cached_nodes: cached_nodes.clone(),
depth,
nodes: HashMap::new(),
next_index: 0,
metadata: Vec::new(),
})
}
fn close_db_connection(&mut self) -> Result<()> {
Ok(())
}
// Returns the depth of the tree
fn depth(&self) -> usize {
self.depth
}
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
fn capacity(&self) -> usize {
1 << self.depth
}
// Returns the total number of leaves set
fn leaves_set(&mut self) -> usize {
self.next_index
}
#[must_use]
// Returns the root of the tree
fn root(&self) -> H::Fr {
self.get_node(0, 0)
}
// Sets a leaf at the specified tree index
fn set(&mut self, index: usize, leaf: H::Fr) -> Result<()> {
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
self.nodes.insert((self.depth, index), leaf);
self.recalculate_from(index)?;
self.next_index = max(self.next_index, index + 1);
Ok(())
}
// Get a leaf from the specified tree index
fn get(&self, index: usize) -> Result<H::Fr> {
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
Ok(self.get_node(self.depth, index))
}
// Sets multiple leaves from the specified tree index
fn set_range<I: IntoIterator<Item = H::Fr>>(&mut self, start: usize, leaves: I) -> Result<()> {
let leaves = leaves.into_iter().collect::<Vec<_>>();
// check if the range is valid
if start + leaves.len() > self.capacity() {
return Err(Report::msg("provided range exceeds set size"));
}
for (i, leaf) in leaves.iter().enumerate() {
self.nodes.insert((self.depth, start + i), *leaf);
self.recalculate_from(start + i)?;
}
self.next_index = max(self.next_index, start + leaves.len());
Ok(())
}
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
where
I: IntoIterator<Item = FrOf<Self::Hasher>>,
J: IntoIterator<Item = usize>,
{
let leaves = leaves.into_iter().collect::<Vec<_>>();
let to_remove_indices = to_remove_indices.into_iter().collect::<Vec<_>>();
// check if the range is valid
if leaves.len() + start - to_remove_indices.len() > self.capacity() {
return Err(Report::msg("provided range exceeds set size"));
}
// remove leaves
for i in &to_remove_indices {
self.delete(*i)?;
}
// add leaves
for (i, leaf) in leaves.iter().enumerate() {
self.nodes.insert((self.depth, start + i), *leaf);
self.recalculate_from(start + i)?;
}
self.next_index = max(
self.next_index,
start + leaves.len() - to_remove_indices.len(),
);
Ok(())
}
// Sets a leaf at the next available index
fn update_next(&mut self, leaf: H::Fr) -> Result<()> {
self.set(self.next_index, leaf)?;
Ok(())
}
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
fn delete(&mut self, index: usize) -> Result<()> {
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
}
Ok(())
}
// Computes a merkle proof the the leaf at the specified index
fn proof(&self, index: usize) -> Result<Self::Proof> {
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
let mut witness = Vec::<(H::Fr, u8)>::with_capacity(self.depth);
let mut i = index;
let mut depth = self.depth;
loop {
i ^= 1;
witness.push((self.get_node(depth, i), (1 - (i & 1)).try_into().unwrap()));
i >>= 1;
depth -= 1;
if depth == 0 {
break;
}
}
if i != 0 {
Err(Report::msg("i != 0"))
} else {
Ok(OptimalMerkleProof(witness))
}
}
// Verifies a Merkle proof with respect to the input leaf and the tree root
fn verify(&self, leaf: &H::Fr, witness: &Self::Proof) -> Result<bool> {
if witness.length() != self.depth {
return Err(Report::msg("witness length doesn't match tree depth"));
}
let expected_root = witness.compute_root_from(leaf);
Ok(expected_root.eq(&self.root()))
}
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>> {
self.recalculate_from(0)?;
Ok(self.root())
}
fn set_metadata(&mut self, metadata: &[u8]) -> Result<()> {
self.metadata = metadata.to_vec();
Ok(())
}
fn metadata(&self) -> Result<Vec<u8>> {
Ok(self.metadata.to_vec())
}
}
impl<H: Hasher> OptimalMerkleTree<H>
where
H: Hasher,
{
// Utilities for updating the tree nodes
fn get_node(&self, depth: usize, index: usize) -> H::Fr {
let node = *self
.nodes
.get(&(depth, index))
.unwrap_or_else(|| &self.cached_nodes[depth]);
node
}
pub fn get_leaf(&self, index: usize) -> H::Fr {
self.get_node(self.depth, index)
}
fn hash_couple(&mut self, depth: usize, index: usize) -> H::Fr {
let b = index & !1;
H::hash(&[self.get_node(depth, b), self.get_node(depth, b + 1)])
}
fn recalculate_from(&mut self, index: usize) -> Result<()> {
let mut i = index;
let mut depth = self.depth;
loop {
let h = self.hash_couple(depth, i);
i >>= 1;
depth -= 1;
self.nodes.insert((depth, i), h);
if depth == 0 {
break;
}
}
if depth != 0 {
return Err(Report::msg("did not reach the depth"));
}
if i != 0 {
return Err(Report::msg("did not go through all indexes"));
}
Ok(())
}
}
impl<H: Hasher> ZerokitMerkleProof for OptimalMerkleProof<H>
where
H: Hasher,
{
type Index = u8;
type Hasher = H;
#[must_use]
// Returns the length of a Merkle proof
fn length(&self) -> usize {
self.0.len()
}
/// Computes the leaf index corresponding to a Merkle proof
#[must_use]
fn leaf_index(&self) -> usize {
// In current implementation the path indexes in a proof correspond to the binary representation of the leaf index
let mut binary_repr = self.get_path_index();
binary_repr.reverse();
binary_repr
.into_iter()
.fold(0, |acc, digit| (acc << 1) + usize::from(digit))
}
#[must_use]
/// Returns the path elements forming a Merkle proof
fn get_path_elements(&self) -> Vec<H::Fr> {
self.0.iter().map(|x| x.0).collect()
}
/// Returns the path indexes forming a Merkle proof
#[must_use]
fn get_path_index(&self) -> Vec<u8> {
self.0.iter().map(|x| x.1).collect()
}
#[must_use]
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
fn compute_root_from(&self, leaf: &H::Fr) -> H::Fr {
let mut acc: H::Fr = *leaf;
for w in self.0.iter() {
if w.1 == 0 {
acc = H::hash(&[acc, w.0]);
} else {
acc = H::hash(&[w.0, acc]);
}
}
acc
}
}
// Debug formatting for printing a (Optimal) Merkle Proof
impl<H> Debug for OptimalMerkleProof<H>
where
H: Hasher,
H::Fr: Debug,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_tuple("Proof").field(&self.0).finish()
}
}

View File

@@ -1,4 +0,0 @@
pub mod sled_adapter;
pub use self::sled_adapter::*;
pub use pmtree;
pub use sled::*;

View File

@@ -1,105 +0,0 @@
use pmtree::*;
use sled::Db as Sled;
use std::collections::HashMap;
use std::thread;
use std::time::Duration;
pub struct SledDB(Sled);
impl SledDB {
fn new_with_tries(config: <SledDB as Database>::Config, tries: u32) -> PmtreeResult<Self> {
// If we've tried more than 10 times, we give up and return an error.
if tries >= 10 {
return Err(PmtreeErrorKind::DatabaseError(
DatabaseErrorKind::CustomError(format!(
"Cannot create database: exceeded maximum retry attempts. {config:#?}"
)),
));
}
match config.open() {
Ok(db) => Ok(SledDB(db)),
Err(e) if e.to_string().contains("WouldBlock") => {
// try till the fd is freed
// sleep for 10^tries milliseconds, then recursively try again
thread::sleep(Duration::from_millis(10u64.pow(tries)));
Self::new_with_tries(config, tries + 1)
}
Err(e) => {
// On any other error, we return immediately.
Err(PmtreeErrorKind::DatabaseError(
DatabaseErrorKind::CustomError(format!(
"Cannot create database: {e} {config:#?}"
)),
))
}
}
}
}
impl Database for SledDB {
type Config = sled::Config;
fn new(config: Self::Config) -> PmtreeResult<Self> {
let db = Self::new_with_tries(config, 0)?;
Ok(db)
}
fn load(config: Self::Config) -> PmtreeResult<Self> {
let db = match config.open() {
Ok(db) => db,
Err(e) => {
return Err(PmtreeErrorKind::DatabaseError(
DatabaseErrorKind::CustomError(format!("Cannot load database: {e}")),
))
}
};
if !db.was_recovered() {
return Err(PmtreeErrorKind::DatabaseError(
DatabaseErrorKind::CustomError(format!(
"Database was not recovered: {}",
config.path.display()
)),
));
}
Ok(SledDB(db))
}
fn close(&mut self) -> PmtreeResult<()> {
let _ = self.0.flush().map_err(|_| {
PmtreeErrorKind::DatabaseError(DatabaseErrorKind::CustomError(
"Cannot flush database".to_string(),
))
})?;
Ok(())
}
fn get(&self, key: DBKey) -> PmtreeResult<Option<Value>> {
match self.0.get(key) {
Ok(value) => Ok(value.map(|val| val.to_vec())),
Err(_e) => Err(PmtreeErrorKind::TreeError(TreeErrorKind::InvalidKey)),
}
}
fn put(&mut self, key: DBKey, value: Value) -> PmtreeResult<()> {
match self.0.insert(key, value) {
Ok(_) => Ok(()),
Err(_e) => Err(PmtreeErrorKind::TreeError(TreeErrorKind::InvalidKey)),
}
}
fn put_batch(&mut self, subtree: HashMap<DBKey, Value>) -> PmtreeResult<()> {
let mut batch = sled::Batch::default();
for (key, value) in subtree {
batch.insert(&key, value);
}
self.0
.apply_batch(batch)
.map_err(|_| PmtreeErrorKind::TreeError(TreeErrorKind::InvalidKey))?;
Ok(())
}
}

View File

@@ -11,7 +11,7 @@
#![allow(dead_code)]
use ark_ff::PrimeField;
use ark_ff::{FpParameters, PrimeField};
use num_bigint::BigUint;
pub struct PoseidonGrainLFSR {
@@ -36,12 +36,20 @@ impl PoseidonGrainLFSR {
assert!(is_field == 1);
// b0, b1 describes the field
state[1] = is_field == 1;
if is_field == 1 {
state[1] = true;
} else {
state[1] = false;
}
assert!(is_sbox_an_inverse == 0 || is_sbox_an_inverse == 1);
// b2, ..., b5 describes the S-BOX
state[5] = is_sbox_an_inverse == 1;
if is_sbox_an_inverse == 1 {
state[5] = true;
} else {
state[5] = false;
}
// b6, ..., b17 are the binary representation of n (prime_num_bits)
{
@@ -80,8 +88,8 @@ impl PoseidonGrainLFSR {
}
// b50, ..., b79 are set to 1
for item in state.iter_mut().skip(50) {
*item = true;
for i in 50..=79 {
state[i] = true;
}
let head = 0;
@@ -103,7 +111,7 @@ impl PoseidonGrainLFSR {
let mut new_bit = self.update();
// Loop until the first bit is true
while !new_bit {
while new_bit == false {
// Discard the second bit
let _ = self.update();
// Obtain another first bit
@@ -121,8 +129,8 @@ impl PoseidonGrainLFSR {
&mut self,
num_elems: usize,
) -> Vec<F> {
assert_eq!(F::MODULUS_BIT_SIZE as u64, self.prime_num_bits);
let modulus: BigUint = F::MODULUS.into();
assert_eq!(F::Params::MODULUS_BITS as u64, self.prime_num_bits);
let modulus: BigUint = F::Params::MODULUS.into();
let mut res = Vec::new();
for _ in 0..num_elems {
@@ -155,7 +163,7 @@ impl PoseidonGrainLFSR {
}
pub fn get_field_elements_mod_p<F: PrimeField>(&mut self, num_elems: usize) -> Vec<F> {
assert_eq!(F::MODULUS_BIT_SIZE as u64, self.prime_num_bits);
assert_eq!(F::Params::MODULUS_BITS as u64, self.prime_num_bits);
let mut res = Vec::new();
for _ in 0..num_elems {
@@ -255,8 +263,8 @@ pub fn find_poseidon_ark_and_mds<F: PrimeField>(
let ys = lfsr.get_field_elements_mod_p::<F>(rate);
for i in 0..(rate) {
for (j, ys_item) in ys.iter().enumerate().take(rate) {
mds[i][j] = (xs[i] + ys_item).inverse().unwrap();
for j in 0..(rate) {
mds[i][j] = (xs[i] + &ys[j]).inverse().unwrap();
}
}

View File

@@ -4,7 +4,7 @@
// and adapted to work over arkworks field traits and custom data structures
use crate::poseidon_constants::find_poseidon_ark_and_mds;
use ark_ff::PrimeField;
use ark_ff::{FpParameters, PrimeField};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RoundParamenters<F: PrimeField> {
@@ -28,21 +28,22 @@ impl<F: PrimeField> Poseidon<F> {
pub fn from(poseidon_params: &[(usize, usize, usize, usize)]) -> Self {
let mut read_params = Vec::<RoundParamenters<F>>::new();
for &(t, n_rounds_f, n_rounds_p, skip_matrices) in poseidon_params {
for i in 0..poseidon_params.len() {
let (t, n_rounds_f, n_rounds_p, skip_matrices) = poseidon_params[i];
let (ark, mds) = find_poseidon_ark_and_mds::<F>(
1, // is_field = 1
0, // is_sbox_inverse = 0
F::MODULUS_BIT_SIZE as u64,
F::Params::MODULUS_BITS as u64,
t,
n_rounds_f as u64,
n_rounds_p as u64,
skip_matrices,
);
let rp = RoundParamenters {
t,
n_rounds_p,
n_rounds_f,
skip_matrices,
t: t,
n_rounds_p: n_rounds_p,
n_rounds_f: n_rounds_f,
skip_matrices: skip_matrices,
c: ark,
m: mds,
};
@@ -66,11 +67,11 @@ impl<F: PrimeField> Poseidon<F> {
pub fn sbox(&self, n_rounds_f: usize, n_rounds_p: usize, state: &mut [F], i: usize) {
if (i < n_rounds_f / 2) || (i >= n_rounds_f / 2 + n_rounds_p) {
for current_state in &mut state.iter_mut() {
let aux = *current_state;
*current_state *= *current_state;
*current_state *= *current_state;
*current_state *= aux;
for j in 0..state.len() {
let aux = state[j];
state[j] *= state[j];
state[j] *= state[j];
state[j] *= aux;
}
} else {
let aux = state[0];
@@ -84,9 +85,9 @@ impl<F: PrimeField> Poseidon<F> {
let mut new_state: Vec<F> = Vec::new();
for i in 0..state.len() {
new_state.push(F::zero());
for (j, state_item) in state.iter().enumerate() {
for j in 0..state.len() {
let mut mij = m[i][j];
mij *= state_item;
mij *= state[j];
new_state[i] += mij;
}
}
@@ -115,7 +116,7 @@ impl<F: PrimeField> Poseidon<F> {
self.ark(
&mut state,
&self.round_params[param_index].c,
i * self.round_params[param_index].t,
(i as usize) * self.round_params[param_index].t,
);
self.sbox(
self.round_params[param_index].n_rounds_f,

View File

@@ -3,11 +3,8 @@
mod test {
use hex_literal::hex;
use tiny_keccak::{Hasher as _, Keccak};
use zerokit_utils::{
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,
ZerokitMerkleProof, ZerokitMerkleTree,
};
#[derive(Clone, Copy, Eq, PartialEq)]
use utils::{FullMerkleTree, Hasher, OptimalMerkleTree};
struct Keccak256;
impl Hasher for Keccak256 {
@@ -47,17 +44,14 @@ mod test {
hex!("a9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36"),
];
let mut tree =
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
let mut tree = FullMerkleTree::<Keccak256>::new(2, [0; 32]);
assert_eq!(tree.root(), default_tree_root);
for i in 0..leaves.len() {
tree.set(i, leaves[i]).unwrap();
assert_eq!(tree.root(), roots[i]);
}
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
.unwrap();
let mut tree = OptimalMerkleTree::<Keccak256>::new(2, [0; 32]);
assert_eq!(tree.root(), default_tree_root);
for i in 0..leaves.len() {
tree.set(i, leaves[i]).unwrap();
@@ -75,8 +69,7 @@ mod test {
];
// We thest the FullMerkleTree implementation
let mut tree =
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
let mut tree = FullMerkleTree::<Keccak256>::new(2, [0; 32]);
for i in 0..leaves.len() {
// We set the leaves
tree.set(i, leaves[i]).unwrap();
@@ -100,9 +93,7 @@ mod test {
}
// We test the OptimalMerkleTree implementation
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
.unwrap();
let mut tree = OptimalMerkleTree::<Keccak256>::new(2, [0; 32]);
for i in 0..leaves.len() {
// We set the leaves
tree.set(i, leaves[i]).unwrap();
@@ -125,41 +116,4 @@ mod test {
.unwrap());
}
}
#[test]
fn test_override_range() {
let initial_leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
];
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
.unwrap();
// We set the leaves
tree.set_range(0, initial_leaves.iter().cloned()).unwrap();
let new_leaves = [
hex!("0000000000000000000000000000000000000000000000000000000000000005"),
hex!("0000000000000000000000000000000000000000000000000000000000000006"),
];
let to_delete_indices: [usize; 2] = [0, 1];
// We override the leaves
tree.override_range(
0, // start from the end of the initial leaves
new_leaves.iter().cloned(),
to_delete_indices.iter().cloned(),
)
.unwrap();
// ensure that the leaves are set correctly
for i in 0..new_leaves.len() {
assert_eq!(tree.get_leaf(i), new_leaves[i]);
}
}
}

View File

@@ -3,7 +3,7 @@ mod test {
use ark_bn254::Fr;
use num_bigint::BigUint;
use num_traits::Num;
use zerokit_utils::poseidon_hash::Poseidon;
use utils::poseidon_hash::Poseidon;
const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
(2, 8, 56, 0),