Compare commits

..

1 Commits

Author SHA1 Message Date
Rahul Ghangas
4765711f8c feat: implement Serialize trait for witness 2023-02-02 01:10:50 +05:30
110 changed files with 5526 additions and 13016 deletions

View File

@@ -3,117 +3,173 @@ on:
branches:
- master
paths-ignore:
- "**.md"
- "!.github/workflows/*.yml"
- "!rln-wasm/**"
- "!rln/src/**"
- "!rln/resources/**"
- "!utils/src/**"
- '**.md'
- '!.github/workflows/*.yml'
- '!multiplier/src/**'
- '!private-settlement/src/**'
- '!rln-wasm/**'
- '!rln/src/**'
- '!rln/resources/**'
- '!semaphore/src/**'
- '!utils/src/**'
pull_request:
paths-ignore:
- "**.md"
- "!.github/workflows/*.yml"
- "!rln-wasm/**"
- "!rln/src/**"
- "!rln/resources/**"
- "!utils/src/**"
- '**.md'
- '!.github/workflows/*.yml'
- '!multiplier/src/**'
- '!private-settlement/src/**'
- '!rln-wasm/**'
- '!rln/src/**'
- '!rln/resources/**'
- '!semaphore/src/**'
- '!utils/src/**'
name: Tests
jobs:
utils-test:
multiplier:
strategy:
matrix:
platform: [ ubuntu-latest, macos-latest ]
crate: [ utils ]
platform: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: test - ${{ matrix.crate }} - ${{ matrix.platform }}
name: multiplier - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo-make test
run: |
cargo make test --release
working-directory: ${{ matrix.crate }}
rln-test:
cargo make test --release
working-directory: multiplier
semaphore:
strategy:
matrix:
platform: [ ubuntu-latest, macos-latest ]
crate: [ rln ]
feature: [ "default", "arkzkey", "stateless" ]
platform: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
name: semaphore - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo-make test
run: |
cargo make test_${{ matrix.feature }} --release
working-directory: ${{ matrix.crate }}
cargo make test --release
working-directory: semaphore
rln-wasm:
strategy:
matrix:
platform: [ ubuntu-latest, macos-latest ]
platform: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: test - rln-wasm - ${{ matrix.platform }}
name: rln-wasm - ${{ matrix.platform }}
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
# TODO: Update to stable once wasmer supports it
toolchain: 1.82.0
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- run: git submodule update --init --recursive
- name: Install wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
- run: cargo install cargo-make
- run: cargo make build
working-directory: rln-wasm
- run: cargo make test --release
- run: cargo-make test
working-directory: rln-wasm
rln:
strategy:
matrix:
platform: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: rln - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo-make test
run: |
cargo make test --release
working-directory: rln
utils:
strategy:
matrix:
platform: [ubuntu-latest, macos-latest]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: utils - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo-make test
run: |
cargo make test --release
working-directory: utils
lint:
strategy:
matrix:
# we run lint tests only on ubuntu
platform: [ ubuntu-latest ]
crate: [ rln, utils ]
platform: [ubuntu-latest]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: lint - ${{ matrix.crate }} - ${{ matrix.platform }}
name: lint - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
@@ -121,62 +177,31 @@ jobs:
toolchain: stable
override: true
components: rustfmt, clippy
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- name: Update git submodules
run: git submodule update --init --recursive
- name: cargo fmt
if: success() || failure()
run: cargo fmt -- --check
working-directory: ${{ matrix.crate }}
- name: cargo clippy
run: cargo fmt --all -- --check
- name: multiplier - cargo clippy
if: success() || failure()
run: |
cargo clippy --release -- -D warnings
working-directory: ${{ matrix.crate }}
cargo clippy --release
working-directory: multiplier
- name: semaphore - cargo clippy
if: success() || failure()
run: |
cargo clippy --release
working-directory: semaphore
- name: rln - cargo clippy
if: success() || failure()
run: |
cargo clippy --release
working-directory: rln
- name: utils - cargo clippy
if: success() || failure()
run: |
cargo clippy --release
working-directory: utils
# We skip clippy on rln-wasm, since wasm target is managed by cargo make
# Currently not treating warnings as error, too noisy
# -- -D warnings
benchmark-utils:
# run only in pull requests
if: github.event_name == 'pull_request'
strategy:
matrix:
# we run benchmark tests only on ubuntu
platform: [ ubuntu-latest ]
crate: [ utils ]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: benchmark - ${{ matrix.platform }} - ${{ matrix.crate }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
- uses: Swatinem/rust-cache@v2
- uses: boa-dev/criterion-compare-action@v3
with:
branchName: ${{ github.base_ref }}
cwd: ${{ matrix.crate }}
benchmark-rln:
# run only in pull requests
if: github.event_name == 'pull_request'
strategy:
matrix:
# we run benchmark tests only on ubuntu
platform: [ ubuntu-latest ]
crate: [ rln ]
feature: [ "default", "arkzkey" ]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: benchmark - ${{ matrix.platform }} - ${{ matrix.crate }} - ${{ matrix.feature }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
- uses: Swatinem/rust-cache@v2
- uses: boa-dev/criterion-compare-action@v3
with:
branchName: ${{ github.base_ref }}
cwd: ${{ matrix.crate }}
features: ${{ matrix.feature }}
# -- -D warnings

View File

@@ -6,131 +6,112 @@ on:
jobs:
linux:
strategy:
matrix:
feature: [ "default", "arkzkey", "stateless" ]
target:
- x86_64-unknown-linux-gnu
- aarch64-unknown-linux-gnu
- i686-unknown-linux-gnu
include:
- feature: stateless
cargo_args: --exclude rln-cli
name: Linux build
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
target: ${{ matrix.target }}
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: cross build
- name: Update git submodules
run: git submodule update --init --recursive
- name: cargo build
run: |
cross build --release --target ${{ matrix.target }} --features ${{ matrix.feature }} --workspace --exclude rln-wasm ${{ matrix.cargo_args }}
cargo build --release --workspace --exclude rln-wasm
mkdir release
cp target/${{ matrix.target }}/release/librln* release/
tar -czvf ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz release/
cp target/release/librln* release/
tar -czvf linux-rln.tar.gz release/
- name: Upload archive artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v2
with:
name: ${{ matrix.target }}-${{ matrix.feature }}-archive
path: ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz
name: linux-archive
path: linux-rln.tar.gz
retention-days: 2
macos:
name: MacOS build
runs-on: macos-latest
strategy:
matrix:
feature: [ "default", "arkzkey", "stateless" ]
target:
- x86_64-apple-darwin
- aarch64-apple-darwin
include:
- feature: stateless
cargo_args: --exclude rln-cli
steps:
- name: Checkout sources
uses: actions/checkout@v3
uses: actions/checkout@v2
with:
ref: master
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
target: ${{ matrix.target }}
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: cross build
- name: Update git submodules
run: git submodule update --init --recursive
- name: cargo build
run: |
cross build --release --target ${{ matrix.target }} --features ${{ matrix.feature }} --workspace --exclude rln-wasm ${{ matrix.cargo_args }}
cargo build --release --workspace --exclude rln-wasm
mkdir release
cp target/${{ matrix.target }}/release/librln* release/
tar -czvf ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz release/
cp target/release/librln* release/
tar -czvf macos-rln.tar.gz release/
- name: Upload archive artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v2
with:
name: ${{ matrix.target }}-${{ matrix.feature }}-archive
path: ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz
name: macos-archive
path: macos-rln.tar.gz
retention-days: 2
browser-rln-wasm:
name: Browser build (RLN WASM)
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
uses: actions/checkout@v2
with:
ref: master
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
# TODO: Update to stable once wasmer supports it
toolchain: 1.82.0
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- run: git submodule update --init --recursive
- name: Install wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
- name: cross make build
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo make build
run: |
cross make build
cargo make build
mkdir release
cp pkg/** release/
tar -czvf browser-rln-wasm.tar.gz release/
working-directory: rln-wasm
- name: Upload archive artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v2
with:
name: browser-rln-wasm-archive
path: rln-wasm/browser-rln-wasm.tar.gz
retention-days: 2
prepare-prerelease:
name: Prepare pre-release
needs: [ linux, macos, browser-rln-wasm ]
needs: [linux, macos, browser-rln-wasm]
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@v2
with:
ref: master
- name: Download artifacts
uses: actions/download-artifact@v4
uses: actions/download-artifact@v2
- name: Delete tag
uses: dev-drprasad/delete-tag-and-release@v0.2.1
uses: dev-drprasad/delete-tag-and-release@v0.2.0
with:
delete_release: true
tag_name: nightly
@@ -139,19 +120,22 @@ jobs:
- name: Create prerelease
run: |
start_tag=$(gh release list -L 2 --exclude-drafts | grep -v nightly | cut -d$'\t' -f3 | sed -n '1p')
start_tag=$(gh release list -L 2 --exclude-drafts | grep -v nightly | cut -d$'\t' -f3)
gh release create nightly --prerelease --target master \
--title 'Nightly build ("master" branch)' \
--generate-notes \
--draft=false \
--notes-start-tag $start_tag \
*-archive/*.tar.gz \
linux-archive/linux-rln.tar.gz \
macos-archive/macos-rln.tar.gz \
browser-rln-wasm-archive/browser-rln-wasm.tar.gz
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Delete artifacts
uses: geekyeggo/delete-artifact@v5
uses: geekyeggo/delete-artifact@v1
with:
failOnError: false
name: |
*-archive
linux-archive
macos-archive
browser-rln-wasm-archive

View File

@@ -9,7 +9,7 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
- uses: micnncim/action-label-syncer@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

6
.gitignore vendored
View File

@@ -8,10 +8,10 @@ rln/pmtree_db
# will have compiled files and executables
debug/
target/
wabt/
# Generated by Nix
result/
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
Cargo.lock
# These are backup files generated by rustfmt
**/*.rs.bk

8
.gitmodules vendored Normal file
View File

@@ -0,0 +1,8 @@
[submodule "rln/vendor/rln"]
path = rln/vendor/rln
ignore = dirty
url = https://github.com/Rate-Limiting-Nullifier/rln_circuits.git
[submodule "semaphore/vendor/semaphore"]
path = semaphore/vendor/semaphore
ignore = dirty
url = https://github.com/appliedzkp/semaphore.git

View File

@@ -1,15 +1,18 @@
## 2023-02-28 v0.2
## Upcoming release
This release contains:
- Improved code quality
Release highlights:
- Allows consumers of zerokit RLN to set leaves to the Merkle Tree from an arbitrary index. Useful for batching updates to the Merkle Tree.
- Improved performance for proof generation and verification
- rln_wasm which allows for the consumption of RLN through a WebAssembly interface
- Refactored to generate Semaphore-compatible credentials
- Dual License under Apache 2.0 and MIT
- RLN compiles as a static library, which can be consumed through a C FFI
The full list of changes is below.
### Features
- Creation of `set_leaves_from`, which allows consumers to add leaves to a tree from a given starting index. `init_tree_with_leaves` internally uses `set_leaves_from`, with index 0.
### Changes
- Renaming of `set_leaves` to `init_tree_with_leaves`, which is a more accurate representation of the function's utility.
### Fixes
- None
## 2022-09-19 v0.1

3809
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,14 +1,9 @@
[workspace]
members = ["rln", "rln-cli", "rln-wasm", "utils"]
default-members = ["rln", "rln-cli", "utils"]
resolver = "2"
# Compilation profile for any non-workspace member.
# Dependencies are optimized, even in a dev build. This improves dev performance
# while having neglible impact on incremental build times.
[profile.dev.package."*"]
opt-level = 3
[profile.release.package."rln-wasm"]
# Tell `rustc` to optimize for small code size.
opt-level = "s"
members = [
"multiplier",
"private-settlement",
"semaphore",
"rln",
"rln-wasm",
"utils",
]

View File

@@ -1,35 +0,0 @@
[target.x86_64-pc-windows-gnu]
image = "ghcr.io/cross-rs/x86_64-pc-windows-gnu:latest"
[target.aarch64-unknown-linux-gnu]
image = "ghcr.io/cross-rs/aarch64-unknown-linux-gnu:latest"
[target.x86_64-unknown-linux-gnu]
image = "ghcr.io/cross-rs/x86_64-unknown-linux-gnu:latest"
[target.arm-unknown-linux-gnueabi]
image = "ghcr.io/cross-rs/arm-unknown-linux-gnueabi:latest"
[target.i686-pc-windows-gnu]
image = "ghcr.io/cross-rs/i686-pc-windows-gnu:latest"
[target.i686-unknown-linux-gnu]
image = "ghcr.io/cross-rs/i686-unknown-linux-gnu:latest"
[target.arm-unknown-linux-gnueabihf]
image = "ghcr.io/cross-rs/arm-unknown-linux-gnueabihf:latest"
[target.mips-unknown-linux-gnu]
image = "ghcr.io/cross-rs/mips-unknown-linux-gnu:latest"
[target.mips64-unknown-linux-gnuabi64]
image = "ghcr.io/cross-rs/mips64-unknown-linux-gnuabi64:latest"
[target.mips64el-unknown-linux-gnuabi64]
image = "ghcr.io/cross-rs/mips64el-unknown-linux-gnuabi64:latest"
[target.mipsel-unknown-linux-gnu]
image = "ghcr.io/cross-rs/mipsel-unknown-linux-gnu:latest"
[target.aarch64-linux-android]
image = "ghcr.io/cross-rs/aarch64-linux-android:edge"

View File

@@ -1,203 +0,0 @@
Copyright (c) 2022 Vac Research
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -1,25 +0,0 @@
Copyright (c) 2022 Vac Research
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE O THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@@ -1,35 +1,13 @@
.PHONY: all installdeps build test clean
.PHONY: all test clean
all: .pre-build build
.fetch-submodules:
@git submodule update --init --recursive
.pre-build: .fetch-submodules
@cargo install cargo-make
ifdef CI
@cargo install cross --git https://github.com/cross-rs/cross.git --rev 1511a28
endif
installdeps: .pre-build
ifeq ($(shell uname),Darwin)
# commented due to https://github.com/orgs/Homebrew/discussions/4612
# @brew update
@brew install cmake ninja
else ifeq ($(shell uname),Linux)
@sudo apt-get update
@sudo apt-get install -y cmake ninja-build
endif
@git -C "wabt" pull || git clone --recursive https://github.com/WebAssembly/wabt.git "wabt"
@cd wabt && mkdir -p build && cd build && cmake .. -GNinja && ninja && sudo ninja install
@which wasm-pack || cargo install wasm-pack
# nvm already checks if it's installed, and no-ops if it is
@curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash
@. ${HOME}/.nvm/nvm.sh && nvm install 18.20.2 && nvm use 18.20.2;
build: .pre-build
all: .pre-build
@cargo make build
.pre-build:
ifndef $(cargo make --help)
@cargo install --force cargo-make
endif
test: .pre-build
@cargo make test

View File

@@ -17,31 +17,3 @@ in Rust.
- [RLN library](https://github.com/kilic/rln) written in Rust based on Bellman.
- [semaphore-rs](https://github.com/worldcoin/semaphore-rs) written in Rust based on ark-circom.
- The circom witness calculation code of the rln crate is based on [circom-witnesscalc](https://github.com/iden3/circom-witnesscalc) by iden3. The execution graph file used by this code has been generated by means of the same iden3 software.
## Users
Zerokit is used by -
- [nwaku](https://github.com/waku-org/nwaku)
- [js-rln](https://github.com/waku-org/js-rln)
## Build and Test
To install missing dependencies, run the following commands from the root folder
```bash
make installdeps
```
To build and test all crates, run the following commands from the root folder
```bash
make build
make test
```
## Release assets
We use [`cross-rs`](https://github.com/cross-rs/cross) to cross-compile and generate release assets for rln.

27
flake.lock generated
View File

@@ -1,27 +0,0 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1737299813,
"narHash": "sha256-Qw2PwmkXDK8sPQ5YQ/y/icbQ+TYgbxfjhgnkNJyT1X8=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "107d5ef05c0b1119749e381451389eded30fb0d5",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-24.11",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs"
}
}
},
"root": "root",
"version": 7
}

View File

@@ -1,45 +0,0 @@
{
description = "A flake for building zerokit";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-24.11";
};
outputs = { self, nixpkgs }:
let
stableSystems = [
"x86_64-linux" "aarch64-linux"
"x86_64-darwin" "aarch64-darwin"
"x86_64-windows" "i686-linux"
"i686-windows"
];
forAllSystems = nixpkgs.lib.genAttrs stableSystems;
pkgsFor = forAllSystems (
system: import nixpkgs {
inherit system;
config = {
android_sdk.accept_license = true;
allowUnfree = true;
};
overlays = [
(final: prev: {
androidEnvCustom = prev.callPackage ./nix/pkgs/android-sdk { };
androidPkgs = final.androidEnvCustom.pkgs;
androidShell = final.androidEnvCustom.shell;
})
];
}
);
in
{
packages = forAllSystems (system: let
pkgs = pkgsFor.${system};
in rec {
zerokit-android-arm64 = pkgs.callPackage ./nix/default.nix { target-platform="aarch64-android-prebuilt"; rust-target= "aarch64-linux-android"; };
zerokit-android-amd64 = pkgs.callPackage ./nix/default.nix { target-platform="musl64"; rust-target= "x86_64-linux-android"; };
zerokit-android-x86 = pkgs.callPackage ./nix/default.nix { target-platform="musl32"; rust-target= "i686-linux-android"; };
zerokit-android-arm = pkgs.callPackage ./nix/default.nix { target-platform="armv7a-android-prebuilt"; rust-target= "armv7a-linux-androideabi"; };
default = zerokit-android-arm64;
});
};
}

35
multiplier/Cargo.toml Normal file
View File

@@ -0,0 +1,35 @@
[package]
name = "multiplier"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
# WASM operations
# wasmer = { version = "2.0" }
# fnv = { version = "1.0.3", default-features = false }
# num = { version = "0.4.0" }
# num-traits = { version = "0.2.0", default-features = false }
num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
# ZKP Generation
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
# ark-ff = { version = "0.3.0", default-features = false, features = ["parallel", "asm"] }
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
ark-bn254 = { version = "0.3.0" }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
# ark-poly = { version = "^0.3.0", default-features = false, features = ["parallel"] }
ark-relations = { version = "0.3.0", default-features = false }
ark-serialize = { version = "0.3.0", default-features = false }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circom-2"] }
# error handling
# thiserror = "1.0.26"
color-eyre = "0.5"
# decoding of data
# hex = "0.4.3"
# byteorder = "1.4.3"

7
multiplier/Makefile.toml Normal file
View File

@@ -0,0 +1,7 @@
[tasks.build]
command = "cargo"
args = ["build", "--release"]
[tasks.test]
command = "cargo"
args = ["test", "--release"]

13
multiplier/README.md Normal file
View File

@@ -0,0 +1,13 @@
# Multiplier example
Example wrapper around a basic Circom circuit to test Circom 2 integration
through ark-circom and FFI.
# FFI
To generate C or Nim bindings from Rust FFI, use `cbindgen` or `nbindgen`:
```
cbindgen . -o target/multiplier.h
nbindgen . -o target/multiplier.nim
```

Binary file not shown.

Binary file not shown.

77
multiplier/src/ffi.rs Normal file
View File

@@ -0,0 +1,77 @@
use crate::public::Multiplier;
use std::slice;
/// Buffer struct is taken from
/// https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs
///
/// Also heavily inspired by https://github.com/kilic/rln/blob/master/src/ffi.rs
#[repr(C)]
#[derive(Clone, Debug, PartialEq)]
pub struct Buffer {
pub ptr: *const u8,
pub len: usize,
}
impl From<&[u8]> for Buffer {
fn from(src: &[u8]) -> Self {
Self {
ptr: &src[0] as *const u8,
len: src.len(),
}
}
}
impl<'a> From<&Buffer> for &'a [u8] {
fn from(src: &Buffer) -> &'a [u8] {
unsafe { slice::from_raw_parts(src.ptr, src.len) }
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn new_circuit(ctx: *mut *mut Multiplier) -> bool {
println!("multiplier ffi: new");
let mul = Multiplier::new();
unsafe { *ctx = Box::into_raw(Box::new(mul)) };
true
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn prove(ctx: *const Multiplier, output_buffer: *mut Buffer) -> bool {
println!("multiplier ffi: prove");
let mul = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
match mul.prove(&mut output_data) {
Ok(proof_data) => proof_data,
Err(_) => return false,
};
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn verify(
ctx: *const Multiplier,
proof_buffer: *const Buffer,
result_ptr: *mut u32,
) -> bool {
println!("multiplier ffi: verify");
let mul = unsafe { &*ctx };
let proof_data = <&[u8]>::from(unsafe { &*proof_buffer });
if match mul.verify(proof_data) {
Ok(verified) => verified,
Err(_) => return false,
} {
unsafe { *result_ptr = 0 };
} else {
unsafe { *result_ptr = 1 };
};
true
}

2
multiplier/src/lib.rs Normal file
View File

@@ -0,0 +1,2 @@
pub mod ffi;
pub mod public;

48
multiplier/src/main.rs Normal file
View File

@@ -0,0 +1,48 @@
use ark_circom::{CircomBuilder, CircomConfig};
use ark_std::rand::thread_rng;
use color_eyre::Result;
use ark_bn254::Bn254;
use ark_groth16::{
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
};
fn groth16_proof_example() -> Result<()> {
let cfg = CircomConfig::<Bn254>::new(
"./resources/circom2_multiplier2.wasm",
"./resources/circom2_multiplier2.r1cs",
)?;
let mut builder = CircomBuilder::new(cfg);
builder.push_input("a", 3);
builder.push_input("b", 11);
// create an empty instance for setting it up
let circom = builder.setup();
let mut rng = thread_rng();
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng)?;
let circom = builder.build()?;
let inputs = circom.get_public_inputs().unwrap();
let proof = prove(circom, &params, &mut rng)?;
let pvk = prepare_verifying_key(&params.vk);
let verified = verify_proof(&pvk, &proof, &inputs)?;
assert!(verified);
Ok(())
}
fn main() {
println!("Hello, world!");
match groth16_proof_example() {
Ok(_) => println!("Success"),
Err(_) => println!("Error"),
}
}

80
multiplier/src/public.rs Normal file
View File

@@ -0,0 +1,80 @@
use ark_circom::{CircomBuilder, CircomCircuit, CircomConfig};
use ark_std::rand::thread_rng;
use ark_bn254::Bn254;
use ark_groth16::{
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
Proof, ProvingKey,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
// , SerializationError};
use std::io::{self, Read, Write};
pub struct Multiplier {
circom: CircomCircuit<Bn254>,
params: ProvingKey<Bn254>,
}
impl Multiplier {
// TODO Break this apart here
pub fn new() -> Multiplier {
let cfg = CircomConfig::<Bn254>::new(
"./resources/circom2_multiplier2.wasm",
"./resources/circom2_multiplier2.r1cs",
)
.unwrap();
let mut builder = CircomBuilder::new(cfg);
builder.push_input("a", 3);
builder.push_input("b", 11);
// create an empty instance for setting it up
let circom = builder.setup();
let mut rng = thread_rng();
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng).unwrap();
let circom = builder.build().unwrap();
//let inputs = circom.get_public_inputs().unwrap();
Multiplier { circom, params }
}
// TODO Input Read
pub fn prove<W: Write>(&self, result_data: W) -> io::Result<()> {
let mut rng = thread_rng();
// XXX: There's probably a better way to do this
let circom = self.circom.clone();
let params = self.params.clone();
let proof = prove(circom, &params, &mut rng).unwrap();
// XXX: Unclear if this is different from other serialization(s)
let _ = proof.serialize(result_data).unwrap();
Ok(())
}
pub fn verify<R: Read>(&self, input_data: R) -> io::Result<bool> {
let proof = Proof::deserialize(input_data).unwrap();
let pvk = prepare_verifying_key(&self.params.vk);
// XXX Part of input data?
let inputs = self.circom.get_public_inputs().unwrap();
let verified = verify_proof(&pvk, &proof, &inputs).unwrap();
Ok(verified)
}
}
impl Default for Multiplier {
fn default() -> Self {
Self::new()
}
}

View File

@@ -0,0 +1,22 @@
#[cfg(test)]
mod tests {
use multiplier::public::Multiplier;
#[test]
fn multiplier_proof() {
let mul = Multiplier::new();
//let inputs = mul.circom.get_public_inputs().unwrap();
let mut output_data: Vec<u8> = Vec::new();
let _ = mul.prove(&mut output_data);
let proof_data = &output_data[..];
// XXX Pass as arg?
//let pvk = prepare_verifying_key(&mul.params.vk);
let verified = mul.verify(proof_data).unwrap();
assert!(verified);
}
}

View File

@@ -1,110 +0,0 @@
{
pkgs,
target-platform ? "aarch64-android-prebuilt",
rust-target ? "aarch64-linux-android",
lib,
}:
let
rustVersion = "1.84.1";
# Define each Rust standard library archive separately
rustStdX86_64LinuxAndroid = pkgs.fetchurl {
url = "https://static.rust-lang.org/dist/2025-01-30/rust-std-1.84.1-x86_64-linux-android.tar.gz ";
sha256 = "sha256-Iu9hg4w/4uMfJCwPWw9SCKvPGZoyOeP4uW+ixAf63Is="; # Replace with actual SHA256 hash
};
rustStdAarch64LinuxAndroid = pkgs.fetchurl {
url = "https://static.rust-lang.org/dist/2025-01-30/rust-std-1.84.1-aarch64-linux-android.tar.gz";
sha256 = "sha256-NMmJW3A7JJeu+epf9R3pWKKr/dUQnDCo3QmFhkVll2o="; # Replace with actual SHA256 hash
};
rustStdX86_64UnknownLinuxGnu = pkgs.fetchurl {
url = "https://static.rust-lang.org/dist/rust-1.84.1-x86_64-unknown-linux-gnu.tar.xz";
sha256 = "sha256-5PMzF5TxoyxW+DcDCRLYC1o9lmn0tJfJFhHWW9atqXs="; # Replace with actual SHA256 hash
};
rustupInit = pkgs.fetchurl {
url = "https://static.rust-lang.org/rustup/dist/x86_64-unknown-linux-gnu/rustup-init";
sha256 = "sha256-au7OaZPpAnCJg7IJ0EwNHbsU67QF3bh971eNQfkg9W0="; # Replace with actual SHA256 hash for rustup-init
};
in
pkgs.rustPlatform.buildRustPackage {
pname = "zerokit";
version = "nightly";
src = ../.;
cargoLock = {
lockFile = ../Cargo.lock;
allowBuiltinFetchGit = true;
};
# Dependencies that should only exist in the build environment.
nativeBuildInputs = with pkgs; [
unzip
xz
clang
cmake
gcc
which
];
ANDROID_NDK_HOME="${pkgs.androidPkgs.ndk}";
CARGO_HOME = "/tmp";
configurePhase = ''
echo $USER
echo $UID
# Create directories for Rust installation
mkdir -p ./rust-install/rust-${rustVersion}-x86_64-unknown-linux-gnu
# Extract Rust standard libraries
tar -xvzf ${rustStdX86_64LinuxAndroid}
tar -xvzf ${rustStdAarch64LinuxAndroid}
tar -xvf ${rustStdX86_64UnknownLinuxGnu} -C ./rust-install/rust-${rustVersion}-x86_64-unknown-linux-gnu
patchShebangs .
# Install STD's
chmod +x ./rust-std-1.84.1-x86_64-linux-android/install.sh
chmod +x ./rust-std-1.84.1-aarch64-linux-android/install.sh
./rust-std-1.84.1-x86_64-linux-android/install.sh --prefix=./rust-install/rust-${rustVersion}-x86_64-unknown-linux-gnu --verbose
./rust-std-1.84.1-aarch64-linux-android/install.sh --prefix=./rust-install/rust-${rustVersion}-x86_64-unknown-linux-gnu --verbose
# Initialize rustup
${rustupInit} --default-toolchain none -y --verbose
# Link custom toolchain
. "./.cargo/env"
cargo --version
which cargo
rustup
rustup toolchain link rust-toolchain-${rustVersion} ./rust-install/rust-${rustVersion}-x86_64-unknown-linux-gnu
rustup default rust-toolchain-${rustVersion}
# Set environment variables for Android NDK
export CC=/android-ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/x86_64-linux-android25-clang
export CXX=/android-ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/x86_64-linux-android25-clang++
export CARGO_TARGET_X86_64_LINUX_ANDROID_LINKER=/android-ndk/toolchains/llvm/prebuilt/linux-x86_64/bin/x86_64-linux-android25-clang
'';
buildPhase = ''
pushd rln
cargo rustc --crate-type=cdylib --release --lib --target=x86_64-linux-android
popd
'';
installPhase = ''
mkdir -p $out/
cp ./target/${rust-target}/release/librln.so $out/
'';
meta = with pkgs.lib; {
description = "Zerokit";
license = licenses.mit;
};
}

View File

@@ -1,26 +0,0 @@
#
# This Nix expression centralizes the configuration
# for the Android development environment.
#
{ androidenv, lib, stdenv }:
assert lib.assertMsg (stdenv.system != "aarch64-darwin")
"aarch64-darwin not supported for Android SDK. Use: NIXPKGS_SYSTEM_OVERRIDE=x86_64-darwin";
# The "android-sdk-license" license is accepted
# by setting android_sdk.accept_license = true.
androidenv.composeAndroidPackages {
cmdLineToolsVersion = "9.0";
toolsVersion = "26.1.1";
platformToolsVersion = "33.0.3";
buildToolsVersions = [ "34.0.0" ];
platformVersions = [ "34" ];
cmakeVersions = [ "3.22.1" ];
ndkVersion = "25.2.9519653";
includeNDK = true;
includeExtras = [
"extras;android;m2repository"
"extras;google;m2repository"
];
}

View File

@@ -1,14 +0,0 @@
#
# This Nix expression centralizes the configuration
# for the Android development environment.
#
{ callPackage }:
let
compose = callPackage ./compose.nix { };
pkgs = callPackage ./pkgs.nix { inherit compose; };
shell = callPackage ./shell.nix { androidPkgs = pkgs; };
in {
inherit compose pkgs shell;
}

View File

@@ -1,17 +0,0 @@
{ stdenv, compose }:
#
# This derivation simply symlinks some stuff to get
# shorter paths as libexec/android-sdk is quite the mouthful.
# With this you can just do `androidPkgs.sdk` and `androidPkgs.ndk`.
#
stdenv.mkDerivation {
name = "${compose.androidsdk.name}-mod";
phases = [ "symlinkPhase" ];
outputs = [ "out" "sdk" "ndk" ];
symlinkPhase = ''
ln -s ${compose.androidsdk} $out
ln -s ${compose.androidsdk}/libexec/android-sdk $sdk
ln -s ${compose.androidsdk}/libexec/android-sdk/ndk-bundle $ndk
'';
}

View File

@@ -1,18 +0,0 @@
{ mkShell, openjdk, androidPkgs }:
mkShell {
name = "android-sdk-shell";
buildInputs = [ openjdk ];
shellHook = ''
export ANDROID_HOME="${androidPkgs.sdk}"
export ANDROID_NDK_ROOT="${androidPkgs.ndk}"
export ANDROID_SDK_ROOT="$ANDROID_HOME"
export PATH="$ANDROID_NDK_ROOT:$PATH"
export PATH="$ANDROID_SDK_ROOT/tools:$PATH"
export PATH="$ANDROID_SDK_ROOT/tools/bin:$PATH"
export PATH="$(echo $ANDROID_SDK_ROOT/cmdline-tools/*/bin):$PATH"
export PATH="$ANDROID_SDK_ROOT/platform-tools:$PATH"
'';
}

View File

@@ -0,0 +1,8 @@
[package]
name = "private-settlement"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]

View File

@@ -0,0 +1,7 @@
[tasks.build]
command = "cargo"
args = ["build", "--release"]
[tasks.test]
command = "cargo"
args = ["test", "--release"]

View File

@@ -0,0 +1,4 @@
# Private Settlement Module
This module is to provide APIs to manage, compute and verify [Private Settlement](https://rfc.vac.dev/spec/44/) zkSNARK proofs and primitives.

View File

@@ -0,0 +1 @@

View File

@@ -0,0 +1,11 @@
#[cfg(test)]
mod tests {
#[cfg(test)]
mod tests {
#[test]
fn it_works() {
let result = 2 + 2;
assert_eq!(result, 4);
}
}
}

View File

@@ -1,16 +0,0 @@
[package]
name = "rln-cli"
version = "0.3.0"
edition = "2021"
[dependencies]
rln = { path = "../rln", default-features = true, features = ["arkzkey"] }
clap = { version = "4.2.7", features = ["cargo", "derive", "env"]}
clap_derive = { version = "=4.2.0" }
color-eyre = "=0.6.2"
# serialization
serde_json = "1.0.48"
serde = { version = "1.0.130", features = ["derive"] }
[features]
arkzkey = []

View File

@@ -1,67 +0,0 @@
use std::path::PathBuf;
use clap::Subcommand;
#[derive(Subcommand)]
pub(crate) enum Commands {
New {
tree_height: usize,
/// Sets a custom config file
#[arg(short, long)]
config: PathBuf,
},
NewWithParams {
tree_height: usize,
/// Sets a custom config file
#[arg(short, long)]
config: PathBuf,
#[arg(short, long)]
tree_config_input: PathBuf,
},
SetTree {
tree_height: usize,
},
SetLeaf {
index: usize,
#[arg(short, long)]
file: PathBuf,
},
SetMultipleLeaves {
index: usize,
#[arg(short, long)]
file: PathBuf,
},
ResetMultipleLeaves {
#[arg(short, long)]
file: PathBuf,
},
SetNextLeaf {
#[arg(short, long)]
file: PathBuf,
},
DeleteLeaf {
index: usize,
},
GetRoot,
GetProof {
index: usize,
},
Prove {
#[arg(short, long)]
input: PathBuf,
},
Verify {
#[arg(short, long)]
file: PathBuf,
},
GenerateProof {
#[arg(short, long)]
input: PathBuf,
},
VerifyWithRoots {
#[arg(short, long)]
input: PathBuf,
#[arg(short, long)]
roots: PathBuf,
},
}

View File

@@ -1,29 +0,0 @@
use color_eyre::Result;
use serde::{Deserialize, Serialize};
use std::{fs::File, io::Read, path::PathBuf};
pub const RLN_STATE_PATH: &str = "RLN_STATE_PATH";
#[derive(Default, Serialize, Deserialize)]
pub(crate) struct Config {
pub inner: Option<InnerConfig>,
}
#[derive(Default, Serialize, Deserialize)]
pub(crate) struct InnerConfig {
pub file: PathBuf,
pub tree_height: usize,
}
impl Config {
pub(crate) fn load_config() -> Result<Config> {
let path = PathBuf::from(std::env::var(RLN_STATE_PATH)?);
let mut file = File::open(path)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let state: Config = serde_json::from_str(&contents)?;
Ok(state)
}
}

View File

@@ -1,160 +0,0 @@
use std::{fs::File, io::Read, path::Path};
use clap::Parser;
use color_eyre::{Report, Result};
use commands::Commands;
use rln::public::RLN;
use state::State;
mod commands;
mod config;
mod state;
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
struct Cli {
#[command(subcommand)]
command: Option<Commands>,
}
fn main() -> Result<()> {
let cli = Cli::parse();
let mut state = State::load_state()?;
match &cli.command {
Some(Commands::New {
tree_height,
config,
}) => {
let resources = File::open(config)?;
state.rln = Some(RLN::new(*tree_height, resources)?);
Ok(())
}
Some(Commands::NewWithParams {
tree_height,
config,
tree_config_input,
}) => {
let mut resources: Vec<Vec<u8>> = Vec::new();
#[cfg(feature = "arkzkey")]
let filenames = ["rln_final.arkzkey", "verification_key.arkvkey"];
#[cfg(not(feature = "arkzkey"))]
let filenames = ["rln_final.zkey", "verification_key.arkvkey"];
for filename in filenames {
let fullpath = config.join(Path::new(filename));
let mut file = File::open(&fullpath)?;
let metadata = std::fs::metadata(&fullpath)?;
let mut buffer = vec![0; metadata.len() as usize];
file.read_exact(&mut buffer)?;
resources.push(buffer);
}
let tree_config_input_file = File::open(tree_config_input)?;
state.rln = Some(RLN::new_with_params(
*tree_height,
resources[0].clone(),
resources[1].clone(),
tree_config_input_file,
)?);
Ok(())
}
Some(Commands::SetTree { tree_height }) => {
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_tree(*tree_height)?;
Ok(())
}
Some(Commands::SetLeaf { index, file }) => {
let input_data = File::open(file)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_leaf(*index, input_data)?;
Ok(())
}
Some(Commands::SetMultipleLeaves { index, file }) => {
let input_data = File::open(file)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_leaves_from(*index, input_data)?;
Ok(())
}
Some(Commands::ResetMultipleLeaves { file }) => {
let input_data = File::open(file)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.init_tree_with_leaves(input_data)?;
Ok(())
}
Some(Commands::SetNextLeaf { file }) => {
let input_data = File::open(file)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_next_leaf(input_data)?;
Ok(())
}
Some(Commands::DeleteLeaf { index }) => {
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.delete_leaf(*index)?;
Ok(())
}
Some(Commands::GetRoot) => {
let writer = std::io::stdout();
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.get_root(writer)?;
Ok(())
}
Some(Commands::GetProof { index }) => {
let writer = std::io::stdout();
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.get_proof(*index, writer)?;
Ok(())
}
Some(Commands::Prove { input }) => {
let input_data = File::open(input)?;
let writer = std::io::stdout();
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.prove(input_data, writer)?;
Ok(())
}
Some(Commands::Verify { file }) => {
let input_data = File::open(file)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.verify(input_data)?;
Ok(())
}
Some(Commands::GenerateProof { input }) => {
let input_data = File::open(input)?;
let writer = std::io::stdout();
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.generate_rln_proof(input_data, writer)?;
Ok(())
}
Some(Commands::VerifyWithRoots { input, roots }) => {
let input_data = File::open(input)?;
let roots_data = File::open(roots)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.verify_with_roots(input_data, roots_data)?;
Ok(())
}
None => Ok(()),
}
}

View File

@@ -1,23 +0,0 @@
use color_eyre::Result;
use rln::public::RLN;
use std::fs::File;
use crate::config::{Config, InnerConfig};
#[derive(Default)]
pub(crate) struct State {
pub rln: Option<RLN>,
}
impl State {
pub(crate) fn load_state() -> Result<State> {
let config = Config::load_config()?;
let rln = if let Some(InnerConfig { file, tree_height }) = config.inner {
let resources = File::open(&file)?;
Some(RLN::new(tree_height, resources)?)
} else {
None
};
Ok(State { rln })
}
}

View File

@@ -1,6 +1,6 @@
[package]
name = "rln-wasm"
version = "0.0.13"
version = "0.0.7"
edition = "2021"
license = "MIT or Apache2"
@@ -11,16 +11,10 @@ crate-type = ["cdylib", "rlib"]
default = ["console_error_panic_hook"]
[dependencies]
rln = { path = "../rln", default-features = false, features = [
"wasm",
"stateless",
] }
num-bigint = { version = "0.4", default-features = false, features = [
"rand",
"serde",
] }
rln = { path = "../rln", default-features = false, features = ["wasm"] }
num-bigint = { version = "0.4", default-features = false, features = ["rand", "serde"] }
wasmer = { version = "2.3", default-features = false, features = ["js", "std"] }
web-sys = { version = "0.3", features = ["console"] }
web-sys = {version = "0.3", features=["console"]}
getrandom = { version = "0.2.7", default-features = false, features = ["js"] }
wasm-bindgen = "0.2.63"
serde-wasm-bindgen = "0.4"
@@ -32,8 +26,11 @@ serde_json = "1.0.85"
# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for
# code size when deploying.
console_error_panic_hook = { version = "0.1.7", optional = true }
zerokit_utils = { version = "0.5.1", path = "../utils" }
[dev-dependencies]
wasm-bindgen-test = "0.3.13"
wasm-bindgen-futures = "0.4.33"
[profile.release]
# Tell `rustc` to optimize for small code size.
opt-level = "s"

View File

@@ -7,11 +7,10 @@ script = "sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/
[tasks.build]
clear = true
dependencies = ["pack-build", "pack-rename", "post-build"]
[tasks.post-build]
command = "wasm-strip"
args = ["./pkg/rln_wasm_bg.wasm"]
dependencies = [
"pack-build",
"pack-rename"
]
[tasks.test]
command = "wasm-pack"
@@ -25,7 +24,3 @@ args = ["login"]
[tasks.publish]
command = "wasm-pack"
args = ["publish", "--access", "public", "--target", "web"]
[tasks.bench]
command = "echo"
args = ["'No benchmarks available for this project'"]

View File

@@ -10,22 +10,11 @@ curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
```
cargo install cargo-make
```
OR
```
make installdeps
```
3. Compile zerokit for `wasm32-unknown-unknown`:
```
cd rln-wasm
cargo make build
```
4. Compile a slimmer version of zerokit for `wasm32-unknown-unknown`:
```
cd rln-wasm
cargo make post-build
```
## Running tests
```

View File

@@ -3,11 +3,9 @@
extern crate wasm_bindgen;
extern crate web_sys;
use std::vec::Vec;
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
use num_bigint::BigInt;
use rln::public::{hash, poseidon_hash, RLN};
use rln::public::RLN;
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
@@ -19,192 +17,85 @@ pub fn init_panic_hook() {
pub struct RLNWrapper {
// The purpose of this wrapper is to hold a RLN instance with the 'static lifetime
// because wasm_bindgen does not allow returning elements with lifetimes
instance: RLN,
}
// Macro to call methods with arbitrary amount of arguments,
// which have the last argument is output buffer pointer
// First argument to the macro is context,
// second is the actual method on `RLN`
// third is the aforementioned output buffer argument
// rest are all other arguments to the method
macro_rules! call_with_output_and_error_msg {
// this variant is needed for the case when
// there are zero other arguments
($instance:expr, $method:ident, $error_msg:expr) => {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
if let Err(err) = new_instance.instance.$method(&mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
($instance:expr, $method:ident, $error_msg:expr, $( $arg:expr ),* ) => {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
if let Err(err) = new_instance.instance.$method($($arg.process()),*, &mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
}
// Macro to call_with_error_msg methods with arbitrary amount of arguments,
// First argument to the macro is context,
// second is the actual method on `RLNWrapper`
// rest are all other arguments to the method
macro_rules! call_with_error_msg {
($instance:expr, $method:ident, $error_msg:expr $(, $arg:expr)*) => {
{
let new_instance: &mut RLNWrapper = $instance.process();
if let Err(err) = new_instance.instance.$method($($arg.process()),*) {
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
Ok(())
}
}
}
}
macro_rules! call {
($instance:expr, $method:ident $(, $arg:expr)*) => {
{
let new_instance: &mut RLNWrapper = $instance.process();
new_instance.instance.$method($($arg.process()),*)
}
}
}
macro_rules! call_bool_method_with_error_msg {
($instance:expr, $method:ident, $error_msg:expr $(, $arg:expr)*) => {
{
let new_instance: &RLNWrapper = $instance.process();
new_instance.instance.$method($($arg.process()),*).map_err(|err| format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
}
}
}
// Macro to execute a function with arbitrary amount of arguments,
// First argument is the function to execute
// Rest are all other arguments to the method
macro_rules! fn_call_with_output_and_error_msg {
// this variant is needed for the case when
// there are zero other arguments
($func:ident, $error_msg:expr) => {
{
let mut output_data: Vec<u8> = Vec::new();
if let Err(err) = $func(&mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
($func:ident, $error_msg:expr, $( $arg:expr ),* ) => {
{
let mut output_data: Vec<u8> = Vec::new();
if let Err(err) = $func($($arg.process()),*, &mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
}
trait ProcessArg {
type ReturnType;
fn process(self) -> Self::ReturnType;
}
impl ProcessArg for usize {
type ReturnType = usize;
fn process(self) -> Self::ReturnType {
self
}
}
impl<T> ProcessArg for Vec<T> {
type ReturnType = Vec<T>;
fn process(self) -> Self::ReturnType {
self
}
}
impl ProcessArg for *const RLN {
type ReturnType = &'static RLN;
fn process(self) -> Self::ReturnType {
unsafe { &*self }
}
}
impl ProcessArg for *const RLNWrapper {
type ReturnType = &'static RLNWrapper;
fn process(self) -> Self::ReturnType {
unsafe { &*self }
}
}
impl ProcessArg for *mut RLNWrapper {
type ReturnType = &'static mut RLNWrapper;
fn process(self) -> Self::ReturnType {
unsafe { &mut *self }
}
}
impl<'a> ProcessArg for &'a [u8] {
type ReturnType = &'a [u8];
fn process(self) -> Self::ReturnType {
self
}
instance: RLN<'static>,
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = newRLN)]
pub fn wasm_new(
zkey: Uint8Array,
vk: Uint8Array,
) -> Result<*mut RLNWrapper, String> {
let instance = RLN::new_with_params(zkey.to_vec(), vk.to_vec())
.map_err(|err| format!("{:#?}", err))?;
pub fn wasm_new(tree_height: usize, zkey: Uint8Array, vk: Uint8Array) -> *mut RLNWrapper {
let instance = RLN::new_with_params(tree_height, zkey.to_vec(), vk.to_vec());
let wrapper = RLNWrapper { instance };
Ok(Box::into_raw(Box::new(wrapper)))
Box::into_raw(Box::new(wrapper))
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = getSerializedRLNWitness)]
pub fn wasm_get_serialized_rln_witness(ctx: *mut RLNWrapper, input: Uint8Array) -> Uint8Array {
let wrapper = unsafe { &mut *ctx };
let rln_witness = wrapper
.instance
.get_serialized_rln_witness(&input.to_vec()[..]);
Uint8Array::from(&rln_witness[..])
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = insertMember)]
pub fn wasm_set_next_leaf(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
let wrapper = unsafe { &mut *ctx };
if wrapper.instance.set_next_leaf(&input.to_vec()[..]).is_ok() {
Ok(())
} else {
Err("could not insert member into merkle tree".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = setLeavesFrom)]
pub fn wasm_set_leaves_from(
ctx: *mut RLNWrapper,
index: usize,
input: Uint8Array,
) -> Result<(), String> {
let wrapper = unsafe { &mut *ctx };
if wrapper
.instance
.set_leaves_from(index as usize, &input.to_vec()[..])
.is_ok()
{
Ok(())
} else {
Err("could not set multiple leaves".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = initTreeWithLeaves)]
pub fn wasm_init_tree_with_leaves(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
let wrapper = unsafe { &mut *ctx };
if wrapper
.instance
.init_tree_with_leaves(&input.to_vec()[..])
.is_ok()
{
Ok(())
} else {
Err("could not init merkle tree".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = RLNWitnessToJson)]
pub fn rln_witness_to_json(
ctx: *mut RLNWrapper,
serialized_witness: Uint8Array,
) -> Result<Object, String> {
let inputs = call!(
ctx,
get_rln_witness_bigint_json,
&serialized_witness.to_vec()[..]
)
.map_err(|err| err.to_string())?;
let js_value = serde_wasm_bindgen::to_value(&inputs).map_err(|err| err.to_string())?;
Object::from_entries(&js_value).map_err(|err| format!("{:#?}", err))
pub fn rln_witness_to_json(ctx: *mut RLNWrapper, serialized_witness: Uint8Array) -> Object {
let wrapper = unsafe { &mut *ctx };
let inputs = wrapper
.instance
.get_rln_witness_json(&serialized_witness.to_vec()[..])
.unwrap();
let js_value = serde_wasm_bindgen::to_value(&inputs).unwrap();
let obj = Object::from_entries(&js_value);
obj.unwrap()
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -214,49 +105,83 @@ pub fn generate_rln_proof_with_witness(
calculated_witness: Vec<JsBigInt>,
serialized_witness: Uint8Array,
) -> Result<Uint8Array, String> {
let mut witness_vec: Vec<BigInt> = vec![];
let wrapper = unsafe { &mut *ctx };
for v in calculated_witness {
witness_vec.push(
let witness_vec: Vec<BigInt> = calculated_witness
.iter()
.map(|v| {
v.to_string(10)
.map_err(|err| format!("{:#?}", err))?
.unwrap()
.as_string()
.ok_or("not a string error")?
.unwrap()
.parse::<BigInt>()
.map_err(|err| format!("{:#?}", err))?,
);
}
.unwrap()
})
.collect();
call_with_output_and_error_msg!(
ctx,
generate_rln_proof_with_witness,
"could not generate proof",
witness_vec,
serialized_witness.to_vec()
)
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.generate_rln_proof_with_witness(witness_vec, serialized_witness.to_vec(), &mut output_data)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate proof".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateMembershipKey)]
pub fn wasm_key_gen(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(ctx, key_gen, "could not generate membership keys")
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper.instance.key_gen(&mut output_data).is_ok() {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership keys".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateExtendedMembershipKey)]
pub fn wasm_extended_key_gen(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(ctx, extended_key_gen, "could not generate membership keys")
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper.instance.extended_key_gen(&mut output_data).is_ok() {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership keys".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateSeededMembershipKey)]
pub fn wasm_seeded_key_gen(ctx: *const RLNWrapper, seed: Uint8Array) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(
ctx,
seeded_key_gen,
"could not generate membership key",
&seed.to_vec()[..]
)
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.seeded_key_gen(&seed.to_vec()[..], &mut output_data)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership key".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -265,12 +190,20 @@ pub fn wasm_seeded_extended_key_gen(
ctx: *const RLNWrapper,
seed: Uint8Array,
) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(
ctx,
seeded_extended_key_gen,
"could not generate membership key",
&seed.to_vec()[..]
)
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.seeded_extended_key_gen(&seed.to_vec()[..], &mut output_data)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not generate membership key".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -280,13 +213,38 @@ pub fn wasm_recover_id_secret(
input_proof_data_1: Uint8Array,
input_proof_data_2: Uint8Array,
) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(
ctx,
recover_id_secret,
"could not recover id secret",
&input_proof_data_1.to_vec()[..],
&input_proof_data_2.to_vec()[..]
)
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper
.instance
.recover_id_secret(
&input_proof_data_1.to_vec()[..],
&input_proof_data_2.to_vec()[..],
&mut output_data,
)
.is_ok()
{
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not recover id secret".into())
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = verifyRLNProof)]
pub fn wasm_verify_rln_proof(ctx: *const RLNWrapper, proof: Uint8Array) -> Result<bool, String> {
let wrapper = unsafe { &*ctx };
if match wrapper.instance.verify_rln_proof(&proof.to_vec()[..]) {
Ok(verified) => verified,
Err(_) => return Err("error while verifying rln proof".into()),
} {
return Ok(true);
}
Ok(false)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -296,25 +254,31 @@ pub fn wasm_verify_with_roots(
proof: Uint8Array,
roots: Uint8Array,
) -> Result<bool, String> {
call_bool_method_with_error_msg!(
ctx,
verify_with_roots,
"error while verifying proof with roots".to_string(),
&proof.to_vec()[..],
&roots.to_vec()[..]
)
let wrapper = unsafe { &*ctx };
if match wrapper
.instance
.verify_with_roots(&proof.to_vec()[..], &roots.to_vec()[..])
{
Ok(verified) => verified,
Err(_) => return Err("error while verifying proof with roots".into()),
} {
return Ok(true);
}
Ok(false)
}
#[wasm_bindgen(js_name = hash)]
pub fn wasm_hash(input: Uint8Array) -> Result<Uint8Array, String> {
fn_call_with_output_and_error_msg!(hash, "could not generate hash", &input.to_vec()[..])
}
#[wasm_bindgen(js_name = poseidonHash)]
pub fn wasm_poseidon_hash(input: Uint8Array) -> Result<Uint8Array, String> {
fn_call_with_output_and_error_msg!(
poseidon_hash,
"could not generate poseidon hash",
&input.to_vec()[..]
)
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = getRoot)]
pub fn wasm_get_root(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
let wrapper = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if wrapper.instance.get_root(&mut output_data).is_ok() {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
} else {
std::mem::forget(output_data);
Err("could not obtain root".into())
}
}

View File

@@ -3,17 +3,11 @@
#[cfg(test)]
mod tests {
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
use rln::circuit::{Fr, TEST_TREE_HEIGHT};
use rln::hashers::{hash_to_field, poseidon_hash};
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, normalize_usize};
use rln::circuit::TEST_TREE_HEIGHT;
use rln_wasm::*;
use rln::poseidon_tree::PoseidonTree;
use rln::utils::vec_fr_to_bytes_le;
use wasm_bindgen::{prelude::*, JsValue};
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsValue;
use wasm_bindgen_test::wasm_bindgen_test;
use zerokit_utils::merkle_tree::merkle_tree::ZerokitMerkleTree;
use zerokit_utils::ZerokitMerkleProof;
use rln::utils::vec_u8_to_bytes_le;
#[wasm_bindgen(module = "src/utils.js")]
extern "C" {
@@ -30,58 +24,45 @@ mod tests {
let circom_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln.wasm");
let zkey_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
let vk_path =
format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.arkvkey");
format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.json");
let zkey = read_file(&zkey_path).unwrap();
let vk = read_file(&vk_path).unwrap();
// Creating an instance of RLN
let rln_instance = wasm_new(zkey, vk).unwrap();
let mut tree = PoseidonTree::default(TEST_TREE_HEIGHT).unwrap();
let rln_instance = wasm_new(tree_height, zkey, vk);
// Creating membership key
let mem_keys = wasm_key_gen(rln_instance).unwrap();
let id_key = mem_keys.subarray(0, 32);
let id_commitment = mem_keys.subarray(32, 64);
let idkey = mem_keys.subarray(0, 32);
let idcommitment = mem_keys.subarray(32, 64);
// Insert PK
wasm_set_next_leaf(rln_instance, idcommitment).unwrap();
// Prepare the message
let signal = b"Hello World";
let signal = "Hello World".as_bytes();
let signal_len: u64 = signal.len() as u64;
let identity_index = tree.leaves_set();
// Setting up the epoch and rln_identifier
let epoch = hash_to_field(b"test-epoch");
let rln_identifier = hash_to_field(b"test-rln-identifier");
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
let external_nullifier = fr_to_bytes_le(&external_nullifier);
let user_message_limit = Fr::from(100);
let message_id = fr_to_bytes_le(&Fr::from(0));
let (id_commitment_fr, _) = bytes_le_to_fr(&id_commitment.to_vec()[..]);
let rate_commitment = poseidon_hash(&[id_commitment_fr, user_message_limit]);
tree.update_next(rate_commitment).unwrap();
let x = hash_to_field(signal);
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
// Setting up the epoch (With 0s for the test)
let epoch = Uint8Array::new_with_length(32);
epoch.fill(0, 0, 32);
let identity_index: u64 = 0;
// Serializing the message
let mut serialized_vec: Vec<u8> = Vec::new();
serialized_vec.append(&mut id_key.to_vec());
serialized_vec.append(&mut fr_to_bytes_le(&user_message_limit).to_vec());
serialized_vec.append(&mut message_id.to_vec());
serialized_vec.append(&mut vec_fr_to_bytes_le(&path_elements).unwrap());
serialized_vec.append(&mut vec_u8_to_bytes_le(&identity_path_index).unwrap());
serialized_vec.append(&mut fr_to_bytes_le(&x));
serialized_vec.append(&mut external_nullifier.to_vec());
serialized_vec.append(&mut idkey.to_vec());
serialized_vec.append(&mut identity_index.to_le_bytes().to_vec());
serialized_vec.append(&mut epoch.to_vec());
serialized_vec.append(&mut signal_len.to_le_bytes().to_vec());
serialized_vec.append(&mut signal.to_vec());
let serialized_message = Uint8Array::from(&serialized_vec[..]);
let serialized_rln_witness =
wasm_get_serialized_rln_witness(rln_instance, serialized_message);
// Obtaining inputs that should be sent to circom witness calculator
let json_inputs =
rln_witness_to_json(rln_instance, serialized_message.clone()).unwrap();
let json_inputs = rln_witness_to_json(rln_instance, serialized_rln_witness.clone());
// Calculating witness with JS
// (Using a JSON since wasm_bindgen does not like Result<Vec<JsBigInt>,JsValue>)
@@ -101,20 +82,27 @@ mod tests {
let proof = generate_rln_proof_with_witness(
rln_instance,
calculated_witness.into(),
serialized_message,
serialized_rln_witness,
)
.unwrap();
// Add signal_len | signal
let mut proof_bytes = proof.to_vec();
proof_bytes.append(&mut normalize_usize(signal.len()));
proof_bytes.append(&mut signal_len.to_le_bytes().to_vec());
proof_bytes.append(&mut signal.to_vec());
let proof_with_signal = Uint8Array::from(&proof_bytes[..]);
// Validate Proof
let is_proof_valid = wasm_verify_rln_proof(rln_instance, proof_with_signal);
assert!(
is_proof_valid.unwrap(),
"validating proof generated with wasm failed"
);
// Validating Proof with Roots
let root = tree.root();
let root_le = fr_to_bytes_le(&root);
let roots = Uint8Array::from(&root_le[..]);
let root = wasm_get_root(rln_instance).unwrap();
let roots = Uint8Array::from(&root.to_vec()[..]);
let proof_with_signal = Uint8Array::from(&proof_bytes[..]);
let is_proof_valid = wasm_verify_with_roots(rln_instance, proof_with_signal, roots);

View File

@@ -1,16 +1,10 @@
[package]
name = "rln"
version = "0.5.1"
version = "0.1.0"
edition = "2021"
license = "MIT OR Apache-2.0"
description = "APIs to manage, compute and verify zkSNARK proofs and RLN primitives"
documentation = "https://github.com/vacp2p/zerokit"
homepage = "https://vac.dev"
repository = "https://github.com/vacp2p/zerokit"
[lib]
crate-type = ["rlib", "staticlib"]
bench = false
crate-type = ["cdylib", "rlib", "staticlib"]
# This flag disable cargo doctests, i.e. testing example code-snippets in documentation
doctest = false
@@ -18,93 +12,46 @@ doctest = false
[dependencies]
# ZKP Generation
ark-ec = { version = "=0.4.1", default-features = false }
ark-ff = { version = "=0.4.1", default-features = false, features = ["asm"] }
ark-std = { version = "=0.4.0", default-features = false }
ark-bn254 = { version = "=0.4.0" }
ark-groth16 = { version = "=0.4.0", features = [
"parallel",
], default-features = false }
ark-relations = { version = "=0.4.0", default-features = false, features = [
"std",
] }
ark-serialize = { version = "=0.4.1", default-features = false }
ark-circom = { version = "=0.1.0", default-features = false, features = [
"circom-2",
] }
ark-zkey = { version = "0.1.0", optional = true, default-features = false }
ark-ec = { version = "0.3.0", default-features = false }
ark-ff = { version = "0.3.0", default-features = false, features = [ "asm"] }
ark-std = { version = "0.3.0", default-features = false }
ark-bn254 = { version = "0.3.0" }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", default-features = false }
ark-relations = { version = "0.3.0", default-features = false, features = [ "std" ] }
ark-serialize = { version = "0.3.0", default-features = false }
ark-circom = { git = "https://github.com/vacp2p/ark-circom", branch = "wasm", default-features = false, features = ["circom-2"] }
#ark-circom = { git = "https://github.com/vacp2p/ark-circom", branch = "no-ethers-core", features = ["circom-2"] }
# WASM
wasmer = { version = "=2.3.0", default-features = false }
wasmer = { version = "2.3.0", default-features = false }
# error handling
color-eyre = "=0.6.2"
thiserror = "=1.0.39"
color-eyre = "0.5.11"
thiserror = "1.0.0"
# utilities
byteorder = "1.4.3"
cfg-if = "=1.0"
num-bigint = { version = "=0.4.3", default-features = false, features = [
"rand",
] }
num-traits = "=0.2.15"
once_cell = "=1.17.1"
lazy_static = "=1.4.0"
rand = "=0.8.5"
rand_chacha = "=0.3.1"
ruint = { version = "1.10.0", features = ["rand", "serde", "ark-ff-04", "num-bigint"] }
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
utils = { package = "zerokit_utils", version = "=0.5.1", path = "../utils/", default-features = false }
cfg-if = "1.0"
num-bigint = { version = "0.4.3", default-features = false, features = ["rand", "serde"] }
num-traits = "0.2.11"
once_cell = "1.14.0"
rand = "0.8"
rand_chacha = "0.3.1"
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
utils = { path = "../utils/", default-features = false }
# serialization
prost = "0.13.1"
serde_json = "=1.0.96"
serde = { version = "=1.0.163", features = ["derive"] }
document-features = { version = "=0.2.10", optional = true }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0.48"
[dev-dependencies]
sled = "=0.34.7"
criterion = { version = "=0.4.0", features = ["html_reports"] }
pmtree = { git = "https://github.com/Rate-Limiting-Nullifier/pmtree" }
sled = "0.34.7"
[features]
default = ["parallel", "wasmer/sys-default", "pmtree-ft"]
parallel = [
"ark-ec/parallel",
"ark-ff/parallel",
"ark-std/parallel",
"ark-groth16/parallel",
"utils/parallel",
]
default = ["parallel", "wasmer/sys-default"]
parallel = ["ark-ec/parallel", "ark-ff/parallel", "ark-std/parallel", "ark-groth16/parallel", "utils/parallel"]
wasm = ["wasmer/js", "wasmer/std"]
fullmerkletree = ["default"]
arkzkey = ["ark-zkey"]
stateless = []
# Note: pmtree feature is still experimental
pmtree-ft = ["utils/pmtree-ft"]
[[bench]]
name = "pmtree_benchmark"
harness = false
[[bench]]
name = "circuit_loading_benchmark"
harness = false
[[bench]]
name = "circuit_loading_arkzkey_benchmark"
harness = false
required-features = ["arkzkey"]
[[bench]]
name = "circuit_deser_benchmark"
harness = false
[[bench]]
name = "poseidon_tree_benchmark"
harness = false
[package.metadata.docs.rs]
all-features = true
pmtree = ["default"]

View File

@@ -2,18 +2,6 @@
command = "cargo"
args = ["build", "--release"]
[tasks.test_default]
[tasks.test]
command = "cargo"
args = ["test", "--release", "--", "--nocapture"]
[tasks.test_stateless]
command = "cargo"
args = ["test", "--release", "--features", "stateless"]
[tasks.test_arkzkey]
command = "cargo"
args = ["test", "--release", "--features", "arkzkey"]
[tasks.bench]
command = "cargo"
args = ["bench"]
args = ["test", "--release"]

View File

@@ -3,35 +3,23 @@
This module provides APIs to manage, compute and verify [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and RLN primitives.
## Pre-requisites
### Install dependencies and clone repo
### Install
```sh
make installdeps
git clone https://github.com/vacp2p/zerokit.git
cd zerokit/rln
```
Implemented tests can be executed by running within the module folder
### Build and Test
To build and test, run the following commands within the module folder
``` bash
cargo make build
cargo make test_{mode}
```
The {mode} placeholder should be replaced with
* **default** for the default tests;
* **arkzkey** for the tests with the arkzkey feature;
* **stateless** for the tests with the stateless feature.
`cargo test --release`
### Compile ZK circuits
The `rln` (https://github.com/rate-limiting-nullifier/circom-rln) repository, which contains the RLN circuit implementation is a submodule of zerokit RLN.
The `rln` (https://github.com/privacy-scaling-explorations/rln) repository, which contains the RLN circuit implementation is a submodule of zerokit RLN.
To compile the RLN circuit
```sh
``` sh
# Update submodules
git submodule update --init --recursive
@@ -58,10 +46,11 @@ include "./rln-base.circom";
component main {public [x, epoch, rln_identifier ]} = RLN(N);
```
However, if `N` is too big, this might require a bigger Powers of Tau ceremony than the one hardcoded in `./scripts/build-circuits.sh`, which is `2^14`.
However, if `N` is too big, this might require a bigger Powers of Tau ceremony than the one hardcoded in `./scripts/build-circuits.sh`, which is `2^14`.
In such case we refer to the official [Circom documentation](https://docs.circom.io/getting-started/proving-circuits/#powers-of-tau) for instructions on how to run an appropriate Powers of Tau ceremony and Phase 2 in order to compile the desired circuit.
Currently, the `rln` module comes with 2 [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources) RLN circuits having Merkle tree of height `20` and `32`, respectively.
Currently, the `rln` module comes with three [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources) RLN circuits having Merkle tree of height `15`, `19` and `20`, respectively.
## Getting started
@@ -78,7 +67,7 @@ rln = { git = "https://github.com/vacp2p/zerokit" }
First, we need to create a RLN object for a chosen input Merkle tree size.
Note that we need to pass to RLN object constructor the path where the circuit (`rln.wasm`, built for the input tree size), the corresponding proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) and verification key (`verification_key.arkvkey`, optional) are found.
Note that we need to pass to RLN object constructor the path where the circuit (`rln.wasm`, built for the input tree size), the corresponding proving key (`rln_final.zkey`) and verification key (`verification_key.json`, optional) are found.
In the following we will use [cursors](https://doc.rust-lang.org/std/io/struct.Cursor.html) as readers/writers for interfacing with RLN public APIs.
@@ -87,14 +76,14 @@ use rln::protocol::*;
use rln::public::*;
use std::io::Cursor;
// We set the RLN parameters:
// We set the RLN parameters:
// - the tree height;
// - the tree config, if it is not defined, the default value will be set
// - the circuit resource folder (requires a trailing "/").
let tree_height = 20;
let input = Cursor::new(json!({}).to_string());
let resources = Cursor::new("../zerokit/rln/resources/tree_height_20/");
// We create a new RLN instance
let mut rln = RLN::new(tree_height, input);
let mut rln = RLN::new(tree_height, resources);
```
### Generate an identity keypair
@@ -107,62 +96,50 @@ let mut buffer = Cursor::new(Vec::<u8>::new());
rln.key_gen(&mut buffer).unwrap();
// We deserialize the keygen output to obtain
// the identity_secret and id_commitment
// the identiy_secret and id_commitment
let (identity_secret_hash, id_commitment) = deserialize_identity_pair(buffer.into_inner());
```
### Add Rate commitment to the RLN Merkle tree
### Add ID commitment to the RLN Merkle tree
```rust
// We define the tree index where id_commitment will be added
let id_index = 10;
let user_message_limit = 10;
// We serialize id_commitment and pass it to set_leaf
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
let mut buffer = Cursor::new(serialize_field_element(rate_commitment));
let mut buffer = Cursor::new(serialize_field_element(id_commitment));
rln.set_leaf(id_index, &mut buffer).unwrap();
```
Note that when tree leaves are not explicitly set by the user (in this example, all those with index less and greater than `10`), their values is set to an hardcoded default (all-`0` bytes in current implementation).
### Set external nullifier
### Set epoch
The `external nullifier` includes two parameters.
The first one is `epoch` and it's used to identify messages received in a certain time frame. It usually corresponds to the current UNIX time but can also be set to a random value or generated by a seed, provided that it corresponds to a field element.
The second one is `rln_identifier` and it's used to prevent a RLN ZK proof generated for one application to be re-used in another one.
The epoch, sometimes referred to as _external nullifier_, is used to identify messages received in a certain time frame. It usually corresponds to the current UNIX time but can also be set to a random value or generated by a seed, provided that it corresponds to a field element.
```rust
// We generate epoch from a date seed and we ensure is
// mapped to a field element by hashing-to-field its content
let epoch = hash_to_field(b"Today at noon, this year");
// We generate rln_identifier from a date seed and we ensure is
// mapped to a field element by hashing-to-field its content
let rln_identifier = hash_to_field(b"test-rln-identifier");
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
```
### Set signal
The signal is the message for which we are computing a RLN proof.
```rust
// We set our signal
// We set our signal
let signal = b"RLN is awesome";
```
### Generate a RLN proof
We prepare the input to the proof generation routine.
We prepare the input to the proof generation routine.
Input buffer is serialized as `[ identity_key | id_index | external_nullifier | user_message_limit | message_id | signal_len | signal ]`.
Input buffer is serialized as `[ identity_key | id_index | epoch | signal_len | signal ]`.
```rust
// We prepare input to the proof generation routine
let proof_input = prepare_prove_input(identity_secret_hash, id_index, external_nullifier, signal);
let proof_input = prepare_prove_input(identity_secret_hash, id_index, epoch, signal);
```
We are now ready to generate a RLN ZK proof along with the _public outputs_ of the ZK circuit evaluation.
@@ -179,11 +156,12 @@ rln.generate_rln_proof(&mut in_buffer, &mut out_buffer)
let proof_data = out_buffer.into_inner();
```
The byte vector `proof_data` is serialized as `[ zk-proof | tree_root | external_nullifier | share_x | share_y | nullifier ]`.
The byte vector `proof_data` is serialized as `[ zk-proof | tree_root | epoch | share_x | share_y | nullifier | rln_identifier ]`.
### Verify a RLN proof
We prepare the input to the proof verification routine.
We prepare the input to the proof verification routine.
Input buffer is serialized as `[proof_data | signal_len | signal ]`, where `proof_data` is (computed as) the output obtained by `generate_rln_proof`.
@@ -196,21 +174,17 @@ let mut in_buffer = Cursor::new(verify_data);
let verified = rln.verify(&mut in_buffer).unwrap();
```
We check if the proof verification was successful:
We check if the proof verification was successful:
```rust
// We ensure the proof is valid
assert!(verified);
```
## Get involved!
Zerokit RLN public and FFI APIs allow interaction with many more features than what briefly showcased above.
We invite you to check our API documentation by running
```rust
cargo doc --no-deps
```
and look at unit tests to have an hint on how to interface and use them.
and look at unit tests to have an hint on how to interface and use them.

View File

@@ -1,22 +0,0 @@
use criterion::{criterion_group, criterion_main, Criterion};
use rln::circuit::{vk_from_ark_serialized, VK_BYTES};
// Here we benchmark how long the deserialization of the
// verifying_key takes, only testing the json => verifying_key conversion,
// and skipping conversion from bytes => string => serde_json::Value
pub fn vk_deserialize_benchmark(c: &mut Criterion) {
let vk = VK_BYTES;
c.bench_function("vk::vk_from_ark_serialized", |b| {
b.iter(|| {
let _ = vk_from_ark_serialized(vk);
})
});
}
criterion_group! {
name = benches;
config = Criterion::default().measurement_time(std::time::Duration::from_secs(10));
targets = vk_deserialize_benchmark
}
criterion_main!(benches);

View File

@@ -1,43 +0,0 @@
use criterion::{criterion_group, criterion_main, Criterion};
use rln::circuit::{
read_arkzkey_from_bytes_compressed, read_arkzkey_from_bytes_uncompressed, ARKZKEY_BYTES,
ARKZKEY_BYTES_UNCOMPR,
};
pub fn uncompressed_bench(c: &mut Criterion) {
let arkzkey = ARKZKEY_BYTES_UNCOMPR.to_vec();
let size = arkzkey.len() as f32;
println!(
"Size of uncompressed arkzkey: {:.2?} MB",
size / 1024.0 / 1024.0
);
c.bench_function("arkzkey::arkzkey_from_raw_uncompressed", |b| {
b.iter(|| {
let r = read_arkzkey_from_bytes_uncompressed(&arkzkey);
assert_eq!(r.is_ok(), true);
})
});
}
pub fn compressed_bench(c: &mut Criterion) {
let arkzkey = ARKZKEY_BYTES.to_vec();
let size = arkzkey.len() as f32;
println!(
"Size of compressed arkzkey: {:.2?} MB",
size / 1024.0 / 1024.0
);
c.bench_function("arkzkey::arkzkey_from_raw_compressed", |b| {
b.iter(|| {
let r = read_arkzkey_from_bytes_compressed(&arkzkey);
assert_eq!(r.is_ok(), true);
})
});
}
criterion_group! {
name = benches;
config = Criterion::default().measurement_time(std::time::Duration::from_secs(250));
targets = uncompressed_bench, compressed_bench
}
criterion_main!(benches);

View File

@@ -1,24 +0,0 @@
use ark_circom::read_zkey;
use criterion::{criterion_group, criterion_main, Criterion};
use std::io::Cursor;
pub fn zkey_load_benchmark(c: &mut Criterion) {
let zkey = rln::circuit::ZKEY_BYTES.to_vec();
let size = zkey.len() as f32;
println!("Size of zkey: {:.2?} MB", size / 1024.0 / 1024.0);
c.bench_function("zkey::zkey_from_raw", |b| {
b.iter(|| {
let mut reader = Cursor::new(zkey.clone());
let r = read_zkey(&mut reader);
assert_eq!(r.is_ok(), true);
})
});
}
criterion_group! {
name = benches;
config = Criterion::default().measurement_time(std::time::Duration::from_secs(250));
targets = zkey_load_benchmark
}
criterion_main!(benches);

View File

@@ -1,56 +0,0 @@
use criterion::{criterion_group, criterion_main, Criterion};
use rln::{circuit::Fr, pm_tree_adapter::PmTree};
use utils::ZerokitMerkleTree;
pub fn pmtree_benchmark(c: &mut Criterion) {
let mut tree = PmTree::default(2).unwrap();
let leaves: Vec<Fr> = (0..4).map(|s| Fr::from(s)).collect();
c.bench_function("Pmtree::set", |b| {
b.iter(|| {
tree.set(0, leaves[0]).unwrap();
})
});
c.bench_function("Pmtree:delete", |b| {
b.iter(|| {
tree.delete(0).unwrap();
})
});
c.bench_function("Pmtree::override_range", |b| {
b.iter(|| {
tree.override_range(0, leaves.clone(), [0, 1, 2, 3])
.unwrap();
})
});
c.bench_function("Pmtree::compute_root", |b| {
b.iter(|| {
tree.compute_root().unwrap();
})
});
c.bench_function("Pmtree::get", |b| {
b.iter(|| {
tree.get(0).unwrap();
})
});
// check intermediate node getter which required additional computation of sub root index
c.bench_function("Pmtree::get_subtree_root", |b| {
b.iter(|| {
tree.get_subtree_root(1, 0).unwrap();
})
});
c.bench_function("Pmtree::get_empty_leaves_indices", |b| {
b.iter(|| {
tree.get_empty_leaves_indices();
})
});
}
criterion_group!(benches, pmtree_benchmark);
criterion_main!(benches);

View File

@@ -1,79 +0,0 @@
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use rln::{
circuit::{Fr, TEST_TREE_HEIGHT},
hashers::PoseidonHash,
};
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleTree};
pub fn get_leaves(n: u32) -> Vec<Fr> {
(0..n).map(|s| Fr::from(s)).collect()
}
pub fn optimal_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
c.bench_function("OptimalMerkleTree::<Poseidon>::full_height_gen", |b| {
b.iter(|| {
OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
})
});
let mut group = c.benchmark_group("Set");
for &n in [1u32, 10, 100].iter() {
let leaves = get_leaves(n);
let mut tree = OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
group.bench_function(
BenchmarkId::new("OptimalMerkleTree::<Poseidon>::set", n),
|b| {
b.iter(|| {
for (i, l) in leaves.iter().enumerate() {
let _ = tree.set(i, *l);
}
})
},
);
group.bench_function(
BenchmarkId::new("OptimalMerkleTree::<Poseidon>::set_range", n),
|b| b.iter(|| tree.set_range(0, leaves.iter().cloned())),
);
}
group.finish();
}
pub fn full_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
c.bench_function("FullMerkleTree::<Poseidon>::full_height_gen", |b| {
b.iter(|| {
FullMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
})
});
let mut group = c.benchmark_group("Set");
for &n in [1u32, 10, 100].iter() {
let leaves = get_leaves(n);
let mut tree = FullMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
group.bench_function(
BenchmarkId::new("FullMerkleTree::<Poseidon>::set", n),
|b| {
b.iter(|| {
for (i, l) in leaves.iter().enumerate() {
let _ = tree.set(i, *l);
}
})
},
);
group.bench_function(
BenchmarkId::new("FullMerkleTree::<Poseidon>::set_range", n),
|b| b.iter(|| tree.set_range(0, leaves.iter().cloned())),
);
}
group.finish();
}
criterion_group!(
benches,
optimal_merkle_tree_poseidon_benchmark,
full_merkle_tree_poseidon_benchmark
);
criterion_main!(benches);

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,119 @@
{
"protocol": "groth16",
"curve": "bn128",
"nPublic": 6,
"vk_alpha_1": [
"20124996762962216725442980738609010303800849578410091356605067053491763969391",
"9118593021526896828671519912099489027245924097793322973632351264852174143923",
"1"
],
"vk_beta_2": [
[
"4693952934005375501364248788849686435240706020501681709396105298107971354382",
"14346958885444710485362620645446987998958218205939139994511461437152241966681"
],
[
"16851772916911573982706166384196538392731905827088356034885868448550849804972",
"823612331030938060799959717749043047845343400798220427319188951998582076532"
],
[
"1",
"0"
]
],
"vk_gamma_2": [
[
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
],
[
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
],
[
"1",
"0"
]
],
"vk_delta_2": [
[
"1361919643088555407518565462732544232965454074504004321739078395285189557133",
"20823246840633598579879223919854294301857184404415306521912631074982696570306"
],
[
"7088590198103342249937795923142619828109070290720888704402714617857746884833",
"8191367139632195506244169264298620546181137131063303219908889318280111188437"
],
[
"1",
"0"
]
],
"vk_alphabeta_12": [
[
[
"12608968655665301215455851857466367636344427685631271961542642719683786103711",
"9849575605876329747382930567422916152871921500826003490242628251047652318086"
],
[
"6322029441245076030714726551623552073612922718416871603535535085523083939021",
"8700115492541474338049149013125102281865518624059015445617546140629435818912"
],
[
"10674973475340072635573101639867487770811074181475255667220644196793546640210",
"2926286967251299230490668407790788696102889214647256022788211245826267484824"
]
],
[
[
"9660441540778523475944706619139394922744328902833875392144658911530830074820",
"19548113127774514328631808547691096362144426239827206966690021428110281506546"
],
[
"1870837942477655969123169532603615788122896469891695773961478956740992497097",
"12536105729661705698805725105036536744930776470051238187456307227425796690780"
],
[
"21811903352654147452884857281720047789720483752548991551595462057142824037334",
"19021616763967199151052893283384285352200445499680068407023236283004353578353"
]
]
],
"IC": [
[
"17643142412395322664866141827318671249236739056291610144830020671604112279111",
"13273439661778801509295280274403992505521239023074387826870538372514206268318",
"1"
],
[
"12325966053136615826793633393742326952102053533176311103856731330114882211366",
"6439956820140153832120005353467272867287237423425778281905068783317736451260",
"1"
],
[
"20405310272367450124741832665322768131899487413829191383721623069139009993137",
"21336772016824870564600007750206596010566056069977718959140462128560786193566",
"1"
],
[
"4007669092231576644992949839487535590075070172447826102934640178940614212519",
"7597503385395289202372182678960254605827199004598882158153019657732525465207",
"1"
],
[
"4545695279389338758267531646940033299700127241196839077811942492841603458462",
"6635771967009274882904456432128877995932122611166121203658485990305433499873",
"1"
],
[
"7876954805169515500747828488548350352651069599547377092970620945851311591012",
"7571431725691513008054581132582771105743462534789373657638701712901679323321",
"1"
],
[
"5563973122249220346301217166900152021860462617567141574881706390202619333219",
"5147729144109676590873823097632042430451708874867871369293332620382492068692",
"1"
]
]
}

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,119 @@
{
"protocol": "groth16",
"curve": "bn128",
"nPublic": 6,
"vk_alpha_1": [
"20124996762962216725442980738609010303800849578410091356605067053491763969391",
"9118593021526896828671519912099489027245924097793322973632351264852174143923",
"1"
],
"vk_beta_2": [
[
"4693952934005375501364248788849686435240706020501681709396105298107971354382",
"14346958885444710485362620645446987998958218205939139994511461437152241966681"
],
[
"16851772916911573982706166384196538392731905827088356034885868448550849804972",
"823612331030938060799959717749043047845343400798220427319188951998582076532"
],
[
"1",
"0"
]
],
"vk_gamma_2": [
[
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
],
[
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
],
[
"1",
"0"
]
],
"vk_delta_2": [
[
"16125279975606773676640811113051624654121459921695914044301154938920321009721",
"14844345250267029614093295465313288254479124604567709177260777529651293576873"
],
[
"20349277326920398483890518242229158117668855310237215044647746783223259766294",
"19338776107510040969200058390413661029003750817172740054990168933780935479540"
],
[
"1",
"0"
]
],
"vk_alphabeta_12": [
[
[
"12608968655665301215455851857466367636344427685631271961542642719683786103711",
"9849575605876329747382930567422916152871921500826003490242628251047652318086"
],
[
"6322029441245076030714726551623552073612922718416871603535535085523083939021",
"8700115492541474338049149013125102281865518624059015445617546140629435818912"
],
[
"10674973475340072635573101639867487770811074181475255667220644196793546640210",
"2926286967251299230490668407790788696102889214647256022788211245826267484824"
]
],
[
[
"9660441540778523475944706619139394922744328902833875392144658911530830074820",
"19548113127774514328631808547691096362144426239827206966690021428110281506546"
],
[
"1870837942477655969123169532603615788122896469891695773961478956740992497097",
"12536105729661705698805725105036536744930776470051238187456307227425796690780"
],
[
"21811903352654147452884857281720047789720483752548991551595462057142824037334",
"19021616763967199151052893283384285352200445499680068407023236283004353578353"
]
]
],
"IC": [
[
"5645604624116784480262312750033349186912223090668673154853165165224747369512",
"5656337658385597582701340925622307146226708710361427687425735166776477641124",
"1"
],
[
"8216930132302312821663833393171053651364962198587857550991047765311607638330",
"19934865864074163318938688021560358348660709566570123384268356491416384822148",
"1"
],
[
"11046959016591768534564223076484566731774575511709349452804727872479525392631",
"9401797690410912638766111919371607085248054251975419812613989999345815833269",
"1"
],
[
"13216594148914395028254776738842380005944817065680915990743659996725367876414",
"11541283802841111343960351782994043892623551381569479006737253908665900144087",
"1"
],
[
"6957074593219251760608960101283708711892008557897337713430173510328411964571",
"21673833055087220750009279957462375662312260098732685145862504142183400549467",
"1"
],
[
"20795071270535109448604057031148356571036039566776607847840379441839742201050",
"21654952744643117202636583766828639581880877547772465264383291983528268115687",
"1"
],
[
"19143058772755719660075704757531991493801758701561469885274062297246796623789",
"3996020163280925980543600106196205910576345230982361007978823537163123181007",
"1"
]
]
}

View File

@@ -0,0 +1,119 @@
{
"protocol": "groth16",
"curve": "bn128",
"nPublic": 6,
"vk_alpha_1": [
"20124996762962216725442980738609010303800849578410091356605067053491763969391",
"9118593021526896828671519912099489027245924097793322973632351264852174143923",
"1"
],
"vk_beta_2": [
[
"4693952934005375501364248788849686435240706020501681709396105298107971354382",
"14346958885444710485362620645446987998958218205939139994511461437152241966681"
],
[
"16851772916911573982706166384196538392731905827088356034885868448550849804972",
"823612331030938060799959717749043047845343400798220427319188951998582076532"
],
[
"1",
"0"
]
],
"vk_gamma_2": [
[
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
],
[
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
],
[
"1",
"0"
]
],
"vk_delta_2": [
[
"8353516066399360694538747105302262515182301251524941126222712285088022964076",
"9329524012539638256356482961742014315122377605267454801030953882967973561832"
],
[
"16805391589556134376869247619848130874761233086443465978238468412168162326401",
"10111259694977636294287802909665108497237922060047080343914303287629927847739"
],
[
"1",
"0"
]
],
"vk_alphabeta_12": [
[
[
"12608968655665301215455851857466367636344427685631271961542642719683786103711",
"9849575605876329747382930567422916152871921500826003490242628251047652318086"
],
[
"6322029441245076030714726551623552073612922718416871603535535085523083939021",
"8700115492541474338049149013125102281865518624059015445617546140629435818912"
],
[
"10674973475340072635573101639867487770811074181475255667220644196793546640210",
"2926286967251299230490668407790788696102889214647256022788211245826267484824"
]
],
[
[
"9660441540778523475944706619139394922744328902833875392144658911530830074820",
"19548113127774514328631808547691096362144426239827206966690021428110281506546"
],
[
"1870837942477655969123169532603615788122896469891695773961478956740992497097",
"12536105729661705698805725105036536744930776470051238187456307227425796690780"
],
[
"21811903352654147452884857281720047789720483752548991551595462057142824037334",
"19021616763967199151052893283384285352200445499680068407023236283004353578353"
]
]
],
"IC": [
[
"11992897507809711711025355300535923222599547639134311050809253678876341466909",
"17181525095924075896332561978747020491074338784673526378866503154966799128110",
"1"
],
[
"17018665030246167677911144513385572506766200776123272044534328594850561667818",
"18601114175490465275436712413925513066546725461375425769709566180981674884464",
"1"
],
[
"18799470100699658367834559797874857804183288553462108031963980039244731716542",
"13064227487174191981628537974951887429496059857753101852163607049188825592007",
"1"
],
[
"17432501889058124609368103715904104425610382063762621017593209214189134571156",
"13406815149699834788256141097399354592751313348962590382887503595131085938635",
"1"
],
[
"10320964835612716439094703312987075811498239445882526576970512041988148264481",
"9024164961646353611176283204118089412001502110138072989569118393359029324867",
"1"
],
[
"718355081067365548229685160476620267257521491773976402837645005858953849298",
"14635482993933988261008156660773180150752190597753512086153001683711587601974",
"1"
],
[
"11777720285956632126519898515392071627539405001940313098390150593689568177535",
"8483603647274280691250972408211651407952870456587066148445913156086740744515",
"1"
]
]
}

View File

@@ -1,58 +1,43 @@
// This crate provides interfaces for the zero-knowledge circuit and keys
use crate::iden3calc::calc_witness;
use ark_bn254::{
Bn254, Fq as ArkFq, Fq2 as ArkFq2, Fr as ArkFr, G1Affine as ArkG1Affine,
G1Projective as ArkG1Projective, G2Affine as ArkG2Affine, G2Projective as ArkG2Projective,
};
use ark_circom::read_zkey;
use ark_groth16::{ProvingKey, VerifyingKey};
use ark_relations::r1cs::ConstraintMatrices;
use ark_serialize::CanonicalDeserialize;
use cfg_if::cfg_if;
use color_eyre::{Report, Result};
use num_bigint::BigInt;
use num_bigint::BigUint;
use serde_json::Value;
use std::fs::File;
use std::io::{Cursor, Error, ErrorKind, Result};
use std::path::Path;
use std::str::FromStr;
#[cfg(not(target_arch = "wasm32"))]
use ::lazy_static::lazy_static;
#[cfg(feature = "arkzkey")]
use {
ark_zkey::{read_arkzkey_from_bytes, SerializableConstraintMatrices, SerializableProvingKey},
color_eyre::eyre::WrapErr,
};
#[cfg(not(feature = "arkzkey"))]
use {ark_circom::read_zkey, std::io::Cursor};
#[cfg(feature = "arkzkey")]
pub const ARKZKEY_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/rln_final.arkzkey");
#[cfg(feature = "arkzkey")]
pub const ARKZKEY_BYTES_UNCOMPR: &[u8] =
include_bytes!("../resources/tree_height_20/rln_final_uncompr.arkzkey");
pub const ZKEY_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/rln_final.zkey");
pub const VK_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/verification_key.arkvkey");
const GRAPH_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/graph.bin");
#[cfg(not(target_arch = "wasm32"))]
lazy_static! {
#[cfg(not(target_arch = "wasm32"))]
static ref ZKEY: (ProvingKey<Curve>, ConstraintMatrices<Fr>) = {
cfg_if! {
if #[cfg(feature = "arkzkey")] {
read_arkzkey_from_bytes_uncompressed(ARKZKEY_BYTES_UNCOMPR).expect("Failed to read arkzkey")
} else {
let mut reader = Cursor::new(ZKEY_BYTES);
read_zkey(&mut reader).expect("Failed to read zkey")
}
}
};
#[cfg(not(target_arch = "wasm32"))]
static ref VK: VerifyingKey<Curve> = vk_from_ark_serialized(VK_BYTES).expect("Failed to read vk");
cfg_if! {
if #[cfg(not(target_arch = "wasm32"))] {
use ark_circom::{WitnessCalculator};
use once_cell::sync::OnceCell;
use std::sync::Mutex;
use wasmer::{Module, Store};
}
}
pub const TEST_TREE_HEIGHT: usize = 20;
const ZKEY_FILENAME: &str = "rln_final.zkey";
const VK_FILENAME: &str = "verifying_key.json";
const WASM_FILENAME: &str = "rln.wasm";
// These parameters are used for tests
// Note that the circuit and keys in TEST_RESOURCES_FOLDER are compiled for Merkle trees of height 15, 19 and 20
// Changing these parameters to other values than these defaults will cause zkSNARK proof verification to fail
pub const TEST_PARAMETERS_INDEX: usize = 2;
pub const TEST_TREE_HEIGHT: usize = [15, 19, 20][TEST_PARAMETERS_INDEX];
pub const TEST_RESOURCES_FOLDER: &str = [
"./resources/tree_height_15/",
"./resources/tree_height_19/",
"./resources/tree_height_20/",
][TEST_PARAMETERS_INDEX];
// The following types define the pairing friendly elliptic curve, the underlying finite fields and groups default to this module
// Note that proofs are serialized assuming Fr to be 4x8 = 32 bytes in size. Hence, changing to a curve with different encoding will make proof verification to fail
@@ -66,142 +51,199 @@ pub type G2Affine = ArkG2Affine;
pub type G2Projective = ArkG2Projective;
// Loads the proving key using a bytes vector
pub fn zkey_from_raw(zkey_data: &[u8]) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
if zkey_data.is_empty() {
return Err(Report::msg("No proving key found!"));
pub fn zkey_from_raw(zkey_data: &Vec<u8>) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
if !zkey_data.is_empty() {
let mut c = Cursor::new(zkey_data);
let proving_key_and_matrices = read_zkey(&mut c)?;
Ok(proving_key_and_matrices)
} else {
Err(Error::new(ErrorKind::NotFound, "No proving key found!"))
}
let proving_key_and_matrices = match () {
#[cfg(feature = "arkzkey")]
() => read_arkzkey_from_bytes(zkey_data)?,
#[cfg(not(feature = "arkzkey"))]
() => {
let mut reader = Cursor::new(zkey_data);
read_zkey(&mut reader)?
}
};
Ok(proving_key_and_matrices)
}
pub fn calculate_rln_witness<I: IntoIterator<Item = (String, Vec<BigInt>)>>(inputs: I) -> Vec<Fr> {
calc_witness(inputs, GRAPH_BYTES)
}
// Loads the proving key
#[cfg(not(target_arch = "wasm32"))]
pub fn zkey_from_folder() -> &'static (ProvingKey<Curve>, ConstraintMatrices<Fr>) {
&ZKEY
pub fn zkey_from_folder(
resources_folder: &str,
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
if Path::new(&zkey_path).exists() {
let mut file = File::open(&zkey_path)?;
let proving_key_and_matrices = read_zkey(&mut file)?;
Ok(proving_key_and_matrices)
} else {
Err(Error::new(ErrorKind::NotFound, "No proving key found!"))
}
}
// Loads the verification key from a bytes vector
pub fn vk_from_raw(vk_data: &[u8], zkey_data: &[u8]) -> Result<VerifyingKey<Curve>> {
pub fn vk_from_raw(vk_data: &Vec<u8>, zkey_data: &Vec<u8>) -> Result<VerifyingKey<Curve>> {
let verifying_key: VerifyingKey<Curve>;
if !vk_data.is_empty() {
return vk_from_ark_serialized(vk_data);
}
if !zkey_data.is_empty() {
verifying_key = vk_from_vector(vk_data);
Ok(verifying_key)
} else if !zkey_data.is_empty() {
let (proving_key, _matrices) = zkey_from_raw(zkey_data)?;
return Ok(proving_key.vk);
verifying_key = proving_key.vk;
Ok(verifying_key)
} else {
Err(Error::new(
ErrorKind::NotFound,
"No proving/verification key found!",
))
}
Err(Report::msg("No proving/verification key found!"))
}
// Loads the verification key
#[cfg(not(target_arch = "wasm32"))]
pub fn vk_from_folder() -> &'static VerifyingKey<Curve> {
&VK
pub fn vk_from_folder(resources_folder: &str) -> Result<VerifyingKey<Curve>> {
let vk_path = format!("{resources_folder}{VK_FILENAME}");
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
let verifying_key: VerifyingKey<Curve>;
if Path::new(&vk_path).exists() {
verifying_key = vk_from_json(&vk_path);
Ok(verifying_key)
} else if Path::new(&zkey_path).exists() {
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
verifying_key = proving_key.vk;
Ok(verifying_key)
} else {
Err(Error::new(
ErrorKind::NotFound,
"No proving/verification key found!",
))
}
}
// Computes the verification key from a bytes vector containing pre-processed ark-serialized verification key
// uncompressed, unchecked
pub fn vk_from_ark_serialized(data: &[u8]) -> Result<VerifyingKey<Curve>> {
let vk = VerifyingKey::<Curve>::deserialize_uncompressed_unchecked(data)?;
Ok(vk)
#[cfg(not(target_arch = "wasm32"))]
static WITNESS_CALCULATOR: OnceCell<Mutex<WitnessCalculator>> = OnceCell::new();
// Initializes the witness calculator using a bytes vector
#[cfg(not(target_arch = "wasm32"))]
pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> &'static Mutex<WitnessCalculator> {
WITNESS_CALCULATOR.get_or_init(|| {
let store = Store::default();
let module = Module::new(&store, wasm_buffer).unwrap();
let result =
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
Mutex::new(result)
})
}
// Initializes the witness calculator
#[cfg(not(target_arch = "wasm32"))]
pub fn circom_from_folder(resources_folder: &str) -> &'static Mutex<WitnessCalculator> {
// We read the wasm file
let wasm_path = format!("{resources_folder}{WASM_FILENAME}");
let wasm_buffer = std::fs::read(&wasm_path).unwrap();
circom_from_raw(wasm_buffer)
}
// The following function implementations are taken/adapted from https://github.com/gakonst/ark-circom/blob/1732e15d6313fe176b0b1abb858ac9e095d0dbd7/src/zkey.rs
// Utilities to convert a json verification key in a groth16::VerificationKey
fn fq_from_str(s: &str) -> Fq {
Fq::try_from(BigUint::from_str(s).unwrap()).unwrap()
}
// Extracts the element in G1 corresponding to its JSON serialization
fn json_to_g1(json: &Value, key: &str) -> G1Affine {
let els: Vec<String> = json
.get(key)
.unwrap()
.as_array()
.unwrap()
.iter()
.map(|i| i.as_str().unwrap().to_string())
.collect();
G1Affine::from(G1Projective::new(
fq_from_str(&els[0]),
fq_from_str(&els[1]),
fq_from_str(&els[2]),
))
}
// Extracts the vector of G1 elements corresponding to its JSON serialization
fn json_to_g1_vec(json: &Value, key: &str) -> Vec<G1Affine> {
let els: Vec<Vec<String>> = json
.get(key)
.unwrap()
.as_array()
.unwrap()
.iter()
.map(|i| {
i.as_array()
.unwrap()
.iter()
.map(|x| x.as_str().unwrap().to_string())
.collect::<Vec<String>>()
})
.collect();
els.iter()
.map(|coords| {
G1Affine::from(G1Projective::new(
fq_from_str(&coords[0]),
fq_from_str(&coords[1]),
fq_from_str(&coords[2]),
))
})
.collect()
}
// Extracts the element in G2 corresponding to its JSON serialization
fn json_to_g2(json: &Value, key: &str) -> G2Affine {
let els: Vec<Vec<String>> = json
.get(key)
.unwrap()
.as_array()
.unwrap()
.iter()
.map(|i| {
i.as_array()
.unwrap()
.iter()
.map(|x| x.as_str().unwrap().to_string())
.collect::<Vec<String>>()
})
.collect();
let x = Fq2::new(fq_from_str(&els[0][0]), fq_from_str(&els[0][1]));
let y = Fq2::new(fq_from_str(&els[1][0]), fq_from_str(&els[1][1]));
let z = Fq2::new(fq_from_str(&els[2][0]), fq_from_str(&els[2][1]));
G2Affine::from(G2Projective::new(x, y, z))
}
// Converts JSON to a VerifyingKey
fn to_verifying_key(json: serde_json::Value) -> VerifyingKey<Curve> {
VerifyingKey {
alpha_g1: json_to_g1(&json, "vk_alpha_1"),
beta_g2: json_to_g2(&json, "vk_beta_2"),
gamma_g2: json_to_g2(&json, "vk_gamma_2"),
delta_g2: json_to_g2(&json, "vk_delta_2"),
gamma_abc_g1: json_to_g1_vec(&json, "IC"),
}
}
// Computes the verification key from its JSON serialization
fn vk_from_json(vk_path: &str) -> VerifyingKey<Curve> {
let json = std::fs::read_to_string(vk_path).unwrap();
let json: Value = serde_json::from_str(&json).unwrap();
to_verifying_key(json)
}
// Computes the verification key from a bytes vector containing its JSON serialization
fn vk_from_vector(vk: &[u8]) -> VerifyingKey<Curve> {
let json = String::from_utf8(vk.to_vec()).expect("Found invalid UTF-8");
let json: Value = serde_json::from_str(&json).unwrap();
to_verifying_key(json)
}
// Checks verification key to be correct with respect to proving key
#[cfg(not(target_arch = "wasm32"))]
pub fn check_vk_from_zkey(verifying_key: VerifyingKey<Curve>) -> Result<()> {
let (proving_key, _matrices) = zkey_from_folder();
if proving_key.vk == verifying_key {
Ok(())
} else {
Err(Report::msg("verifying_keys are not equal"))
}
}
////////////////////////////////////////////////////////
// Functions from [arkz-key](https://github.com/zkmopro/ark-zkey/blob/main/src/lib.rs#L106)
// without print and allow to choose between compressed and uncompressed arkzkey
////////////////////////////////////////////////////////
#[cfg(feature = "arkzkey")]
pub fn read_arkzkey_from_bytes_uncompressed(
arkzkey_data: &[u8],
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
if arkzkey_data.is_empty() {
return Err(Report::msg("No proving key found!"));
}
let mut cursor = std::io::Cursor::new(arkzkey_data);
let serialized_proving_key =
SerializableProvingKey::deserialize_uncompressed_unchecked(&mut cursor)
.wrap_err("Failed to deserialize proving key")?;
let serialized_constraint_matrices =
SerializableConstraintMatrices::deserialize_uncompressed_unchecked(&mut cursor)
.wrap_err("Failed to deserialize constraint matrices")?;
// Get on right form for API
let proving_key: ProvingKey<Bn254> = serialized_proving_key.0;
let constraint_matrices: ConstraintMatrices<ark_bn254::Fr> = ConstraintMatrices {
num_instance_variables: serialized_constraint_matrices.num_instance_variables,
num_witness_variables: serialized_constraint_matrices.num_witness_variables,
num_constraints: serialized_constraint_matrices.num_constraints,
a_num_non_zero: serialized_constraint_matrices.a_num_non_zero,
b_num_non_zero: serialized_constraint_matrices.b_num_non_zero,
c_num_non_zero: serialized_constraint_matrices.c_num_non_zero,
a: serialized_constraint_matrices.a.data,
b: serialized_constraint_matrices.b.data,
c: serialized_constraint_matrices.c.data,
};
Ok((proving_key, constraint_matrices))
}
#[cfg(feature = "arkzkey")]
pub fn read_arkzkey_from_bytes_compressed(
arkzkey_data: &[u8],
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
if arkzkey_data.is_empty() {
return Err(Report::msg("No proving key found!"));
}
let mut cursor = std::io::Cursor::new(arkzkey_data);
let serialized_proving_key =
SerializableProvingKey::deserialize_compressed_unchecked(&mut cursor)
.wrap_err("Failed to deserialize proving key")?;
let serialized_constraint_matrices =
SerializableConstraintMatrices::deserialize_compressed_unchecked(&mut cursor)
.wrap_err("Failed to deserialize constraint matrices")?;
// Get on right form for API
let proving_key: ProvingKey<Bn254> = serialized_proving_key.0;
let constraint_matrices: ConstraintMatrices<ark_bn254::Fr> = ConstraintMatrices {
num_instance_variables: serialized_constraint_matrices.num_instance_variables,
num_witness_variables: serialized_constraint_matrices.num_witness_variables,
num_constraints: serialized_constraint_matrices.num_constraints,
a_num_non_zero: serialized_constraint_matrices.a_num_non_zero,
b_num_non_zero: serialized_constraint_matrices.b_num_non_zero,
c_num_non_zero: serialized_constraint_matrices.c_num_non_zero,
a: serialized_constraint_matrices.a.data,
b: serialized_constraint_matrices.b.data,
c: serialized_constraint_matrices.c.data,
};
Ok((proving_key, constraint_matrices))
pub fn check_vk_from_zkey(resources_folder: &str, verifying_key: VerifyingKey<Curve>) {
let (proving_key, _matrices) = zkey_from_folder(resources_folder).unwrap();
assert_eq!(proving_key.vk, verifying_key);
}

View File

@@ -2,7 +2,7 @@
use std::slice;
use crate::public::{hash as public_hash, poseidon_hash as public_poseidon_hash, RLN};
use crate::public::RLN;
// Macro to call methods with arbitrary amount of arguments,
// First argument to the macro is context,
@@ -12,15 +12,7 @@ macro_rules! call {
($instance:expr, $method:ident $(, $arg:expr)*) => {
{
let new_instance: &mut RLN = $instance.process();
match new_instance.$method($($arg.process()),*) {
Ok(()) => {
true
}
Err(err) => {
eprintln!("execution error: {err}");
false
}
}
new_instance.$method($($arg.process()),*).is_ok()
}
}
}
@@ -38,17 +30,13 @@ macro_rules! call_with_output_arg {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
match new_instance.$method(&mut output_data) {
Ok(()) => {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
}
Err(err) => {
std::mem::forget(output_data);
eprintln!("execution error: {err}");
false
}
if new_instance.$method(&mut output_data).is_ok() {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
} else {
std::mem::forget(output_data);
false
}
}
};
@@ -56,43 +44,13 @@ macro_rules! call_with_output_arg {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
match new_instance.$method($($arg.process()),*, &mut output_data) {
Ok(()) => {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
}
Err(err) => {
std::mem::forget(output_data);
eprintln!("execution error: {err}");
false
}
}
}
};
}
// Macro to call methods with arbitrary amount of arguments,
// which are not implemented in a ctx RLN object
// First argument is the method to call
// Second argument is the output buffer argument
// The remaining arguments are all other inputs to the method
macro_rules! no_ctx_call_with_output_arg {
($method:ident, $output_arg:expr, $( $arg:expr ),* ) => {
{
let mut output_data: Vec<u8> = Vec::new();
match $method($($arg.process()),*, &mut output_data) {
Ok(()) => {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
}
Err(err) => {
std::mem::forget(output_data);
eprintln!("execution error: {err}");
false
}
if new_instance.$method($($arg.process()),*, &mut output_data).is_ok() {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
} else {
std::mem::forget(output_data);
false
}
}
}
@@ -109,11 +67,8 @@ macro_rules! call_with_bool_arg {
{
let new_instance = $instance.process();
if match new_instance.$method($($arg.process()),*,) {
Ok(result) => result,
Err(err) => {
eprintln!("execution error: {err}");
return false
},
Ok(verified) => verified,
Err(_) => return false,
} {
unsafe { *$bool_arg = true };
} else {
@@ -143,24 +98,24 @@ impl ProcessArg for *const Buffer {
}
}
impl ProcessArg for *const RLN {
type ReturnType = &'static RLN;
impl<'a> ProcessArg for *const RLN<'a> {
type ReturnType = &'a RLN<'a>;
fn process(self) -> Self::ReturnType {
unsafe { &*self }
}
}
impl ProcessArg for *mut RLN {
type ReturnType = &'static mut RLN;
impl<'a> ProcessArg for *mut RLN<'a> {
type ReturnType = &'a mut RLN<'a>;
fn process(self) -> Self::ReturnType {
unsafe { &mut *self }
}
}
///// Buffer struct is taken from
///// <https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs>
/////
///// Also heavily inspired by <https://github.com/kilic/rln/blob/master/src/ffi.rs>
/// Buffer struct is taken from
/// <https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs>
///
/// Also heavily inspired by <https://github.com/kilic/rln/blob/master/src/ffi.rs>
#[repr(C)]
#[derive(Clone, Debug, PartialEq)]
@@ -192,82 +147,30 @@ impl<'a> From<&Buffer> for &'a [u8] {
////////////////////////////////////////////////////////
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[cfg(not(feature = "stateless"))]
#[no_mangle]
pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut *mut RLN) -> bool {
match RLN::new(tree_height, input_buffer.process()) {
Ok(rln) => {
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
Err(err) => {
eprintln!("could not instantiate rln: {err}");
false
}
}
let rln = RLN::new(tree_height, input_buffer.process());
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[cfg(feature = "stateless")]
#[no_mangle]
pub extern "C" fn new(ctx: *mut *mut RLN) -> bool {
match RLN::new() {
Ok(rln) => {
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
Err(err) => {
eprintln!("could not instantiate rln: {err}");
false
}
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[cfg(not(feature = "stateless"))]
#[no_mangle]
pub extern "C" fn new_with_params(
tree_height: usize,
circom_buffer: *const Buffer,
zkey_buffer: *const Buffer,
vk_buffer: *const Buffer,
tree_config: *const Buffer,
ctx: *mut *mut RLN,
) -> bool {
match RLN::new_with_params(
let rln = RLN::new_with_params(
tree_height,
circom_buffer.process().to_vec(),
zkey_buffer.process().to_vec(),
vk_buffer.process().to_vec(),
tree_config.process(),
) {
Ok(rln) => {
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
Err(err) => {
eprintln!("could not instantiate rln: {err}");
false
}
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[cfg(feature = "stateless")]
#[no_mangle]
pub extern "C" fn new_with_params(
zkey_buffer: *const Buffer,
vk_buffer: *const Buffer,
ctx: *mut *mut RLN,
) -> bool {
match RLN::new_with_params(zkey_buffer.process().to_vec(), vk_buffer.process().to_vec()) {
Ok(rln) => {
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
Err(err) => {
eprintln!("could not instantiate rln: {err}");
false
}
}
);
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
////////////////////////////////////////////////////////
@@ -275,49 +178,30 @@ pub extern "C" fn new_with_params(
////////////////////////////////////////////////////////
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn set_tree(ctx: *mut RLN, tree_height: usize) -> bool {
call!(ctx, set_tree, tree_height)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn delete_leaf(ctx: *mut RLN, index: usize) -> bool {
call!(ctx, delete_leaf, index)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn set_leaf(ctx: *mut RLN, index: usize, input_buffer: *const Buffer) -> bool {
call!(ctx, set_leaf, index, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn get_leaf(ctx: *mut RLN, index: usize, output_buffer: *mut Buffer) -> bool {
call_with_output_arg!(ctx, get_leaf, output_buffer, index)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn leaves_set(ctx: *mut RLN) -> usize {
ctx.process().leaves_set()
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn set_next_leaf(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
call!(ctx, set_next_leaf, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn set_leaves_from(
ctx: *mut RLN,
index: usize,
@@ -328,50 +212,18 @@ pub extern "C" fn set_leaves_from(
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn init_tree_with_leaves(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
call!(ctx, init_tree_with_leaves, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn atomic_operation(
ctx: *mut RLN,
index: usize,
leaves_buffer: *const Buffer,
indices_buffer: *const Buffer,
) -> bool {
call!(ctx, atomic_operation, index, leaves_buffer, indices_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn seq_atomic_operation(
ctx: *mut RLN,
leaves_buffer: *const Buffer,
indices_buffer: *const Buffer,
) -> bool {
call!(
ctx,
atomic_operation,
ctx.process().leaves_set(),
leaves_buffer,
indices_buffer
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn get_root(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
call_with_output_arg!(ctx, get_root, output_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn get_proof(ctx: *const RLN, index: usize, output_buffer: *mut Buffer) -> bool {
call_with_output_arg!(ctx, get_proof, output_buffer, index)
}
@@ -401,7 +253,6 @@ pub extern "C" fn verify(
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn generate_rln_proof(
ctx: *mut RLN,
input_buffer: *const Buffer,
@@ -412,22 +263,6 @@ pub extern "C" fn generate_rln_proof(
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn generate_rln_proof_with_witness(
ctx: *mut RLN,
input_buffer: *const Buffer,
output_buffer: *mut Buffer,
) -> bool {
call_with_output_arg!(
ctx,
generate_rln_proof_with_witness,
output_buffer,
input_buffer
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn verify_rln_proof(
ctx: *const RLN,
proof_buffer: *const Buffer,
@@ -505,39 +340,12 @@ pub extern "C" fn recover_id_secret(
)
}
////////////////////////////////////////////////////////
// Persistent metadata APIs
////////////////////////////////////////////////////////
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn set_metadata(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
call!(ctx, set_metadata, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn get_metadata(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
call_with_output_arg!(ctx, get_metadata, output_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
#[cfg(not(feature = "stateless"))]
pub extern "C" fn flush(ctx: *mut RLN) -> bool {
call!(ctx, flush)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn hash(input_buffer: *const Buffer, output_buffer: *mut Buffer) -> bool {
no_ctx_call_with_output_arg!(public_hash, output_buffer, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn poseidon_hash(input_buffer: *const Buffer, output_buffer: *mut Buffer) -> bool {
no_ctx_call_with_output_arg!(public_poseidon_hash, output_buffer, input_buffer)
pub extern "C" fn hash(
ctx: *mut RLN,
input_buffer: *const Buffer,
output_buffer: *mut Buffer,
) -> bool {
call_with_output_arg!(ctx, hash, output_buffer, input_buffer)
}

View File

@@ -1,58 +0,0 @@
/// This crate instantiates the Poseidon hash algorithm.
use crate::{circuit::Fr, utils::bytes_le_to_fr};
use once_cell::sync::Lazy;
use tiny_keccak::{Hasher, Keccak};
use utils::poseidon::Poseidon;
/// These indexed constants hardcode the supported round parameters tuples (t, RF, RN, SKIP_MATRICES) for the Bn254 scalar field.
/// SKIP_MATRICES is the index of the randomly generated secure MDS matrix.
/// TODO: generate these parameters
pub const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
(2, 8, 56, 0),
(3, 8, 57, 0),
(4, 8, 56, 0),
(5, 8, 60, 0),
(6, 8, 60, 0),
(7, 8, 63, 0),
(8, 8, 64, 0),
(9, 8, 63, 0),
];
/// Poseidon Hash wrapper over above implementation.
static POSEIDON: Lazy<Poseidon<Fr>> = Lazy::new(|| Poseidon::<Fr>::from(&ROUND_PARAMS));
pub fn poseidon_hash(input: &[Fr]) -> Fr {
POSEIDON
.hash(input.to_vec())
.expect("hash with fixed input size can't fail")
}
/// The zerokit RLN Merkle tree Hasher.
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct PoseidonHash;
/// The default Hasher trait used by Merkle tree implementation in utils.
impl utils::merkle_tree::Hasher for PoseidonHash {
type Fr = Fr;
fn default_leaf() -> Self::Fr {
Self::Fr::from(0)
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}
/// Hashes arbitrary signal to the underlying prime field.
pub fn hash_to_field(signal: &[u8]) -> Fr {
// We hash the input signal using Keccak256
let mut hash = [0; 32];
let mut hasher = Keccak::v256();
hasher.update(signal);
hasher.finalize(&mut hash);
// We export the hash as a field element
let (el, _) = bytes_le_to_fr(hash.as_ref());
el
}

View File

@@ -1,73 +0,0 @@
// This file is based on the code by iden3. Its preimage can be found here:
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/lib.rs
pub mod graph;
pub mod proto;
pub mod storage;
use ark_bn254::Fr;
use graph::Node;
use num_bigint::BigInt;
use ruint::aliases::U256;
use std::collections::HashMap;
use storage::deserialize_witnesscalc_graph;
pub type InputSignalsInfo = HashMap<String, (usize, usize)>;
pub fn calc_witness<I: IntoIterator<Item = (String, Vec<BigInt>)>>(
inputs: I,
graph_data: &[u8],
) -> Vec<Fr> {
let inputs: HashMap<String, Vec<U256>> = inputs
.into_iter()
.map(|(key, value)| (key, value.iter().map(|v| U256::from(v)).collect()))
.collect();
let (nodes, signals, input_mapping): (Vec<Node>, Vec<usize>, InputSignalsInfo) =
deserialize_witnesscalc_graph(std::io::Cursor::new(graph_data)).unwrap();
let mut inputs_buffer = get_inputs_buffer(get_inputs_size(&nodes));
populate_inputs(&inputs, &input_mapping, &mut inputs_buffer);
graph::evaluate(&nodes, inputs_buffer.as_slice(), &signals)
}
fn get_inputs_size(nodes: &[Node]) -> usize {
let mut start = false;
let mut max_index = 0usize;
for &node in nodes.iter() {
if let Node::Input(i) = node {
if i > max_index {
max_index = i;
}
start = true
} else if start {
break;
}
}
max_index + 1
}
fn populate_inputs(
input_list: &HashMap<String, Vec<U256>>,
inputs_info: &InputSignalsInfo,
input_buffer: &mut [U256],
) {
for (key, value) in input_list {
let (offset, len) = inputs_info[key];
if len != value.len() {
panic!("Invalid input length for {}", key);
}
for (i, v) in value.iter().enumerate() {
input_buffer[offset + i] = *v;
}
}
}
/// Allocates inputs vec with position 0 set to 1
fn get_inputs_buffer(size: usize) -> Vec<U256> {
let mut inputs = vec![U256::ZERO; size];
inputs[0] = U256::from(1);
inputs
}

View File

@@ -1,947 +0,0 @@
// This file is based on the code by iden3. Its preimage can be found here:
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/graph.rs
use crate::iden3calc::proto;
use ark_bn254::Fr;
use ark_ff::{BigInt, BigInteger, One, PrimeField, Zero};
use rand::Rng;
use ruint::aliases::U256;
use serde::{Deserialize, Serialize};
use std::cmp::Ordering;
use std::error::Error;
use std::ops::{BitOr, BitXor, Deref};
use std::{
collections::HashMap,
ops::{BitAnd, Shl, Shr},
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate};
use ruint::uint;
pub const M: U256 =
uint!(21888242871839275222246405745257275088548364400416034343698204186575808495617_U256);
fn ark_se<S, A: CanonicalSerialize>(a: &A, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut bytes = vec![];
a.serialize_with_mode(&mut bytes, Compress::Yes)
.map_err(serde::ser::Error::custom)?;
s.serialize_bytes(&bytes)
}
fn ark_de<'de, D, A: CanonicalDeserialize>(data: D) -> Result<A, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let s: Vec<u8> = serde::de::Deserialize::deserialize(data)?;
let a = A::deserialize_with_mode(s.as_slice(), Compress::Yes, Validate::Yes);
a.map_err(serde::de::Error::custom)
}
#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy, Serialize, Deserialize)]
pub enum Operation {
Mul,
Div,
Add,
Sub,
Pow,
Idiv,
Mod,
Eq,
Neq,
Lt,
Gt,
Leq,
Geq,
Land,
Lor,
Shl,
Shr,
Bor,
Band,
Bxor,
}
impl Operation {
// TODO: rewrite to &U256 type
pub fn eval(&self, a: U256, b: U256) -> U256 {
use Operation::*;
match self {
Mul => a.mul_mod(b, M),
Div => {
if b == U256::ZERO {
// as we are simulating a circuit execution with signals
// values all equal to 0, just return 0 here in case of
// division by zero
U256::ZERO
} else {
a.mul_mod(b.inv_mod(M).unwrap(), M)
}
}
Add => a.add_mod(b, M),
Sub => a.add_mod(M - b, M),
Pow => a.pow_mod(b, M),
Mod => a.div_rem(b).1,
Eq => U256::from(a == b),
Neq => U256::from(a != b),
Lt => u_lt(&a, &b),
Gt => u_gt(&a, &b),
Leq => u_lte(&a, &b),
Geq => u_gte(&a, &b),
Land => U256::from(a != U256::ZERO && b != U256::ZERO),
Lor => U256::from(a != U256::ZERO || b != U256::ZERO),
Shl => compute_shl_uint(a, b),
Shr => compute_shr_uint(a, b),
// TODO test with conner case when it is possible to get the number
// bigger then modulus
Bor => a.bitor(b),
Band => a.bitand(b),
// TODO test with conner case when it is possible to get the number
// bigger then modulus
Bxor => a.bitxor(b),
Idiv => a / b,
}
}
pub fn eval_fr(&self, a: Fr, b: Fr) -> Fr {
use Operation::*;
match self {
Mul => a * b,
// We always should return something on the circuit execution.
// So in case of division by 0 we would return 0. And the proof
// should be invalid in the end.
Div => {
if b.is_zero() {
Fr::zero()
} else {
a / b
}
}
Add => a + b,
Sub => a - b,
Idiv => {
if b.is_zero() {
Fr::zero()
} else {
Fr::new((Into::<U256>::into(a) / Into::<U256>::into(b)).into())
}
}
Mod => {
if b.is_zero() {
Fr::zero()
} else {
Fr::new((Into::<U256>::into(a) % Into::<U256>::into(b)).into())
}
}
Eq => match a.cmp(&b) {
Ordering::Equal => Fr::one(),
_ => Fr::zero(),
},
Neq => match a.cmp(&b) {
Ordering::Equal => Fr::zero(),
_ => Fr::one(),
},
Lt => Fr::new(u_lt(&a.into(), &b.into()).into()),
Gt => Fr::new(u_gt(&a.into(), &b.into()).into()),
Leq => Fr::new(u_lte(&a.into(), &b.into()).into()),
Geq => Fr::new(u_gte(&a.into(), &b.into()).into()),
Land => {
if a.is_zero() || b.is_zero() {
Fr::zero()
} else {
Fr::one()
}
}
Lor => {
if a.is_zero() && b.is_zero() {
Fr::zero()
} else {
Fr::one()
}
}
Shl => shl(a, b),
Shr => shr(a, b),
Bor => bit_or(a, b),
Band => bit_and(a, b),
Bxor => bit_xor(a, b),
// TODO implement other operators
_ => unimplemented!("operator {:?} not implemented for Montgomery", self),
}
}
}
impl From<&Operation> for proto::DuoOp {
fn from(v: &Operation) -> Self {
match v {
Operation::Mul => proto::DuoOp::Mul,
Operation::Div => proto::DuoOp::Div,
Operation::Add => proto::DuoOp::Add,
Operation::Sub => proto::DuoOp::Sub,
Operation::Pow => proto::DuoOp::Pow,
Operation::Idiv => proto::DuoOp::Idiv,
Operation::Mod => proto::DuoOp::Mod,
Operation::Eq => proto::DuoOp::Eq,
Operation::Neq => proto::DuoOp::Neq,
Operation::Lt => proto::DuoOp::Lt,
Operation::Gt => proto::DuoOp::Gt,
Operation::Leq => proto::DuoOp::Leq,
Operation::Geq => proto::DuoOp::Geq,
Operation::Land => proto::DuoOp::Land,
Operation::Lor => proto::DuoOp::Lor,
Operation::Shl => proto::DuoOp::Shl,
Operation::Shr => proto::DuoOp::Shr,
Operation::Bor => proto::DuoOp::Bor,
Operation::Band => proto::DuoOp::Band,
Operation::Bxor => proto::DuoOp::Bxor,
}
}
}
#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy, Serialize, Deserialize)]
pub enum UnoOperation {
Neg,
Id, // identity - just return self
}
impl UnoOperation {
pub fn eval(&self, a: U256) -> U256 {
match self {
UnoOperation::Neg => {
if a == U256::ZERO {
U256::ZERO
} else {
M - a
}
}
UnoOperation::Id => a,
}
}
pub fn eval_fr(&self, a: Fr) -> Fr {
match self {
UnoOperation::Neg => {
if a.is_zero() {
Fr::zero()
} else {
let mut x = Fr::MODULUS;
x.sub_with_borrow(&a.into_bigint());
Fr::from_bigint(x).unwrap()
}
}
_ => unimplemented!("uno operator {:?} not implemented for Montgomery", self),
}
}
}
impl From<&UnoOperation> for proto::UnoOp {
fn from(v: &UnoOperation) -> Self {
match v {
UnoOperation::Neg => proto::UnoOp::Neg,
UnoOperation::Id => proto::UnoOp::Id,
}
}
}
#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy, Serialize, Deserialize)]
pub enum TresOperation {
TernCond,
}
impl TresOperation {
pub fn eval(&self, a: U256, b: U256, c: U256) -> U256 {
match self {
TresOperation::TernCond => {
if a == U256::ZERO {
c
} else {
b
}
}
}
}
pub fn eval_fr(&self, a: Fr, b: Fr, c: Fr) -> Fr {
match self {
TresOperation::TernCond => {
if a.is_zero() {
c
} else {
b
}
}
}
}
}
impl From<&TresOperation> for proto::TresOp {
fn from(v: &TresOperation) -> Self {
match v {
TresOperation::TernCond => proto::TresOp::TernCond,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum Node {
Input(usize),
Constant(U256),
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
MontConstant(Fr),
UnoOp(UnoOperation, usize),
Op(Operation, usize, usize),
TresOp(TresOperation, usize, usize, usize),
}
// TODO remove pub from Vec<Node>
#[derive(Default)]
pub struct Nodes(pub Vec<Node>);
impl Nodes {
pub fn new() -> Self {
Nodes(Vec::new())
}
pub fn to_const(&self, idx: NodeIdx) -> Result<U256, NodeConstErr> {
let me = self.0.get(idx.0).ok_or(NodeConstErr::EmptyNode(idx))?;
match me {
Node::Constant(v) => Ok(*v),
Node::UnoOp(op, a) => Ok(op.eval(self.to_const(NodeIdx(*a))?)),
Node::Op(op, a, b) => {
Ok(op.eval(self.to_const(NodeIdx(*a))?, self.to_const(NodeIdx(*b))?))
}
Node::TresOp(op, a, b, c) => Ok(op.eval(
self.to_const(NodeIdx(*a))?,
self.to_const(NodeIdx(*b))?,
self.to_const(NodeIdx(*c))?,
)),
Node::Input(_) => Err(NodeConstErr::InputSignal),
Node::MontConstant(_) => {
panic!("MontConstant should not be used here")
}
}
}
pub fn push(&mut self, n: Node) -> NodeIdx {
self.0.push(n);
NodeIdx(self.0.len() - 1)
}
pub fn get(&self, idx: NodeIdx) -> Option<&Node> {
self.0.get(idx.0)
}
}
impl Deref for Nodes {
type Target = Vec<Node>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[derive(Debug, Copy, Clone)]
pub struct NodeIdx(pub usize);
impl From<usize> for NodeIdx {
fn from(v: usize) -> Self {
NodeIdx(v)
}
}
#[derive(Debug)]
pub enum NodeConstErr {
EmptyNode(NodeIdx),
InputSignal,
}
impl std::fmt::Display for NodeConstErr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
NodeConstErr::EmptyNode(idx) => {
write!(f, "empty node at index {}", idx.0)
}
NodeConstErr::InputSignal => {
write!(f, "input signal is not a constant")
}
}
}
}
impl Error for NodeConstErr {}
fn compute_shl_uint(a: U256, b: U256) -> U256 {
debug_assert!(b.lt(&U256::from(256)));
let ls_limb = b.as_limbs()[0];
a.shl(ls_limb as usize)
}
fn compute_shr_uint(a: U256, b: U256) -> U256 {
debug_assert!(b.lt(&U256::from(256)));
let ls_limb = b.as_limbs()[0];
a.shr(ls_limb as usize)
}
/// All references must be backwards.
fn assert_valid(nodes: &[Node]) {
for (i, &node) in nodes.iter().enumerate() {
if let Node::Op(_, a, b) = node {
assert!(a < i);
assert!(b < i);
} else if let Node::UnoOp(_, a) = node {
assert!(a < i);
} else if let Node::TresOp(_, a, b, c) = node {
assert!(a < i);
assert!(b < i);
assert!(c < i);
}
}
}
pub fn optimize(nodes: &mut Vec<Node>, outputs: &mut [usize]) {
tree_shake(nodes, outputs);
propagate(nodes);
value_numbering(nodes, outputs);
constants(nodes);
tree_shake(nodes, outputs);
montgomery_form(nodes);
}
pub fn evaluate(nodes: &[Node], inputs: &[U256], outputs: &[usize]) -> Vec<Fr> {
// assert_valid(nodes);
// Evaluate the graph.
let mut values = Vec::with_capacity(nodes.len());
for &node in nodes.iter() {
let value = match node {
Node::Constant(c) => Fr::new(c.into()),
Node::MontConstant(c) => c,
Node::Input(i) => Fr::new(inputs[i].into()),
Node::Op(op, a, b) => op.eval_fr(values[a], values[b]),
Node::UnoOp(op, a) => op.eval_fr(values[a]),
Node::TresOp(op, a, b, c) => op.eval_fr(values[a], values[b], values[c]),
};
values.push(value);
}
// Convert from Montgomery form and return the outputs.
let mut out = vec![Fr::from(0); outputs.len()];
for i in 0..outputs.len() {
out[i] = values[outputs[i]];
}
out
}
/// Constant propagation
pub fn propagate(nodes: &mut [Node]) {
assert_valid(nodes);
for i in 0..nodes.len() {
if let Node::Op(op, a, b) = nodes[i] {
if let (Node::Constant(va), Node::Constant(vb)) = (nodes[a], nodes[b]) {
nodes[i] = Node::Constant(op.eval(va, vb));
} else if a == b {
// Not constant but equal
use Operation::*;
if let Some(c) = match op {
Eq | Leq | Geq => Some(true),
Neq | Lt | Gt => Some(false),
_ => None,
} {
nodes[i] = Node::Constant(U256::from(c));
}
}
} else if let Node::UnoOp(op, a) = nodes[i] {
if let Node::Constant(va) = nodes[a] {
nodes[i] = Node::Constant(op.eval(va));
}
} else if let Node::TresOp(op, a, b, c) = nodes[i] {
if let (Node::Constant(va), Node::Constant(vb), Node::Constant(vc)) =
(nodes[a], nodes[b], nodes[c])
{
nodes[i] = Node::Constant(op.eval(va, vb, vc));
}
}
}
}
/// Remove unused nodes
pub fn tree_shake(nodes: &mut Vec<Node>, outputs: &mut [usize]) {
assert_valid(nodes);
// Mark all nodes that are used.
let mut used = vec![false; nodes.len()];
for &i in outputs.iter() {
used[i] = true;
}
// Work backwards from end as all references are backwards.
for i in (0..nodes.len()).rev() {
if used[i] {
if let Node::Op(_, a, b) = nodes[i] {
used[a] = true;
used[b] = true;
}
if let Node::UnoOp(_, a) = nodes[i] {
used[a] = true;
}
if let Node::TresOp(_, a, b, c) = nodes[i] {
used[a] = true;
used[b] = true;
used[c] = true;
}
}
}
// Remove unused nodes
let n = nodes.len();
let mut retain = used.iter();
nodes.retain(|_| *retain.next().unwrap());
// Renumber references.
let mut renumber = vec![None; n];
let mut index = 0;
for (i, &used) in used.iter().enumerate() {
if used {
renumber[i] = Some(index);
index += 1;
}
}
assert_eq!(index, nodes.len());
for (&used, renumber) in used.iter().zip(renumber.iter()) {
assert_eq!(used, renumber.is_some());
}
// Renumber references.
for node in nodes.iter_mut() {
if let Node::Op(_, a, b) = node {
*a = renumber[*a].unwrap();
*b = renumber[*b].unwrap();
}
if let Node::UnoOp(_, a) = node {
*a = renumber[*a].unwrap();
}
if let Node::TresOp(_, a, b, c) = node {
*a = renumber[*a].unwrap();
*b = renumber[*b].unwrap();
*c = renumber[*c].unwrap();
}
}
for output in outputs.iter_mut() {
*output = renumber[*output].unwrap();
}
}
/// Randomly evaluate the graph
fn random_eval(nodes: &mut [Node]) -> Vec<U256> {
let mut rng = rand::thread_rng();
let mut values = Vec::with_capacity(nodes.len());
let mut inputs = HashMap::new();
let mut prfs = HashMap::new();
let mut prfs_uno = HashMap::new();
let mut prfs_tres = HashMap::new();
for node in nodes.iter() {
use Operation::*;
let value = match node {
// Constants evaluate to themselves
Node::Constant(c) => *c,
Node::MontConstant(_) => unimplemented!("should not be used"),
// Algebraic Ops are evaluated directly
// Since the field is large, by Swartz-Zippel if
// two values are the same then they are likely algebraically equal.
Node::Op(op @ (Add | Sub | Mul), a, b) => op.eval(values[*a], values[*b]),
// Input and non-algebraic ops are random functions
// TODO: https://github.com/recmo/uint/issues/95 and use .gen_range(..M)
Node::Input(i) => *inputs.entry(*i).or_insert_with(|| rng.gen::<U256>() % M),
Node::Op(op, a, b) => *prfs
.entry((*op, values[*a], values[*b]))
.or_insert_with(|| rng.gen::<U256>() % M),
Node::UnoOp(op, a) => *prfs_uno
.entry((*op, values[*a]))
.or_insert_with(|| rng.gen::<U256>() % M),
Node::TresOp(op, a, b, c) => *prfs_tres
.entry((*op, values[*a], values[*b], values[*c]))
.or_insert_with(|| rng.gen::<U256>() % M),
};
values.push(value);
}
values
}
/// Value numbering
pub fn value_numbering(nodes: &mut [Node], outputs: &mut [usize]) {
assert_valid(nodes);
// Evaluate the graph in random field elements.
let values = random_eval(nodes);
// Find all nodes with the same value.
let mut value_map = HashMap::new();
for (i, &value) in values.iter().enumerate() {
value_map.entry(value).or_insert_with(Vec::new).push(i);
}
// For nodes that are the same, pick the first index.
let renumber: Vec<_> = values.into_iter().map(|v| value_map[&v][0]).collect();
// Renumber references.
for node in nodes.iter_mut() {
if let Node::Op(_, a, b) = node {
*a = renumber[*a];
*b = renumber[*b];
}
if let Node::UnoOp(_, a) = node {
*a = renumber[*a];
}
if let Node::TresOp(_, a, b, c) = node {
*a = renumber[*a];
*b = renumber[*b];
*c = renumber[*c];
}
}
for output in outputs.iter_mut() {
*output = renumber[*output];
}
}
/// Probabilistic constant determination
pub fn constants(nodes: &mut [Node]) {
assert_valid(nodes);
// Evaluate the graph in random field elements.
let values_a = random_eval(nodes);
let values_b = random_eval(nodes);
// Find all nodes with the same value.
for i in 0..nodes.len() {
if let Node::Constant(_) = nodes[i] {
continue;
}
if values_a[i] == values_b[i] {
nodes[i] = Node::Constant(values_a[i]);
}
}
}
/// Convert to Montgomery form
pub fn montgomery_form(nodes: &mut [Node]) {
for node in nodes.iter_mut() {
use Node::*;
use Operation::*;
match node {
Constant(c) => *node = MontConstant(Fr::new((*c).into())),
MontConstant(..) => (),
Input(..) => (),
Op(
Mul | Div | Add | Sub | Idiv | Mod | Eq | Neq | Lt | Gt | Leq | Geq | Land | Lor
| Shl | Shr | Bor | Band | Bxor,
..,
) => (),
Op(op @ Pow, ..) => unimplemented!("Operators Montgomery form: {:?}", op),
UnoOp(UnoOperation::Neg, ..) => (),
UnoOp(op, ..) => unimplemented!("Uno Operators Montgomery form: {:?}", op),
TresOp(TresOperation::TernCond, ..) => (),
}
}
}
fn shl(a: Fr, b: Fr) -> Fr {
if b.is_zero() {
return a;
}
if b.cmp(&Fr::from(Fr::MODULUS_BIT_SIZE)).is_ge() {
return Fr::zero();
}
let n = b.into_bigint().0[0] as u32;
let mut a = a.into_bigint();
a.muln(n);
Fr::from_bigint(a).unwrap()
}
fn shr(a: Fr, b: Fr) -> Fr {
if b.is_zero() {
return a;
}
match b.cmp(&Fr::from(254u64)) {
Ordering::Equal => return Fr::zero(),
Ordering::Greater => return Fr::zero(),
_ => (),
};
let mut n = b.into_bigint().to_bytes_le()[0];
let mut result = a.into_bigint();
let c = result.as_mut();
while n >= 64 {
for i in 0..3 {
c[i as usize] = c[(i + 1) as usize];
}
c[3] = 0;
n -= 64;
}
if n == 0 {
return Fr::from_bigint(result).unwrap();
}
let mask: u64 = (1 << n) - 1;
let mut carrier: u64 = c[3] & mask;
c[3] >>= n;
for i in (0..3).rev() {
let new_carrier = c[i] & mask;
c[i] = (c[i] >> n) | (carrier << (64 - n));
carrier = new_carrier;
}
Fr::from_bigint(result).unwrap()
}
fn bit_and(a: Fr, b: Fr) -> Fr {
let a = a.into_bigint();
let b = b.into_bigint();
let c: [u64; 4] = [
a.0[0] & b.0[0],
a.0[1] & b.0[1],
a.0[2] & b.0[2],
a.0[3] & b.0[3],
];
let mut d: BigInt<4> = BigInt::new(c);
if d > Fr::MODULUS {
d.sub_with_borrow(&Fr::MODULUS);
}
Fr::from_bigint(d).unwrap()
}
fn bit_or(a: Fr, b: Fr) -> Fr {
let a = a.into_bigint();
let b = b.into_bigint();
let c: [u64; 4] = [
a.0[0] | b.0[0],
a.0[1] | b.0[1],
a.0[2] | b.0[2],
a.0[3] | b.0[3],
];
let mut d: BigInt<4> = BigInt::new(c);
if d > Fr::MODULUS {
d.sub_with_borrow(&Fr::MODULUS);
}
Fr::from_bigint(d).unwrap()
}
fn bit_xor(a: Fr, b: Fr) -> Fr {
let a = a.into_bigint();
let b = b.into_bigint();
let c: [u64; 4] = [
a.0[0] ^ b.0[0],
a.0[1] ^ b.0[1],
a.0[2] ^ b.0[2],
a.0[3] ^ b.0[3],
];
let mut d: BigInt<4> = BigInt::new(c);
if d > Fr::MODULUS {
d.sub_with_borrow(&Fr::MODULUS);
}
Fr::from_bigint(d).unwrap()
}
// M / 2
const HALF_M: U256 =
uint!(10944121435919637611123202872628637544274182200208017171849102093287904247808_U256);
fn u_gte(a: &U256, b: &U256) -> U256 {
let a_neg = &HALF_M < a;
let b_neg = &HALF_M < b;
match (a_neg, b_neg) {
(false, false) => U256::from(a >= b),
(true, false) => uint!(0_U256),
(false, true) => uint!(1_U256),
(true, true) => U256::from(a >= b),
}
}
fn u_lte(a: &U256, b: &U256) -> U256 {
let a_neg = &HALF_M < a;
let b_neg = &HALF_M < b;
match (a_neg, b_neg) {
(false, false) => U256::from(a <= b),
(true, false) => uint!(1_U256),
(false, true) => uint!(0_U256),
(true, true) => U256::from(a <= b),
}
}
fn u_gt(a: &U256, b: &U256) -> U256 {
let a_neg = &HALF_M < a;
let b_neg = &HALF_M < b;
match (a_neg, b_neg) {
(false, false) => U256::from(a > b),
(true, false) => uint!(0_U256),
(false, true) => uint!(1_U256),
(true, true) => U256::from(a > b),
}
}
fn u_lt(a: &U256, b: &U256) -> U256 {
let a_neg = &HALF_M < a;
let b_neg = &HALF_M < b;
match (a_neg, b_neg) {
(false, false) => U256::from(a < b),
(true, false) => uint!(1_U256),
(false, true) => uint!(0_U256),
(true, true) => U256::from(a < b),
}
}
#[cfg(test)]
mod tests {
use super::*;
use ruint::uint;
use std::ops::Div;
use std::str::FromStr;
#[test]
fn test_ok() {
let a = Fr::from(4u64);
let b = Fr::from(2u64);
let c = shl(a, b);
assert_eq!(c.cmp(&Fr::from(16u64)), Ordering::Equal)
}
#[test]
fn test_div() {
assert_eq!(
Operation::Div.eval_fr(Fr::from(2u64), Fr::from(3u64)),
Fr::from_str(
"7296080957279758407415468581752425029516121466805344781232734728858602831873"
)
.unwrap()
);
assert_eq!(
Operation::Div.eval_fr(Fr::from(6u64), Fr::from(2u64)),
Fr::from_str("3").unwrap()
);
assert_eq!(
Operation::Div.eval_fr(Fr::from(7u64), Fr::from(2u64)),
Fr::from_str(
"10944121435919637611123202872628637544274182200208017171849102093287904247812"
)
.unwrap()
);
}
#[test]
fn test_idiv() {
assert_eq!(
Operation::Idiv.eval_fr(Fr::from(2u64), Fr::from(3u64)),
Fr::from_str("0").unwrap()
);
assert_eq!(
Operation::Idiv.eval_fr(Fr::from(6u64), Fr::from(2u64)),
Fr::from_str("3").unwrap()
);
assert_eq!(
Operation::Idiv.eval_fr(Fr::from(7u64), Fr::from(2u64)),
Fr::from_str("3").unwrap()
);
}
#[test]
fn test_fr_mod() {
assert_eq!(
Operation::Mod.eval_fr(Fr::from(7u64), Fr::from(2u64)),
Fr::from_str("1").unwrap()
);
assert_eq!(
Operation::Mod.eval_fr(Fr::from(7u64), Fr::from(9u64)),
Fr::from_str("7").unwrap()
);
}
#[test]
fn test_u_gte() {
let result = u_gte(&uint!(10_U256), &uint!(3_U256));
assert_eq!(result, uint!(1_U256));
let result = u_gte(&uint!(3_U256), &uint!(3_U256));
assert_eq!(result, uint!(1_U256));
let result = u_gte(&uint!(2_U256), &uint!(3_U256));
assert_eq!(result, uint!(0_U256));
// -1 >= 3 => 0
let result = u_gte(
&uint!(
21888242871839275222246405745257275088548364400416034343698204186575808495616_U256
),
&uint!(3_U256),
);
assert_eq!(result, uint!(0_U256));
// -1 >= -2 => 1
let result = u_gte(
&uint!(
21888242871839275222246405745257275088548364400416034343698204186575808495616_U256
),
&uint!(
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
),
);
assert_eq!(result, uint!(1_U256));
// -2 >= -1 => 0
let result = u_gte(
&uint!(
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
),
&uint!(
21888242871839275222246405745257275088548364400416034343698204186575808495616_U256
),
);
assert_eq!(result, uint!(0_U256));
// -2 == -2 => 1
let result = u_gte(
&uint!(
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
),
&uint!(
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
),
);
assert_eq!(result, uint!(1_U256));
}
#[test]
fn test_x() {
let x = M.div(uint!(2_U256));
println!("x: {:?}", x.as_limbs());
println!("x: {}", M);
}
#[test]
fn test_2() {
let nodes: Vec<Node> = vec![];
// let node = nodes[0];
let node = nodes.get(0);
println!("{:?}", node);
}
}

View File

@@ -1,117 +0,0 @@
// This file has been generated by prost-build during compilation of the code by iden3
// and modified manually. The *.proto file used to generate this on can be found here:
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/protos/messages.proto
use std::collections::HashMap;
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct BigUInt {
#[prost(bytes = "vec", tag = "1")]
pub value_le: Vec<u8>,
}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
pub struct InputNode {
#[prost(uint32, tag = "1")]
pub idx: u32,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct ConstantNode {
#[prost(message, optional, tag = "1")]
pub value: Option<BigUInt>,
}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
pub struct UnoOpNode {
#[prost(enumeration = "UnoOp", tag = "1")]
pub op: i32,
#[prost(uint32, tag = "2")]
pub a_idx: u32,
}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
pub struct DuoOpNode {
#[prost(enumeration = "DuoOp", tag = "1")]
pub op: i32,
#[prost(uint32, tag = "2")]
pub a_idx: u32,
#[prost(uint32, tag = "3")]
pub b_idx: u32,
}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
pub struct TresOpNode {
#[prost(enumeration = "TresOp", tag = "1")]
pub op: i32,
#[prost(uint32, tag = "2")]
pub a_idx: u32,
#[prost(uint32, tag = "3")]
pub b_idx: u32,
#[prost(uint32, tag = "4")]
pub c_idx: u32,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct Node {
#[prost(oneof = "node::Node", tags = "1, 2, 3, 4, 5")]
pub node: Option<node::Node>,
}
/// Nested message and enum types in `Node`.
pub mod node {
#[derive(Clone, PartialEq, ::prost::Oneof)]
pub enum Node {
#[prost(message, tag = "1")]
Input(super::InputNode),
#[prost(message, tag = "2")]
Constant(super::ConstantNode),
#[prost(message, tag = "3")]
UnoOp(super::UnoOpNode),
#[prost(message, tag = "4")]
DuoOp(super::DuoOpNode),
#[prost(message, tag = "5")]
TresOp(super::TresOpNode),
}
}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
pub struct SignalDescription {
#[prost(uint32, tag = "1")]
pub offset: u32,
#[prost(uint32, tag = "2")]
pub len: u32,
}
#[derive(Clone, PartialEq, ::prost::Message)]
pub struct GraphMetadata {
#[prost(uint32, repeated, tag = "1")]
pub witness_signals: Vec<u32>,
#[prost(map = "string, message", tag = "2")]
pub inputs: HashMap<String, SignalDescription>,
}
#[derive(Clone, Copy, Debug, PartialEq, ::prost::Enumeration)]
pub enum DuoOp {
Mul = 0,
Div = 1,
Add = 2,
Sub = 3,
Pow = 4,
Idiv = 5,
Mod = 6,
Eq = 7,
Neq = 8,
Lt = 9,
Gt = 10,
Leq = 11,
Geq = 12,
Land = 13,
Lor = 14,
Shl = 15,
Shr = 16,
Bor = 17,
Band = 18,
Bxor = 19,
}
#[derive(Clone, Copy, Debug, PartialEq, ::prost::Enumeration)]
pub enum UnoOp {
Neg = 0,
Id = 1,
}
#[derive(Clone, Copy, Debug, PartialEq, ::prost::Enumeration)]
pub enum TresOp {
TernCond = 0,
}

View File

@@ -1,496 +0,0 @@
// This file is based on the code by iden3. Its preimage can be found here:
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/storage.rs
use crate::iden3calc::{
graph,
graph::{Operation, TresOperation, UnoOperation},
proto, InputSignalsInfo,
};
use ark_bn254::Fr;
use ark_ff::PrimeField;
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use prost::Message;
use std::io::{Read, Write};
// format of the wtns.graph file:
// + magic line: wtns.graph.001
// + 4 bytes unsigned LE 32-bit integer: number of nodes
// + series of protobuf serialized nodes. Each node prefixed by varint length
// + protobuf serialized GraphMetadata
// + 8 bytes unsigned LE 64-bit integer: offset of GraphMetadata message
const WITNESSCALC_GRAPH_MAGIC: &[u8] = b"wtns.graph.001";
const MAX_VARINT_LENGTH: usize = 10;
impl From<proto::Node> for graph::Node {
fn from(value: proto::Node) -> Self {
match value.node.unwrap() {
proto::node::Node::Input(input_node) => graph::Node::Input(input_node.idx as usize),
proto::node::Node::Constant(constant_node) => {
let i = constant_node.value.unwrap();
graph::Node::MontConstant(Fr::from_le_bytes_mod_order(i.value_le.as_slice()))
}
proto::node::Node::UnoOp(uno_op_node) => {
let op = proto::UnoOp::try_from(uno_op_node.op).unwrap();
graph::Node::UnoOp(op.into(), uno_op_node.a_idx as usize)
}
proto::node::Node::DuoOp(duo_op_node) => {
let op = proto::DuoOp::try_from(duo_op_node.op).unwrap();
graph::Node::Op(
op.into(),
duo_op_node.a_idx as usize,
duo_op_node.b_idx as usize,
)
}
proto::node::Node::TresOp(tres_op_node) => {
let op = proto::TresOp::try_from(tres_op_node.op).unwrap();
graph::Node::TresOp(
op.into(),
tres_op_node.a_idx as usize,
tres_op_node.b_idx as usize,
tres_op_node.c_idx as usize,
)
}
}
}
}
impl From<&graph::Node> for proto::node::Node {
fn from(node: &graph::Node) -> Self {
match node {
graph::Node::Input(i) => proto::node::Node::Input(proto::InputNode { idx: *i as u32 }),
graph::Node::Constant(_) => {
panic!("We are not supposed to write Constant to the witnesscalc graph. All Constant should be converted to MontConstant.");
}
graph::Node::UnoOp(op, a) => {
let op = proto::UnoOp::from(op);
proto::node::Node::UnoOp(proto::UnoOpNode {
op: op as i32,
a_idx: *a as u32,
})
}
graph::Node::Op(op, a, b) => proto::node::Node::DuoOp(proto::DuoOpNode {
op: proto::DuoOp::from(op) as i32,
a_idx: *a as u32,
b_idx: *b as u32,
}),
graph::Node::TresOp(op, a, b, c) => proto::node::Node::TresOp(proto::TresOpNode {
op: proto::TresOp::from(op) as i32,
a_idx: *a as u32,
b_idx: *b as u32,
c_idx: *c as u32,
}),
graph::Node::MontConstant(c) => {
let bi = Into::<num_bigint::BigUint>::into(*c);
let i = proto::BigUInt {
value_le: bi.to_bytes_le(),
};
proto::node::Node::Constant(proto::ConstantNode { value: Some(i) })
}
}
}
}
impl From<proto::UnoOp> for UnoOperation {
fn from(value: proto::UnoOp) -> Self {
match value {
proto::UnoOp::Neg => UnoOperation::Neg,
proto::UnoOp::Id => UnoOperation::Id,
}
}
}
impl From<proto::DuoOp> for Operation {
fn from(value: proto::DuoOp) -> Self {
match value {
proto::DuoOp::Mul => Operation::Mul,
proto::DuoOp::Div => Operation::Div,
proto::DuoOp::Add => Operation::Add,
proto::DuoOp::Sub => Operation::Sub,
proto::DuoOp::Pow => Operation::Pow,
proto::DuoOp::Idiv => Operation::Idiv,
proto::DuoOp::Mod => Operation::Mod,
proto::DuoOp::Eq => Operation::Eq,
proto::DuoOp::Neq => Operation::Neq,
proto::DuoOp::Lt => Operation::Lt,
proto::DuoOp::Gt => Operation::Gt,
proto::DuoOp::Leq => Operation::Leq,
proto::DuoOp::Geq => Operation::Geq,
proto::DuoOp::Land => Operation::Land,
proto::DuoOp::Lor => Operation::Lor,
proto::DuoOp::Shl => Operation::Shl,
proto::DuoOp::Shr => Operation::Shr,
proto::DuoOp::Bor => Operation::Bor,
proto::DuoOp::Band => Operation::Band,
proto::DuoOp::Bxor => Operation::Bxor,
}
}
}
impl From<proto::TresOp> for graph::TresOperation {
fn from(value: proto::TresOp) -> Self {
match value {
proto::TresOp::TernCond => TresOperation::TernCond,
}
}
}
pub fn serialize_witnesscalc_graph<T: Write>(
mut w: T,
nodes: &Vec<graph::Node>,
witness_signals: &[usize],
input_signals: &InputSignalsInfo,
) -> std::io::Result<()> {
let mut ptr = 0usize;
w.write_all(WITNESSCALC_GRAPH_MAGIC).unwrap();
ptr += WITNESSCALC_GRAPH_MAGIC.len();
w.write_u64::<LittleEndian>(nodes.len() as u64)?;
ptr += 8;
let metadata = proto::GraphMetadata {
witness_signals: witness_signals
.iter()
.map(|x| *x as u32)
.collect::<Vec<u32>>(),
inputs: input_signals
.iter()
.map(|(k, v)| {
let sig = proto::SignalDescription {
offset: v.0 as u32,
len: v.1 as u32,
};
(k.clone(), sig)
})
.collect(),
};
// capacity of buf should be enough to hold the largest message + 10 bytes
// of varint length
let mut buf = Vec::with_capacity(metadata.encoded_len() + MAX_VARINT_LENGTH);
for node in nodes {
let node_pb = proto::Node {
node: Some(proto::node::Node::from(node)),
};
assert_eq!(buf.len(), 0);
node_pb.encode_length_delimited(&mut buf)?;
ptr += buf.len();
w.write_all(&buf)?;
buf.clear();
}
metadata.encode_length_delimited(&mut buf)?;
w.write_all(&buf)?;
buf.clear();
w.write_u64::<LittleEndian>(ptr as u64)?;
Ok(())
}
fn read_message_length<R: Read>(rw: &mut WriteBackReader<R>) -> std::io::Result<usize> {
let mut buf = [0u8; MAX_VARINT_LENGTH];
let bytes_read = rw.read(&mut buf)?;
if bytes_read == 0 {
return Err(std::io::Error::new(
std::io::ErrorKind::UnexpectedEof,
"Unexpected EOF",
));
}
let len_delimiter = prost::decode_length_delimiter(buf.as_ref())?;
let lnln = prost::length_delimiter_len(len_delimiter);
if lnln < bytes_read {
rw.write_all(&buf[lnln..bytes_read])?;
}
Ok(len_delimiter)
}
fn read_message<R: Read, M: Message + std::default::Default>(
rw: &mut WriteBackReader<R>,
) -> std::io::Result<M> {
let ln = read_message_length(rw)?;
let mut buf = vec![0u8; ln];
let bytes_read = rw.read(&mut buf)?;
if bytes_read != ln {
return Err(std::io::Error::new(
std::io::ErrorKind::UnexpectedEof,
"Unexpected EOF",
));
}
let msg = prost::Message::decode(&buf[..])?;
Ok(msg)
}
pub fn deserialize_witnesscalc_graph(
r: impl Read,
) -> std::io::Result<(Vec<graph::Node>, Vec<usize>, InputSignalsInfo)> {
let mut br = WriteBackReader::new(r);
let mut magic = [0u8; WITNESSCALC_GRAPH_MAGIC.len()];
br.read_exact(&mut magic)?;
if !magic.eq(WITNESSCALC_GRAPH_MAGIC) {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"Invalid magic",
));
}
let nodes_num = br.read_u64::<LittleEndian>()?;
let mut nodes = Vec::with_capacity(nodes_num as usize);
for _ in 0..nodes_num {
let n: proto::Node = read_message(&mut br)?;
let n2: graph::Node = n.into();
nodes.push(n2);
}
let md: proto::GraphMetadata = read_message(&mut br)?;
let witness_signals = md
.witness_signals
.iter()
.map(|x| *x as usize)
.collect::<Vec<usize>>();
let input_signals = md
.inputs
.iter()
.map(|(k, v)| (k.clone(), (v.offset as usize, v.len as usize)))
.collect::<InputSignalsInfo>();
Ok((nodes, witness_signals, input_signals))
}
struct WriteBackReader<R: Read> {
reader: R,
buffer: Vec<u8>,
}
impl<R: Read> WriteBackReader<R> {
fn new(reader: R) -> Self {
WriteBackReader {
reader,
buffer: Vec::new(),
}
}
}
impl<R: Read> Read for WriteBackReader<R> {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
if buf.is_empty() {
return Ok(0);
}
let mut n = 0usize;
if !self.buffer.is_empty() {
n = std::cmp::min(buf.len(), self.buffer.len());
self.buffer[self.buffer.len() - n..]
.iter()
.rev()
.enumerate()
.for_each(|(i, x)| {
buf[i] = *x;
});
self.buffer.truncate(self.buffer.len() - n);
}
while n < buf.len() {
let m = self.reader.read(&mut buf[n..])?;
if m == 0 {
break;
}
n += m;
}
Ok(n)
}
}
impl<R: Read> Write for WriteBackReader<R> {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
self.buffer.reserve(buf.len());
self.buffer.extend(buf.iter().rev());
Ok(buf.len())
}
fn flush(&mut self) -> std::io::Result<()> {
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use byteorder::ByteOrder;
use core::str::FromStr;
use graph::{Operation, TresOperation, UnoOperation};
use std::collections::HashMap;
#[test]
fn test_read_message() {
let mut buf = Vec::new();
let n1 = proto::Node {
node: Some(proto::node::Node::Input(proto::InputNode { idx: 1 })),
};
n1.encode_length_delimited(&mut buf).unwrap();
let n2 = proto::Node {
node: Some(proto::node::Node::Input(proto::InputNode { idx: 2 })),
};
n2.encode_length_delimited(&mut buf).unwrap();
let mut reader = std::io::Cursor::new(&buf);
let mut rw = WriteBackReader::new(&mut reader);
let got_n1: proto::Node = read_message(&mut rw).unwrap();
assert!(n1.eq(&got_n1));
let got_n2: proto::Node = read_message(&mut rw).unwrap();
assert!(n2.eq(&got_n2));
assert_eq!(reader.position(), buf.len() as u64);
}
#[test]
fn test_read_message_variant() {
let nodes = vec![
proto::Node {
node: Some(proto::node::Node::from(&graph::Node::Input(0))),
},
proto::Node {
node: Some(proto::node::Node::from(&graph::Node::MontConstant(
Fr::from_str("1").unwrap(),
))),
},
proto::Node {
node: Some(proto::node::Node::from(&graph::Node::UnoOp(
UnoOperation::Id,
4,
))),
},
proto::Node {
node: Some(proto::node::Node::from(&graph::Node::Op(
Operation::Mul,
5,
6,
))),
},
proto::Node {
node: Some(proto::node::Node::from(&graph::Node::TresOp(
TresOperation::TernCond,
7,
8,
9,
))),
},
];
let mut buf = Vec::new();
for n in &nodes {
n.encode_length_delimited(&mut buf).unwrap();
}
let mut nodes_got: Vec<proto::Node> = Vec::new();
let mut reader = std::io::Cursor::new(&buf);
let mut rw = WriteBackReader::new(&mut reader);
for _ in 0..nodes.len() {
nodes_got.push(read_message(&mut rw).unwrap());
}
assert_eq!(nodes, nodes_got);
}
#[test]
fn test_write_back_reader() {
let data = [1u8, 2, 3, 4, 5, 6];
let mut r = WriteBackReader::new(std::io::Cursor::new(&data));
let buf = &mut [0u8; 5];
r.read(buf).unwrap();
assert_eq!(buf, &[1, 2, 3, 4, 5]);
// return [4, 5] to reader
r.write(&buf[3..]).unwrap();
// return [2, 3] to reader
r.write(&buf[1..3]).unwrap();
buf.fill(0);
// read 3 bytes, expect [2, 3, 4] after returns
let mut n = r.read(&mut buf[..3]).unwrap();
assert_eq!(n, 3);
assert_eq!(buf, &[2, 3, 4, 0, 0]);
buf.fill(0);
// read everything left in reader
n = r.read(buf).unwrap();
assert_eq!(n, 2);
assert_eq!(buf, &[5, 6, 0, 0, 0]);
}
#[test]
fn test_deserialize_inputs() {
let nodes = vec![
graph::Node::Input(0),
graph::Node::MontConstant(Fr::from_str("1").unwrap()),
graph::Node::UnoOp(UnoOperation::Id, 4),
graph::Node::Op(Operation::Mul, 5, 6),
graph::Node::TresOp(TresOperation::TernCond, 7, 8, 9),
];
let witness_signals = vec![4, 1];
let mut input_signals: InputSignalsInfo = HashMap::new();
input_signals.insert("sig1".to_string(), (1, 3));
input_signals.insert("sig2".to_string(), (5, 1));
let mut tmp = Vec::new();
serialize_witnesscalc_graph(&mut tmp, &nodes, &witness_signals, &input_signals).unwrap();
let mut reader = std::io::Cursor::new(&tmp);
let (nodes_res, witness_signals_res, input_signals_res) =
deserialize_witnesscalc_graph(&mut reader).unwrap();
assert_eq!(nodes, nodes_res);
assert_eq!(input_signals, input_signals_res);
assert_eq!(witness_signals, witness_signals_res);
let metadata_start = LittleEndian::read_u64(&tmp[tmp.len() - 8..]);
let mt_reader = std::io::Cursor::new(&tmp[metadata_start as usize..]);
let mut rw = WriteBackReader::new(mt_reader);
let metadata: proto::GraphMetadata = read_message(&mut rw).unwrap();
let metadata_want = proto::GraphMetadata {
witness_signals: vec![4, 1],
inputs: input_signals
.iter()
.map(|(k, v)| {
(
k.clone(),
proto::SignalDescription {
offset: v.0 as u32,
len: v.1 as u32,
},
)
})
.collect(),
};
assert_eq!(metadata, metadata_want);
}
}

View File

@@ -1,15 +1,10 @@
#![allow(dead_code)]
pub mod circuit;
pub mod hashers;
pub mod iden3calc;
#[cfg(feature = "pmtree-ft")]
pub mod pm_tree_adapter;
pub mod poseidon_hash;
pub mod poseidon_tree;
pub mod protocol;
pub mod public;
#[cfg(test)]
pub mod public_api_tests;
pub mod utils;
#[cfg(not(target_arch = "wasm32"))]

View File

@@ -1,384 +0,0 @@
use std::fmt::Debug;
use std::path::PathBuf;
use std::str::FromStr;
use color_eyre::{Report, Result};
use serde_json::Value;
use utils::pmtree::tree::Key;
use utils::pmtree::{Database, Hasher};
use utils::*;
use crate::circuit::Fr;
use crate::hashers::{poseidon_hash, PoseidonHash};
use crate::utils::{bytes_le_to_fr, fr_to_bytes_le};
const METADATA_KEY: [u8; 8] = *b"metadata";
pub struct PmTree {
tree: pmtree::MerkleTree<SledDB, PoseidonHash>,
/// The indices of leaves which are set into zero upto next_index.
/// Set to 0 if the leaf is empty and set to 1 in otherwise.
cached_leaves_indices: Vec<u8>,
// metadata that an application may use to store additional information
metadata: Vec<u8>,
}
pub struct PmTreeProof {
proof: pmtree::tree::MerkleProof<PoseidonHash>,
}
pub type FrOf<H> = <H as Hasher>::Fr;
// The pmtree Hasher trait used by pmtree Merkle tree
impl Hasher for PoseidonHash {
type Fr = Fr;
fn serialize(value: Self::Fr) -> pmtree::Value {
fr_to_bytes_le(&value)
}
fn deserialize(value: pmtree::Value) -> Self::Fr {
let (fr, _) = bytes_le_to_fr(&value);
fr
}
fn default_leaf() -> Self::Fr {
Fr::from(0)
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}
fn get_tmp_path() -> PathBuf {
std::env::temp_dir().join(format!("pmtree-{}", rand::random::<u64>()))
}
fn get_tmp() -> bool {
true
}
pub struct PmtreeConfig(Config);
impl FromStr for PmtreeConfig {
type Err = Report;
fn from_str(s: &str) -> Result<Self> {
let config: Value = serde_json::from_str(s)?;
let path = config["path"].as_str();
let path = path.map(PathBuf::from);
let temporary = config["temporary"].as_bool();
let cache_capacity = config["cache_capacity"].as_u64();
let flush_every_ms = config["flush_every_ms"].as_u64();
let mode = match config["mode"].as_str() {
Some("HighThroughput") => Mode::HighThroughput,
Some("LowSpace") => Mode::LowSpace,
_ => Mode::HighThroughput,
};
let use_compression = config["use_compression"].as_bool();
if temporary.is_some()
&& path.is_some()
&& temporary.unwrap()
&& path.as_ref().unwrap().exists()
{
return Err(Report::msg(format!(
"Path {:?} already exists, cannot use temporary",
path.unwrap()
)));
}
let config = Config::new()
.temporary(temporary.unwrap_or(get_tmp()))
.path(path.unwrap_or(get_tmp_path()))
.cache_capacity(cache_capacity.unwrap_or(1024 * 1024 * 1024))
.flush_every_ms(flush_every_ms)
.mode(mode)
.use_compression(use_compression.unwrap_or(false));
Ok(PmtreeConfig(config))
}
}
impl Default for PmtreeConfig {
fn default() -> Self {
let tmp_path = get_tmp_path();
PmtreeConfig(
Config::new()
.temporary(true)
.path(tmp_path)
.cache_capacity(150_000)
.mode(Mode::HighThroughput)
.use_compression(false)
.flush_every_ms(Some(12_000)),
)
}
}
impl Debug for PmtreeConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.0.fmt(f)
}
}
impl Clone for PmtreeConfig {
fn clone(&self) -> Self {
PmtreeConfig(self.0.clone())
}
}
impl ZerokitMerkleTree for PmTree {
type Proof = PmTreeProof;
type Hasher = PoseidonHash;
type Config = PmtreeConfig;
fn default(depth: usize) -> Result<Self> {
let default_config = PmtreeConfig::default();
PmTree::new(depth, Self::Hasher::default_leaf(), default_config)
}
fn new(depth: usize, _default_leaf: FrOf<Self::Hasher>, config: Self::Config) -> Result<Self> {
let tree_loaded = pmtree::MerkleTree::load(config.clone().0);
let tree = match tree_loaded {
Ok(tree) => tree,
Err(_) => pmtree::MerkleTree::new(depth, config.0)?,
};
Ok(PmTree {
tree,
cached_leaves_indices: vec![0; 1 << depth],
metadata: Vec::new(),
})
}
fn depth(&self) -> usize {
self.tree.depth()
}
fn capacity(&self) -> usize {
self.tree.capacity()
}
fn leaves_set(&self) -> usize {
self.tree.leaves_set()
}
fn root(&self) -> FrOf<Self::Hasher> {
self.tree.root()
}
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>> {
Ok(self.tree.root())
}
fn set(&mut self, index: usize, leaf: FrOf<Self::Hasher>) -> Result<()> {
self.tree
.set(index, leaf)
.map_err(|e| Report::msg(e.to_string()))?;
self.cached_leaves_indices[index] = 1;
Ok(())
}
fn set_range<I: IntoIterator<Item = FrOf<Self::Hasher>>>(
&mut self,
start: usize,
values: I,
) -> Result<()> {
let v = values.into_iter().collect::<Vec<_>>();
self.tree
.set_range(start, v.clone().into_iter())
.map_err(|e| Report::msg(e.to_string()))?;
for i in start..v.len() {
self.cached_leaves_indices[i] = 1
}
Ok(())
}
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>> {
self.tree.get(index).map_err(|e| Report::msg(e.to_string()))
}
fn get_subtree_root(&self, n: usize, index: usize) -> Result<FrOf<Self::Hasher>> {
if n > self.depth() {
return Err(Report::msg("level exceeds depth size"));
}
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
}
if n == 0 {
Ok(self.root())
} else if n == self.depth() {
self.get(index)
} else {
let node = self
.tree
.get_elem(Key::new(n, index >> (self.depth() - n)))
.unwrap();
Ok(node)
}
}
fn get_empty_leaves_indices(&self) -> Vec<usize> {
let next_idx = self.leaves_set();
self.cached_leaves_indices
.iter()
.take(next_idx)
.enumerate()
.filter(|&(_, &v)| v == 0u8)
.map(|(idx, _)| idx)
.collect()
}
fn override_range<I: IntoIterator<Item = FrOf<Self::Hasher>>, J: IntoIterator<Item = usize>>(
&mut self,
start: usize,
leaves: I,
indices: J,
) -> Result<()> {
let leaves = leaves.into_iter().collect::<Vec<_>>();
let mut indices = indices.into_iter().collect::<Vec<_>>();
indices.sort();
match (leaves.len(), indices.len()) {
(0, 0) => Err(Report::msg("no leaves or indices to be removed")),
(1, 0) => self.set(start, leaves[0]),
(0, 1) => self.delete(indices[0]),
(_, 0) => self.set_range(start, leaves),
(0, _) => self.remove_indices(&indices),
(_, _) => self.remove_indices_and_set_leaves(start, leaves, &indices),
}
}
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<()> {
self.tree
.update_next(leaf)
.map_err(|e| Report::msg(e.to_string()))
}
fn delete(&mut self, index: usize) -> Result<()> {
self.tree
.delete(index)
.map_err(|e| Report::msg(e.to_string()))?;
self.cached_leaves_indices[index] = 0;
Ok(())
}
fn proof(&self, index: usize) -> Result<Self::Proof> {
let proof = self.tree.proof(index)?;
Ok(PmTreeProof { proof })
}
fn verify(&self, leaf: &FrOf<Self::Hasher>, witness: &Self::Proof) -> Result<bool> {
if self.tree.verify(leaf, &witness.proof) {
Ok(true)
} else {
Err(Report::msg("verify failed"))
}
}
fn set_metadata(&mut self, metadata: &[u8]) -> Result<()> {
self.tree.db.put(METADATA_KEY, metadata.to_vec())?;
self.metadata = metadata.to_vec();
Ok(())
}
fn metadata(&self) -> Result<Vec<u8>> {
if !self.metadata.is_empty() {
return Ok(self.metadata.clone());
}
// if empty, try searching the db
let data = self.tree.db.get(METADATA_KEY)?;
if data.is_none() {
// send empty Metadata
return Ok(Vec::new());
}
Ok(data.unwrap())
}
fn close_db_connection(&mut self) -> Result<()> {
self.tree.db.close().map_err(|e| Report::msg(e.to_string()))
}
}
type PmTreeHasher = <PmTree as ZerokitMerkleTree>::Hasher;
type FrOfPmTreeHasher = FrOf<PmTreeHasher>;
impl PmTree {
fn remove_indices(&mut self, indices: &[usize]) -> Result<()> {
let start = indices[0];
let end = indices.last().unwrap() + 1;
let new_leaves = (start..end).map(|_| PmTreeHasher::default_leaf());
self.tree
.set_range(start, new_leaves)
.map_err(|e| Report::msg(e.to_string()))?;
for i in start..end {
self.cached_leaves_indices[i] = 0
}
Ok(())
}
fn remove_indices_and_set_leaves(
&mut self,
start: usize,
leaves: Vec<FrOfPmTreeHasher>,
indices: &[usize],
) -> Result<()> {
let min_index = *indices.first().unwrap();
let max_index = start + leaves.len();
let mut set_values = vec![PmTreeHasher::default_leaf(); max_index - min_index];
for i in min_index..start {
if !indices.contains(&i) {
let value = self.tree.get(i)?;
set_values[i - min_index] = value;
}
}
for (i, &leaf) in leaves.iter().enumerate() {
set_values[start - min_index + i] = leaf;
}
self.tree
.set_range(start, set_values)
.map_err(|e| Report::msg(e.to_string()))?;
for i in indices {
self.cached_leaves_indices[*i] = 0;
}
for i in start..(max_index - min_index) {
self.cached_leaves_indices[i] = 1
}
Ok(())
}
}
impl ZerokitMerkleProof for PmTreeProof {
type Index = u8;
type Hasher = PoseidonHash;
fn length(&self) -> usize {
self.proof.length()
}
fn leaf_index(&self) -> usize {
self.proof.leaf_index()
}
fn get_path_elements(&self) -> Vec<FrOf<Self::Hasher>> {
self.proof.get_path_elements()
}
fn get_path_index(&self) -> Vec<Self::Index> {
self.proof.get_path_index()
}
fn compute_root_from(&self, leaf: &FrOf<Self::Hasher>) -> FrOf<Self::Hasher> {
self.proof.compute_root_from(leaf)
}
}

28
rln/src/poseidon_hash.rs Normal file
View File

@@ -0,0 +1,28 @@
// This crate instantiate the Poseidon hash algorithm
use crate::circuit::Fr;
use once_cell::sync::Lazy;
use utils::poseidon::Poseidon;
// These indexed constants hardcodes the supported round parameters tuples (t, RF, RN, SKIP_MATRICES) for the Bn254 scalar field
// SKIP_MATRICES is the index of the randomly generated secure MDS matrix. See security note in the zerokit_utils::poseidon::poseidon_constants crate on this.
// TODO: generate these parameters
pub const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
(2, 8, 56, 0),
(3, 8, 57, 0),
(4, 8, 56, 0),
(5, 8, 60, 0),
(6, 8, 60, 0),
(7, 8, 63, 0),
(8, 8, 64, 0),
(9, 8, 63, 0),
];
// Poseidon Hash wrapper over above implementation. Adapted from semaphore-rs poseidon hash wrapper.
static POSEIDON: Lazy<Poseidon<Fr>> = Lazy::new(|| Poseidon::<Fr>::from(&ROUND_PARAMS));
pub fn poseidon_hash(input: &[Fr]) -> Fr {
POSEIDON
.hash(input.to_vec())
.expect("hash with fixed input size can't fail")
}

View File

@@ -2,16 +2,10 @@
// Implementation inspired by https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/poseidon_tree.rs (no differences)
use crate::circuit::Fr;
use crate::poseidon_hash::poseidon_hash;
use cfg_if::cfg_if;
cfg_if! {
if #[cfg(feature = "pmtree-ft")] {
use crate::pm_tree_adapter::*;
} else {
use crate::hashers::{PoseidonHash};
use utils::merkle_tree::*;
}
}
use utils::merkle_tree::*;
// The zerokit RLN default Merkle tree implementation is the OptimalMerkleTree.
// To switch to FullMerkleTree implementation, it is enough to enable the fullmerkletree feature
@@ -20,11 +14,25 @@ cfg_if! {
if #[cfg(feature = "fullmerkletree")] {
pub type PoseidonTree = FullMerkleTree<PoseidonHash>;
pub type MerkleProof = FullMerkleProof<PoseidonHash>;
} else if #[cfg(feature = "pmtree-ft")] {
pub type PoseidonTree = PmTree;
pub type MerkleProof = PmTreeProof;
} else {
pub type PoseidonTree = OptimalMerkleTree<PoseidonHash>;
pub type MerkleProof = OptimalMerkleProof<PoseidonHash>;
}
}
// The zerokit RLN Merkle tree Hasher
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct PoseidonHash;
// The default Hasher trait used by Merkle tree implementation in utils
impl utils::merkle_tree::Hasher for PoseidonHash {
type Fr = Fr;
fn default_leaf() -> Self::Fr {
Self::Fr::from(0)
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}

View File

@@ -1,48 +1,82 @@
// This crate collects all the underlying primitives used to implement RLN
use ark_circom::CircomReduction;
use ark_groth16::{prepare_verifying_key, Groth16, Proof as ArkProof, ProvingKey, VerifyingKey};
use std::fmt;
use ark_circom::{CircomReduction, WitnessCalculator};
use ark_groth16::{
create_proof_with_reduction_and_matrices, prepare_verifying_key,
verify_proof as ark_verify_proof, Proof as ArkProof, ProvingKey, VerifyingKey,
};
use ark_relations::r1cs::ConstraintMatrices;
use ark_relations::r1cs::SynthesisError;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::{rand::thread_rng, UniformRand};
use color_eyre::{Report, Result};
use num_bigint::BigInt;
use color_eyre::Result;
use num_bigint::{BigInt, BigUint};
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha20Rng;
use serde::{Deserialize, Serialize};
#[cfg(test)]
#[cfg(not(target_arch = "wasm32"))]
use std::sync::Mutex;
#[cfg(debug_assertions)]
use std::time::Instant;
use thiserror::Error;
use tiny_keccak::{Hasher as _, Keccak};
use serde::{Serialize, Serializer, ser::SerializeStruct, Deserialize, Deserializer};
use crate::circuit::{calculate_rln_witness, Curve, Fr};
use crate::hashers::hash_to_field;
use crate::hashers::poseidon_hash;
use crate::circuit::{Curve, Fr};
use crate::poseidon_hash::poseidon_hash;
use crate::poseidon_tree::*;
use crate::public::RLN_IDENTIFIER;
use crate::utils::*;
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
use cfg_if::cfg_if;
use serde::de::{SeqAccess, Visitor};
///////////////////////////////////////////////////////
// RLN Witness data structure and utility functions
///////////////////////////////////////////////////////
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[derive(Debug, PartialEq)]
pub struct RLNWitnessInput {
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
identity_secret: Fr,
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
user_message_limit: Fr,
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
message_id: Fr,
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
path_elements: Vec<Fr>,
identity_path_index: Vec<u8>,
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
x: Fr,
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
external_nullifier: Fr,
epoch: Fr,
rln_identifier: Fr,
}
impl Serialize for RLNWitnessInput {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut witness_ser = serializer.serialize_struct("RLNWitnessInput", 6)?;
let identity_secret: BigUint = self.identity_secret.into();
witness_ser.serialize_field("identity_secret", &identity_secret)?;
let path_el_as_biguints: Vec<BigUint> = (&self).path_elements.iter().map(
|e| {
let e_biguint: BigUint = (*e).into();
e_biguint
}
).collect();
witness_ser.serialize_field(
"path_elements",
&path_el_as_biguints
)?;
witness_ser.serialize_field("identity_path_index", &self.identity_path_index)?;
let x: BigUint = self.x.into();
witness_ser.serialize_field("x", &x)?;
let epoch: BigUint = self.epoch.into();
witness_ser.serialize_field("epoch", &epoch)?;
let rln_identifier: BigUint = self.rln_identifier.into();
witness_ser.serialize_field("rln_identifier", &rln_identifier)?;
witness_ser.end()
}
}
#[derive(Debug, PartialEq)]
@@ -53,199 +87,237 @@ pub struct RLNProofValues {
pub root: Fr,
// Public Inputs:
pub x: Fr,
pub external_nullifier: Fr,
pub epoch: Fr,
pub rln_identifier: Fr,
}
pub fn serialize_field_element(element: Fr) -> Vec<u8> {
fr_to_bytes_le(&element)
return fr_to_bytes_le(&element);
}
pub fn deserialize_field_element(serialized: Vec<u8>) -> Fr {
let (element, _) = bytes_le_to_fr(&serialized);
element
return element;
}
pub fn deserialize_identity_pair(serialized: Vec<u8>) -> (Fr, Fr) {
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized);
let (id_commitment, _) = bytes_le_to_fr(&serialized[read..]);
let (id_commitment, _) = bytes_le_to_fr(&serialized[read..].to_vec());
(identity_secret_hash, id_commitment)
return (identity_secret_hash, id_commitment);
}
pub fn deserialize_identity_tuple(serialized: Vec<u8>) -> (Fr, Fr, Fr, Fr) {
let mut all_read = 0;
let (identity_trapdoor, read) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_trapdoor, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (identity_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_nullifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (identity_commitment, _) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_commitment, _) = bytes_le_to_fr(&serialized[all_read..].to_vec());
(
return (
identity_trapdoor,
identity_nullifier,
identity_secret_hash,
identity_commitment,
)
);
}
/// Serializes witness
///
/// # Errors
///
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Result<Vec<u8>> {
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Vec<u8> {
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&rln_witness.identity_secret));
serialized.append(&mut fr_to_bytes_le(&rln_witness.user_message_limit));
serialized.append(&mut fr_to_bytes_le(&rln_witness.message_id));
serialized.append(&mut vec_fr_to_bytes_le(&rln_witness.path_elements)?);
serialized.append(&mut vec_u8_to_bytes_le(&rln_witness.identity_path_index)?);
serialized.append(&mut vec_fr_to_bytes_le(&rln_witness.path_elements));
serialized.append(&mut vec_u8_to_bytes_le(&rln_witness.identity_path_index));
serialized.append(&mut fr_to_bytes_le(&rln_witness.x));
serialized.append(&mut fr_to_bytes_le(&rln_witness.external_nullifier));
serialized.append(&mut fr_to_bytes_le(&rln_witness.epoch));
serialized.append(&mut fr_to_bytes_le(&rln_witness.rln_identifier));
Ok(serialized)
serialized
}
/// Deserializes witness
///
/// # Errors
///
/// Returns an error if `message_id` is not within `user_message_limit`.
pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)> {
let mut all_read: usize = 0;
impl<'de> Deserialize<'de> for RLNWitnessInput {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_seq(WitnessVisitor)
deserializer.
}
}
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
struct WitnessVisitor;
let (user_message_limit, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
impl<'de> Visitor<'de> for WitnessVisitor {
type Value = RLNWitnessInput;
let (message_id, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
message_id_range_check(&message_id, &user_message_limit)?;
let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..])?;
all_read += read;
let (identity_path_index, read) = bytes_le_to_vec_u8(&serialized[all_read..])?;
all_read += read;
let (x, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (external_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
if serialized.len() != all_read {
return Err(Report::msg("serialized length is not equal to all_read"));
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("a sequence that deserializes to RLNWitnessInput")
}
Ok((
fn visit_seq<S>(self, mut seq: S) -> Result<Self::Value, S::Error>
where
S: SeqAccess<'de>,
{
let len = seq.size_hint().unwrap_or(0);
let mut data = Vec::with_capacity(len);
while let Some(value) = seq.next_element::<u32>()? {
data.push(value);
}
BigUint::deserialize()
Ok(biguint_from_vec(data))
}
}
pub fn deserialize_witness(serialized: &[u8]) -> (RLNWitnessInput, usize) {
let mut all_read: usize = 0;
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..].to_vec());
all_read += read;
let (identity_path_index, read) = bytes_le_to_vec_u8(&serialized[all_read..].to_vec());
all_read += read;
let (x, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
// TODO: check rln_identifier against public::RLN_IDENTIFIER
assert_eq!(serialized.len(), all_read);
(
RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
external_nullifier,
user_message_limit,
message_id,
epoch,
rln_identifier,
},
all_read,
))
)
}
// This function deserializes input for kilic's rln generate_proof public API
// https://github.com/kilic/rln/blob/7ac74183f8b69b399e3bc96c1ae8ab61c026dc43/src/public.rs#L148
// input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
// return value is a rln witness populated according to this information
pub fn proof_inputs_to_rln_witness(
tree: &mut PoseidonTree,
serialized: &[u8],
) -> Result<(RLNWitnessInput, usize)> {
) -> (RLNWitnessInput, usize) {
let mut all_read: usize = 0;
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let id_index = usize::try_from(u64::from_le_bytes(
serialized[all_read..all_read + 8].try_into()?,
))?;
let id_index = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap());
all_read += 8;
let (user_message_limit, read) = bytes_le_to_fr(&serialized[all_read..]);
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (message_id, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (external_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let signal_len = usize::try_from(u64::from_le_bytes(
serialized[all_read..all_read + 8].try_into()?,
))?;
let signal_len = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap());
all_read += 8;
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
let signal: Vec<u8> = serialized[all_read..all_read + (signal_len as usize)].to_vec();
let merkle_proof = tree.proof(id_index).expect("proof should exist");
let merkle_proof = tree.proof(id_index as usize).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
let x = hash_to_field(&signal);
Ok((
let rln_identifier = hash_to_field(RLN_IDENTIFIER);
(
RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
user_message_limit,
message_id,
x,
external_nullifier,
epoch,
rln_identifier,
},
all_read,
))
)
}
/// Creates `RLNWitnessInput` from it's fields.
///
/// # Errors
///
/// Returns an error if `message_id` is not within `user_message_limit`.
pub fn rln_witness_from_values(
identity_secret: Fr,
merkle_proof: &MerkleProof,
x: Fr,
external_nullifier: Fr,
user_message_limit: Fr,
message_id: Fr,
) -> Result<RLNWitnessInput> {
message_id_range_check(&message_id, &user_message_limit)?;
pub fn rln_witness_from_json(input_json_str: &str) -> RLNWitnessInput {
let input_json: serde_json::Value =
serde_json::from_str(input_json_str).expect("JSON was not well-formatted");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
let identity_secret = str_to_fr(&input_json["identity_secret"].to_string(), 10);
Ok(RLNWitnessInput {
let path_elements = input_json["path_elements"]
.as_array()
.unwrap()
.iter()
.map(|v| str_to_fr(&v.to_string(), 10))
.collect();
let identity_path_index = input_json["identity_path_index"]
.as_array()
.unwrap()
.iter()
.map(|v| v.as_u64().unwrap() as u8)
.collect();
let x = str_to_fr(&input_json["x"].to_string(), 10);
let epoch = str_to_fr(&input_json["epoch"].to_string(), 16);
let rln_identifier = str_to_fr(&input_json["rln_identifier"].to_string(), 10);
// TODO: check rln_identifier against public::RLN_IDENTIFIER
RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
external_nullifier,
user_message_limit,
message_id,
})
epoch,
rln_identifier,
}
}
pub fn rln_witness_from_values(
identity_secret: Fr,
merkle_proof: &MerkleProof,
x: Fr,
epoch: Fr,
//rln_identifier: Fr,
) -> RLNWitnessInput {
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
let rln_identifier = hash_to_field(RLN_IDENTIFIER);
RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
epoch,
rln_identifier,
}
}
pub fn random_rln_witness(tree_height: usize) -> RLNWitnessInput {
@@ -264,26 +336,21 @@ pub fn random_rln_witness(tree_height: usize) -> RLNWitnessInput {
identity_path_index.push(rng.gen_range(0..2) as u8);
}
let user_message_limit = Fr::from(100);
let message_id = Fr::from(1);
RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
external_nullifier: poseidon_hash(&[epoch, rln_identifier]),
user_message_limit,
message_id,
epoch,
rln_identifier,
}
}
pub fn proof_values_from_witness(rln_witness: &RLNWitnessInput) -> Result<RLNProofValues> {
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
pub fn proof_values_from_witness(rln_witness: &RLNWitnessInput) -> RLNProofValues {
// y share
let external_nullifier = poseidon_hash(&[rln_witness.epoch, rln_witness.rln_identifier]);
let a_0 = rln_witness.identity_secret;
let a_1 = poseidon_hash(&[a_0, rln_witness.external_nullifier, rln_witness.message_id]);
let a_1 = poseidon_hash(&[a_0, external_nullifier]);
let y = a_0 + rln_witness.x * a_1;
// Nullifier
@@ -292,28 +359,30 @@ pub fn proof_values_from_witness(rln_witness: &RLNWitnessInput) -> Result<RLNPro
// Merkle tree root computations
let root = compute_tree_root(
&rln_witness.identity_secret,
&rln_witness.user_message_limit,
&rln_witness.path_elements,
&rln_witness.identity_path_index,
true,
);
Ok(RLNProofValues {
RLNProofValues {
y,
nullifier,
root,
x: rln_witness.x,
external_nullifier: rln_witness.external_nullifier,
})
epoch: rln_witness.epoch,
rln_identifier: rln_witness.rln_identifier,
}
}
pub fn serialize_proof_values(rln_proof_values: &RLNProofValues) -> Vec<u8> {
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.root));
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.external_nullifier));
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.epoch));
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.x));
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.y));
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.nullifier));
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.rln_identifier));
serialized
}
@@ -323,19 +392,22 @@ pub fn serialize_proof_values(rln_proof_values: &RLNProofValues) -> Vec<u8> {
pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) {
let mut all_read: usize = 0;
let (root, read) = bytes_le_to_fr(&serialized[all_read..]);
let (root, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (external_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (x, read) = bytes_le_to_fr(&serialized[all_read..]);
let (x, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (y, read) = bytes_le_to_fr(&serialized[all_read..]);
let (y, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
let (nullifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
(
@@ -344,7 +416,8 @@ pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) {
nullifier,
root,
x,
external_nullifier,
epoch,
rln_identifier,
},
all_read,
)
@@ -353,29 +426,32 @@ pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) {
pub fn prepare_prove_input(
identity_secret: Fr,
id_index: usize,
external_nullifier: Fr,
epoch: Fr,
signal: &[u8],
) -> Vec<u8> {
let signal_len = u64::try_from(signal.len()).unwrap();
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret));
serialized.append(&mut normalize_usize(id_index));
serialized.append(&mut fr_to_bytes_le(&external_nullifier));
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut id_index.to_le_bytes().to_vec());
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut signal.to_vec());
serialized
return serialized;
}
#[allow(clippy::redundant_clone)]
pub fn prepare_verify_input(proof_data: Vec<u8>, signal: &[u8]) -> Vec<u8> {
let signal_len = u64::try_from(signal.len()).unwrap();
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut proof_data.clone());
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut signal.to_vec());
serialized
return serialized;
}
///////////////////////////////////////////////////////
@@ -383,13 +459,15 @@ pub fn prepare_verify_input(proof_data: Vec<u8>, signal: &[u8]) -> Vec<u8> {
///////////////////////////////////////////////////////
pub fn compute_tree_root(
identity_secret: &Fr,
user_message_limit: &Fr,
leaf: &Fr,
path_elements: &[Fr],
identity_path_index: &[u8],
hash_leaf: bool,
) -> Fr {
let id_commitment = poseidon_hash(&[*identity_secret]);
let mut root = poseidon_hash(&[id_commitment, *user_message_limit]);
let mut root = *leaf;
if hash_leaf {
root = poseidon_hash(&[root]);
}
for i in 0..identity_path_index.len() {
if identity_path_index[i] == 0 {
@@ -480,7 +558,25 @@ pub fn extended_seeded_keygen(signal: &[u8]) -> (Fr, Fr, Fr, Fr) {
)
}
pub fn compute_id_secret(share1: (Fr, Fr), share2: (Fr, Fr)) -> Result<Fr, String> {
// Hashes arbitrary signal to the underlying prime field
pub fn hash_to_field(signal: &[u8]) -> Fr {
// We hash the input signal using Keccak256
// (note that a bigger curve order might require a bigger hash blocksize)
let mut hash = [0; 32];
let mut hasher = Keccak::v256();
hasher.update(signal);
hasher.finalize(&mut hash);
// We export the hash as a field element
let (el, _) = bytes_le_to_fr(hash.as_ref());
el
}
pub fn compute_id_secret(
share1: (Fr, Fr),
share2: (Fr, Fr),
external_nullifier: Fr,
) -> Result<Fr, String> {
// Assuming a0 is the identity secret and a1 = poseidonHash([a0, external_nullifier]),
// a (x,y) share satisfies the following relation
// y = a_0 + x * a_1
@@ -494,7 +590,14 @@ pub fn compute_id_secret(share1: (Fr, Fr), share2: (Fr, Fr)) -> Result<Fr, Strin
let a_0 = y1 - x1 * a_1;
// If shares come from the same polynomial, a0 is correctly recovered and a1 = poseidonHash([a0, external_nullifier])
Ok(a_0)
let computed_a_1 = poseidon_hash(&[a_0, external_nullifier]);
if a_1 == computed_a_1 {
// We successfully recovered the identity secret
return Ok(a_0);
} else {
return Err("Cannot recover identity_secret_hash from provided shares".into());
}
}
///////////////////////////////////////////////////////
@@ -504,36 +607,33 @@ pub fn compute_id_secret(share1: (Fr, Fr), share2: (Fr, Fr)) -> Result<Fr, Strin
#[derive(Error, Debug)]
pub enum ProofError {
#[error("Error reading circuit key: {0}")]
CircuitKeyError(#[from] Report),
CircuitKeyError(#[from] std::io::Error),
#[error("Error producing witness: {0}")]
WitnessError(Report),
WitnessError(color_eyre::Report),
#[error("Error producing proof: {0}")]
SynthesisError(#[from] SynthesisError),
}
fn calculate_witness_element<E: ark_ec::pairing::Pairing>(
witness: Vec<BigInt>,
) -> Result<Vec<E::ScalarField>> {
use ark_ff::PrimeField;
let modulus = <E::ScalarField as PrimeField>::MODULUS;
fn calculate_witness_element<E: ark_ec::PairingEngine>(witness: Vec<BigInt>) -> Result<Vec<E::Fr>> {
use ark_ff::{FpParameters, PrimeField};
let modulus = <<E::Fr as PrimeField>::Params as FpParameters>::MODULUS;
// convert it to field elements
use num_traits::Signed;
let mut witness_vec = vec![];
for w in witness.into_iter() {
let w = if w.sign() == num_bigint::Sign::Minus {
// Need to negate the witness element if negative
modulus.into()
- w.abs()
.to_biguint()
.ok_or(Report::msg("not a biguint value"))?
} else {
w.to_biguint().ok_or(Report::msg("not a biguint value"))?
};
witness_vec.push(E::ScalarField::from(w))
}
let witness = witness
.into_iter()
.map(|w| {
let w = if w.sign() == num_bigint::Sign::Minus {
// Need to negate the witness element if negative
modulus.into() - w.abs().to_biguint().unwrap()
} else {
w.to_biguint().unwrap()
};
E::Fr::from(w)
})
.collect::<Vec<_>>();
Ok(witness_vec)
Ok(witness)
}
pub fn generate_proof_with_witness(
@@ -541,13 +641,14 @@ pub fn generate_proof_with_witness(
proving_key: &(ProvingKey<Curve>, ConstraintMatrices<Fr>),
) -> Result<ArkProof<Curve>, ProofError> {
// If in debug mode, we measure and later print time take to compute witness
#[cfg(test)]
#[cfg(debug_assertions)]
let now = Instant::now();
let full_assignment =
calculate_witness_element::<Curve>(witness).map_err(ProofError::WitnessError)?;
let full_assignment = calculate_witness_element::<Curve>(witness)
.map_err(ProofError::WitnessError)
.unwrap();
#[cfg(test)]
#[cfg(debug_assertions)]
println!("witness generation took: {:.2?}", now.elapsed());
// Random Values
@@ -556,10 +657,10 @@ pub fn generate_proof_with_witness(
let s = Fr::rand(&mut rng);
// If in debug mode, we measure and later print time take to compute proof
#[cfg(test)]
#[cfg(debug_assertions)]
let now = Instant::now();
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
let proof = create_proof_with_reduction_and_matrices::<_, CircomReduction>(
&proving_key.0,
r,
s,
@@ -567,31 +668,23 @@ pub fn generate_proof_with_witness(
proving_key.1.num_instance_variables,
proving_key.1.num_constraints,
full_assignment.as_slice(),
)?;
)
.unwrap();
#[cfg(test)]
#[cfg(debug_assertions)]
println!("proof generation took: {:.2?}", now.elapsed());
Ok(proof)
}
/// Formats inputs for witness calculation
///
/// # Errors
///
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
pub fn inputs_for_witness_calculation(
rln_witness: &RLNWitnessInput,
) -> Result<[(&str, Vec<BigInt>); 7]> {
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
// We convert the path indexes to field elements
pub fn inputs_for_witness_calculation(rln_witness: &RLNWitnessInput) -> [(&str, Vec<BigInt>); 6] {
// We confert the path indexes to field elements
// TODO: check if necessary
let mut path_elements = Vec::new();
for v in rln_witness.path_elements.iter() {
path_elements.push(to_bigint(v)?);
}
rln_witness
.path_elements
.iter()
.for_each(|v| path_elements.push(to_bigint(v)));
let mut identity_path_index = Vec::new();
rln_witness
@@ -599,24 +692,20 @@ pub fn inputs_for_witness_calculation(
.iter()
.for_each(|v| identity_path_index.push(BigInt::from(*v)));
Ok([
[
(
"identitySecret",
vec![to_bigint(&rln_witness.identity_secret)?],
"identity_secret",
vec![to_bigint(&rln_witness.identity_secret)],
),
("path_elements", path_elements),
("identity_path_index", identity_path_index),
("x", vec![to_bigint(&rln_witness.x)]),
("epoch", vec![to_bigint(&rln_witness.epoch)]),
(
"userMessageLimit",
vec![to_bigint(&rln_witness.user_message_limit)?],
"rln_identifier",
vec![to_bigint(&rln_witness.rln_identifier)],
),
("messageId", vec![to_bigint(&rln_witness.message_id)?]),
("pathElements", path_elements),
("identityPathIndex", identity_path_index),
("x", vec![to_bigint(&rln_witness.x)?]),
(
"externalNullifier",
vec![to_bigint(&rln_witness.external_nullifier)?],
),
])
]
}
/// Generates a RLN proof
@@ -625,19 +714,34 @@ pub fn inputs_for_witness_calculation(
///
/// Returns a [`ProofError`] if proving fails.
pub fn generate_proof(
#[cfg(not(target_arch = "wasm32"))] witness_calculator: &Mutex<WitnessCalculator>,
#[cfg(target_arch = "wasm32")] witness_calculator: &mut WitnessCalculator,
proving_key: &(ProvingKey<Curve>, ConstraintMatrices<Fr>),
rln_witness: &RLNWitnessInput,
) -> Result<ArkProof<Curve>, ProofError> {
let inputs = inputs_for_witness_calculation(rln_witness)?
let inputs = inputs_for_witness_calculation(rln_witness)
.into_iter()
.map(|(name, values)| (name.to_string(), values));
// If in debug mode, we measure and later print time take to compute witness
#[cfg(test)]
#[cfg(debug_assertions)]
let now = Instant::now();
let full_assignment = calculate_rln_witness(inputs);
#[cfg(test)]
cfg_if! {
if #[cfg(target_arch = "wasm32")] {
let full_assignment = witness_calculator
.calculate_witness_element::<Curve, _>(inputs, false)
.map_err(ProofError::WitnessError)?;
} else {
let full_assignment = witness_calculator
.lock()
.expect("witness_calculator mutex should not get poisoned")
.calculate_witness_element::<Curve, _>(inputs, false)
.map_err(ProofError::WitnessError)?;
}
}
#[cfg(debug_assertions)]
println!("witness generation took: {:.2?}", now.elapsed());
// Random Values
@@ -646,9 +750,10 @@ pub fn generate_proof(
let s = Fr::rand(&mut rng);
// If in debug mode, we measure and later print time take to compute proof
#[cfg(test)]
#[cfg(debug_assertions)]
let now = Instant::now();
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
let proof = create_proof_with_reduction_and_matrices::<_, CircomReduction>(
&proving_key.0,
r,
s,
@@ -658,7 +763,7 @@ pub fn generate_proof(
full_assignment.as_slice(),
)?;
#[cfg(test)]
#[cfg(debug_assertions)]
println!("proof generation took: {:.2?}", now.elapsed());
Ok(proof)
@@ -681,7 +786,8 @@ pub fn verify_proof(
proof_values.root,
proof_values.nullifier,
proof_values.x,
proof_values.external_nullifier,
proof_values.epoch,
proof_values.rln_identifier,
];
// Check that the proof is valid
@@ -689,76 +795,27 @@ pub fn verify_proof(
//let pr: ArkProof<Curve> = (*proof).into();
// If in debug mode, we measure and later print time take to verify proof
#[cfg(test)]
#[cfg(debug_assertions)]
let now = Instant::now();
let verified = Groth16::<_, CircomReduction>::verify_proof(&pvk, proof, &inputs)?;
let verified = ark_verify_proof(&pvk, proof, &inputs)?;
#[cfg(test)]
#[cfg(debug_assertions)]
println!("verify took: {:.2?}", now.elapsed());
Ok(verified)
}
// auxiliary function for serialisation Fr to json using ark serilize
fn ark_se<S, A: CanonicalSerialize>(a: &A, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut bytes = vec![];
a.serialize_compressed(&mut bytes)
.map_err(serde::ser::Error::custom)?;
s.serialize_bytes(&bytes)
}
// auxiliary function for deserialisation Fr to json using ark serilize
fn ark_de<'de, D, A: CanonicalDeserialize>(data: D) -> Result<A, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let s: Vec<u8> = serde::de::Deserialize::deserialize(data)?;
let a = A::deserialize_compressed_unchecked(s.as_slice());
a.map_err(serde::de::Error::custom)
}
/// Converts a JSON value into [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object.
/// Get CIRCOM JSON inputs
///
/// # Errors
///
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
pub fn rln_witness_from_json(input_json: serde_json::Value) -> Result<RLNWitnessInput> {
let rln_witness: RLNWitnessInput = serde_json::from_value(input_json).unwrap();
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
Ok(rln_witness)
}
/// Converts a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object to the corresponding JSON serialization.
///
/// # Errors
///
/// Returns an error if `message_id` is not within `user_message_limit`.
pub fn rln_witness_to_json(rln_witness: &RLNWitnessInput) -> Result<serde_json::Value> {
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
let rln_witness_json = serde_json::to_value(rln_witness)?;
Ok(rln_witness_json)
}
/// Converts a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object to the corresponding JSON serialization.
/// Before serialisation the data should be translated into big int for further calculation in the witness calculator.
///
/// # Errors
///
/// Returns an error if `message_id` is not within `user_message_limit`.
pub fn rln_witness_to_bigint_json(rln_witness: &RLNWitnessInput) -> Result<serde_json::Value> {
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
/// Returns a JSON object containing the inputs necessary to calculate
/// the witness with CIRCOM on javascript
pub fn get_json_inputs(rln_witness: &RLNWitnessInput) -> serde_json::Value {
let mut path_elements = Vec::new();
for v in rln_witness.path_elements.iter() {
path_elements.push(to_bigint(v)?.to_str_radix(10));
}
rln_witness
.path_elements
.iter()
.for_each(|v| path_elements.push(to_bigint(v).to_str_radix(10)));
let mut identity_path_index = Vec::new();
rln_witness
@@ -767,23 +824,13 @@ pub fn rln_witness_to_bigint_json(rln_witness: &RLNWitnessInput) -> Result<serde
.for_each(|v| identity_path_index.push(BigInt::from(*v).to_str_radix(10)));
let inputs = serde_json::json!({
"identitySecret": to_bigint(&rln_witness.identity_secret)?.to_str_radix(10),
"userMessageLimit": to_bigint(&rln_witness.user_message_limit)?.to_str_radix(10),
"messageId": to_bigint(&rln_witness.message_id)?.to_str_radix(10),
"pathElements": path_elements,
"identityPathIndex": identity_path_index,
"x": to_bigint(&rln_witness.x)?.to_str_radix(10),
"externalNullifier": to_bigint(&rln_witness.external_nullifier)?.to_str_radix(10),
"identity_secret": to_bigint(&rln_witness.identity_secret).to_str_radix(10),
"path_elements": path_elements,
"identity_path_index": identity_path_index,
"x": to_bigint(&rln_witness.x).to_str_radix(10),
"epoch": format!("0x{:064x}", to_bigint(&rln_witness.epoch)),
"rln_identifier": to_bigint(&rln_witness.rln_identifier).to_str_radix(10),
});
Ok(inputs)
}
pub fn message_id_range_check(message_id: &Fr, user_message_limit: &Fr) -> Result<()> {
if message_id > user_message_limit {
return Err(color_eyre::Report::msg(
"message_id is not within user_message_limit",
));
}
Ok(())
inputs
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -2,27 +2,22 @@
use crate::circuit::Fr;
use ark_ff::PrimeField;
use color_eyre::{Report, Result};
use num_bigint::{BigInt, BigUint};
use num_traits::Num;
use serde_json::json;
use std::io::Cursor;
use std::iter::Extend;
pub fn to_bigint(el: &Fr) -> Result<BigInt> {
let res: BigUint = (*el).into();
Ok(res.into())
pub fn to_bigint(el: &Fr) -> BigInt {
let res: BigUint = (*el).try_into().unwrap();
res.try_into().unwrap()
}
pub fn fr_byte_size() -> usize {
let mbs = <Fr as PrimeField>::MODULUS_BIT_SIZE;
((mbs + 64 - (mbs % 64)) / 8) as usize
let mbs = <Fr as PrimeField>::size_in_bits();
(mbs + 64 - (mbs % 64)) / 8
}
pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr> {
if !(radix == 10 || radix == 16) {
return Err(Report::msg("wrong radix"));
}
pub fn str_to_fr(input: &str, radix: u32) -> Fr {
assert!((radix == 10) || (radix == 16));
// We remove any quote present and we trim
let single_quote: char = '\"';
@@ -30,10 +25,16 @@ pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr> {
input_clean = input_clean.trim().to_string();
if radix == 10 {
Ok(BigUint::from_str_radix(&input_clean, radix)?.into())
BigUint::from_str_radix(&input_clean, radix)
.unwrap()
.try_into()
.unwrap()
} else {
input_clean = input_clean.replace("0x", "");
Ok(BigUint::from_str_radix(&input_clean, radix)?.into())
BigUint::from_str_radix(&input_clean, radix)
.unwrap()
.try_into()
.unwrap()
}
}
@@ -74,129 +75,99 @@ pub fn fr_to_bytes_be(input: &Fr) -> Vec<u8> {
res
}
pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Result<Vec<u8>> {
pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len())?.to_le_bytes().to_vec());
bytes.extend(u64::try_from(input.len()).unwrap().to_le_bytes().to_vec());
// We store each element
input.iter().for_each(|el| bytes.extend(fr_to_bytes_le(el)));
Ok(bytes)
bytes
}
pub fn vec_fr_to_bytes_be(input: &[Fr]) -> Result<Vec<u8>> {
pub fn vec_fr_to_bytes_be(input: &[Fr]) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len())?.to_be_bytes().to_vec());
bytes.extend(u64::try_from(input.len()).unwrap().to_be_bytes().to_vec());
// We store each element
input.iter().for_each(|el| bytes.extend(fr_to_bytes_be(el)));
Ok(bytes)
bytes
}
pub fn vec_u8_to_bytes_le(input: &[u8]) -> Result<Vec<u8>> {
pub fn vec_u8_to_bytes_le(input: &[u8]) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len())?.to_le_bytes().to_vec());
bytes.extend(u64::try_from(input.len()).unwrap().to_le_bytes().to_vec());
bytes.extend(input);
Ok(bytes)
bytes
}
pub fn vec_u8_to_bytes_be(input: Vec<u8>) -> Result<Vec<u8>> {
pub fn vec_u8_to_bytes_be(input: Vec<u8>) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len())?.to_be_bytes().to_vec());
bytes.extend(u64::try_from(input.len()).unwrap().to_be_bytes().to_vec());
bytes.extend(input);
Ok(bytes)
bytes
}
pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
pub fn bytes_le_to_vec_u8(input: &[u8]) -> (Vec<u8>, usize) {
let mut read: usize = 0;
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
let len = u64::from_le_bytes(input[0..8].try_into().unwrap()) as usize;
read += 8;
let res = input[8..8 + len].to_vec();
read += res.len();
Ok((res, read))
(res, read)
}
pub fn bytes_be_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
pub fn bytes_be_to_vec_u8(input: &[u8]) -> (Vec<u8>, usize) {
let mut read: usize = 0;
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into()?))?;
let len = u64::from_be_bytes(input[0..8].try_into().unwrap()) as usize;
read += 8;
let res = input[8..8 + len].to_vec();
read += res.len();
Ok((res, read))
(res, read)
}
pub fn bytes_le_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
pub fn bytes_le_to_vec_fr(input: &[u8]) -> (Vec<Fr>, usize) {
let mut read: usize = 0;
let mut res: Vec<Fr> = Vec::new();
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
let len = u64::from_le_bytes(input[0..8].try_into().unwrap()) as usize;
read += 8;
let el_size = fr_byte_size();
for i in 0..len {
let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]);
let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec());
res.push(curr_el);
read += el_size;
}
Ok((res, read))
(res, read)
}
pub fn bytes_be_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
pub fn bytes_be_to_vec_fr(input: &[u8]) -> (Vec<Fr>, usize) {
let mut read: usize = 0;
let mut res: Vec<Fr> = Vec::new();
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into()?))?;
let len = u64::from_be_bytes(input[0..8].try_into().unwrap()) as usize;
read += 8;
let el_size = fr_byte_size();
for i in 0..len {
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]);
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec());
res.push(curr_el);
read += el_size;
}
Ok((res, read))
}
pub fn normalize_usize(input: usize) -> Vec<u8> {
let mut normalized_usize = input.to_le_bytes().to_vec();
normalized_usize.resize(8, 0);
normalized_usize
}
pub fn bytes_le_to_vec_usize(input: &[u8]) -> Result<Vec<usize>> {
let nof_elem = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
if nof_elem == 0 {
Ok(vec![])
} else {
let elements: Vec<usize> = input[8..]
.chunks(8)
.map(|ch| usize::from_le_bytes(ch[0..8].try_into().unwrap()))
.collect();
Ok(elements)
}
}
// using for test
pub fn generate_input_buffer() -> Cursor<String> {
Cursor::new(json!({}).to_string())
(res, read)
}
/* Old conversion utilities between different libraries data types

File diff suppressed because it is too large Load Diff

View File

@@ -4,25 +4,48 @@
#[cfg(test)]
mod test {
use rln::hashers::{poseidon_hash, PoseidonHash};
use rln::{circuit::*, poseidon_tree::PoseidonTree};
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
use rln::circuit::*;
use rln::poseidon_tree::*;
use utils::{FullMerkleTree, OptimalMerkleTree};
#[test]
// The test is checked correctness for `FullMerkleTree` and `OptimalMerkleTree` with Poseidon hash
fn test_zerokit_merkle_implementations() {
/// A basic performance comparison between the two supported Merkle Tree implementations
fn test_zerokit_merkle_implementations_performances() {
use std::time::{Duration, Instant};
let tree_height = 20;
let sample_size = 100;
let leaves: Vec<Fr> = (0..sample_size).map(|s| Fr::from(s)).collect();
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
let mut gen_time_full: u128 = 0;
let mut upd_time_full: u128 = 0;
let mut gen_time_opt: u128 = 0;
let mut upd_time_opt: u128 = 0;
for _ in 0..sample_size.try_into().unwrap() {
let now = Instant::now();
FullMerkleTree::<PoseidonHash>::default(tree_height);
gen_time_full += now.elapsed().as_nanos();
let now = Instant::now();
OptimalMerkleTree::<PoseidonHash>::default(tree_height);
gen_time_opt += now.elapsed().as_nanos();
}
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height);
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height);
for i in 0..sample_size.try_into().unwrap() {
let now = Instant::now();
tree_full.set(i, leaves[i]).unwrap();
upd_time_full += now.elapsed().as_nanos();
let proof = tree_full.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
let now = Instant::now();
tree_opt.set(i, leaves[i]).unwrap();
upd_time_opt += now.elapsed().as_nanos();
let proof = tree_opt.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
}
@@ -32,108 +55,518 @@ mod test {
let tree_opt_root = tree_opt.root();
assert_eq!(tree_full_root, tree_opt_root);
println!(" Average tree generation time:");
println!(
" - Full Merkle Tree: {:?}",
Duration::from_nanos((gen_time_full / sample_size).try_into().unwrap())
);
println!(
" - Optimal Merkle Tree: {:?}",
Duration::from_nanos((gen_time_opt / sample_size).try_into().unwrap())
);
println!(" Average update_next execution time:");
println!(
" - Full Merkle Tree: {:?}",
Duration::from_nanos((upd_time_full / sample_size).try_into().unwrap())
);
println!(
" - Optimal Merkle Tree: {:?}",
Duration::from_nanos((upd_time_opt / sample_size).try_into().unwrap())
);
}
}
#[test]
fn test_subtree_root() {
const DEPTH: usize = 3;
const LEAVES_LEN: usize = 6;
// Test module for testing pmtree integration and features in zerokit
// enabled only if the pmtree feature is enabled
let mut tree = PoseidonTree::default(DEPTH).unwrap();
let leaves: Vec<Fr> = (0..LEAVES_LEN).map(|s| Fr::from(s as i32)).collect();
let _ = tree.set_range(0, leaves);
#[cfg(feature = "pmtree")]
#[cfg(test)]
mod pmtree_test {
for i in 0..LEAVES_LEN {
// check leaves
assert_eq!(
tree.get(i).unwrap(),
tree.get_subtree_root(DEPTH, i).unwrap()
);
// check root
assert_eq!(tree.root(), tree.get_subtree_root(0, i).unwrap());
use pmtree::*;
use rln::circuit::Fr;
use rln::poseidon_hash::poseidon_hash;
use rln::poseidon_tree::PoseidonHash;
use rln::protocol::hash_to_field;
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, str_to_fr};
use sled::Db as Sled;
use std::collections::HashMap;
use std::fs;
use std::path::Path;
use utils::{FullMerkleTree, OptimalMerkleTree};
// The pmtree Hasher trait used by pmtree Merkle tree
impl pmtree::Hasher for PoseidonHash {
type Fr = Fr;
fn default_leaf() -> Self::Fr {
Fr::from(0)
}
// check intermediate nodes
for n in (1..=DEPTH).rev() {
for i in (0..(1 << n)).step_by(2) {
let idx_l = i * (1 << (DEPTH - n));
let idx_r = (i + 1) * (1 << (DEPTH - n));
let idx_sr = idx_l;
fn serialize(value: Self::Fr) -> Value {
fr_to_bytes_le(&value)
}
let prev_l = tree.get_subtree_root(n, idx_l).unwrap();
let prev_r = tree.get_subtree_root(n, idx_r).unwrap();
let subroot = tree.get_subtree_root(n - 1, idx_sr).unwrap();
fn deserialize(value: Value) -> Self::Fr {
let (fr, _) = bytes_le_to_fr(&value);
fr
}
assert_eq!(poseidon_hash(&[prev_l, prev_r]), subroot);
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}
// pmtree supports in-memory and on-disk databases (Database trait) for storing the Merkle tree state
// We implement Database for hashmaps, an in-memory database
struct MemoryDB(HashMap<DBKey, Value>);
impl Database for MemoryDB {
fn new(_dbpath: &str) -> Result<Self> {
Ok(MemoryDB(HashMap::new()))
}
fn load(_dbpath: &str) -> Result<Self> {
Err(Error("Cannot load in-memory DB".to_string()))
}
fn get(&self, key: DBKey) -> Result<Option<Value>> {
Ok(self.0.get(&key).cloned())
}
fn put(&mut self, key: DBKey, value: Value) -> Result<()> {
self.0.insert(key, value);
Ok(())
}
}
// We implement Database for sled DB, an on-disk database
struct SledDB(Sled);
impl Database for SledDB {
fn new(dbpath: &str) -> Result<Self> {
if Path::new(dbpath).exists() {
match fs::remove_dir_all(dbpath) {
Ok(x) => x,
Err(e) => return Err(Error(e.to_string())),
}
}
let db: Sled = match sled::open(dbpath) {
Ok(db) => db,
Err(e) => return Err(Error(e.to_string())),
};
Ok(SledDB(db))
}
fn load(dbpath: &str) -> Result<Self> {
let db: Sled = match sled::open(dbpath) {
Ok(db) => db,
Err(e) => return Err(Error(e.to_string())),
};
if !db.was_recovered() {
return Err(Error("Trying to load non-existing database!".to_string()));
}
Ok(SledDB(db))
}
fn get(&self, key: DBKey) -> Result<Option<Value>> {
match self.0.get(key) {
Ok(value) => Ok(value.map(|val| val.to_vec())),
Err(e) => Err(Error(e.to_string())),
}
}
fn put(&mut self, key: DBKey, value: Value) -> Result<()> {
match self.0.insert(key, value) {
Ok(_) => Ok(()),
Err(e) => Err(Error(e.to_string())),
}
}
}
#[test]
fn test_get_empty_leaves_indices() {
let depth = 4;
let nof_leaves: usize = 1 << (depth - 1);
/// A basic performance comparison between the two supported Merkle Tree implementations and in-memory/on-disk pmtree implementations
fn test_zerokit_and_pmtree_merkle_implementations_performances() {
use std::time::{Duration, Instant};
let mut tree = PoseidonTree::default(depth).unwrap();
let leaves: Vec<Fr> = (0..nof_leaves).map(|s| Fr::from(s as i32)).collect();
let tree_height = 20;
let sample_size = 100;
// check set_range
let _ = tree.set_range(0, leaves.clone());
assert!(tree.get_empty_leaves_indices().is_empty());
let leaves: Vec<Fr> = (0..sample_size).map(|s| Fr::from(s)).collect();
let mut vec_idxs = Vec::new();
// check delete function
for i in 0..nof_leaves {
vec_idxs.push(i);
let _ = tree.delete(i);
assert_eq!(tree.get_empty_leaves_indices(), vec_idxs);
}
// check set function
for i in (0..nof_leaves).rev() {
vec_idxs.pop();
let _ = tree.set(i, leaves[i]);
assert_eq!(tree.get_empty_leaves_indices(), vec_idxs);
let mut gen_time_full: u128 = 0;
let mut upd_time_full: u128 = 0;
let mut gen_time_opt: u128 = 0;
let mut upd_time_opt: u128 = 0;
let mut gen_time_pm_memory: u128 = 0;
let mut upd_time_pm_memory: u128 = 0;
let mut gen_time_pm_sled: u128 = 0;
let mut upd_time_pm_sled: u128 = 0;
for _ in 0..sample_size.try_into().unwrap() {
let now = Instant::now();
FullMerkleTree::<PoseidonHash>::default(tree_height);
gen_time_full += now.elapsed().as_nanos();
let now = Instant::now();
OptimalMerkleTree::<PoseidonHash>::default(tree_height);
gen_time_opt += now.elapsed().as_nanos();
let now = Instant::now();
pmtree::MerkleTree::<MemoryDB, PoseidonHash>::default(tree_height).unwrap();
gen_time_pm_memory += now.elapsed().as_nanos();
let now = Instant::now();
pmtree::MerkleTree::<SledDB, PoseidonHash>::default(tree_height).unwrap();
gen_time_pm_sled += now.elapsed().as_nanos();
}
// check remove_indices_and_set_leaves inside override_range function
assert!(tree.get_empty_leaves_indices().is_empty());
let leaves_2: Vec<Fr> = (0..2).map(|s| Fr::from(s as i32)).collect();
tree.override_range(0, leaves_2.clone(), [0, 1, 2, 3])
.unwrap();
assert_eq!(tree.get_empty_leaves_indices(), vec![2, 3]);
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height);
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height);
let mut tree_pm_memory =
pmtree::MerkleTree::<MemoryDB, PoseidonHash>::default(tree_height).unwrap();
let mut tree_pm_sled =
pmtree::MerkleTree::<SledDB, PoseidonHash>::default(tree_height).unwrap();
// check remove_indices inside override_range function
tree.override_range(0, [], [0, 1]).unwrap();
assert_eq!(tree.get_empty_leaves_indices(), vec![0, 1, 2, 3]);
for i in 0..sample_size.try_into().unwrap() {
let now = Instant::now();
tree_full.set(i, leaves[i]).unwrap();
upd_time_full += now.elapsed().as_nanos();
let proof = tree_full.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
// check set_range inside override_range function
tree.override_range(0, leaves_2.clone(), []).unwrap();
assert_eq!(tree.get_empty_leaves_indices(), vec![2, 3]);
let now = Instant::now();
tree_opt.set(i, leaves[i]).unwrap();
upd_time_opt += now.elapsed().as_nanos();
let proof = tree_opt.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
let leaves_4: Vec<Fr> = (0..4).map(|s| Fr::from(s as i32)).collect();
// check if the indexes for write and delete are the same
tree.override_range(0, leaves_4.clone(), [0, 1, 2, 3])
.unwrap();
assert!(tree.get_empty_leaves_indices().is_empty());
let now = Instant::now();
tree_pm_memory.set(i, leaves[i]).unwrap();
upd_time_pm_memory += now.elapsed().as_nanos();
let proof = tree_pm_memory.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
// check if indexes for deletion are before indexes for overwriting
tree.override_range(4, leaves_4.clone(), [0, 1, 2, 3])
.unwrap();
// The result will be like this, because in the set_range function in pmtree
// the next_index value is increased not by the number of elements to insert,
// but by the union of indices for deleting and inserting.
assert_eq!(
tree.get_empty_leaves_indices(),
vec![0, 1, 2, 3, 8, 9, 10, 11]
let now = Instant::now();
tree_pm_sled.set(i, leaves[i]).unwrap();
upd_time_pm_sled += now.elapsed().as_nanos();
let proof = tree_pm_sled.proof(i).expect("index should be set");
assert_eq!(proof.leaf_index(), i);
}
// We check all roots are the same
let tree_full_root = tree_full.root();
let tree_opt_root = tree_opt.root();
let tree_pm_memory_root = tree_pm_memory.root();
let tree_pm_sled_root = tree_pm_sled.root();
assert_eq!(tree_full_root, tree_opt_root);
assert_eq!(tree_opt_root, tree_pm_memory_root);
assert_eq!(tree_pm_memory_root, tree_pm_sled_root);
println!(" Average tree generation time:");
println!(
" - Full Merkle Tree: {:?}",
Duration::from_nanos((gen_time_full / sample_size).try_into().unwrap())
);
println!(
" - Optimal Merkle Tree: {:?}",
Duration::from_nanos((gen_time_opt / sample_size).try_into().unwrap())
);
// check if the indices for write and delete do not overlap completely
tree.override_range(2, leaves_4.clone(), [0, 1, 2, 3])
.unwrap();
// The result will be like this, because in the set_range function in pmtree
// the next_index value is increased not by the number of elements to insert,
// but by the union of indices for deleting and inserting.
// + we've already set to 6 and 7 in previous test
assert_eq!(tree.get_empty_leaves_indices(), vec![0, 1, 8, 9, 10, 11]);
println!(
" - Pmtree-HashMap Merkle Tree: {:?}",
Duration::from_nanos((gen_time_pm_memory / sample_size).try_into().unwrap())
);
println!(
" - Pmtree-Sled Merkle Tree: {:?}",
Duration::from_nanos((gen_time_pm_sled / sample_size).try_into().unwrap())
);
println!(" Average update_next execution time:");
println!(
" - Full Merkle Tree: {:?}",
Duration::from_nanos((upd_time_full / sample_size).try_into().unwrap())
);
println!(
" - Optimal Merkle Tree: {:?}",
Duration::from_nanos((upd_time_opt / sample_size).try_into().unwrap())
);
println!(
" - Pmtree-HashMap Merkle Tree: {:?}",
Duration::from_nanos((upd_time_pm_memory / sample_size).try_into().unwrap())
);
println!(
" - Pmtree-Sled Merkle Tree: {:?}",
Duration::from_nanos((upd_time_pm_sled / sample_size).try_into().unwrap())
);
}
// The following two tests contain values that come from public::test_merkle_proof test
// We check that pmtree and zerokit Merkle tree implementations match.
#[test]
fn test_pmtree_hashmap() -> Result<()> {
let tree_height = 20;
let mut tree = pmtree::MerkleTree::<MemoryDB, PoseidonHash>::default(tree_height).unwrap();
let leaf_index = 3;
let identity_secret = hash_to_field(b"test-merkle-proof");
let id_commitment = poseidon_hash(&[identity_secret]);
// let default_leaf = Fr::from(0);
tree.set(leaf_index, id_commitment).unwrap();
// We check correct computation of the root
let root = tree.root();
assert_eq!(
root,
str_to_fr(
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
);
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
// We check correct computation of the path and indexes
// These values refers to tree height = 20
let expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
),
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
),
str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
),
];
let expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
assert_eq!(path_elements, expected_path_elements);
assert_eq!(identity_path_index, expected_identity_path_index);
// We check correct verification of the proof
assert!(tree.verify(&id_commitment, &merkle_proof));
Ok(())
}
#[test]
fn test_pmtree_sled() -> Result<()> {
let tree_height = 20;
let mut tree = pmtree::MerkleTree::<SledDB, PoseidonHash>::default(tree_height).unwrap();
let leaf_index = 3;
let identity_secret = hash_to_field(b"test-merkle-proof");
let id_commitment = poseidon_hash(&[identity_secret]);
// let default_leaf = Fr::from(0);
tree.set(leaf_index, id_commitment).unwrap();
// We check correct computation of the root
let root = tree.root();
assert_eq!(
root,
str_to_fr(
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
);
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
// We check correct computation of the path and indexes
// These values refers to tree height = 20
let expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
),
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
),
str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
),
];
let expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
assert_eq!(path_elements, expected_path_elements);
assert_eq!(identity_path_index, expected_identity_path_index);
// We check correct verification of the proof
assert!(tree.verify(&id_commitment, &merkle_proof));
Ok(())
}
}

View File

@@ -1,144 +1,343 @@
#[cfg(test)]
mod test {
use ark_ff::BigInt;
use rln::circuit::zkey_from_folder;
use rln::circuit::{vk_from_folder, Fr, TEST_TREE_HEIGHT};
use rln::hashers::{hash_to_field, poseidon_hash};
use rln::circuit::{
circom_from_folder, vk_from_folder, zkey_from_folder, Fr, TEST_RESOURCES_FOLDER,
TEST_TREE_HEIGHT,
};
use rln::poseidon_hash::poseidon_hash;
use rln::poseidon_tree::PoseidonTree;
use rln::protocol::*;
use rln::utils::str_to_fr;
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
const WITNESS_JSON_15: &str = r#"
{
"identity_secret": "12825549237505733615964533204745049909430608936689388901883576945030025938736",
"path_elements": [
"18622655742232062119094611065896226799484910997537830749762961454045300666333",
"20590447254980891299813706518821659736846425329007960381537122689749540452732",
"7423237065226347324353380772367382631490014989348495481811164164159255474657",
"11286972368698509976183087595462810875513684078608517520839298933882497716792",
"3607627140608796879659380071776844901612302623152076817094415224584923813162",
"19712377064642672829441595136074946683621277828620209496774504837737984048981",
"20775607673010627194014556968476266066927294572720319469184847051418138353016",
"3396914609616007258851405644437304192397291162432396347162513310381425243293",
"21551820661461729022865262380882070649935529853313286572328683688269863701601",
"6573136701248752079028194407151022595060682063033565181951145966236778420039",
"12413880268183407374852357075976609371175688755676981206018884971008854919922",
"14271763308400718165336499097156975241954733520325982997864342600795471836726",
"20066985985293572387227381049700832219069292839614107140851619262827735677018",
"9394776414966240069580838672673694685292165040808226440647796406499139370960",
"11331146992410411304059858900317123658895005918277453009197229807340014528524"
],
"identity_path_index": [
1,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
],
"x": "8143228284048792769012135629627737459844825626241842423967352803501040982",
"epoch": "0x0000005b612540fc986b42322f8cb91c2273afad58ed006fdba0c97b4b16b12f",
"rln_identifier": "11412926387081627876309792396682864042420635853496105400039841573530884328439"
}
"#;
// Input generated with protocol::random_rln_witness
const WITNESS_JSON_19: &str = r#"
{
"identity_secret": "922538810348594125658702672067738675294669207539999802857585668079702330450",
"path_elements": [
"16059714054680148404543504061485737353203416489071538960876865983954285286166",
"3041470753871943901334053763207316028823782848445723460227667780327106380356",
"2557297527793326315072058421057853700096944625924483912548759909801348042183",
"6677578602456189582427063963562590713054668181987223110955234085327917303436",
"2250827150965576973906150764756422151438812678308727218463995574869267980301",
"1895457427602709606993445561553433669787657053834360973759981803464906070980",
"11033689991077061346803816826729204895841441316315304395980565540264104346466",
"18588752216879570844240300406954267039026327526134910835334500497981810174976",
"19346480964028499661277403659363466542857230928032088490855656809181891953123",
"21460193770370072688835316363068413651465631481105148051902686770759127189327",
"20906347653364838502964722817589315918082261023317339146393355650507243340078",
"13466599592974387800162739317046838825289754472645703919149409009404541432954",
"9617165663598957201253074168824246164494443748556931540348223968573884172285",
"6936463137584425684797785981770877165377386163416057257854261010817156666898",
"369902028235468424790098825415813437044876310542601948037281422841675126849",
"13510969869821080499683463562609720931680005714401083864659516045615497273644",
"2567921390740781421487331055530491683313154421589525170472201828596388395736",
"14360870889466292805403568662660511177232987619663547772298178013674025998478",
"4735344599616284973799984501493858013178071155960162022656706545116168334293"
],
"identity_path_index": [
1,
0,
1,
0,
1,
1,
0,
0,
1,
1,
1,
0,
0,
0,
1,
0,
1,
1,
0
],
"x": "6427050788896290028100534859169645070970780055911091444144195464808120686416",
"epoch": "0x2bd155d9f85c741044da6909d144f9cc5ce8e0d545a9ed4921b156e8b8569bab",
"rln_identifier": "2193983000213424579594329476781986065965849144986973472766961413131458022566"
}
"#;
const WITNESS_JSON_20: &str = r#"
{
"identity_secret": "13732353453861280511150022598793312186188599006979552959297495195757997428306",
"path_elements": [
"20463525608687844300981085488128968694844212760055234622292326942405619575964",
"8040856403709217901175408904825741112286158901303127670929462145501210871313",
"3776499751255585163563840252112871568402966629435152937692711318702338789837",
"19415813252626942110541463414404411443562242499365750694284604341271149125679",
"19414720788761208006634240390286942738242262010168559813148115573784354129237",
"17680594732844291740094158892269696200077963275550625226493856898849422516043",
"16009199741350632715210088346611798597033333293348807000623441780059543674510",
"18743496911007535170857676824393811326863602477260615792503039058813338644738",
"1029572792321380246989475723806770724699749375691788486434716005338938722216",
"21713138150151063186050010182615713685603650963220209951496401043119768920892",
"6713732504049401389983008178456811894856018247924860823028704114266363984580",
"2746686888799473963221285145390361693256731812094259845879519459924507786594",
"18620748467731297359505500266677881218553438497271819903304075323783392031715",
"2446201221122671119406471414204229600430018713181038717206670749886932158104",
"12037171942017611311954851302868199608036334625783560875426350283156617524597",
"21798743392351780927808323348278035105395367759688979232116905142049921734349",
"17450230289417496971557215666910229260621413088991137405744457922069827319039",
"20936854099128086256353520300046664152516566958630447858438908748907198510485",
"13513344965831154386658059617477268600255664386844920822248038939666265737046",
"15546319496880899251450021422131511560001766832580480193115646510655765306630"
],
"identity_path_index": [
0,
1,
0,
0,
1,
1,
0,
0,
1,
1,
0,
0,
0,
1,
0,
1,
1,
0,
0,
0
],
"x": "18073935665561339809445069958310044423750771681863480888589546877024349720547",
"epoch": "0x147e4c23a43a1ddca78d94bcd28147f62ca74b3dc7e56bb0a314a954b9f0e567",
"rln_identifier": "2193983000213424579594329476781986065965849144986973472766961413131458022566"
}
"#;
#[test]
// We test Merkle tree generation, proofs and verification
fn test_merkle_proof() {
let tree_height = TEST_TREE_HEIGHT;
let leaf_index = 3;
// generate identity
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
let id_commitment = poseidon_hash(&[identity_secret_hash]);
let rate_commitment = poseidon_hash(&[id_commitment, 100.into()]);
let id_commitment = poseidon_hash(&vec![identity_secret_hash]);
// generate merkle tree
let default_leaf = Fr::from(0);
let mut tree = PoseidonTree::new(
TEST_TREE_HEIGHT,
default_leaf,
ConfigOf::<PoseidonTree>::default(),
)
.unwrap();
tree.set(leaf_index, rate_commitment.into()).unwrap();
let mut tree = PoseidonTree::new(tree_height, default_leaf);
tree.set(leaf_index, id_commitment.into()).unwrap();
// We check correct computation of the root
let root = tree.root();
assert_eq!(
root,
BigInt([
4939322235247991215,
5110804094006647505,
4427606543677101242,
910933464535675827
])
.into()
);
if TEST_TREE_HEIGHT == 15 {
assert_eq!(
root,
str_to_fr(
"0x1984f2e01184aef5cb974640898a5f5c25556554e2b06d99d4841badb8b198cd",
16
)
);
} else if TEST_TREE_HEIGHT == 19 {
assert_eq!(
root,
str_to_fr(
"0x219ceb53f2b1b7a6cf74e80d50d44d68ecb4a53c6cc65b25593c8d56343fb1fe",
16
)
);
} else if TEST_TREE_HEIGHT == 20 {
assert_eq!(
root,
str_to_fr(
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
);
}
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
// We check correct computation of the path and indexes
let expected_path_elements: Vec<Fr> = [
"0x0000000000000000000000000000000000000000000000000000000000000000",
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
]
.map(|e| str_to_fr(e, 16).unwrap())
.to_vec();
// These values refers to TEST_TREE_HEIGHT == 16
let mut expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
),
];
let expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
let mut expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
// We add the remaining elements for the case TEST_TREE_HEIGHT = 20
if TEST_TREE_HEIGHT == 19 || TEST_TREE_HEIGHT == 20 {
expected_path_elements.append(&mut vec![
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
),
]);
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
}
if TEST_TREE_HEIGHT == 20 {
expected_path_elements.append(&mut vec![str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
)]);
expected_identity_path_index.append(&mut vec![0]);
}
assert_eq!(path_elements, expected_path_elements);
assert_eq!(identity_path_index, expected_identity_path_index);
// We check correct verification of the proof
assert!(tree.verify(&rate_commitment, &merkle_proof).unwrap());
}
fn get_test_witness() -> RLNWitnessInput {
let leaf_index = 3;
// Generate identity pair
let (identity_secret_hash, id_commitment) = keygen();
let user_message_limit = Fr::from(100);
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
//// generate merkle tree
let default_leaf = Fr::from(0);
let mut tree = PoseidonTree::new(
TEST_TREE_HEIGHT,
default_leaf,
ConfigOf::<PoseidonTree>::default(),
)
.unwrap();
tree.set(leaf_index, rate_commitment.into()).unwrap();
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let signal = b"hey hey";
let x = hash_to_field(signal);
// We set the remaining values to random ones
let epoch = hash_to_field(b"test-epoch");
let rln_identifier = hash_to_field(b"test-rln-identifier");
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
rln_witness_from_values(
identity_secret_hash,
&merkle_proof,
x,
external_nullifier,
user_message_limit,
Fr::from(1),
)
.unwrap()
assert!(tree.verify(&id_commitment, &merkle_proof).unwrap());
}
#[test]
// We test a RLN proof generation and verification
fn test_witness_from_json() {
// We generate all relevant keys
let proving_key = zkey_from_folder();
let verification_key = vk_from_folder();
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let builder = circom_from_folder(TEST_RESOURCES_FOLDER);
// We compute witness from the json input
let rln_witness = get_test_witness();
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
assert_eq!(rln_witness_deser, rln_witness);
// We compute witness from the json input example
let mut witness_json: &str = "";
if TEST_TREE_HEIGHT == 15 {
witness_json = WITNESS_JSON_15;
} else if TEST_TREE_HEIGHT == 19 {
witness_json = WITNESS_JSON_19;
} else if TEST_TREE_HEIGHT == 20 {
witness_json = WITNESS_JSON_20;
}
let rln_witness = rln_witness_from_json(witness_json);
// Let's generate a zkSNARK proof
let proof = generate_proof(&proving_key, &rln_witness_deser).unwrap();
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
let proof_values = proof_values_from_witness(&rln_witness);
// Let's verify the proof
let verified = verify_proof(&verification_key, &proof, &proof_values);
@@ -149,19 +348,42 @@ mod test {
#[test]
// We test a RLN proof generation and verification
fn test_end_to_end() {
let rln_witness = get_test_witness();
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
assert_eq!(rln_witness_deser, rln_witness);
let tree_height = TEST_TREE_HEIGHT;
let leaf_index = 3;
// Generate identity pair
let (identity_secret_hash, id_commitment) = keygen();
//// generate merkle tree
let default_leaf = Fr::from(0);
let mut tree = PoseidonTree::new(tree_height, default_leaf);
tree.set(leaf_index, id_commitment.into()).unwrap();
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let signal = b"hey hey";
let x = hash_to_field(signal);
// We set the remaining values to random ones
let epoch = hash_to_field(b"test-epoch");
//let rln_identifier = hash_to_field(b"test-rln-identifier");
let rln_witness: RLNWitnessInput = rln_witness_from_values(
identity_secret_hash,
&merkle_proof,
x,
epoch, /*, rln_identifier*/
);
// We generate all relevant keys
let proving_key = zkey_from_folder();
let verification_key = vk_from_folder();
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let builder = circom_from_folder(TEST_RESOURCES_FOLDER);
// Let's generate a zkSNARK proof
let proof = generate_proof(&proving_key, &rln_witness_deser).unwrap();
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
let proof_values = proof_values_from_witness(&rln_witness);
// Let's verify the proof
let success = verify_proof(&verification_key, &proof, &proof_values).unwrap();
@@ -171,19 +393,25 @@ mod test {
#[test]
fn test_witness_serialization() {
// We test witness JSON serialization
let rln_witness = get_test_witness();
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
assert_eq!(rln_witness_deser, rln_witness);
// We test witness serialization
let ser = serialize_witness(&rln_witness).unwrap();
let (deser, _) = deserialize_witness(&ser).unwrap();
let mut witness_json: &str = "";
if TEST_TREE_HEIGHT == 15 {
witness_json = WITNESS_JSON_15;
} else if TEST_TREE_HEIGHT == 19 {
witness_json = WITNESS_JSON_19;
} else if TEST_TREE_HEIGHT == 20 {
witness_json = WITNESS_JSON_20;
}
let rln_witness = rln_witness_from_json(witness_json);
let ser = serialize_witness(&rln_witness);
let (deser, _) = deserialize_witness(&ser);
assert_eq!(rln_witness, deser);
// We test Proof values serialization
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
let proof_values = proof_values_from_witness(&rln_witness);
let ser = serialize_proof_values(&proof_values);
let (deser, _) = deserialize_proof_values(&ser);
assert_eq!(proof_values, deser);
@@ -201,13 +429,11 @@ mod test {
let expected_identity_secret_hash_seed_phrase = str_to_fr(
"0x20df38f3f00496f19fe7c6535492543b21798ed7cb91aebe4af8012db884eda3",
16,
)
.unwrap();
);
let expected_id_commitment_seed_phrase = str_to_fr(
"0x1223a78a5d66043a7f9863e14507dc80720a5602b2a894923e5b5147d5a9c325",
16,
)
.unwrap();
);
assert_eq!(
identity_secret_hash,
@@ -223,13 +449,11 @@ mod test {
let expected_identity_secret_hash_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
)
.unwrap();
);
let expected_id_commitment_seed_bytes = str_to_fr(
"0xbf16d2b5c0d6f9d9d561e05bfca16a81b4b873bb063508fae360d8c74cef51f",
16,
)
.unwrap();
);
assert_eq!(
identity_secret_hash,

View File

@@ -1,130 +1,173 @@
#[cfg(test)]
mod test {
use ark_ff::BigInt;
use ark_std::{rand::thread_rng, UniformRand};
use rand::Rng;
use rln::circuit::{Fr, TEST_TREE_HEIGHT};
use rln::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
use rln::protocol::{compute_tree_root, deserialize_identity_tuple};
use rln::public::{hash as public_hash, poseidon_hash as public_poseidon_hash, RLN};
use rln::circuit::{TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
use rln::poseidon_hash::poseidon_hash;
use rln::protocol::{compute_tree_root, deserialize_identity_tuple, hash_to_field};
use rln::public::RLN;
use rln::utils::*;
use std::io::Cursor;
#[test]
// This test is similar to the one in lib, but uses only public API
#[cfg(not(feature = "stateless"))]
fn test_merkle_proof() {
let tree_height = TEST_TREE_HEIGHT;
let leaf_index = 3;
let user_message_limit = 1;
let mut rln = RLN::new(TEST_TREE_HEIGHT, generate_input_buffer()).unwrap();
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
let mut rln = RLN::new(tree_height, input_buffer);
// generate identity
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
let id_commitment = utils_poseidon_hash(&vec![identity_secret_hash]);
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit.into()]);
let id_commitment = poseidon_hash(&vec![identity_secret_hash]);
// check that leaves indices is empty
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_empty_leaves_indices(&mut buffer).unwrap();
let idxs = bytes_le_to_vec_usize(&buffer.into_inner()).unwrap();
assert!(idxs.is_empty());
// We pass rate_commitment as Read buffer to RLN's set_leaf
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
// We pass id_commitment as Read buffer to RLN's set_leaf
let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment));
rln.set_leaf(leaf_index, &mut buffer).unwrap();
// check that leaves before leaf_index is set to zero
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_empty_leaves_indices(&mut buffer).unwrap();
let idxs = bytes_le_to_vec_usize(&buffer.into_inner()).unwrap();
assert_eq!(idxs, [0, 1, 2]);
// We check correct computation of the root
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root, _) = bytes_le_to_fr(&buffer.into_inner());
assert_eq!(
root,
Fr::from(BigInt([
17110646155607829651,
5040045984242729823,
6965416728592533086,
2328960363755461975
]))
);
if TEST_TREE_HEIGHT == 15 {
assert_eq!(
root,
str_to_fr(
"0x1984f2e01184aef5cb974640898a5f5c25556554e2b06d99d4841badb8b198cd",
16
)
);
} else if TEST_TREE_HEIGHT == 19 {
assert_eq!(
root,
str_to_fr(
"0x219ceb53f2b1b7a6cf74e80d50d44d68ecb4a53c6cc65b25593c8d56343fb1fe",
16
)
);
} else if TEST_TREE_HEIGHT == 20 {
assert_eq!(
root,
str_to_fr(
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
);
}
// We check correct computation of merkle proof
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_proof(leaf_index, &mut buffer).unwrap();
let buffer_inner = buffer.into_inner();
let (path_elements, read) = bytes_le_to_vec_fr(&buffer_inner).unwrap();
let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec()).unwrap();
let (path_elements, read) = bytes_le_to_vec_fr(&buffer_inner);
let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec());
// We check correct computation of the path and indexes
let expected_path_elements: Vec<Fr> = [
"0x0000000000000000000000000000000000000000000000000000000000000000",
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
]
.map(|e| str_to_fr(e, 16).unwrap())
.to_vec();
let mut expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
),
];
let expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
let mut expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
// We add the remaining elements for the case TEST_TREE_HEIGHT = 20
if TEST_TREE_HEIGHT == 19 || TEST_TREE_HEIGHT == 20 {
expected_path_elements.append(&mut vec![
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
),
]);
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
}
if TEST_TREE_HEIGHT == 20 {
expected_path_elements.append(&mut vec![str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
)]);
expected_identity_path_index.append(&mut vec![0]);
}
assert_eq!(path_elements, expected_path_elements);
assert_eq!(identity_path_index, expected_identity_path_index);
// check subtree root computation for leaf 0 for all corresponding node until the root
let l_idx = 0;
for n in (1..=TEST_TREE_HEIGHT).rev() {
let idx_l = l_idx * (1 << (TEST_TREE_HEIGHT - n));
let idx_r = (l_idx + 1) * (1 << (TEST_TREE_HEIGHT - n));
let idx_sr = idx_l;
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_subtree_root(n, idx_l, &mut buffer).unwrap();
let (prev_l, _) = bytes_le_to_fr(&buffer.into_inner());
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_subtree_root(n, idx_r, &mut buffer).unwrap();
let (prev_r, _) = bytes_le_to_fr(&buffer.into_inner());
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_subtree_root(n - 1, idx_sr, &mut buffer).unwrap();
let (subroot, _) = bytes_le_to_fr(&buffer.into_inner());
let res = utils_poseidon_hash(&[prev_l, prev_r]);
assert_eq!(res, subroot);
}
// We double check that the proof computed from public API is correct
let root_from_proof = compute_tree_root(
&identity_secret_hash,
&user_message_limit.into(),
&path_elements,
&identity_path_index,
);
let root_from_proof =
compute_tree_root(&id_commitment, &path_elements, &identity_path_index, false);
assert_eq!(root, root_from_proof);
}
@@ -149,13 +192,11 @@ mod test {
let expected_identity_secret_hash_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
)
.unwrap();
);
let expected_id_commitment_seed_bytes = str_to_fr(
"0xbf16d2b5c0d6f9d9d561e05bfca16a81b4b873bb063508fae360d8c74cef51f",
16,
)
.unwrap();
);
assert_eq!(
identity_secret_hash,
@@ -184,23 +225,19 @@ mod test {
let expected_identity_trapdoor_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
)
.unwrap();
);
let expected_identity_nullifier_seed_bytes = str_to_fr(
"0x1f18714c7bc83b5bca9e89d404cf6f2f585bc4c0f7ed8b53742b7e2b298f50b4",
16,
)
.unwrap();
);
let expected_identity_secret_hash_seed_bytes = str_to_fr(
"0x2aca62aaa7abaf3686fff2caf00f55ab9462dc12db5b5d4bcf3994e671f8e521",
16,
)
.unwrap();
);
let expected_id_commitment_seed_bytes = str_to_fr(
"0x68b66aa0a8320d2e56842581553285393188714c48f9b17acd198b4f1734c5c",
16,
)
.unwrap();
);
assert_eq!(identity_trapdoor, expected_identity_trapdoor_seed_bytes);
assert_eq!(identity_nullifier, expected_identity_nullifier_seed_bytes);
@@ -213,13 +250,15 @@ mod test {
#[test]
fn test_hash_to_field() {
let mut rng = thread_rng();
let rln = RLN::default();
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let mut input_buffer = Cursor::new(&signal);
let mut output_buffer = Cursor::new(Vec::<u8>::new());
public_hash(&mut input_buffer, &mut output_buffer).unwrap();
rln.hash(&mut input_buffer, &mut output_buffer).unwrap();
let serialized_hash = output_buffer.into_inner();
let (hash1, _) = bytes_le_to_fr(&serialized_hash);
@@ -227,24 +266,4 @@ mod test {
assert_eq!(hash1, hash2);
}
#[test]
fn test_poseidon_hash() {
let mut rng = thread_rng();
let number_of_inputs = rng.gen_range(1..ROUND_PARAMS.len());
let mut inputs = Vec::with_capacity(number_of_inputs);
for _ in 0..number_of_inputs {
inputs.push(Fr::rand(&mut rng));
}
let expected_hash = utils_poseidon_hash(&inputs);
let mut input_buffer = Cursor::new(vec_fr_to_bytes_le(&inputs).unwrap());
let mut output_buffer = Cursor::new(Vec::<u8>::new());
public_poseidon_hash(&mut input_buffer, &mut output_buffer).unwrap();
let serialized_hash = output_buffer.into_inner();
let (hash, _) = bytes_le_to_fr(&serialized_hash);
assert_eq!(hash, expected_hash);
}
}

1
rln/vendor/rln vendored Submodule

Submodule rln/vendor/rln added at fc86ad156a

50
semaphore/Cargo.toml Normal file
View File

@@ -0,0 +1,50 @@
[package]
name = "semaphore-wrapper"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[features]
default = []
dylib = [ "wasmer/dylib", "wasmer-engine-dylib", "wasmer-compiler-cranelift" ]
[dependencies]
ark-bn254 = { version = "0.3.0" }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features=["circom-2"] }
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
ark-relations = { version = "0.3.0", default-features = false }
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
color-eyre = "0.5"
num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
once_cell = "1.8"
rand = "0.8.4"
semaphore = { git = "https://github.com/worldcoin/semaphore-rs", rev = "ee658c2"}
ethers-core = { git = "https://github.com/gakonst/ethers-rs", default-features = false }
ruint = { version = "1.2.0", features = [ "serde", "num-bigint", "ark-ff" ] }
serde = "1.0"
thiserror = "1.0.0"
wasmer = { version = "2.0" }
[dev-dependencies]
rand_chacha = "0.3.1"
serde_json = "1.0.79"
[build-dependencies]
color-eyre = "0.5"
wasmer = { version = "2.0" }
wasmer-engine-dylib = { version = "2.2.1", optional = true }
wasmer-compiler-cranelift = { version = "2.2.1", optional = true }
[profile.release]
codegen-units = 1
lto = true
panic = "abort"
opt-level = 3
# Compilation profile for any non-workspace member.
# Dependencies are optimized, even in a dev build. This improves dev performance
# while having neglible impact on incremental build times.
[profile.dev.package."*"]
opt-level = 3

7
semaphore/Makefile.toml Normal file
View File

@@ -0,0 +1,7 @@
[tasks.build]
command = "cargo"
args = ["build", "--release"]
[tasks.test]
command = "cargo"
args = ["test", "--release"]

10
semaphore/README.md Normal file
View File

@@ -0,0 +1,10 @@
# Semaphore example package
This is basically a wrapper around/copy of
https://github.com/worldcoin/semaphore-rs to illustrate how e.g. RLN package
can be structured like.
Goal is also to provide a basic FFI around protocol.rs, which is currently not
in scope for that project.
See that project for more information.

111
semaphore/build.rs Normal file
View File

@@ -0,0 +1,111 @@
// Adapted from semaphore-rs/build.rs
use color_eyre::eyre::{eyre, Result};
use std::{
path::{Component, Path, PathBuf},
process::Command,
};
const ZKEY_FILE: &str = "./vendor/semaphore/build/snark/semaphore_final.zkey";
const WASM_FILE: &str = "./vendor/semaphore/build/snark/semaphore.wasm";
// See <https://internals.rust-lang.org/t/path-to-lexical-absolute/14940>
fn absolute(path: &str) -> Result<PathBuf> {
let path = Path::new(path);
let mut absolute = if path.is_absolute() {
PathBuf::new()
} else {
std::env::current_dir()?
};
for component in path.components() {
match component {
Component::CurDir => {}
Component::ParentDir => {
absolute.pop();
}
component => absolute.push(component.as_os_str()),
}
}
Ok(absolute)
}
fn build_circuit() -> Result<()> {
println!("cargo:rerun-if-changed=./vendor/semaphore");
let run = |cmd: &[&str]| -> Result<()> {
// TODO: Use ExitCode::exit_ok() when stable.
Command::new(cmd[0])
.args(cmd[1..].iter())
.current_dir("./vendor/semaphore")
.status()?
.success()
.then(|| ())
.ok_or(eyre!("procees returned failure"))?;
Ok(())
};
// Compute absolute paths
let zkey_file = absolute(ZKEY_FILE)?;
let wasm_file = absolute(WASM_FILE)?;
// Build circuits if not exists
// TODO: This does not rebuild if the semaphore submodule is changed.
// NOTE: This requires npm / nodejs to be installed.
if !(zkey_file.exists() && wasm_file.exists()) {
run(&["npm", "install"])?;
run(&["npm", "exec", "ts-node", "./scripts/compile-circuits.ts"])?;
}
assert!(zkey_file.exists());
assert!(wasm_file.exists());
// Export generated paths
println!("cargo:rustc-env=BUILD_RS_ZKEY_FILE={}", zkey_file.display());
println!("cargo:rustc-env=BUILD_RS_WASM_FILE={}", wasm_file.display());
Ok(())
}
#[cfg(feature = "dylib")]
fn build_dylib() -> Result<()> {
use enumset::enum_set;
use std::{env, str::FromStr};
use wasmer::{Module, Store, Target, Triple};
use wasmer_compiler_cranelift::Cranelift;
use wasmer_engine_dylib::Dylib;
let wasm_file = absolute(WASM_FILE)?;
assert!(wasm_file.exists());
let out_dir = env::var("OUT_DIR")?;
let out_dir = Path::new(&out_dir).to_path_buf();
let dylib_file = out_dir.join("semaphore.dylib");
println!(
"cargo:rustc-env=CIRCUIT_WASM_DYLIB={}",
dylib_file.display()
);
if dylib_file.exists() {
return Ok(());
}
// Create a WASM engine for the target that can compile
let triple = Triple::from_str(&env::var("TARGET")?).map_err(|e| eyre!(e))?;
let cpu_features = enum_set!();
let target = Target::new(triple, cpu_features);
let compiler_config = Cranelift::default();
let engine = Dylib::new(compiler_config).target(target).engine();
// Compile the WASM module
let store = Store::new(&engine);
let module = Module::from_file(&store, &wasm_file)?;
module.serialize_to_file(&dylib_file)?;
assert!(dylib_file.exists());
println!("cargo:warning=Circuit dylib is in {}", dylib_file.display());
Ok(())
}
fn main() -> Result<()> {
build_circuit()?;
#[cfg(feature = "dylib")]
build_dylib()?;
Ok(())
}

79
semaphore/src/circuit.rs Normal file
View File

@@ -0,0 +1,79 @@
// Adapted from semaphore-rs/src/circuit.rs
use ark_bn254::{Bn254, Fr};
use ark_circom::{read_zkey, WitnessCalculator};
use ark_groth16::ProvingKey;
use ark_relations::r1cs::ConstraintMatrices;
use core::include_bytes;
use once_cell::sync::{Lazy, OnceCell};
use std::{io::Cursor, sync::Mutex};
use wasmer::{Module, Store};
#[cfg(feature = "dylib")]
use std::{env, path::Path};
#[cfg(feature = "dylib")]
use wasmer::Dylib;
const ZKEY_BYTES: &[u8] = include_bytes!(env!("BUILD_RS_ZKEY_FILE"));
#[cfg(not(feature = "dylib"))]
const WASM: &[u8] = include_bytes!(env!("BUILD_RS_WASM_FILE"));
static ZKEY: Lazy<(ProvingKey<Bn254>, ConstraintMatrices<Fr>)> = Lazy::new(|| {
let mut reader = Cursor::new(ZKEY_BYTES);
read_zkey(&mut reader).expect("zkey should be valid")
});
static WITNESS_CALCULATOR: OnceCell<Mutex<WitnessCalculator>> = OnceCell::new();
/// Initialize the library.
#[cfg(feature = "dylib")]
pub fn initialize(dylib_path: &Path) {
WITNESS_CALCULATOR
.set(from_dylib(dylib_path))
.expect("Failed to initialize witness calculator");
// Force init of ZKEY
Lazy::force(&ZKEY);
}
#[cfg(feature = "dylib")]
fn from_dylib(path: &Path) -> Mutex<WitnessCalculator> {
let store = Store::new(&Dylib::headless().engine());
// The module must be exported using [`Module::serialize`].
let module = unsafe {
Module::deserialize_from_file(&store, path).expect("Failed to load wasm dylib module")
};
let result =
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
Mutex::new(result)
}
#[must_use]
pub fn zkey() -> &'static (ProvingKey<Bn254>, ConstraintMatrices<Fr>) {
&*ZKEY
}
#[cfg(feature = "dylib")]
#[must_use]
pub fn witness_calculator() -> &'static Mutex<WitnessCalculator> {
WITNESS_CALCULATOR.get_or_init(|| {
let path = env::var("CIRCUIT_WASM_DYLIB").expect(
"Semaphore-rs is not initialized. The library needs to be initialized before use when \
build with the `cdylib` feature. You can initialize by calling `initialize` or \
seting the `CIRCUIT_WASM_DYLIB` environment variable.",
);
from_dylib(Path::new(&path))
})
}
#[cfg(not(feature = "dylib"))]
#[must_use]
pub fn witness_calculator() -> &'static Mutex<WitnessCalculator> {
WITNESS_CALCULATOR.get_or_init(|| {
let store = Store::default();
let module = Module::from_binary(&store, WASM).expect("wasm should be valid");
let result =
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
Mutex::new(result)
})
}

7
semaphore/src/lib.rs Normal file
View File

@@ -0,0 +1,7 @@
#![allow(clippy::multiple_crate_versions)]
pub mod circuit;
pub mod protocol;
#[cfg(feature = "dylib")]
pub use circuit::initialize;

215
semaphore/src/protocol.rs Normal file
View File

@@ -0,0 +1,215 @@
// Adapted from semaphore-rs/src/protocol.rs
// For illustration purposes only as an example protocol
// Private module
use crate::circuit::{witness_calculator, zkey};
use ark_bn254::{Bn254, Parameters};
use ark_circom::CircomReduction;
use ark_ec::bn::Bn;
use ark_groth16::{
create_proof_with_reduction_and_matrices, prepare_verifying_key, Proof as ArkProof,
};
use ark_relations::r1cs::SynthesisError;
use ark_std::UniformRand;
use color_eyre::Result;
use ethers_core::types::U256;
use rand::{thread_rng, Rng};
use semaphore::{
identity::Identity,
merkle_tree::{self, Branch},
poseidon,
poseidon_tree::PoseidonHash,
Field,
};
use serde::{Deserialize, Serialize};
use std::time::Instant;
use thiserror::Error;
// Matches the private G1Tup type in ark-circom.
pub type G1 = (U256, U256);
// Matches the private G2Tup type in ark-circom.
pub type G2 = ([U256; 2], [U256; 2]);
/// Wrap a proof object so we have serde support
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct Proof(G1, G2, G1);
impl From<ArkProof<Bn<Parameters>>> for Proof {
fn from(proof: ArkProof<Bn<Parameters>>) -> Self {
let proof = ark_circom::ethereum::Proof::from(proof);
let (a, b, c) = proof.as_tuple();
Self(a, b, c)
}
}
impl From<Proof> for ArkProof<Bn<Parameters>> {
fn from(proof: Proof) -> Self {
let eth_proof = ark_circom::ethereum::Proof {
a: ark_circom::ethereum::G1 {
x: proof.0 .0,
y: proof.0 .1,
},
#[rustfmt::skip] // Rustfmt inserts some confusing spaces
b: ark_circom::ethereum::G2 {
// The order of coefficients is flipped.
x: [proof.1.0[1], proof.1.0[0]],
y: [proof.1.1[1], proof.1.1[0]],
},
c: ark_circom::ethereum::G1 {
x: proof.2 .0,
y: proof.2 .1,
},
};
eth_proof.into()
}
}
/// Helper to merkle proof into a bigint vector
/// TODO: we should create a From trait for this
fn merkle_proof_to_vec(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<Field> {
proof
.0
.iter()
.map(|x| match x {
Branch::Left(value) | Branch::Right(value) => *value,
})
.collect()
}
/// Generates the nullifier hash
#[must_use]
pub fn generate_nullifier_hash(identity: &Identity, external_nullifier: Field) -> Field {
poseidon::hash2(external_nullifier, identity.nullifier)
}
#[derive(Error, Debug)]
pub enum ProofError {
#[error("Error reading circuit key: {0}")]
CircuitKeyError(#[from] std::io::Error),
#[error("Error producing witness: {0}")]
WitnessError(color_eyre::Report),
#[error("Error producing proof: {0}")]
SynthesisError(#[from] SynthesisError),
#[error("Error converting public input: {0}")]
ToFieldError(#[from] ruint::ToFieldError),
}
/// Generates a semaphore proof
///
/// # Errors
///
/// Returns a [`ProofError`] if proving fails.
pub fn generate_proof(
identity: &Identity,
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
external_nullifier_hash: Field,
signal_hash: Field,
) -> Result<Proof, ProofError> {
generate_proof_rng(
identity,
merkle_proof,
external_nullifier_hash,
signal_hash,
&mut thread_rng(),
)
}
/// Generates a semaphore proof from entropy
///
/// # Errors
///
/// Returns a [`ProofError`] if proving fails.
pub fn generate_proof_rng(
identity: &Identity,
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
external_nullifier_hash: Field,
signal_hash: Field,
rng: &mut impl Rng,
) -> Result<Proof, ProofError> {
generate_proof_rs(
identity,
merkle_proof,
external_nullifier_hash,
signal_hash,
ark_bn254::Fr::rand(rng),
ark_bn254::Fr::rand(rng),
)
}
fn generate_proof_rs(
identity: &Identity,
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
external_nullifier_hash: Field,
signal_hash: Field,
r: ark_bn254::Fr,
s: ark_bn254::Fr,
) -> Result<Proof, ProofError> {
let inputs = [
("identityNullifier", vec![identity.nullifier]),
("identityTrapdoor", vec![identity.trapdoor]),
("treePathIndices", merkle_proof.path_index()),
("treeSiblings", merkle_proof_to_vec(merkle_proof)),
("externalNullifier", vec![external_nullifier_hash]),
("signalHash", vec![signal_hash]),
];
let inputs = inputs.into_iter().map(|(name, values)| {
(
name.to_string(),
values.iter().copied().map(Into::into).collect::<Vec<_>>(),
)
});
let now = Instant::now();
let full_assignment = witness_calculator()
.lock()
.expect("witness_calculator mutex should not get poisoned")
.calculate_witness_element::<Bn254, _>(inputs, false)
.map_err(ProofError::WitnessError)?;
println!("witness generation took: {:.2?}", now.elapsed());
let now = Instant::now();
let zkey = zkey();
let ark_proof = create_proof_with_reduction_and_matrices::<_, CircomReduction>(
&zkey.0,
r,
s,
&zkey.1,
zkey.1.num_instance_variables,
zkey.1.num_constraints,
full_assignment.as_slice(),
)?;
let proof = ark_proof.into();
println!("proof generation took: {:.2?}", now.elapsed());
Ok(proof)
}
/// Verifies a given semaphore proof
///
/// # Errors
///
/// Returns a [`ProofError`] if verifying fails. Verification failure does not
/// necessarily mean the proof is incorrect.
pub fn verify_proof(
root: Field,
nullifier_hash: Field,
signal_hash: Field,
external_nullifier_hash: Field,
proof: &Proof,
) -> Result<bool, ProofError> {
let zkey = zkey();
let pvk = prepare_verifying_key(&zkey.0.vk);
let public_inputs = [root, nullifier_hash, signal_hash, external_nullifier_hash]
.iter()
.map(ark_bn254::Fr::try_from)
.collect::<Result<Vec<_>, _>>()?;
let ark_proof = (*proof).into();
let result = ark_groth16::verify_proof(&pvk, &ark_proof, &public_inputs[..])?;
Ok(result)
}

115
semaphore/tests/protocol.rs Normal file
View File

@@ -0,0 +1,115 @@
#[cfg(test)]
mod tests {
use ark_bn254::Parameters;
use ark_ec::bn::Bn;
use ark_groth16::Proof as ArkProof;
use rand::{Rng, SeedableRng as _};
use rand_chacha::ChaChaRng;
use semaphore::{hash_to_field, identity::Identity, poseidon_tree::PoseidonTree, Field};
use semaphore_wrapper::protocol::{
generate_nullifier_hash, generate_proof, generate_proof_rng, verify_proof, Proof,
};
use serde_json::json;
#[test]
fn test_semaphore() {
// generate identity
let id = Identity::from_seed(b"secret");
// generate merkle tree
let leaf = Field::from(0);
let mut tree = PoseidonTree::new(21, leaf);
tree.set(0, id.commitment());
let merkle_proof = tree.proof(0).expect("proof should exist");
let root = tree.root().into();
// change signal and external_nullifier here
let signal_hash = hash_to_field(b"xxx");
let external_nullifier_hash = hash_to_field(b"appId");
let nullifier_hash = generate_nullifier_hash(&id, external_nullifier_hash);
let proof =
generate_proof(&id, &merkle_proof, external_nullifier_hash, signal_hash).unwrap();
let success = verify_proof(
root,
nullifier_hash,
signal_hash,
external_nullifier_hash,
&proof,
)
.unwrap();
assert!(success);
}
fn arb_proof(seed: u64) -> Proof {
// Deterministic randomness for testing
let mut rng = ChaChaRng::seed_from_u64(seed);
// generate identity
let seed: [u8; 16] = rng.gen();
let id = Identity::from_seed(&seed);
// generate merkle tree
let leaf = Field::from(0);
let mut tree = PoseidonTree::new(21, leaf);
tree.set(0, id.commitment());
let merkle_proof = tree.proof(0).expect("proof should exist");
let external_nullifier: [u8; 16] = rng.gen();
let external_nullifier_hash = hash_to_field(&external_nullifier);
let signal: [u8; 16] = rng.gen();
let signal_hash = hash_to_field(&signal);
generate_proof_rng(
&id,
&merkle_proof,
external_nullifier_hash,
signal_hash,
&mut rng,
)
.unwrap()
}
#[test]
fn test_proof_cast_roundtrip() {
let proof = arb_proof(123);
let ark_proof: ArkProof<Bn<Parameters>> = proof.into();
let result: Proof = ark_proof.into();
assert_eq!(proof, result);
}
#[test]
fn test_proof_serialize() {
let proof = arb_proof(456);
let json = serde_json::to_value(&proof).unwrap();
assert_eq!(
json,
json!([
[
"0x249ae469686987ee9368da60dd177a8c42891c02f5760e955e590c79d55cfab2",
"0xf22e25870f49388459d388afb24dcf6ec11bb2d4def1e2ec26d6e42f373aad8"
],
[
[
"0x17bd25dbd7436c30ea5b8a3a47aadf11ed646c4b25cc14a84ff8cbe0252ff1f8",
"0x1c140668c56688367416534d57b4a14e5a825efdd5e121a6a2099f6dc4cd277b"
],
[
"0x26a8524759d969ea0682a092cf7a551697d81962d6c998f543f81e52d83e05e1",
"0x273eb3f796fd1807b9df9c6d769d983e3dabdc61677b75d48bb7691303b2c8dd"
]
],
[
"0x62715c53a0eb4c46dbb5f73f1fd7449b9c63d37c1ece65debc39b472065a90f",
"0x114f7becc66f1cd7a8b01c89db8233622372fc0b6fc037c4313bca41e2377fd9"
]
])
);
}
}

1
semaphore/vendor/semaphore vendored Submodule

View File

@@ -1,40 +1,18 @@
[package]
name = "zerokit_utils"
version = "0.5.1"
name = "utils"
version = "0.1.0"
edition = "2021"
license = "MIT OR Apache-2.0"
description = "Various utilities for Zerokit"
documentation = "https://github.com/vacp2p/zerokit"
homepage = "https://vac.dev"
repository = "https://github.com/vacp2p/zerokit"
[lib]
bench = false
[dependencies]
ark-ff = { version = "=0.4.1", default-features = false, features = ["asm"] }
num-bigint = { version = "=0.4.3", default-features = false, features = [
"rand",
] }
color-eyre = "=0.6.2"
pmtree = { package = "vacp2p_pmtree", version = "=2.0.2", optional = true }
sled = "=0.34.7"
serde = "=1.0.163"
lazy_static = "1.4.0"
hex = "0.4"
ark-ff = { version = "0.3.0", default-features = false, features = ["asm"] }
num-bigint = { version = "0.4.3", default-features = false, features = ["rand"] }
[dev-dependencies]
ark-bn254 = "=0.4.0"
num-traits = "=0.2.15"
hex-literal = "=0.3.4"
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
criterion = { version = "=0.4.0", features = ["html_reports"] }
ark-bn254 = { version = "0.3.0" }
num-traits = "0.2.11"
hex-literal = "0.3.4"
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
[features]
default = ["parallel"]
parallel = ["ark-ff/parallel"]
pmtree-ft = ["pmtree"]
[[bench]]
name = "merkle_tree_benchmark"
harness = false

View File

@@ -5,7 +5,3 @@ args = ["build", "--release"]
[tasks.test]
command = "cargo"
args = ["test", "--release"]
[tasks.bench]
command = "cargo"
args = ["bench"]

View File

@@ -1,15 +0,0 @@
# Utils crate
## Building
1. `cargo build`
## Testing
1. `cargo test`
## Benchmarking
1. `cargo bench`
To view the results of the benchmark, open the `target/criterion/report/index.html` file generated after the bench

View File

@@ -1,161 +0,0 @@
use criterion::{criterion_group, criterion_main, Criterion};
use hex_literal::hex;
use lazy_static::lazy_static;
use std::{fmt::Display, str::FromStr};
use tiny_keccak::{Hasher as _, Keccak};
use zerokit_utils::{
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,
ZerokitMerkleTree,
};
#[derive(Clone, Copy, Eq, PartialEq)]
struct Keccak256;
#[derive(Clone, Copy, Eq, PartialEq, Debug, Default)]
struct TestFr([u8; 32]);
impl Hasher for Keccak256 {
type Fr = TestFr;
fn default_leaf() -> Self::Fr {
TestFr([0; 32])
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
let mut output = [0; 32];
let mut hasher = Keccak::v256();
for element in inputs {
hasher.update(element.0.as_slice());
}
hasher.finalize(&mut output);
TestFr(output)
}
}
impl Display for TestFr {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", String::from_utf8_lossy(self.0.as_slice()))
}
}
impl FromStr for TestFr {
type Err = std::string::FromUtf8Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(TestFr(s.as_bytes().try_into().unwrap()))
}
}
lazy_static! {
static ref LEAVES: [TestFr; 4] = [
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
]
.map(TestFr);
}
pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
let mut tree =
OptimalMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), OptimalMerkleConfig::default())
.unwrap();
c.bench_function("OptimalMerkleTree::set", |b| {
b.iter(|| {
tree.set(0, LEAVES[0]).unwrap();
})
});
c.bench_function("OptimalMerkleTree::delete", |b| {
b.iter(|| {
tree.delete(0).unwrap();
})
});
c.bench_function("OptimalMerkleTree::override_range", |b| {
b.iter(|| {
tree.override_range(0, *LEAVES, [0, 1, 2, 3]).unwrap();
})
});
c.bench_function("OptimalMerkleTree::compute_root", |b| {
b.iter(|| {
tree.compute_root().unwrap();
})
});
c.bench_function("OptimalMerkleTree::get", |b| {
b.iter(|| {
tree.get(0).unwrap();
})
});
// check intermediate node getter which required additional computation of sub root index
c.bench_function("OptimalMerkleTree::get_subtree_root", |b| {
b.iter(|| {
tree.get_subtree_root(1, 0).unwrap();
})
});
c.bench_function("OptimalMerkleTree::get_empty_leaves_indices", |b| {
b.iter(|| {
tree.get_empty_leaves_indices();
})
});
}
pub fn full_merkle_tree_benchmark(c: &mut Criterion) {
let mut tree =
FullMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), FullMerkleConfig::default()).unwrap();
c.bench_function("FullMerkleTree::set", |b| {
b.iter(|| {
tree.set(0, LEAVES[0]).unwrap();
})
});
c.bench_function("FullMerkleTree::delete", |b| {
b.iter(|| {
tree.delete(0).unwrap();
})
});
c.bench_function("FullMerkleTree::override_range", |b| {
b.iter(|| {
tree.override_range(0, *LEAVES, [0, 1, 2, 3]).unwrap();
})
});
c.bench_function("FullMerkleTree::compute_root", |b| {
b.iter(|| {
tree.compute_root().unwrap();
})
});
c.bench_function("FullMerkleTree::get", |b| {
b.iter(|| {
tree.get(0).unwrap();
})
});
// check intermediate node getter which required additional computation of sub root index
c.bench_function("FullMerkleTree::get_subtree_root", |b| {
b.iter(|| {
tree.get_subtree_root(1, 0).unwrap();
})
});
c.bench_function("FullMerkleTree::get_empty_leaves_indices", |b| {
b.iter(|| {
tree.get_empty_leaves_indices();
})
});
}
criterion_group!(
benches,
optimal_merkle_tree_benchmark,
full_merkle_tree_benchmark
);
criterion_main!(benches);

View File

@@ -3,8 +3,3 @@ pub use self::poseidon::*;
pub mod merkle_tree;
pub use self::merkle_tree::*;
#[cfg(feature = "pmtree-ft")]
pub mod pm_tree;
#[cfg(feature = "pmtree-ft")]
pub use self::pm_tree::*;

Some files were not shown because too many files have changed in this diff Show More