mirror of
https://github.com/vacp2p/zerokit.git
synced 2026-01-09 13:47:58 -05:00
Compare commits
13 Commits
secret-has
...
benchmark-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
82f95b63aa | ||
|
|
dc6fcd9f04 | ||
|
|
735a9d2071 | ||
|
|
3df9eb4a4e | ||
|
|
d18024beac | ||
|
|
a8f50a4232 | ||
|
|
bf1e184da9 | ||
|
|
4473688efa | ||
|
|
c80569d518 | ||
|
|
fd99b6af74 | ||
|
|
65f53e3da3 | ||
|
|
042f8a9739 | ||
|
|
baf474e747 |
151
.github/workflows/ci.yml
vendored
151
.github/workflows/ci.yml
vendored
@@ -29,20 +29,16 @@ jobs:
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: test - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
name: Test - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: cargo-make test
|
||||
- name: Test utils
|
||||
run: |
|
||||
cargo make test --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
@@ -52,24 +48,20 @@ jobs:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [rln]
|
||||
feature: ["default", "arkzkey", "stateless"]
|
||||
feature: ["default", "stateless"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
name: Test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: cargo-make test
|
||||
- name: Test rln
|
||||
run: |
|
||||
if [ ${{ matrix.feature }} == default ]; then
|
||||
cargo make test --release
|
||||
@@ -83,86 +75,54 @@ jobs:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [rln-wasm]
|
||||
feature: ["default", "arkzkey"]
|
||||
feature: ["default"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
name: Test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install Dependencies
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: cargo-make build
|
||||
run: |
|
||||
if [ ${{ matrix.feature }} == default ]; then
|
||||
cargo make build
|
||||
else
|
||||
cargo make build_${{ matrix.feature }}
|
||||
fi
|
||||
- name: Build rln-wasm
|
||||
run: cargo make build
|
||||
working-directory: ${{ matrix.crate }}
|
||||
- name: cargo-make test
|
||||
run: |
|
||||
if [ ${{ matrix.feature }} == default ]; then
|
||||
cargo make test --release
|
||||
else
|
||||
cargo make test_${{ matrix.feature }} --release
|
||||
fi
|
||||
- name: Test rln-wasm on node
|
||||
run: cargo make test --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
- name: cargo-make test browser
|
||||
run: |
|
||||
if [ ${{ matrix.feature }} == default ]; then
|
||||
cargo make test_browser --release
|
||||
else
|
||||
cargo make test_browser_${{ matrix.feature }} --release
|
||||
fi
|
||||
- name: Test rln-wasm on browser
|
||||
run: cargo make test_browser --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
# rln-wasm-multihread-test:
|
||||
# strategy:
|
||||
# matrix:
|
||||
# platform: [ubuntu-latest, macos-latest]
|
||||
# crate: [rln-wasm]
|
||||
# feature: ["multithread", "multithread_arkzkey"]
|
||||
# runs-on: ${{ matrix.platform }}
|
||||
# timeout-minutes: 60
|
||||
rln-wasm-parallel-test:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [rln-wasm]
|
||||
feature: ["parallel"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
# name: test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
# steps:
|
||||
# - uses: actions/checkout@v3
|
||||
# - name: Install nightly toolchain
|
||||
# uses: actions-rs/toolchain@v1
|
||||
# with:
|
||||
# profile: minimal
|
||||
# toolchain: nightly
|
||||
# override: true
|
||||
# components: rust-src
|
||||
# target: wasm32-unknown-unknown
|
||||
# - uses: Swatinem/rust-cache@v2
|
||||
# - name: Install Dependencies
|
||||
# run: make installdeps
|
||||
# - name: cargo-make build
|
||||
# run: |
|
||||
# if [ ${{ matrix.feature }} == default ]; then
|
||||
# cargo make build
|
||||
# else
|
||||
# cargo make build_${{ matrix.feature }}
|
||||
# fi
|
||||
# working-directory: ${{ matrix.crate }}
|
||||
# - name: cargo-make test
|
||||
# run: |
|
||||
# if [ ${{ matrix.feature }} == default ]; then
|
||||
# cargo make test --release
|
||||
# else
|
||||
# cargo make test_${{ matrix.feature }} --release
|
||||
# fi
|
||||
# working-directory: ${{ matrix.crate }}
|
||||
name: Test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install nightly toolchain
|
||||
uses: dtolnay/rust-toolchain@nightly
|
||||
with:
|
||||
components: rust-src
|
||||
targets: wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Build rln-wasm in parallel mode
|
||||
run: cargo make build_parallel
|
||||
working-directory: ${{ matrix.crate }}
|
||||
- name: Test rln-wasm in parallel mode on browser
|
||||
run: cargo make test_parallel --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
lint:
|
||||
strategy:
|
||||
@@ -176,25 +136,22 @@ jobs:
|
||||
name: lint - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install Dependencies
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: cargo fmt
|
||||
- name: Check formatting
|
||||
if: success() || failure()
|
||||
run: cargo fmt -- --check
|
||||
working-directory: ${{ matrix.crate }}
|
||||
- name: cargo clippy
|
||||
- name: Check clippy
|
||||
if: success() || failure()
|
||||
run: |
|
||||
cargo clippy --release
|
||||
cargo clippy --all-targets --release -- -D warnings
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
benchmark-utils:
|
||||
@@ -211,7 +168,7 @@ jobs:
|
||||
name: benchmark - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: boa-dev/criterion-compare-action@v3
|
||||
with:
|
||||
@@ -226,14 +183,14 @@ jobs:
|
||||
# we run benchmark tests only on ubuntu
|
||||
platform: [ubuntu-latest]
|
||||
crate: [rln]
|
||||
feature: ["default", "arkzkey"]
|
||||
feature: ["default"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: benchmark - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: boa-dev/criterion-compare-action@v3
|
||||
with:
|
||||
|
||||
120
.github/workflows/nightly-release.yml
vendored
120
.github/workflows/nightly-release.yml
vendored
@@ -6,38 +6,38 @@ on:
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
name: Linux build
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
feature: [ "default", "arkzkey", "stateless" ]
|
||||
feature:
|
||||
- "stateless"
|
||||
- "stateless,parallel"
|
||||
- "pmtree-ft"
|
||||
- "pmtree-ft,parallel"
|
||||
- "fullmerkletree"
|
||||
- "fullmerkletree,parallel"
|
||||
- "optimalmerkletree"
|
||||
- "optimalmerkletree,parallel"
|
||||
target:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- aarch64-unknown-linux-gnu
|
||||
# - i686-unknown-linux-gnu
|
||||
include:
|
||||
- feature: stateless
|
||||
cargo_args: --exclude rln-cli
|
||||
name: Linux build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
target: ${{ matrix.target }}
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: cross build
|
||||
- name: Cross build
|
||||
run: |
|
||||
cross build --release --target ${{ matrix.target }} --features ${{ matrix.feature }} --workspace ${{ matrix.cargo_args }}
|
||||
cross build --release --target ${{ matrix.target }} --no-default-features --features ${{ matrix.feature }} --workspace --exclude rln-cli
|
||||
mkdir release
|
||||
cp target/${{ matrix.target }}/release/librln* release/
|
||||
tar -czvf ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz release/
|
||||
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -50,33 +50,34 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
feature: [ "default", "arkzkey", "stateless" ]
|
||||
feature:
|
||||
- "stateless"
|
||||
- "stateless,parallel"
|
||||
- "pmtree-ft"
|
||||
- "pmtree-ft,parallel"
|
||||
- "fullmerkletree"
|
||||
- "fullmerkletree,parallel"
|
||||
- "optimalmerkletree"
|
||||
- "optimalmerkletree,parallel"
|
||||
target:
|
||||
- x86_64-apple-darwin
|
||||
- aarch64-apple-darwin
|
||||
include:
|
||||
- feature: stateless
|
||||
cargo_args: --exclude rln-cli
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
target: ${{ matrix.target }}
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: cross build
|
||||
- name: Cross build
|
||||
run: |
|
||||
cross build --release --target ${{ matrix.target }} --features ${{ matrix.feature }} --workspace ${{ matrix.cargo_args }}
|
||||
cross build --release --target ${{ matrix.target }} --no-default-features --features ${{ matrix.feature }} --workspace --exclude rln-cli
|
||||
mkdir release
|
||||
cp target/${{ matrix.target }}/release/librln* release/
|
||||
tar -czvf ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz release/
|
||||
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -85,47 +86,76 @@ jobs:
|
||||
retention-days: 2
|
||||
|
||||
browser-rln-wasm:
|
||||
name: Browser build (RLN WASM)
|
||||
name: Browser build
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
feature:
|
||||
- "default"
|
||||
- "parallel"
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
targets: wasm32-unknown-unknown
|
||||
- name: Install nightly toolchain
|
||||
uses: dtolnay/rust-toolchain@nightly
|
||||
with:
|
||||
targets: wasm32-unknown-unknown
|
||||
components: rust-src
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
key: wasm-${{ matrix.feature }}
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: cross make build
|
||||
- name: Install wasm-pack
|
||||
run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
|
||||
- name: Install binaryen
|
||||
run: |
|
||||
cross make build
|
||||
mkdir release
|
||||
cp pkg/** release/
|
||||
tar -czvf browser-rln-wasm.tar.gz release/
|
||||
working-directory: rln-wasm
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y binaryen
|
||||
- name: Build wasm package
|
||||
run: |
|
||||
if [[ ${{ matrix.feature }} == *parallel* ]]; then
|
||||
env RUSTFLAGS="-C target-feature=+atomics,+bulk-memory,+mutable-globals" \
|
||||
rustup run nightly wasm-pack build --release --target web --scope waku \
|
||||
--features ${{ matrix.feature }} -Z build-std=panic_abort,std
|
||||
|
||||
wasm-bindgen --target web --split-linked-modules --out-dir ./pkg \
|
||||
./target/wasm32-unknown-unknown/release/rln_wasm.wasm
|
||||
else
|
||||
wasm-pack build --release --target web --scope waku --features ${{ matrix.feature }}
|
||||
fi
|
||||
|
||||
sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/package.json.bak
|
||||
|
||||
wasm-opt pkg/rln_wasm_bg.wasm -Oz --strip-debug --strip-dwarf \
|
||||
--remove-unused-module-elements --vacuum -o pkg/rln_wasm_bg.wasm
|
||||
|
||||
mkdir release
|
||||
cp -r pkg/* release/
|
||||
tar -czvf browser-rln-wasm-${{ matrix.feature }}.tar.gz release/
|
||||
working-directory: rln-wasm
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: browser-rln-wasm-archive
|
||||
path: rln-wasm/browser-rln-wasm.tar.gz
|
||||
name: Browser-${{ matrix.feature }}-rln-wasm-archive
|
||||
path: rln-wasm/browser-${{ matrix.feature }}-rln-wasm.tar.gz
|
||||
retention-days: 2
|
||||
|
||||
prepare-prerelease:
|
||||
name: Prepare pre-release
|
||||
needs: [ linux, macos, browser-rln-wasm ]
|
||||
needs: [linux, macos, browser-rln-wasm]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: master
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Delete tag
|
||||
uses: dev-drprasad/delete-tag-and-release@v0.2.1
|
||||
with:
|
||||
@@ -133,7 +163,6 @@ jobs:
|
||||
tag_name: nightly
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Create prerelease
|
||||
run: |
|
||||
start_tag=$(gh release list -L 2 --exclude-drafts | grep -v nightly | cut -d$'\t' -f3 | sed -n '1p')
|
||||
@@ -145,7 +174,6 @@ jobs:
|
||||
*-archive/*.tar.gz \
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Delete artifacts
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
|
||||
2
.github/workflows/sync-labels.yml
vendored
2
.github/workflows/sync-labels.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: micnncim/action-label-syncer@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -7,8 +7,8 @@ rln-cli/database
|
||||
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
debug/
|
||||
target/
|
||||
/target
|
||||
/Cargo.lock
|
||||
|
||||
# Generated by Nix
|
||||
result
|
||||
|
||||
2490
Cargo.lock
generated
2490
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
[workspace]
|
||||
members = ["rln", "rln-cli", "rln-wasm", "utils"]
|
||||
default-members = ["rln", "rln-cli", "rln-wasm", "utils"]
|
||||
members = ["rln", "rln-cli", "utils"]
|
||||
exclude = ["rln-wasm"]
|
||||
resolver = "2"
|
||||
|
||||
# Compilation profile for any non-workspace member.
|
||||
|
||||
@@ -28,9 +28,6 @@ Zerokit currently focuses on RLN (Rate-Limiting Nullifier) implementation using
|
||||
|
||||
## Build and Test
|
||||
|
||||
> [!IMPORTANT]
|
||||
> For WASM support or x32 architecture builds, use version `0.6.1`. The current version has dependency issues for these platforms. WASM support will return in a future release.
|
||||
|
||||
### Install Dependencies
|
||||
|
||||
```bash
|
||||
@@ -75,8 +72,8 @@ The execution graph file used by this code has been generated by means of the sa
|
||||
|
||||
> [!IMPORTANT]
|
||||
> The circom-witnesscalc code fragments have been borrowed instead of depending on this crate,
|
||||
because its types of input and output data were incompatible with the corresponding zerokit code fragments,
|
||||
and circom-witnesscalc has some dependencies, which are redundant for our purpose.
|
||||
> because its types of input and output data were incompatible with the corresponding zerokit code fragments,
|
||||
> and circom-witnesscalc has some dependencies, which are redundant for our purpose.
|
||||
|
||||
## Documentation
|
||||
|
||||
|
||||
@@ -31,9 +31,7 @@
|
||||
buildPackage = pkgs.callPackage ./nix/default.nix;
|
||||
buildRln = (buildPackage { src = self; project = "rln"; }).override;
|
||||
in rec {
|
||||
rln = buildRln {
|
||||
features = "arkzkey";
|
||||
};
|
||||
rln = buildRln
|
||||
|
||||
rln-linux-arm64 = buildRln {
|
||||
target-platform = "aarch64-multiplatform";
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
project,
|
||||
src ? ../.,
|
||||
release ? true,
|
||||
features ? "arkzkey",
|
||||
target-platform ? null,
|
||||
rust-target ? null,
|
||||
}:
|
||||
|
||||
@@ -13,14 +13,13 @@ path = "src/examples/stateless.rs"
|
||||
required-features = ["stateless"]
|
||||
|
||||
[dependencies]
|
||||
rln = { path = "../rln", default-features = true }
|
||||
zerokit_utils = { path = "../utils" }
|
||||
clap = { version = "4.5.38", features = ["cargo", "derive", "env"] }
|
||||
color-eyre = "0.6.4"
|
||||
serde_json = "1.0"
|
||||
rln = { path = "../rln", version = "0.8.0", default-features = false }
|
||||
zerokit_utils = { path = "../utils", version = "0.6.0", default-features = false }
|
||||
clap = { version = "4.5.41", features = ["cargo", "derive", "env"] }
|
||||
color-eyre = "0.6.5"
|
||||
serde_json = "1.0.141"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
[features]
|
||||
default = []
|
||||
arkzkey = ["rln/arkzkey"]
|
||||
stateless = ["rln/stateless"]
|
||||
default = ["rln/pmtree-ft", "rln/parallel"]
|
||||
stateless = ["rln/stateless", "rln/parallel"]
|
||||
|
||||
@@ -27,16 +27,6 @@ If the configuration file is empty, default settings will be used, but the tree
|
||||
|
||||
We recommend using the example config, as all commands (except `new` and `create-with-params`) require an initialized RLN instance.
|
||||
|
||||
## Feature Flags
|
||||
|
||||
The CLI supports optional features. To enable the **arkzkey** feature, run:
|
||||
|
||||
```bash
|
||||
cargo run --features arkzkey -- <SUBCOMMAND> [OPTIONS]
|
||||
```
|
||||
|
||||
For more details, refer to the [Zerokit RLN Module](../rln/README.md) documentation.
|
||||
|
||||
## Relay Example
|
||||
|
||||
The following [Example](src/examples/relay.rs) demonstrates how RLN enables spam prevention in anonymous environments for multple users.
|
||||
@@ -47,12 +37,6 @@ You can run the example using the following command:
|
||||
cargo run --example relay
|
||||
```
|
||||
|
||||
or with the **arkzkey** feature flag:
|
||||
|
||||
```bash
|
||||
cargo run --example relay --features arkzkey
|
||||
```
|
||||
|
||||
You can also change **MESSAGE_LIMIT** and **TREEE_HEIGHT** in the [relay.rs](src/examples/relay.rs) file to see how the RLN instance behaves with different parameters.
|
||||
|
||||
The customize **TREEE_HEIGHT** constant differs from the default value of `20` should follow [Custom Circuit Compilation](../rln/README.md#advanced-custom-circuit-compilation) instructions.
|
||||
@@ -66,13 +50,7 @@ This example function similarly to the [Relay Example](#relay-example) but uses
|
||||
You can run the example using the following command:
|
||||
|
||||
```bash
|
||||
cargo run --example stateless --features stateless
|
||||
```
|
||||
|
||||
or with the **arkzkey** feature flag:
|
||||
|
||||
```bash
|
||||
cargo run --example stateless --features stateless,arkzkey
|
||||
cargo run --example stateless --no-default-features --features stateless
|
||||
```
|
||||
|
||||
## CLI Commands
|
||||
|
||||
@@ -12,7 +12,7 @@ use rln::{
|
||||
hashers::{hash_to_field, poseidon_hash},
|
||||
protocol::{keygen, prepare_prove_input, prepare_verify_input},
|
||||
public::RLN,
|
||||
utils::{bytes_le_to_fr, fr_to_bytes_le, generate_input_buffer},
|
||||
utils::{fr_to_bytes_le, generate_input_buffer, IdSecret},
|
||||
};
|
||||
|
||||
const MESSAGE_LIMIT: u32 = 1;
|
||||
@@ -44,7 +44,7 @@ enum Commands {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct Identity {
|
||||
identity_secret_hash: Fr,
|
||||
identity_secret_hash: IdSecret,
|
||||
id_commitment: Fr,
|
||||
}
|
||||
|
||||
@@ -68,10 +68,7 @@ impl RLNSystem {
|
||||
fn new() -> Result<Self> {
|
||||
let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
let resources_path: PathBuf = format!("../rln/resources/tree_height_{TREEE_HEIGHT}").into();
|
||||
#[cfg(feature = "arkzkey")]
|
||||
let filenames = ["rln_final.arkzkey", "graph.bin"];
|
||||
#[cfg(not(feature = "arkzkey"))]
|
||||
let filenames = ["rln_final.zkey", "graph.bin"];
|
||||
for filename in filenames {
|
||||
let fullpath = resources_path.join(Path::new(filename));
|
||||
let mut file = File::open(&fullpath)?;
|
||||
@@ -103,7 +100,7 @@ impl RLNSystem {
|
||||
println!("Registered users:");
|
||||
for (index, identity) in &self.local_identities {
|
||||
println!("User Index: {index}");
|
||||
println!("+ Identity Secret Hash: {}", identity.identity_secret_hash);
|
||||
println!("+ Identity Secret Hash: {}", *identity.identity_secret_hash);
|
||||
println!("+ Identity Commitment: {}", identity.id_commitment);
|
||||
println!();
|
||||
}
|
||||
@@ -118,7 +115,7 @@ impl RLNSystem {
|
||||
match self.rln.set_next_leaf(&mut buffer) {
|
||||
Ok(_) => {
|
||||
println!("Registered User Index: {index}");
|
||||
println!("+ Identity secret hash: {}", identity.identity_secret_hash);
|
||||
println!("+ Identity secret hash: {}", *identity.identity_secret_hash);
|
||||
println!("+ Identity commitment: {},", identity.id_commitment);
|
||||
self.local_identities.insert(index, identity);
|
||||
}
|
||||
@@ -143,7 +140,7 @@ impl RLNSystem {
|
||||
};
|
||||
|
||||
let serialized = prepare_prove_input(
|
||||
identity.identity_secret_hash,
|
||||
identity.identity_secret_hash.clone(),
|
||||
user_index,
|
||||
Fr::from(MESSAGE_LIMIT),
|
||||
Fr::from(message_id),
|
||||
@@ -211,7 +208,7 @@ impl RLNSystem {
|
||||
{
|
||||
Ok(_) => {
|
||||
let output_data = output.into_inner();
|
||||
let (leaked_identity_secret_hash, _) = bytes_le_to_fr(&output_data);
|
||||
let (leaked_identity_secret_hash, _) = IdSecret::from_bytes_le(&output_data);
|
||||
|
||||
if let Some((user_index, identity)) = self
|
||||
.local_identities
|
||||
@@ -221,20 +218,21 @@ impl RLNSystem {
|
||||
})
|
||||
.map(|(index, identity)| (*index, identity))
|
||||
{
|
||||
let real_identity_secret_hash = identity.identity_secret_hash;
|
||||
let real_identity_secret_hash = identity.identity_secret_hash.clone();
|
||||
if leaked_identity_secret_hash != real_identity_secret_hash {
|
||||
Err(eyre!("identity secret hash mismatch {leaked_identity_secret_hash} != {real_identity_secret_hash}"))
|
||||
Err(eyre!("identity secret hash mismatch: leaked_identity_secret_hash != real_identity_secret_hash"))
|
||||
} else {
|
||||
println!("DUPLICATE message ID detected! Reveal identity secret hash: {leaked_identity_secret_hash}");
|
||||
println!(
|
||||
"DUPLICATE message ID detected! Reveal identity secret hash: {}",
|
||||
*leaked_identity_secret_hash
|
||||
);
|
||||
self.local_identities.remove(&user_index);
|
||||
self.rln.delete_leaf(user_index)?;
|
||||
println!("User index {user_index} has been SLASHED");
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
Err(eyre!(
|
||||
"user identity secret hash {leaked_identity_secret_hash} not found"
|
||||
))
|
||||
Err(eyre!("user identity secret hash ******** not found"))
|
||||
}
|
||||
}
|
||||
Err(err) => Err(eyre!("Failed to recover identity secret: {err}")),
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#![cfg(feature = "stateless")]
|
||||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::{stdin, stdout, Cursor, Write},
|
||||
@@ -8,13 +9,12 @@ use clap::{Parser, Subcommand};
|
||||
use color_eyre::{eyre::eyre, Result};
|
||||
use rln::{
|
||||
circuit::{Fr, TEST_TREE_HEIGHT},
|
||||
hashers::{hash_to_field, poseidon_hash},
|
||||
poseidon_tree::PoseidonTree,
|
||||
hashers::{hash_to_field, poseidon_hash, PoseidonHash},
|
||||
protocol::{keygen, prepare_verify_input, rln_witness_from_values, serialize_witness},
|
||||
public::RLN,
|
||||
utils::{bytes_le_to_fr, fr_to_bytes_le},
|
||||
utils::{fr_to_bytes_le, IdSecret},
|
||||
};
|
||||
use zerokit_utils::ZerokitMerkleTree;
|
||||
use zerokit_utils::{OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
const MESSAGE_LIMIT: u32 = 1;
|
||||
|
||||
@@ -45,7 +45,7 @@ enum Commands {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct Identity {
|
||||
identity_secret_hash: Fr,
|
||||
identity_secret_hash: IdSecret,
|
||||
id_commitment: Fr,
|
||||
}
|
||||
|
||||
@@ -61,7 +61,7 @@ impl Identity {
|
||||
|
||||
struct RLNSystem {
|
||||
rln: RLN,
|
||||
tree: PoseidonTree,
|
||||
tree: OptimalMerkleTree<PoseidonHash>,
|
||||
used_nullifiers: HashMap<[u8; 32], Vec<u8>>,
|
||||
local_identities: HashMap<usize, Identity>,
|
||||
}
|
||||
@@ -70,11 +70,12 @@ impl RLNSystem {
|
||||
fn new() -> Result<Self> {
|
||||
let rln = RLN::new()?;
|
||||
let default_leaf = Fr::from(0);
|
||||
let tree = PoseidonTree::new(
|
||||
let tree: OptimalMerkleTree<PoseidonHash> = OptimalMerkleTree::new(
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)?;
|
||||
ConfigOf::<OptimalMerkleTree<PoseidonHash>>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
Ok(RLNSystem {
|
||||
rln,
|
||||
@@ -93,7 +94,7 @@ impl RLNSystem {
|
||||
println!("Registered users:");
|
||||
for (index, identity) in &self.local_identities {
|
||||
println!("User Index: {index}");
|
||||
println!("+ Identity Secret Hash: {}", identity.identity_secret_hash);
|
||||
println!("+ Identity Secret Hash: {}", *identity.identity_secret_hash);
|
||||
println!("+ Identity Commitment: {}", identity.id_commitment);
|
||||
println!();
|
||||
}
|
||||
@@ -107,7 +108,7 @@ impl RLNSystem {
|
||||
self.tree.update_next(rate_commitment)?;
|
||||
|
||||
println!("Registered User Index: {index}");
|
||||
println!("+ Identity secret hash: {}", identity.identity_secret_hash);
|
||||
println!("+ Identity secret hash: {}", *identity.identity_secret_hash);
|
||||
println!("+ Identity commitment: {}", identity.id_commitment);
|
||||
|
||||
self.local_identities.insert(index, identity);
|
||||
@@ -130,8 +131,9 @@ impl RLNSystem {
|
||||
let x = hash_to_field(signal.as_bytes());
|
||||
|
||||
let rln_witness = rln_witness_from_values(
|
||||
identity.identity_secret_hash,
|
||||
&merkle_proof,
|
||||
identity.identity_secret_hash.clone(),
|
||||
merkle_proof.get_path_elements(),
|
||||
merkle_proof.get_path_index(),
|
||||
x,
|
||||
external_nullifier,
|
||||
Fr::from(MESSAGE_LIMIT),
|
||||
@@ -208,7 +210,7 @@ impl RLNSystem {
|
||||
{
|
||||
Ok(_) => {
|
||||
let output_data = output.into_inner();
|
||||
let (leaked_identity_secret_hash, _) = bytes_le_to_fr(&output_data);
|
||||
let (leaked_identity_secret_hash, _) = IdSecret::from_bytes_le(&output_data);
|
||||
|
||||
if let Some((user_index, identity)) = self
|
||||
.local_identities
|
||||
@@ -218,19 +220,19 @@ impl RLNSystem {
|
||||
})
|
||||
.map(|(index, identity)| (*index, identity))
|
||||
{
|
||||
let real_identity_secret_hash = identity.identity_secret_hash;
|
||||
let real_identity_secret_hash = identity.identity_secret_hash.clone();
|
||||
if leaked_identity_secret_hash != real_identity_secret_hash {
|
||||
Err(eyre!("identity secret hash mismatch {leaked_identity_secret_hash} != {real_identity_secret_hash}"))
|
||||
Err(eyre!("identity secret hash mismatch: leaked_identity_secret_hash != real_identity_secret_hash"))
|
||||
} else {
|
||||
println!("DUPLICATE message ID detected! Reveal identity secret hash: {leaked_identity_secret_hash}");
|
||||
println!(
|
||||
"DUPLICATE message ID detected! Reveal identity secret hash: ********"
|
||||
);
|
||||
self.local_identities.remove(&user_index);
|
||||
println!("User index {user_index} has been SLASHED");
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
Err(eyre!(
|
||||
"user identity secret hash {leaked_identity_secret_hash} not found"
|
||||
))
|
||||
Err(eyre!("user identity secret hash ******** not found"))
|
||||
}
|
||||
}
|
||||
Err(err) => Err(eyre!("Failed to recover identity secret: {err}")),
|
||||
|
||||
@@ -51,10 +51,7 @@ fn main() -> Result<()> {
|
||||
resources_path,
|
||||
}) => {
|
||||
let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
#[cfg(feature = "arkzkey")]
|
||||
let filenames = ["rln_final.arkzkey", "graph.bin"];
|
||||
#[cfg(not(feature = "arkzkey"))]
|
||||
let filenames = ["rln_final.zkey", "graph.bin"];
|
||||
for filename in filenames {
|
||||
let fullpath = resources_path.join(Path::new(filename));
|
||||
let mut file = File::open(&fullpath)?;
|
||||
@@ -141,7 +138,7 @@ fn main() -> Result<()> {
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.prove(input_data, &mut output_buffer)?;
|
||||
let proof = output_buffer.into_inner();
|
||||
println!("proof: {:?}", proof);
|
||||
println!("proof: {proof:?}");
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::Verify { input }) => {
|
||||
@@ -150,7 +147,7 @@ fn main() -> Result<()> {
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.verify(input_data)?;
|
||||
println!("verified: {:?}", verified);
|
||||
println!("verified: {verified:?}");
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::GenerateProof { input }) => {
|
||||
@@ -161,7 +158,7 @@ fn main() -> Result<()> {
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.generate_rln_proof(input_data, &mut output_buffer)?;
|
||||
let proof = output_buffer.into_inner();
|
||||
println!("proof: {:?}", proof);
|
||||
println!("proof: {proof:?}");
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::VerifyWithRoots { input, roots }) => {
|
||||
@@ -193,7 +190,7 @@ fn main() -> Result<()> {
|
||||
let output_buffer_inner = output_buffer.into_inner();
|
||||
let (path_elements, _) = bytes_le_to_vec_fr(&output_buffer_inner)?;
|
||||
for (index, element) in path_elements.iter().enumerate() {
|
||||
println!("path element {}: {}", index, element);
|
||||
println!("path element {index}: {element}");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -6,17 +6,19 @@ license = "MIT or Apache2"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
required-features = ["stateless"]
|
||||
|
||||
[dependencies]
|
||||
rln = { path = "../rln", version = "0.8.0", default-features = false }
|
||||
zerokit_utils = { path = "../utils", version = "0.6.0" }
|
||||
|
||||
rln = { path = "../rln", version = "0.8.0", default-features = false, features = [
|
||||
"stateless",
|
||||
] }
|
||||
zerokit_utils = { path = "../utils", version = "0.6.0", default-features = false }
|
||||
num-bigint = { version = "0.4.6", default-features = false }
|
||||
js-sys = "0.3.77"
|
||||
wasm-bindgen = "0.2.100"
|
||||
serde-wasm-bindgen = "0.6.5"
|
||||
wasm-bindgen-rayon = { version = "1.2.0", optional = true }
|
||||
wasm-bindgen-rayon = { version = "1.3.0", features = [
|
||||
"no-bundler",
|
||||
], optional = true }
|
||||
|
||||
# The `console_error_panic_xhook` crate provides better debugging of panics by
|
||||
# logging them with `console.error`. This is great for development, but requires
|
||||
@@ -28,7 +30,7 @@ console_error_panic_hook = { version = "0.1.7", optional = true }
|
||||
getrandom = { version = "0.2.16", features = ["js"] }
|
||||
|
||||
[dev-dependencies]
|
||||
serde_json = "1.0"
|
||||
serde_json = "1.0.141"
|
||||
wasm-bindgen-test = "0.3.50"
|
||||
wasm-bindgen-futures = "0.4.50"
|
||||
|
||||
@@ -38,9 +40,7 @@ features = ["Window", "Navigator"]
|
||||
|
||||
[features]
|
||||
default = ["console_error_panic_hook"]
|
||||
stateless = ["rln/stateless"]
|
||||
arkzkey = ["rln/arkzkey"]
|
||||
multithread = ["wasm-bindgen-rayon"]
|
||||
parallel = ["rln/parallel", "wasm-bindgen-rayon"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
all-features = true
|
||||
|
||||
@@ -2,55 +2,20 @@
|
||||
clear = true
|
||||
dependencies = ["pack_build", "pack_rename", "pack_resize"]
|
||||
|
||||
[tasks.build_arkzkey]
|
||||
clear = true
|
||||
dependencies = ["pack_build_arkzkey", "pack_rename", "pack_resize"]
|
||||
|
||||
[tasks.build_multithread]
|
||||
[tasks.build_parallel]
|
||||
clear = true
|
||||
dependencies = [
|
||||
"pack_build_multithread",
|
||||
"post_build_multithread",
|
||||
"pack_rename",
|
||||
"pack_resize",
|
||||
]
|
||||
|
||||
[tasks.build_multithread_arkzkey]
|
||||
clear = true
|
||||
dependencies = [
|
||||
"pack_build_multithread_arkzkey",
|
||||
"post_build_multithread",
|
||||
"pack_build_parallel",
|
||||
"post_build_parallel",
|
||||
"pack_rename",
|
||||
"pack_resize",
|
||||
]
|
||||
|
||||
[tasks.pack_build]
|
||||
command = "wasm-pack"
|
||||
args = [
|
||||
"build",
|
||||
"--release",
|
||||
"--target",
|
||||
"web",
|
||||
"--scope",
|
||||
"waku",
|
||||
"--features",
|
||||
"stateless",
|
||||
]
|
||||
args = ["build", "--release", "--target", "web", "--scope", "waku"]
|
||||
|
||||
[tasks.pack_build_arkzkey]
|
||||
command = "wasm-pack"
|
||||
args = [
|
||||
"build",
|
||||
"--release",
|
||||
"--target",
|
||||
"web",
|
||||
"--scope",
|
||||
"waku",
|
||||
"--features",
|
||||
"stateless,arkzkey",
|
||||
]
|
||||
|
||||
[tasks.pack_build_multithread]
|
||||
[tasks.pack_build_parallel]
|
||||
command = "env"
|
||||
args = [
|
||||
"RUSTFLAGS=-C target-feature=+atomics,+bulk-memory,+mutable-globals",
|
||||
@@ -65,36 +30,14 @@ args = [
|
||||
"--scope",
|
||||
"waku",
|
||||
"--features",
|
||||
"stateless,multithread",
|
||||
"parallel",
|
||||
"-Z",
|
||||
"build-std=panic_abort,std",
|
||||
]
|
||||
|
||||
[tasks.pack_build_multithread_arkzkey]
|
||||
command = "env"
|
||||
args = [
|
||||
"RUSTFLAGS=-C target-feature=+atomics,+bulk-memory,+mutable-globals",
|
||||
"rustup",
|
||||
"run",
|
||||
"nightly",
|
||||
"wasm-pack",
|
||||
"build",
|
||||
"--release",
|
||||
"--target",
|
||||
"web",
|
||||
"--scope",
|
||||
"waku",
|
||||
"--features",
|
||||
"stateless,multithread,arkzkey",
|
||||
"-Z",
|
||||
"build-std=panic_abort,std",
|
||||
]
|
||||
|
||||
[tasks.post_build_multithread]
|
||||
[tasks.post_build_parallel]
|
||||
script = '''
|
||||
wasm-bindgen --target web --split-linked-modules --out-dir ./pkg ../target/wasm32-unknown-unknown/release/rln_wasm.wasm && \
|
||||
find ./pkg/snippets -name "workerHelpers.worker.js" -exec sed -i.bak 's|from '\''\.\.\/\.\.\/\.\.\/'\'';|from "../../../rln_wasm.js";|g' {} \; -exec rm -f {}.bak \; && \
|
||||
find ./pkg/snippets -name "workerHelpers.worker.js" -exec sed -i.bak 's|await initWbg(module, memory);|await initWbg({ module, memory });|g' {} \; -exec rm -f {}.bak \;
|
||||
wasm-bindgen --target web --split-linked-modules --out-dir ./pkg ./target/wasm32-unknown-unknown/release/rln_wasm.wasm
|
||||
'''
|
||||
|
||||
[tasks.pack_rename]
|
||||
@@ -121,65 +64,26 @@ args = [
|
||||
"--node",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--features",
|
||||
"stateless",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
dependencies = ["build"]
|
||||
|
||||
[tasks.test_arkzkey]
|
||||
command = "wasm-pack"
|
||||
args = [
|
||||
"test",
|
||||
"--release",
|
||||
"--node",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--features",
|
||||
"stateless,arkzkey",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
dependencies = ["build_arkzkey"]
|
||||
|
||||
[tasks.test_browser]
|
||||
command = "wasm-pack"
|
||||
args = [
|
||||
"test",
|
||||
"--release",
|
||||
"--chrome",
|
||||
# "--firefox",
|
||||
# "--safari",
|
||||
"--headless",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--features",
|
||||
"stateless",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
dependencies = ["build"]
|
||||
|
||||
[tasks.test_browser_arkzkey]
|
||||
command = "wasm-pack"
|
||||
args = [
|
||||
"test",
|
||||
"--release",
|
||||
"--chrome",
|
||||
# "--firefox",
|
||||
# "--safari",
|
||||
"--headless",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--features",
|
||||
"stateless,arkzkey",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
dependencies = ["build_arkzkey"]
|
||||
|
||||
[tasks.test_multithread]
|
||||
[tasks.test_parallel]
|
||||
command = "env"
|
||||
args = [
|
||||
"RUSTFLAGS=-C target-feature=+atomics,+bulk-memory,+mutable-globals",
|
||||
@@ -190,44 +94,17 @@ args = [
|
||||
"test",
|
||||
"--release",
|
||||
"--chrome",
|
||||
# "--firefox",
|
||||
# "--safari",
|
||||
"--headless",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--features",
|
||||
"stateless,multithread",
|
||||
"parallel",
|
||||
"-Z",
|
||||
"build-std=panic_abort,std",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
dependencies = ["build_multithread"]
|
||||
|
||||
[tasks.test_multithread_arkzkey]
|
||||
command = "env"
|
||||
args = [
|
||||
"RUSTFLAGS=-C target-feature=+atomics,+bulk-memory,+mutable-globals",
|
||||
"rustup",
|
||||
"run",
|
||||
"nightly",
|
||||
"wasm-pack",
|
||||
"test",
|
||||
"--release",
|
||||
"--chrome",
|
||||
# "--firefox",
|
||||
# "--safari",
|
||||
"--headless",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--features",
|
||||
"stateless,multithread,arkzkey",
|
||||
"-Z",
|
||||
"build-std=panic_abort,std",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
dependencies = ["build_multithread_arkzkey"]
|
||||
dependencies = ["build_parallel"]
|
||||
|
||||
[tasks.bench]
|
||||
disabled = true
|
||||
|
||||
@@ -72,29 +72,16 @@ Compile zerokit for `wasm32-unknown-unknown`:
|
||||
cargo make build
|
||||
```
|
||||
|
||||
Or compile with the **arkzkey** feature enabled
|
||||
|
||||
```bash
|
||||
cargo make build_arkzkey
|
||||
```
|
||||
|
||||
## Running tests and benchmarks
|
||||
|
||||
```bash
|
||||
cargo make test
|
||||
```
|
||||
|
||||
Or test with the **arkzkey** feature enabled
|
||||
|
||||
```bash
|
||||
cargo make test_arkzkey
|
||||
```
|
||||
|
||||
If you want to run the tests in browser headless mode, you can use the following command:
|
||||
|
||||
```bash
|
||||
cargo make test_browser
|
||||
cargo make test_browser_arkzkey
|
||||
```
|
||||
|
||||
## Parallel computation
|
||||
@@ -125,13 +112,7 @@ cargo install wasm-bindgen-cli --version=0.2.100
|
||||
To enable parallel computation for WebAssembly threads, you can use the following command:
|
||||
|
||||
```bash
|
||||
cargo make build_multithread
|
||||
```
|
||||
|
||||
Or with the **arkzkey** feature enabled:
|
||||
|
||||
```bash
|
||||
cargo make build_multithread_arkzkey
|
||||
cargo make build_parallel
|
||||
```
|
||||
|
||||
### WebAssembly Threading Support
|
||||
@@ -150,7 +131,7 @@ If you're targeting [older browser versions that didn't support WebAssembly thre
|
||||
you'll likely want to create two builds - one with thread support and one without -
|
||||
and use feature detection to choose the right one on the JavaScript side.
|
||||
|
||||
You can use [wasm-feature-detect](https://github.com/GoogleChromeLabs/wasm-feature-detect)library for this purpose.
|
||||
You can use [wasm-feature-detect](https://github.com/GoogleChromeLabs/wasm-feature-detect) library for this purpose.
|
||||
For example, your code might look like this:
|
||||
|
||||
```js
|
||||
|
||||
1
rln-wasm/benches/calculated_witness
Normal file
1
rln-wasm/benches/calculated_witness
Normal file
File diff suppressed because one or more lines are too long
561
rln-wasm/benches/index.html
Normal file
561
rln-wasm/benches/index.html
Normal file
@@ -0,0 +1,561 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>RLN WASM Benchmark</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto,
|
||||
sans-serif;
|
||||
max-width: 700px;
|
||||
margin: 0 auto;
|
||||
padding: 20px;
|
||||
color: #333;
|
||||
}
|
||||
|
||||
h1 {
|
||||
text-align: center;
|
||||
margin-bottom: 30px;
|
||||
}
|
||||
|
||||
.panel {
|
||||
background: #f8f9fa;
|
||||
border-radius: 8px;
|
||||
padding: 20px;
|
||||
margin-bottom: 20px;
|
||||
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.05);
|
||||
}
|
||||
|
||||
.file-input {
|
||||
margin-bottom: 15px;
|
||||
}
|
||||
|
||||
label {
|
||||
display: block;
|
||||
margin-bottom: 5px;
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
button {
|
||||
background: #4361ee;
|
||||
color: white;
|
||||
border: none;
|
||||
padding: 10px 20px;
|
||||
border-radius: 4px;
|
||||
font-size: 16px;
|
||||
cursor: pointer;
|
||||
margin-top: 10px;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
button:hover {
|
||||
background: #3a56d4;
|
||||
}
|
||||
|
||||
button:disabled {
|
||||
background: #cccccc;
|
||||
cursor: not-allowed;
|
||||
}
|
||||
|
||||
.results-table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
.results-table th,
|
||||
.results-table td {
|
||||
padding: 12px;
|
||||
text-align: left;
|
||||
border-bottom: 1px solid #ddd;
|
||||
}
|
||||
|
||||
.results-table th {
|
||||
font-weight: 600;
|
||||
background-color: #f1f3f5;
|
||||
}
|
||||
|
||||
.operation {
|
||||
font-weight: 500;
|
||||
}
|
||||
|
||||
.time {
|
||||
font-family: monospace;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.status {
|
||||
padding: 10px;
|
||||
margin-top: 15px;
|
||||
border-radius: 4px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.success {
|
||||
background-color: #d4edda;
|
||||
color: #155724;
|
||||
}
|
||||
|
||||
.error {
|
||||
background-color: #f8d7da;
|
||||
color: #721c24;
|
||||
}
|
||||
|
||||
.running {
|
||||
background-color: #cce5ff;
|
||||
color: #004085;
|
||||
}
|
||||
|
||||
/* Download button style */
|
||||
.download-btn {
|
||||
background: #28a745;
|
||||
margin-top: 15px;
|
||||
}
|
||||
|
||||
.download-btn:hover {
|
||||
background: #218838;
|
||||
}
|
||||
|
||||
/* Summary section */
|
||||
.summary {
|
||||
margin-top: 15px;
|
||||
padding-top: 15px;
|
||||
border-top: 1px solid #ddd;
|
||||
}
|
||||
|
||||
.summary h3 {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
/* Thread info */
|
||||
.thread-info {
|
||||
margin-bottom: 15px;
|
||||
padding: 10px;
|
||||
background-color: #f1f3f5;
|
||||
border-radius: 4px;
|
||||
font-weight: 500;
|
||||
}
|
||||
</style>
|
||||
<script>
|
||||
// Check if cross-origin isolation is available for SharedArrayBuffer
|
||||
document.addEventListener("DOMContentLoaded", () => {
|
||||
if (!crossOriginIsolated) {
|
||||
const statusElement = document.getElementById("status");
|
||||
if (statusElement) {
|
||||
statusElement.innerHTML =
|
||||
'<strong style="color: #721c24;">Error:</strong> ' +
|
||||
"Cross-Origin Isolation is not enabled. Please run the server.js script with:<br>" +
|
||||
"<code>node server.js</code><br>" +
|
||||
'Then access this page via <a href="http://localhost:8000">http://localhost:8000</a>';
|
||||
statusElement.className = "status error";
|
||||
}
|
||||
}
|
||||
});
|
||||
</script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h1>RLN WASM Benchmark</h1>
|
||||
|
||||
<div class="panel">
|
||||
<div class="thread-info" id="threadInfo">Detecting CPU cores...</div>
|
||||
|
||||
<div
|
||||
class="thread-mode-selector"
|
||||
style="display: flex; gap: 10px; margin-bottom: 15px"
|
||||
>
|
||||
<button id="singleThreaded" class="thread-btn" style="flex: 1">
|
||||
Single-Threaded Mode
|
||||
</button>
|
||||
<button
|
||||
id="multiThreaded"
|
||||
class="thread-btn"
|
||||
style="flex: 1; background: #4361ee"
|
||||
>
|
||||
Multi-Threaded Mode
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<button id="initThreads" class="init-btn">Initialize Thread Pool</button>
|
||||
|
||||
<div class="file-input">
|
||||
<label for="zkeyFile">zKey File:</label>
|
||||
<input type="file" id="zkeyFile" />
|
||||
</div>
|
||||
<div class="file-input">
|
||||
<label for="rootFile">Root File:</label>
|
||||
<input type="file" id="rootFile" />
|
||||
</div>
|
||||
<div class="file-input">
|
||||
<label for="witnessFile">Witness File:</label>
|
||||
<input type="file" id="witnessFile" />
|
||||
</div>
|
||||
<div class="file-input">
|
||||
<label for="messageFile">Message File:</label>
|
||||
<input type="file" id="messageFile" />
|
||||
</div>
|
||||
<div class="file-input">
|
||||
<label for="proofFile">Proof File:</label>
|
||||
<input type="file" id="proofFile" />
|
||||
</div>
|
||||
|
||||
<button id="runBenchmark" disabled>Run Benchmark</button>
|
||||
|
||||
<div id="status" class="status">Please initialize thread pool first</div>
|
||||
</div>
|
||||
|
||||
<div class="panel">
|
||||
<h2>Results</h2>
|
||||
<table class="results-table">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Operation</th>
|
||||
<th>Time (ms)</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody id="results">
|
||||
<!-- Results will be populated here -->
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<div id="summarySection" class="summary" style="display: none">
|
||||
<h3>Summary</h3>
|
||||
<div id="summaryContent"></div>
|
||||
<button id="downloadResults" class="download-btn">
|
||||
Download Results
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<script type="module">
|
||||
import init, * as RLN from "../pkg/rln_wasm.js";
|
||||
|
||||
// Get DOM elements
|
||||
const initThreadsBtn = document.getElementById("initThreads");
|
||||
const runBtn = document.getElementById("runBenchmark");
|
||||
const results = document.getElementById("results");
|
||||
const status = document.getElementById("status");
|
||||
const summarySection = document.getElementById("summarySection");
|
||||
const summaryContent = document.getElementById("summaryContent");
|
||||
const downloadBtn = document.getElementById("downloadResults");
|
||||
const threadInfo = document.getElementById("threadInfo");
|
||||
|
||||
// Track benchmark operations
|
||||
const benchmarks = [];
|
||||
let threadPoolInitialized = false;
|
||||
let cpuCount = navigator.hardwareConcurrency || 4; // Default to 4 if detection fails
|
||||
let useMultiThreaded = true; // Default to multi-threaded mode
|
||||
|
||||
// Update the thread info display
|
||||
updateThreadInfo();
|
||||
|
||||
// Function to update thread info display
|
||||
function updateThreadInfo() {
|
||||
if (useMultiThreaded) {
|
||||
threadInfo.textContent = `Automatically detected ${cpuCount} CPU cores for optimal performance`;
|
||||
} else {
|
||||
threadInfo.textContent = `Using single-threaded mode (1 CPU core)`;
|
||||
}
|
||||
}
|
||||
|
||||
// Thread mode selection
|
||||
document
|
||||
.getElementById("singleThreaded")
|
||||
.addEventListener("click", () => {
|
||||
if (!threadPoolInitialized) {
|
||||
useMultiThreaded = false;
|
||||
document.getElementById("singleThreaded").style.background =
|
||||
"#4361ee";
|
||||
document.getElementById("multiThreaded").style.background =
|
||||
"#6c757d";
|
||||
updateThreadInfo();
|
||||
} else {
|
||||
updateStatus(
|
||||
"Thread pool already initialized. Please refresh page to change mode.",
|
||||
"error"
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
document.getElementById("multiThreaded").addEventListener("click", () => {
|
||||
if (!threadPoolInitialized) {
|
||||
useMultiThreaded = true;
|
||||
document.getElementById("multiThreaded").style.background = "#4361ee";
|
||||
document.getElementById("singleThreaded").style.background =
|
||||
"#6c757d";
|
||||
updateThreadInfo();
|
||||
} else {
|
||||
updateStatus(
|
||||
"Thread pool already initialized. Please refresh page to change mode.",
|
||||
"error"
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// Initialize thread pool
|
||||
initThreadsBtn.addEventListener("click", async () => {
|
||||
if (!crossOriginIsolated) {
|
||||
updateStatus(
|
||||
"Cross-Origin Isolation is required. Please run with server.js",
|
||||
"error"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (threadPoolInitialized) {
|
||||
updateStatus("Thread pool already initialized.", "error");
|
||||
return;
|
||||
}
|
||||
|
||||
updateStatus(
|
||||
`Initializing thread pool with ${cpuCount} cores...`,
|
||||
"running"
|
||||
);
|
||||
initThreadsBtn.disabled = true;
|
||||
|
||||
try {
|
||||
const start = performance.now();
|
||||
await init();
|
||||
await RLN.initThreadPool(useMultiThreaded ? cpuCount : 1);
|
||||
const duration = performance.now() - start;
|
||||
|
||||
benchmarks.push({
|
||||
name: "Initialize WASM Module",
|
||||
duration: duration,
|
||||
success: true,
|
||||
});
|
||||
|
||||
updateResults();
|
||||
threadPoolInitialized = true;
|
||||
updateStatus(
|
||||
`Thread pool initialized with ${
|
||||
useMultiThreaded ? cpuCount : 1
|
||||
} cores`,
|
||||
"success"
|
||||
);
|
||||
runBtn.disabled = false;
|
||||
} catch (error) {
|
||||
console.error("Thread pool initialization error:", error);
|
||||
updateStatus(
|
||||
`Error initializing thread pool: ${error.message}`,
|
||||
"error"
|
||||
);
|
||||
} finally {
|
||||
initThreadsBtn.disabled = false;
|
||||
}
|
||||
});
|
||||
|
||||
// Measure operation time
|
||||
async function benchmark(name, fn) {
|
||||
updateStatus(`Running: ${name}...`, "running");
|
||||
|
||||
const start = performance.now();
|
||||
try {
|
||||
const result = await fn();
|
||||
const duration = performance.now() - start;
|
||||
|
||||
// Record result
|
||||
benchmarks.push({ name, duration, success: true });
|
||||
updateResults();
|
||||
|
||||
return { result, duration };
|
||||
} catch (error) {
|
||||
const duration = performance.now() - start;
|
||||
benchmarks.push({
|
||||
name: `${name} (FAILED)`,
|
||||
duration,
|
||||
success: false,
|
||||
error: error.message,
|
||||
});
|
||||
updateResults();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Update results table
|
||||
function updateResults() {
|
||||
results.innerHTML = "";
|
||||
benchmarks.forEach((b) => {
|
||||
const row = document.createElement("tr");
|
||||
|
||||
const nameCell = document.createElement("td");
|
||||
nameCell.className = "operation";
|
||||
nameCell.textContent = b.name;
|
||||
|
||||
const timeCell = document.createElement("td");
|
||||
timeCell.className = "time";
|
||||
timeCell.textContent = b.duration.toFixed(2);
|
||||
|
||||
row.appendChild(nameCell);
|
||||
row.appendChild(timeCell);
|
||||
|
||||
if (!b.success) {
|
||||
row.style.color = "#dc3545";
|
||||
}
|
||||
|
||||
results.appendChild(row);
|
||||
});
|
||||
}
|
||||
|
||||
// Update status message
|
||||
function updateStatus(message, type = "") {
|
||||
status.textContent = message;
|
||||
status.className = `status ${type}`;
|
||||
}
|
||||
|
||||
// Show benchmark summary
|
||||
function showSummary() {
|
||||
if (benchmarks.length === 0) return;
|
||||
|
||||
const successfulOps = benchmarks.filter((b) => b.success).length;
|
||||
|
||||
let summaryHTML = `
|
||||
<p><strong>Operations:</strong> ${successfulOps}/${
|
||||
benchmarks.length
|
||||
} successful</p>
|
||||
<p><strong>CPU cores used:</strong> ${
|
||||
useMultiThreaded ? cpuCount : 1
|
||||
}</p>
|
||||
<p><strong>Mode:</strong> ${
|
||||
useMultiThreaded ? "Multi-threaded" : "Single-threaded"
|
||||
}</p>
|
||||
`;
|
||||
|
||||
summaryContent.innerHTML = summaryHTML;
|
||||
summarySection.style.display = "block";
|
||||
}
|
||||
|
||||
// Download results as JSON
|
||||
downloadBtn.addEventListener("click", () => {
|
||||
const dataStr = JSON.stringify(
|
||||
{
|
||||
timestamp: new Date().toISOString(),
|
||||
mode: useMultiThreaded ? "multi-threaded" : "single-threaded",
|
||||
cpuCount: useMultiThreaded ? cpuCount : 1,
|
||||
operations: benchmarks,
|
||||
},
|
||||
null,
|
||||
2
|
||||
);
|
||||
|
||||
const blob = new Blob([dataStr], { type: "application/json" });
|
||||
const url = URL.createObjectURL(blob);
|
||||
|
||||
const a = document.createElement("a");
|
||||
a.href = url;
|
||||
a.download = `rln-benchmark-${new Date()
|
||||
.toISOString()
|
||||
.slice(0, 19)}.json`;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
document.body.removeChild(a);
|
||||
URL.revokeObjectURL(url);
|
||||
});
|
||||
|
||||
// Read file as Uint8Array
|
||||
async function readFile(file) {
|
||||
const buffer = await file.arrayBuffer();
|
||||
return new Uint8Array(buffer);
|
||||
}
|
||||
|
||||
// Parse witness JSON
|
||||
async function parseWitness(file) {
|
||||
const text = await file.text();
|
||||
try {
|
||||
const data = JSON.parse(text);
|
||||
|
||||
if (!Array.isArray(data)) {
|
||||
throw new Error("Witness JSON must be an array");
|
||||
}
|
||||
|
||||
return data.map((value) => BigInt(value));
|
||||
} catch (e) {
|
||||
throw new Error(`Failed to parse witness JSON: ${e.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Main benchmark runner
|
||||
runBtn.addEventListener("click", async () => {
|
||||
if (!threadPoolInitialized) {
|
||||
updateStatus("Please initialize thread pool first", "error");
|
||||
return;
|
||||
}
|
||||
|
||||
const files = {
|
||||
zkey: document.getElementById("zkeyFile").files[0],
|
||||
root: document.getElementById("rootFile").files[0],
|
||||
witness: document.getElementById("witnessFile").files[0],
|
||||
message: document.getElementById("messageFile").files[0],
|
||||
proof: document.getElementById("proofFile").files[0],
|
||||
};
|
||||
|
||||
// Validation
|
||||
if (!files.zkey || !files.root || !files.proof) {
|
||||
updateStatus("Please select zKey, Root, and Proof files", "error");
|
||||
return;
|
||||
}
|
||||
|
||||
const canGenerate = files.witness && files.message;
|
||||
|
||||
try {
|
||||
// Keep file references but clear previous benchmark results except initialization
|
||||
const initBenchmark = benchmarks.find(
|
||||
(b) => b.name === "Initialize WASM Module"
|
||||
);
|
||||
benchmarks.length = 0;
|
||||
if (initBenchmark) {
|
||||
benchmarks.push(initBenchmark);
|
||||
}
|
||||
updateResults();
|
||||
summarySection.style.display = "none";
|
||||
runBtn.disabled = true;
|
||||
initThreadsBtn.disabled = true;
|
||||
|
||||
// Load files
|
||||
const zkeyData = await readFile(files.zkey);
|
||||
const rootData = await readFile(files.root);
|
||||
|
||||
// Create RLN instance
|
||||
const { result: instance } = await benchmark(
|
||||
"Create RLN Instance",
|
||||
async () => {
|
||||
return RLN.newRLN(zkeyData);
|
||||
}
|
||||
);
|
||||
|
||||
// Handle proof generation (if witness and message files provided)
|
||||
if (canGenerate) {
|
||||
const witnessData = await parseWitness(files.witness);
|
||||
const messageData = await readFile(files.message);
|
||||
|
||||
await benchmark("Generate RLN Proof", async () => {
|
||||
return RLN.generateRLNProofWithWitness(
|
||||
instance,
|
||||
witnessData,
|
||||
messageData
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
// Verify uploaded proof (required)
|
||||
const proofData = await readFile(files.proof);
|
||||
|
||||
await benchmark("Verify Proof", async () => {
|
||||
return RLN.verifyWithRoots(instance, proofData, rootData);
|
||||
});
|
||||
|
||||
updateStatus("Benchmark completed successfully!", "success");
|
||||
showSummary();
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
updateStatus(`Error: ${error.message}`, "error");
|
||||
showSummary();
|
||||
} finally {
|
||||
runBtn.disabled = false;
|
||||
initThreadsBtn.disabled = false;
|
||||
}
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
BIN
rln-wasm/benches/proof_with_signal
Normal file
BIN
rln-wasm/benches/proof_with_signal
Normal file
Binary file not shown.
BIN
rln-wasm/benches/rln_final.arkzkey
Normal file
BIN
rln-wasm/benches/rln_final.arkzkey
Normal file
Binary file not shown.
BIN
rln-wasm/benches/root
Normal file
BIN
rln-wasm/benches/root
Normal file
Binary file not shown.
BIN
rln-wasm/benches/serialized_message
Normal file
BIN
rln-wasm/benches/serialized_message
Normal file
Binary file not shown.
83
rln-wasm/benches/server.js
Normal file
83
rln-wasm/benches/server.js
Normal file
@@ -0,0 +1,83 @@
|
||||
const http = require("http");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const url = require("url");
|
||||
|
||||
const PORT = 8000;
|
||||
|
||||
// MIME type mapping
|
||||
const MIME_TYPES = {
|
||||
".html": "text/html",
|
||||
".js": "text/javascript",
|
||||
".css": "text/css",
|
||||
".json": "application/json",
|
||||
".wasm": "application/wasm",
|
||||
".png": "image/png",
|
||||
".jpg": "image/jpeg",
|
||||
".gif": "image/gif",
|
||||
".svg": "image/svg+xml",
|
||||
};
|
||||
|
||||
// Create HTTP server
|
||||
const server = http.createServer((req, res) => {
|
||||
// Set COOP and COEP headers for SharedArrayBuffer support
|
||||
res.setHeader("Cross-Origin-Opener-Policy", "same-origin");
|
||||
res.setHeader("Cross-Origin-Embedder-Policy", "require-corp");
|
||||
|
||||
// Parse URL
|
||||
const parsedUrl = url.parse(req.url);
|
||||
let requestPath = parsedUrl.pathname;
|
||||
|
||||
// Ignore favicon
|
||||
if (requestPath === "/favicon.ico") {
|
||||
res.writeHead(204);
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle root path
|
||||
let filePath = "." + requestPath;
|
||||
if (filePath === "./") {
|
||||
filePath = "./index.html";
|
||||
}
|
||||
|
||||
// Handle pkg files (including snippets)
|
||||
if (requestPath.startsWith("/pkg/")) {
|
||||
filePath = ".." + requestPath;
|
||||
}
|
||||
|
||||
// Determine content type based on file extension
|
||||
const extname = path.extname(filePath);
|
||||
const contentType = MIME_TYPES[extname] || "application/octet-stream";
|
||||
|
||||
// ❗ Block directory reads
|
||||
if (fs.existsSync(filePath) && fs.statSync(filePath).isDirectory()) {
|
||||
console.error(`Attempted directory read: ${filePath}`);
|
||||
res.writeHead(403);
|
||||
res.end("Forbidden: Cannot read directory directly");
|
||||
return;
|
||||
}
|
||||
|
||||
// Read and serve the file
|
||||
fs.readFile(filePath, (error, content) => {
|
||||
if (error) {
|
||||
if (error.code === "ENOENT") {
|
||||
console.error(`File not found: ${filePath}`);
|
||||
res.writeHead(404);
|
||||
res.end(`File not found: ${requestPath}`);
|
||||
} else {
|
||||
console.error(`Server error (${error.code}): ${filePath}`);
|
||||
res.writeHead(500);
|
||||
res.end(`Server Error: ${error.code}`);
|
||||
}
|
||||
} else {
|
||||
res.writeHead(200, { "Content-Type": contentType });
|
||||
res.end(content, "utf-8");
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Start the server
|
||||
server.listen(PORT, () => {
|
||||
console.log(`Server running at http://localhost:${PORT}`);
|
||||
});
|
||||
@@ -1,5 +1,8 @@
|
||||
// Node.js module compatible witness calculator
|
||||
module.exports = async function builder(code, options) {
|
||||
// File generated with https://github.com/iden3/circom
|
||||
// following the instructions from:
|
||||
// https://github.com/vacp2p/zerokit/tree/master/rln#advanced-custom-circuit-compilation
|
||||
|
||||
export async function builder(code, options) {
|
||||
options = options || {};
|
||||
|
||||
let wasmModule;
|
||||
@@ -102,7 +105,7 @@ module.exports = async function builder(code, options) {
|
||||
// Then append the value to the message we are creating
|
||||
msgStr += fromArray32(arr).toString();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
class WitnessCalculator {
|
||||
constructor(instance, sanityCheck) {
|
||||
@@ -1,335 +0,0 @@
|
||||
// Browser compatible witness calculator
|
||||
(function (global) {
|
||||
async function builder(code, options) {
|
||||
options = options || {};
|
||||
|
||||
let wasmModule;
|
||||
try {
|
||||
wasmModule = await WebAssembly.compile(code);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
console.log(
|
||||
"\nTry to run circom --c in order to generate c++ code instead\n"
|
||||
);
|
||||
throw new Error(err);
|
||||
}
|
||||
|
||||
let wc;
|
||||
|
||||
let errStr = "";
|
||||
let msgStr = "";
|
||||
|
||||
const instance = await WebAssembly.instantiate(wasmModule, {
|
||||
runtime: {
|
||||
exceptionHandler: function (code) {
|
||||
let err;
|
||||
if (code == 1) {
|
||||
err = "Signal not found.\n";
|
||||
} else if (code == 2) {
|
||||
err = "Too many signals set.\n";
|
||||
} else if (code == 3) {
|
||||
err = "Signal already set.\n";
|
||||
} else if (code == 4) {
|
||||
err = "Assert Failed.\n";
|
||||
} else if (code == 5) {
|
||||
err = "Not enough memory.\n";
|
||||
} else if (code == 6) {
|
||||
err = "Input signal array access exceeds the size.\n";
|
||||
} else {
|
||||
err = "Unknown error.\n";
|
||||
}
|
||||
throw new Error(err + errStr);
|
||||
},
|
||||
printErrorMessage: function () {
|
||||
errStr += getMessage() + "\n";
|
||||
// console.error(getMessage());
|
||||
},
|
||||
writeBufferMessage: function () {
|
||||
const msg = getMessage();
|
||||
// Any calls to `log()` will always end with a `\n`, so that's when we print and reset
|
||||
if (msg === "\n") {
|
||||
console.log(msgStr);
|
||||
msgStr = "";
|
||||
} else {
|
||||
// If we've buffered other content, put a space in between the items
|
||||
if (msgStr !== "") {
|
||||
msgStr += " ";
|
||||
}
|
||||
// Then append the message to the message we are creating
|
||||
msgStr += msg;
|
||||
}
|
||||
},
|
||||
showSharedRWMemory: function () {
|
||||
printSharedRWMemory();
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const sanityCheck = options;
|
||||
// options &&
|
||||
// (
|
||||
// options.sanityCheck ||
|
||||
// options.logGetSignal ||
|
||||
// options.logSetSignal ||
|
||||
// options.logStartComponent ||
|
||||
// options.logFinishComponent
|
||||
// );
|
||||
|
||||
wc = new WitnessCalculator(instance, sanityCheck);
|
||||
return wc;
|
||||
|
||||
function getMessage() {
|
||||
var message = "";
|
||||
var c = instance.exports.getMessageChar();
|
||||
while (c != 0) {
|
||||
message += String.fromCharCode(c);
|
||||
c = instance.exports.getMessageChar();
|
||||
}
|
||||
return message;
|
||||
}
|
||||
|
||||
function printSharedRWMemory() {
|
||||
const shared_rw_memory_size = instance.exports.getFieldNumLen32();
|
||||
const arr = new Uint32Array(shared_rw_memory_size);
|
||||
for (let j = 0; j < shared_rw_memory_size; j++) {
|
||||
arr[shared_rw_memory_size - 1 - j] =
|
||||
instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
|
||||
// If we've buffered other content, put a space in between the items
|
||||
if (msgStr !== "") {
|
||||
msgStr += " ";
|
||||
}
|
||||
// Then append the value to the message we are creating
|
||||
msgStr += fromArray32(arr).toString();
|
||||
}
|
||||
}
|
||||
|
||||
class WitnessCalculator {
|
||||
constructor(instance, sanityCheck) {
|
||||
this.instance = instance;
|
||||
|
||||
this.version = this.instance.exports.getVersion();
|
||||
this.n32 = this.instance.exports.getFieldNumLen32();
|
||||
|
||||
this.instance.exports.getRawPrime();
|
||||
const arr = new Uint32Array(this.n32);
|
||||
for (let i = 0; i < this.n32; i++) {
|
||||
arr[this.n32 - 1 - i] = this.instance.exports.readSharedRWMemory(i);
|
||||
}
|
||||
this.prime = fromArray32(arr);
|
||||
|
||||
this.witnessSize = this.instance.exports.getWitnessSize();
|
||||
|
||||
this.sanityCheck = sanityCheck;
|
||||
}
|
||||
|
||||
circom_version() {
|
||||
return this.instance.exports.getVersion();
|
||||
}
|
||||
|
||||
async _doCalculateWitness(input, sanityCheck) {
|
||||
//input is assumed to be a map from signals to arrays of bigints
|
||||
this.instance.exports.init(this.sanityCheck || sanityCheck ? 1 : 0);
|
||||
const keys = Object.keys(input);
|
||||
var input_counter = 0;
|
||||
keys.forEach((k) => {
|
||||
const h = fnvHash(k);
|
||||
const hMSB = parseInt(h.slice(0, 8), 16);
|
||||
const hLSB = parseInt(h.slice(8, 16), 16);
|
||||
const fArr = flatArray(input[k]);
|
||||
let signalSize = this.instance.exports.getInputSignalSize(hMSB, hLSB);
|
||||
if (signalSize < 0) {
|
||||
throw new Error(`Signal ${k} not found\n`);
|
||||
}
|
||||
if (fArr.length < signalSize) {
|
||||
throw new Error(`Not enough values for input signal ${k}\n`);
|
||||
}
|
||||
if (fArr.length > signalSize) {
|
||||
throw new Error(`Too many values for input signal ${k}\n`);
|
||||
}
|
||||
for (let i = 0; i < fArr.length; i++) {
|
||||
const arrFr = toArray32(BigInt(fArr[i]) % this.prime, this.n32);
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
this.instance.exports.writeSharedRWMemory(
|
||||
j,
|
||||
arrFr[this.n32 - 1 - j]
|
||||
);
|
||||
}
|
||||
try {
|
||||
this.instance.exports.setInputSignal(hMSB, hLSB, i);
|
||||
input_counter++;
|
||||
} catch (err) {
|
||||
// console.log(`After adding signal ${i} of ${k}`)
|
||||
throw new Error(err);
|
||||
}
|
||||
}
|
||||
});
|
||||
if (input_counter < this.instance.exports.getInputSize()) {
|
||||
throw new Error(
|
||||
`Not all inputs have been set. Only ${input_counter} out of ${this.instance.exports.getInputSize()}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async calculateWitness(input, sanityCheck) {
|
||||
const w = [];
|
||||
|
||||
await this._doCalculateWitness(input, sanityCheck);
|
||||
|
||||
for (let i = 0; i < this.witnessSize; i++) {
|
||||
this.instance.exports.getWitness(i);
|
||||
const arr = new Uint32Array(this.n32);
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
arr[this.n32 - 1 - j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
w.push(fromArray32(arr));
|
||||
}
|
||||
|
||||
return w;
|
||||
}
|
||||
|
||||
async calculateBinWitness(input, sanityCheck) {
|
||||
const buff32 = new Uint32Array(this.witnessSize * this.n32);
|
||||
const buff = new Uint8Array(buff32.buffer);
|
||||
await this._doCalculateWitness(input, sanityCheck);
|
||||
|
||||
for (let i = 0; i < this.witnessSize; i++) {
|
||||
this.instance.exports.getWitness(i);
|
||||
const pos = i * this.n32;
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
}
|
||||
|
||||
return buff;
|
||||
}
|
||||
|
||||
async calculateWTNSBin(input, sanityCheck) {
|
||||
const buff32 = new Uint32Array(
|
||||
this.witnessSize * this.n32 + this.n32 + 11
|
||||
);
|
||||
const buff = new Uint8Array(buff32.buffer);
|
||||
await this._doCalculateWitness(input, sanityCheck);
|
||||
|
||||
//"wtns"
|
||||
buff[0] = "w".charCodeAt(0);
|
||||
buff[1] = "t".charCodeAt(0);
|
||||
buff[2] = "n".charCodeAt(0);
|
||||
buff[3] = "s".charCodeAt(0);
|
||||
|
||||
//version 2
|
||||
buff32[1] = 2;
|
||||
|
||||
//number of sections: 2
|
||||
buff32[2] = 2;
|
||||
|
||||
//id section 1
|
||||
buff32[3] = 1;
|
||||
|
||||
const n8 = this.n32 * 4;
|
||||
//id section 1 length in 64bytes
|
||||
const idSection1length = 8 + n8;
|
||||
const idSection1lengthHex = idSection1length.toString(16);
|
||||
buff32[4] = parseInt(idSection1lengthHex.slice(0, 8), 16);
|
||||
buff32[5] = parseInt(idSection1lengthHex.slice(8, 16), 16);
|
||||
|
||||
//this.n32
|
||||
buff32[6] = n8;
|
||||
|
||||
//prime number
|
||||
this.instance.exports.getRawPrime();
|
||||
|
||||
var pos = 7;
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
pos += this.n32;
|
||||
|
||||
// witness size
|
||||
buff32[pos] = this.witnessSize;
|
||||
pos++;
|
||||
|
||||
//id section 2
|
||||
buff32[pos] = 2;
|
||||
pos++;
|
||||
|
||||
// section 2 length
|
||||
const idSection2length = n8 * this.witnessSize;
|
||||
const idSection2lengthHex = idSection2length.toString(16);
|
||||
buff32[pos] = parseInt(idSection2lengthHex.slice(0, 8), 16);
|
||||
buff32[pos + 1] = parseInt(idSection2lengthHex.slice(8, 16), 16);
|
||||
|
||||
pos += 2;
|
||||
for (let i = 0; i < this.witnessSize; i++) {
|
||||
this.instance.exports.getWitness(i);
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
pos += this.n32;
|
||||
}
|
||||
|
||||
return buff;
|
||||
}
|
||||
}
|
||||
|
||||
function toArray32(rem, size) {
|
||||
const res = []; //new Uint32Array(size); //has no unshift
|
||||
const radix = BigInt(0x100000000);
|
||||
while (rem) {
|
||||
res.unshift(Number(rem % radix));
|
||||
rem = rem / radix;
|
||||
}
|
||||
if (size) {
|
||||
var i = size - res.length;
|
||||
while (i > 0) {
|
||||
res.unshift(0);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
function fromArray32(arr) {
|
||||
//returns a BigInt
|
||||
var res = BigInt(0);
|
||||
const radix = BigInt(0x100000000);
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
res = res * radix + BigInt(arr[i]);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
function flatArray(a) {
|
||||
var res = [];
|
||||
fillArray(res, a);
|
||||
return res;
|
||||
|
||||
function fillArray(res, a) {
|
||||
if (Array.isArray(a)) {
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
fillArray(res, a[i]);
|
||||
}
|
||||
} else {
|
||||
res.push(a);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function fnvHash(str) {
|
||||
const uint64_max = BigInt(2) ** BigInt(64);
|
||||
let hash = BigInt("0xCBF29CE484222325");
|
||||
for (var i = 0; i < str.length; i++) {
|
||||
hash ^= BigInt(str[i].charCodeAt());
|
||||
hash *= BigInt(0x100000001b3);
|
||||
hash %= uint64_max;
|
||||
}
|
||||
let shash = hash.toString(16);
|
||||
let n = 16 - shash.length;
|
||||
shash = "0".repeat(n).concat(shash);
|
||||
return shash;
|
||||
}
|
||||
|
||||
// Make it globally available
|
||||
global.witnessCalculatorBuilder = builder;
|
||||
})(typeof self !== "undefined" ? self : window);
|
||||
@@ -6,7 +6,7 @@ use rln::public::{hash, poseidon_hash, RLN};
|
||||
use std::vec::Vec;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
#[cfg(feature = "multithread")]
|
||||
#[cfg(feature = "parallel")]
|
||||
pub use wasm_bindgen_rayon::init_thread_pool;
|
||||
|
||||
#[wasm_bindgen(js_name = initPanicHook)]
|
||||
|
||||
@@ -4,19 +4,20 @@
|
||||
mod tests {
|
||||
use js_sys::{BigInt as JsBigInt, Date, Object, Uint8Array};
|
||||
use rln::circuit::{Fr, TEST_TREE_HEIGHT};
|
||||
use rln::hashers::{hash_to_field, poseidon_hash};
|
||||
use rln::poseidon_tree::PoseidonTree;
|
||||
use rln::hashers::{hash_to_field, poseidon_hash, PoseidonHash};
|
||||
use rln::protocol::{prepare_verify_input, rln_witness_from_values, serialize_witness};
|
||||
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le};
|
||||
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, IdSecret};
|
||||
use rln_wasm::{
|
||||
wasm_generate_rln_proof_with_witness, wasm_key_gen, wasm_new, wasm_rln_witness_to_json,
|
||||
wasm_verify_with_roots,
|
||||
};
|
||||
use wasm_bindgen::{prelude::wasm_bindgen, JsValue};
|
||||
use wasm_bindgen_test::{console_log, wasm_bindgen_test, wasm_bindgen_test_configure};
|
||||
use zerokit_utils::merkle_tree::merkle_tree::ZerokitMerkleTree;
|
||||
use zerokit_utils::{
|
||||
OptimalMerkleProof, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree,
|
||||
};
|
||||
|
||||
#[cfg(feature = "multithread")]
|
||||
#[cfg(feature = "parallel")]
|
||||
use {rln_wasm::init_thread_pool, wasm_bindgen_futures::JsFuture, web_sys::window};
|
||||
|
||||
#[wasm_bindgen(inline_js = r#"
|
||||
@@ -28,7 +29,15 @@ mod tests {
|
||||
}
|
||||
|
||||
export function initWitnessCalculator(jsCode) {
|
||||
eval(jsCode);
|
||||
const processedCode = jsCode
|
||||
.replace(/export\s+async\s+function\s+builder/, 'async function builder')
|
||||
.replace(/export\s*\{\s*builder\s*\};?/g, '');
|
||||
|
||||
const moduleFunc = new Function(processedCode + '\nreturn { builder };');
|
||||
const witnessCalculatorModule = moduleFunc();
|
||||
|
||||
window.witnessCalculatorBuilder = witnessCalculatorModule.builder;
|
||||
|
||||
if (typeof window.witnessCalculatorBuilder !== 'function') {
|
||||
return false;
|
||||
}
|
||||
@@ -62,13 +71,10 @@ mod tests {
|
||||
async fn calculateWitness(circom_data: &[u8], inputs: Object) -> Result<JsValue, JsValue>;
|
||||
}
|
||||
|
||||
const WITNESS_CALCULATOR_JS: &str = include_str!("../resources/witness_calculator_browser.js");
|
||||
const WITNESS_CALCULATOR_JS: &str = include_str!("../resources/witness_calculator.js");
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
const ZKEY_BYTES: &[u8] =
|
||||
const ARKZKEY_BYTES: &[u8] =
|
||||
include_bytes!("../../rln/resources/tree_height_20/rln_final.arkzkey");
|
||||
#[cfg(not(feature = "arkzkey"))]
|
||||
const ZKEY_BYTES: &[u8] = include_bytes!("../../rln/resources/tree_height_20/rln_final.zkey");
|
||||
|
||||
const CIRCOM_BYTES: &[u8] = include_bytes!("../../rln/resources/tree_height_20/rln.wasm");
|
||||
|
||||
@@ -77,7 +83,7 @@ mod tests {
|
||||
#[wasm_bindgen_test]
|
||||
pub async fn rln_wasm_benchmark() {
|
||||
// Check if thread pool is supported
|
||||
#[cfg(feature = "multithread")]
|
||||
#[cfg(feature = "parallel")]
|
||||
if !isThreadpoolSupported().expect("Failed to check thread pool support") {
|
||||
panic!("Thread pool is NOT supported");
|
||||
} else {
|
||||
@@ -91,14 +97,14 @@ mod tests {
|
||||
.expect("Failed to initialize thread pool");
|
||||
}
|
||||
|
||||
// Initialize the witness calculator
|
||||
// Initialize witness calculator
|
||||
initWitnessCalculator(WITNESS_CALCULATOR_JS)
|
||||
.expect("Failed to initialize witness calculator");
|
||||
|
||||
let mut results = String::from("\nbenchmarks:\n");
|
||||
let iterations = 10;
|
||||
|
||||
let zkey = readFile(&ZKEY_BYTES).expect("Failed to read zkey file");
|
||||
let zkey = readFile(&ARKZKEY_BYTES).expect("Failed to read zkey file");
|
||||
|
||||
// Benchmark wasm_new
|
||||
let start_wasm_new = Date::now();
|
||||
@@ -109,7 +115,8 @@ mod tests {
|
||||
|
||||
// Create RLN instance for other benchmarks
|
||||
let rln_instance = wasm_new(zkey).expect("Failed to create RLN instance");
|
||||
let mut tree = PoseidonTree::default(TEST_TREE_HEIGHT).expect("Failed to create tree");
|
||||
let mut tree: OptimalMerkleTree<PoseidonHash> =
|
||||
OptimalMerkleTree::default(TEST_TREE_HEIGHT).expect("Failed to create tree");
|
||||
|
||||
// Benchmark wasm_key_gen
|
||||
let start_wasm_key_gen = Date::now();
|
||||
@@ -121,7 +128,7 @@ mod tests {
|
||||
// Generate identity pair for other benchmarks
|
||||
let mem_keys = wasm_key_gen(rln_instance).expect("Failed to generate keys");
|
||||
let id_key = mem_keys.subarray(0, 32);
|
||||
let (identity_secret_hash, _) = bytes_le_to_fr(&id_key.to_vec());
|
||||
let (identity_secret_hash, _) = IdSecret::from_bytes_le(&id_key.to_vec());
|
||||
let (id_commitment, _) = bytes_le_to_fr(&mem_keys.subarray(32, 64).to_vec());
|
||||
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
@@ -140,13 +147,14 @@ mod tests {
|
||||
let signal: [u8; 32] = [0; 32];
|
||||
let x = hash_to_field(&signal);
|
||||
|
||||
let merkle_proof = tree
|
||||
let merkle_proof: OptimalMerkleProof<PoseidonHash> = tree
|
||||
.proof(identity_index)
|
||||
.expect("Failed to generate merkle proof");
|
||||
|
||||
let rln_witness = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
merkle_proof.get_path_elements(),
|
||||
merkle_proof.get_path_index(),
|
||||
x,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
|
||||
@@ -1,38 +1,56 @@
|
||||
#![cfg(not(feature = "multithread"))]
|
||||
#![cfg(not(feature = "parallel"))]
|
||||
#![cfg(target_arch = "wasm32")]
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use js_sys::{BigInt as JsBigInt, Date, Object, Uint8Array};
|
||||
use rln::circuit::{Fr, TEST_TREE_HEIGHT};
|
||||
use rln::hashers::{hash_to_field, poseidon_hash};
|
||||
use rln::poseidon_tree::PoseidonTree;
|
||||
use rln::hashers::{hash_to_field, poseidon_hash, PoseidonHash};
|
||||
use rln::protocol::{prepare_verify_input, rln_witness_from_values, serialize_witness};
|
||||
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le};
|
||||
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, IdSecret};
|
||||
use rln_wasm::{
|
||||
wasm_generate_rln_proof_with_witness, wasm_key_gen, wasm_new, wasm_rln_witness_to_json,
|
||||
wasm_verify_with_roots,
|
||||
};
|
||||
use wasm_bindgen::{prelude::wasm_bindgen, JsValue};
|
||||
use wasm_bindgen_test::{console_log, wasm_bindgen_test};
|
||||
use zerokit_utils::merkle_tree::merkle_tree::ZerokitMerkleTree;
|
||||
use zerokit_utils::{
|
||||
OptimalMerkleProof, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree,
|
||||
};
|
||||
|
||||
const WITNESS_CALCULATOR_JS: &str = include_str!("../resources/witness_calculator.js");
|
||||
|
||||
#[wasm_bindgen(inline_js = r#"
|
||||
const fs = require("fs");
|
||||
|
||||
let witnessCalculatorModule = null;
|
||||
|
||||
module.exports = {
|
||||
initWitnessCalculator: function(code) {
|
||||
const processedCode = code
|
||||
.replace(/export\s+async\s+function\s+builder/, 'async function builder')
|
||||
.replace(/export\s*\{\s*builder\s*\};?/g, '');
|
||||
|
||||
const moduleFunc = new Function(processedCode + '\nreturn { builder };');
|
||||
witnessCalculatorModule = moduleFunc();
|
||||
|
||||
if (typeof witnessCalculatorModule.builder !== 'function') {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
|
||||
readFile: function (path) {
|
||||
return fs.readFileSync(path);
|
||||
},
|
||||
|
||||
calculateWitness: async function (circom_path, inputs) {
|
||||
const wc = require("resources/witness_calculator_node.js");
|
||||
const wasmFile = fs.readFileSync(circom_path);
|
||||
const wasmFileBuffer = wasmFile.slice(
|
||||
wasmFile.byteOffset,
|
||||
wasmFile.byteOffset + wasmFile.byteLength
|
||||
);
|
||||
const witnessCalculator = await wc(wasmFileBuffer);
|
||||
const witnessCalculator = await witnessCalculatorModule.builder(wasmFileBuffer);
|
||||
const calculatedWitness = await witnessCalculator.calculateWitness(
|
||||
inputs,
|
||||
false
|
||||
@@ -44,6 +62,9 @@ mod tests {
|
||||
};
|
||||
"#)]
|
||||
extern "C" {
|
||||
#[wasm_bindgen(catch)]
|
||||
fn initWitnessCalculator(code: &str) -> Result<bool, JsValue>;
|
||||
|
||||
#[wasm_bindgen(catch)]
|
||||
fn readFile(path: &str) -> Result<Uint8Array, JsValue>;
|
||||
|
||||
@@ -51,19 +72,20 @@ mod tests {
|
||||
async fn calculateWitness(circom_path: &str, input: Object) -> Result<JsValue, JsValue>;
|
||||
}
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
const ZKEY_PATH: &str = "../rln/resources/tree_height_20/rln_final.arkzkey";
|
||||
#[cfg(not(feature = "arkzkey"))]
|
||||
const ZKEY_PATH: &str = "../rln/resources/tree_height_20/rln_final.zkey";
|
||||
const ARKZKEY_PATH: &str = "../rln/resources/tree_height_20/rln_final.arkzkey";
|
||||
|
||||
const CIRCOM_PATH: &str = "../rln/resources/tree_height_20/rln.wasm";
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
pub async fn rln_wasm_benchmark() {
|
||||
// Initialize witness calculator
|
||||
initWitnessCalculator(WITNESS_CALCULATOR_JS)
|
||||
.expect("Failed to initialize witness calculator");
|
||||
|
||||
let mut results = String::from("\nbenchmarks:\n");
|
||||
let iterations = 10;
|
||||
|
||||
let zkey = readFile(&ZKEY_PATH).expect("Failed to read zkey file");
|
||||
let zkey = readFile(&ARKZKEY_PATH).expect("Failed to read zkey file");
|
||||
|
||||
// Benchmark wasm_new
|
||||
let start_wasm_new = Date::now();
|
||||
@@ -74,7 +96,8 @@ mod tests {
|
||||
|
||||
// Create RLN instance for other benchmarks
|
||||
let rln_instance = wasm_new(zkey).expect("Failed to create RLN instance");
|
||||
let mut tree = PoseidonTree::default(TEST_TREE_HEIGHT).expect("Failed to create tree");
|
||||
let mut tree: OptimalMerkleTree<PoseidonHash> =
|
||||
OptimalMerkleTree::default(TEST_TREE_HEIGHT).expect("Failed to create tree");
|
||||
|
||||
// Benchmark wasm_key_gen
|
||||
let start_wasm_key_gen = Date::now();
|
||||
@@ -86,7 +109,7 @@ mod tests {
|
||||
// Generate identity pair for other benchmarks
|
||||
let mem_keys = wasm_key_gen(rln_instance).expect("Failed to generate keys");
|
||||
let id_key = mem_keys.subarray(0, 32);
|
||||
let (identity_secret_hash, _) = bytes_le_to_fr(&id_key.to_vec());
|
||||
let (identity_secret_hash, _) = IdSecret::from_bytes_le(&id_key.to_vec());
|
||||
let (id_commitment, _) = bytes_le_to_fr(&mem_keys.subarray(32, 64).to_vec());
|
||||
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
@@ -105,13 +128,14 @@ mod tests {
|
||||
let signal: [u8; 32] = [0; 32];
|
||||
let x = hash_to_field(&signal);
|
||||
|
||||
let merkle_proof = tree
|
||||
let merkle_proof: OptimalMerkleProof<PoseidonHash> = tree
|
||||
.proof(identity_index)
|
||||
.expect("Failed to generate merkle proof");
|
||||
|
||||
let rln_witness = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
merkle_proof.get_path_elements(),
|
||||
merkle_proof.get_path_index(),
|
||||
x,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
|
||||
@@ -19,30 +19,18 @@ doctest = false
|
||||
# ZKP Generation
|
||||
ark-bn254 = { version = "0.5.0", features = ["std"] }
|
||||
ark-relations = { version = "0.5.1", features = ["std"] }
|
||||
ark-ff = { version = "0.5.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
ark-ec = { version = "0.5.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
|
||||
ark-std = { version = "0.5.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
ark-poly = { version = "0.5.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
ark-groth16 = { version = "0.5.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
ark-serialize = { version = "0.5.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
ark-ff = { version = "0.5.0", default-features = false }
|
||||
ark-ec = { version = "0.5.0", default-features = false }
|
||||
ark-std = { version = "0.5.0", default-features = false }
|
||||
ark-poly = { version = "0.5.0", default-features = false }
|
||||
ark-groth16 = { version = "0.5.0", default-features = false }
|
||||
ark-serialize = { version = "0.5.0", default-features = false }
|
||||
|
||||
# error handling
|
||||
thiserror = "2.0.12"
|
||||
|
||||
# utilities
|
||||
rayon = { version = "1.10.0", optional = true }
|
||||
byteorder = "1.5.0"
|
||||
cfg-if = "1.0"
|
||||
num-bigint = { version = "0.4.6", default-features = false, features = ["std"] }
|
||||
@@ -53,39 +41,41 @@ rand = "0.8.5"
|
||||
rand_chacha = "0.3.1"
|
||||
ruint = { version = "1.15.0", features = ["rand", "serde", "ark-ff-04"] }
|
||||
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
|
||||
zeroize = "1.8"
|
||||
utils = { package = "zerokit_utils", version = "0.6.0", path = "../utils", default-features = false }
|
||||
|
||||
# serialization
|
||||
prost = "0.13.5"
|
||||
serde_json = "1.0"
|
||||
prost = "0.14.1"
|
||||
serde_json = "1.0.141"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
document-features = { version = "0.2.11", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
criterion = { version = "0.6.0", features = ["html_reports"] }
|
||||
criterion = { version = "0.7.0", features = ["html_reports"] }
|
||||
|
||||
[features]
|
||||
default = ["pmtree-ft"]
|
||||
fullmerkletree = []
|
||||
default = ["parallel", "pmtree-ft"]
|
||||
stateless = []
|
||||
arkzkey = []
|
||||
|
||||
parallel = [
|
||||
"rayon",
|
||||
"utils/parallel",
|
||||
"ark-ff/parallel",
|
||||
"ark-ec/parallel",
|
||||
"ark-std/parallel",
|
||||
"ark-poly/parallel",
|
||||
"ark-groth16/parallel",
|
||||
"ark-serialize/parallel",
|
||||
]
|
||||
fullmerkletree = []
|
||||
optimalmerkletree = []
|
||||
# Note: pmtree feature is still experimental
|
||||
pmtree-ft = ["utils/pmtree-ft"]
|
||||
|
||||
[[bench]]
|
||||
name = "circuit_loading_arkzkey_benchmark"
|
||||
harness = false
|
||||
required-features = ["arkzkey"]
|
||||
|
||||
[[bench]]
|
||||
name = "circuit_loading_benchmark"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "pmtree_benchmark"
|
||||
harness = false
|
||||
required-features = ["pmtree-ft"]
|
||||
|
||||
[[bench]]
|
||||
name = "poseidon_tree_benchmark"
|
||||
|
||||
@@ -8,11 +8,7 @@ args = ["test", "--release", "--", "--nocapture"]
|
||||
|
||||
[tasks.test_stateless]
|
||||
command = "cargo"
|
||||
args = ["test", "--release", "--features", "stateless"]
|
||||
|
||||
[tasks.test_arkzkey]
|
||||
command = "cargo"
|
||||
args = ["test", "--release", "--features", "arkzkey"]
|
||||
args = ["test", "--release", "--no-default-features", "--features", "stateless"]
|
||||
|
||||
[tasks.bench]
|
||||
command = "cargo"
|
||||
|
||||
@@ -31,9 +31,8 @@ rln = { git = "https://github.com/vacp2p/zerokit" }
|
||||
|
||||
The RLN object constructor requires the following files:
|
||||
|
||||
- `rln_final.arkzkey`: The proving key in arkzkey format.
|
||||
- `graph.bin`: The graph file built for the input tree size
|
||||
- `rln_final.zkey` or `rln_final_uncompr.arkzkey`: The proving key
|
||||
- `verification_key.arkvkey`: The verification key (optional)
|
||||
|
||||
Additionally, `rln.wasm` is used for testing in the rln-wasm module.
|
||||
|
||||
@@ -86,7 +85,7 @@ fn main() {
|
||||
let signal = b"RLN is awesome";
|
||||
|
||||
// 6. Prepare input for generate_rln_proof API
|
||||
// input_data is [ identity_secret<32> | id_index<8> | external_nullifier<32>
|
||||
// input_data is [ identity_secret<32> | id_index<8> | external_nullifier<32>
|
||||
// | user_message_limit<32> | message_id<32> | signal_len<8> | signal<var> ]
|
||||
let prove_input = prepare_prove_input(
|
||||
identity_secret_hash,
|
||||
@@ -105,7 +104,7 @@ fn main() {
|
||||
.unwrap();
|
||||
|
||||
// We get the public outputs returned by the circuit evaluation
|
||||
// The byte vector `proof_data` is serialized as
|
||||
// The byte vector `proof_data` is serialized as
|
||||
// `[ zk-proof | tree_root | external_nullifier | share_x | share_y | nullifier ]`.
|
||||
let proof_data = output_buffer.into_inner();
|
||||
|
||||
@@ -136,13 +135,11 @@ for one application to be re-used in another one.
|
||||
|
||||
### Features
|
||||
|
||||
- **Multiple Backend Support**: Choose between different zkey formats with feature flags
|
||||
- `arkzkey`: Use the optimized Arkworks-compatible zkey format (faster loading)
|
||||
- `stateless`: For stateless proof verification
|
||||
- **Stateless Mode**: Allows the use of RLN without maintaining state of the Merkle tree.
|
||||
- **Pre-compiled Circuits**: Ready-to-use circuits with Merkle tree depth of 20
|
||||
- **Wasm Support**: WebAssembly bindings via rln-wasm crate with features like:
|
||||
- Browser and Node.js compatibility
|
||||
- Optional multi-threading support using wasm-bindgen-rayon
|
||||
- Optional parallel feature support using [wasm-bindgen-rayon](https://github.com/RReverser/wasm-bindgen-rayon)
|
||||
- Headless browser testing capabilities
|
||||
|
||||
## Building and Testing
|
||||
@@ -164,9 +161,8 @@ cargo make build
|
||||
# Test with default features
|
||||
cargo make test
|
||||
|
||||
# Test with specific features
|
||||
cargo make test_arkzkey # For arkzkey feature
|
||||
cargo make test_stateless # For stateless feature
|
||||
# Test with stateless features
|
||||
cargo make test_stateless
|
||||
```
|
||||
|
||||
## Advanced: Custom Circuit Compilation
|
||||
@@ -175,9 +171,9 @@ The `rln` (<https://github.com/rate-limiting-nullifier/circom-rln>) repository,
|
||||
which contains the RLN circuit implementation is using for pre-compiled RLN circuit for zerokit RLN.
|
||||
If you want to compile your own RLN circuit, you can follow the instructions below.
|
||||
|
||||
### 1. Compile ZK Circuits for getting the zkey and verification key files
|
||||
### 1. Compile ZK Circuits for getting the zkey file
|
||||
|
||||
This script actually generates not only the zkey and verification key files for the RLN circuit,
|
||||
This script actually generates not only the zkey file for the RLN circuit,
|
||||
but also the execution wasm file used for witness calculation.
|
||||
However, the wasm file is not needed for the `rln` module,
|
||||
because current implementation uses the iden3 graph file for witness calculation.
|
||||
@@ -271,11 +267,11 @@ cargo run --package circom_witnesscalc --bin build-circuit ../circom-rln/circuit
|
||||
The `rln` module comes with [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources)
|
||||
execution graph files for the RLN circuit.
|
||||
|
||||
### 3. Generate Arkzkey Representation for zkey and verification key files
|
||||
### 3. Generate Arkzkey Representation for zkey file
|
||||
|
||||
For faster loading, compile the zkey file into the arkzkey format using
|
||||
[ark-zkey](https://github.com/seemenkina/ark-zkey).
|
||||
This is fork of the [original](https://github.com/zkmopro/ark-zkey) repository with the uncompressed zkey support.
|
||||
This is fork of the [original](https://github.com/zkmopro/ark-zkey) repository with the uncompressed arkzkey support.
|
||||
|
||||
```sh
|
||||
# Clone the ark-zkey repository
|
||||
@@ -288,9 +284,22 @@ cd ark-zkey && cargo build
|
||||
cargo run --bin arkzkey-util <path_to_rln_final.zkey>
|
||||
```
|
||||
|
||||
This will generate the `rln_final.arkzkey` file, which is used by the `rln` module.
|
||||
|
||||
Currently, the `rln` module comes with
|
||||
[pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources) arkzkey keys for the RLN circuit.
|
||||
|
||||
> [!NOTE]
|
||||
> You can use this [convert_zkey.sh](./convert_zkey.sh) script
|
||||
> to automate the process of generating the arkzkey file from any zkey file
|
||||
|
||||
Run the script as follows:
|
||||
|
||||
```sh
|
||||
chmod +x ./convert_zkey.sh
|
||||
./convert_zkey.sh <path_to_rln_final.zkey>
|
||||
```
|
||||
|
||||
## Get involved
|
||||
|
||||
Zerokit RLN public and FFI APIs allow interaction with many more features than what briefly showcased above.
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use rln::circuit::{read_arkzkey_from_bytes_uncompressed, ARKZKEY_BYTES};
|
||||
|
||||
pub fn uncompressed_bench(c: &mut Criterion) {
|
||||
let arkzkey = ARKZKEY_BYTES.to_vec();
|
||||
let size = arkzkey.len() as f32;
|
||||
println!(
|
||||
"Size of uncompressed arkzkey: {:.2?} MB",
|
||||
size / 1024.0 / 1024.0
|
||||
);
|
||||
|
||||
c.bench_function("arkzkey::arkzkey_from_raw_uncompressed", |b| {
|
||||
b.iter(|| {
|
||||
let r = read_arkzkey_from_bytes_uncompressed(&arkzkey);
|
||||
assert_eq!(r.is_ok(), true);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group! {
|
||||
name = benches;
|
||||
config = Criterion::default().sample_size(10);
|
||||
targets = uncompressed_bench
|
||||
}
|
||||
criterion_main!(benches);
|
||||
@@ -1,24 +0,0 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use rln::circuit::zkey::read_zkey;
|
||||
use std::io::Cursor;
|
||||
|
||||
pub fn zkey_load_benchmark(c: &mut Criterion) {
|
||||
let zkey = rln::circuit::ZKEY_BYTES.to_vec();
|
||||
let size = zkey.len() as f32;
|
||||
println!("Size of zkey: {:.2?} MB", size / 1024.0 / 1024.0);
|
||||
|
||||
c.bench_function("zkey::zkey_from_raw", |b| {
|
||||
b.iter(|| {
|
||||
let mut reader = Cursor::new(zkey.clone());
|
||||
let r = read_zkey(&mut reader);
|
||||
assert_eq!(r.is_ok(), true);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group! {
|
||||
name = benches;
|
||||
config = Criterion::default().sample_size(10);
|
||||
targets = zkey_load_benchmark
|
||||
}
|
||||
criterion_main!(benches);
|
||||
@@ -5,7 +5,7 @@ use utils::ZerokitMerkleTree;
|
||||
pub fn pmtree_benchmark(c: &mut Criterion) {
|
||||
let mut tree = PmTree::default(2).unwrap();
|
||||
|
||||
let leaves: Vec<Fr> = (0..4).map(|s| Fr::from(s)).collect();
|
||||
let leaves: Vec<Fr> = (0..4).map(Fr::from).collect();
|
||||
|
||||
c.bench_function("Pmtree::set", |b| {
|
||||
b.iter(|| {
|
||||
|
||||
@@ -6,7 +6,7 @@ use rln::{
|
||||
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleTree};
|
||||
|
||||
pub fn get_leaves(n: u32) -> Vec<Fr> {
|
||||
(0..n).map(|s| Fr::from(s)).collect()
|
||||
(0..n).map(Fr::from).collect()
|
||||
}
|
||||
|
||||
pub fn optimal_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
|
||||
|
||||
53
rln/convert_zkey.sh
Executable file
53
rln/convert_zkey.sh
Executable file
@@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Convert zkey to arkzkey using /tmp directory
|
||||
# Usage: ./convert.sh <path_to_zkey_file>
|
||||
|
||||
set -e
|
||||
|
||||
# Check input
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Usage: $0 <path_to_zkey_file>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ZKEY_FILE="$1"
|
||||
|
||||
if [ ! -f "$ZKEY_FILE" ]; then
|
||||
echo "Error: File '$ZKEY_FILE' does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get absolute path before changing directories
|
||||
ZKEY_ABSOLUTE_PATH=$(realpath "$ZKEY_FILE")
|
||||
|
||||
# Create temp directory in /tmp
|
||||
TEMP_DIR="/tmp/ark-zkey-$$"
|
||||
echo "Using temp directory: $TEMP_DIR"
|
||||
|
||||
# Cleanup function
|
||||
cleanup() {
|
||||
echo "Cleaning up temp directory: $TEMP_DIR"
|
||||
rm -rf "$TEMP_DIR"
|
||||
}
|
||||
|
||||
# Setup cleanup trap
|
||||
trap cleanup EXIT
|
||||
|
||||
# Create temp directory and clone ark-zkey
|
||||
mkdir -p "$TEMP_DIR"
|
||||
cd "$TEMP_DIR"
|
||||
git clone https://github.com/seemenkina/ark-zkey.git
|
||||
cd ark-zkey
|
||||
cargo build
|
||||
|
||||
# Convert
|
||||
cargo run --bin arkzkey-util "$ZKEY_ABSOLUTE_PATH"
|
||||
|
||||
# Check if arkzkey file was created (tool creates it in same directory as input)
|
||||
ARKZKEY_FILE="${ZKEY_ABSOLUTE_PATH%.zkey}.arkzkey"
|
||||
|
||||
if [ ! -f "$ARKZKEY_FILE" ]; then
|
||||
echo "Could not find generated .arkzkey file at $ARKZKEY_FILE"
|
||||
exit 1
|
||||
fi
|
||||
Binary file not shown.
@@ -8,19 +8,33 @@ pub mod storage;
|
||||
use ruint::aliases::U256;
|
||||
use std::collections::HashMap;
|
||||
use storage::deserialize_witnesscalc_graph;
|
||||
use zeroize::zeroize_flat_type;
|
||||
|
||||
use crate::circuit::iden3calc::graph::fr_to_u256;
|
||||
use crate::circuit::Fr;
|
||||
use graph::{fr_to_u256, Node};
|
||||
use crate::utils::FrOrSecret;
|
||||
use graph::Node;
|
||||
|
||||
pub type InputSignalsInfo = HashMap<String, (usize, usize)>;
|
||||
|
||||
pub fn calc_witness<I: IntoIterator<Item = (String, Vec<Fr>)>>(
|
||||
pub fn calc_witness<I: IntoIterator<Item = (String, Vec<FrOrSecret>)>>(
|
||||
inputs: I,
|
||||
graph_data: &[u8],
|
||||
) -> Vec<Fr> {
|
||||
let inputs: HashMap<String, Vec<U256>> = inputs
|
||||
let mut inputs: HashMap<String, Vec<U256>> = inputs
|
||||
.into_iter()
|
||||
.map(|(key, value)| (key, value.iter().map(fr_to_u256).collect()))
|
||||
.map(|(key, value)| {
|
||||
(
|
||||
key,
|
||||
value
|
||||
.iter()
|
||||
.map(|f_| match f_ {
|
||||
FrOrSecret::IdSecret(s) => s.to_u256(),
|
||||
FrOrSecret::Fr(f) => fr_to_u256(f),
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let (nodes, signals, input_mapping): (Vec<Node>, Vec<usize>, InputSignalsInfo) =
|
||||
@@ -28,8 +42,15 @@ pub fn calc_witness<I: IntoIterator<Item = (String, Vec<Fr>)>>(
|
||||
|
||||
let mut inputs_buffer = get_inputs_buffer(get_inputs_size(&nodes));
|
||||
populate_inputs(&inputs, &input_mapping, &mut inputs_buffer);
|
||||
|
||||
graph::evaluate(&nodes, inputs_buffer.as_slice(), &signals)
|
||||
if let Some(v) = inputs.get_mut("identitySecret") {
|
||||
// ~== v[0] = U256::ZERO;
|
||||
unsafe { zeroize_flat_type(v) };
|
||||
}
|
||||
let res = graph::evaluate(&nodes, inputs_buffer.as_slice(), &signals);
|
||||
inputs_buffer.iter_mut().for_each(|i| {
|
||||
unsafe { zeroize_flat_type(i) };
|
||||
});
|
||||
res
|
||||
}
|
||||
|
||||
fn get_inputs_size(nodes: &[Node]) -> usize {
|
||||
@@ -56,7 +77,7 @@ fn populate_inputs(
|
||||
for (key, value) in input_list {
|
||||
let (offset, len) = inputs_info[key];
|
||||
if len != value.len() {
|
||||
panic!("Invalid input length for {}", key);
|
||||
panic!("Invalid input length for {key}");
|
||||
}
|
||||
|
||||
for (i, v) in value.iter().enumerate() {
|
||||
|
||||
@@ -944,14 +944,14 @@ mod tests {
|
||||
let x = M.div(uint!(2_U256));
|
||||
|
||||
println!("x: {:?}", x.as_limbs());
|
||||
println!("x: {}", M);
|
||||
println!("x: {M}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_2() {
|
||||
let nodes: Vec<Node> = vec![];
|
||||
// let node = nodes[0];
|
||||
let node = nodes.get(0);
|
||||
println!("{:?}", node);
|
||||
let node = nodes.first();
|
||||
println!("{node:?}");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -419,13 +419,13 @@ mod tests {
|
||||
let mut r = WriteBackReader::new(std::io::Cursor::new(&data));
|
||||
|
||||
let buf = &mut [0u8; 5];
|
||||
r.read(buf).unwrap();
|
||||
r.read_exact(buf).unwrap();
|
||||
assert_eq!(buf, &[1, 2, 3, 4, 5]);
|
||||
|
||||
// return [4, 5] to reader
|
||||
r.write(&buf[3..]).unwrap();
|
||||
r.write_all(&buf[3..]).unwrap();
|
||||
// return [2, 3] to reader
|
||||
r.write(&buf[1..3]).unwrap();
|
||||
r.write_all(&buf[1..3]).unwrap();
|
||||
|
||||
buf.fill(0);
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
pub mod error;
|
||||
pub mod iden3calc;
|
||||
pub mod qap;
|
||||
pub mod zkey;
|
||||
|
||||
use ::lazy_static::lazy_static;
|
||||
use ark_bn254::{
|
||||
@@ -12,36 +11,22 @@ use ark_bn254::{
|
||||
};
|
||||
use ark_groth16::ProvingKey;
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use cfg_if::cfg_if;
|
||||
|
||||
use crate::circuit::error::ZKeyReadError;
|
||||
use crate::circuit::iden3calc::calc_witness;
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
use {ark_ff::Field, ark_serialize::CanonicalDeserialize, ark_serialize::CanonicalSerialize};
|
||||
|
||||
#[cfg(not(feature = "arkzkey"))]
|
||||
use {crate::circuit::zkey::read_zkey, std::io::Cursor};
|
||||
use crate::utils::FrOrSecret;
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
pub const ARKZKEY_BYTES: &[u8] = include_bytes!("../../resources/tree_height_20/rln_final.arkzkey");
|
||||
|
||||
pub const ZKEY_BYTES: &[u8] = include_bytes!("../../resources/tree_height_20/rln_final.zkey");
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
const GRAPH_BYTES: &[u8] = include_bytes!("../../resources/tree_height_20/graph.bin");
|
||||
|
||||
lazy_static! {
|
||||
static ref ZKEY: (ProvingKey<Curve>, ConstraintMatrices<Fr>) = {
|
||||
cfg_if! {
|
||||
if #[cfg(feature = "arkzkey")] {
|
||||
read_arkzkey_from_bytes_uncompressed(ARKZKEY_BYTES).expect("Failed to read arkzkey")
|
||||
} else {
|
||||
let mut reader = Cursor::new(ZKEY_BYTES);
|
||||
read_zkey(&mut reader).expect("Failed to read zkey")
|
||||
}
|
||||
}
|
||||
};
|
||||
static ref ARKZKEY: (ProvingKey<Curve>, ConstraintMatrices<Fr>) =
|
||||
read_arkzkey_from_bytes_uncompressed(ARKZKEY_BYTES).expect("Failed to read arkzkey");
|
||||
}
|
||||
|
||||
pub const TEST_TREE_HEIGHT: usize = 20;
|
||||
@@ -65,15 +50,7 @@ pub fn zkey_from_raw(
|
||||
return Err(ZKeyReadError::EmptyBytes);
|
||||
}
|
||||
|
||||
let proving_key_and_matrices = match () {
|
||||
#[cfg(feature = "arkzkey")]
|
||||
() => read_arkzkey_from_bytes_uncompressed(zkey_data)?,
|
||||
#[cfg(not(feature = "arkzkey"))]
|
||||
() => {
|
||||
let mut reader = Cursor::new(zkey_data);
|
||||
read_zkey(&mut reader)?
|
||||
}
|
||||
};
|
||||
let proving_key_and_matrices = read_arkzkey_from_bytes_uncompressed(zkey_data)?;
|
||||
|
||||
Ok(proving_key_and_matrices)
|
||||
}
|
||||
@@ -81,10 +58,10 @@ pub fn zkey_from_raw(
|
||||
// Loads the proving key
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn zkey_from_folder() -> &'static (ProvingKey<Curve>, ConstraintMatrices<Fr>) {
|
||||
&ZKEY
|
||||
&ARKZKEY
|
||||
}
|
||||
|
||||
pub fn calculate_rln_witness<I: IntoIterator<Item = (String, Vec<Fr>)>>(
|
||||
pub fn calculate_rln_witness<I: IntoIterator<Item = (String, Vec<FrOrSecret>)>>(
|
||||
inputs: I,
|
||||
graph_data: &[u8],
|
||||
) -> Vec<Fr> {
|
||||
@@ -101,11 +78,9 @@ pub fn graph_from_folder() -> &'static [u8] {
|
||||
// without print and allow to choose between compressed and uncompressed arkzkey
|
||||
////////////////////////////////////////////////////////
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
|
||||
pub struct SerializableProvingKey(pub ProvingKey<Bn254>);
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
|
||||
pub struct SerializableConstraintMatrices<F: Field> {
|
||||
pub num_instance_variables: usize,
|
||||
@@ -119,13 +94,11 @@ pub struct SerializableConstraintMatrices<F: Field> {
|
||||
pub c: SerializableMatrix<F>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
|
||||
pub struct SerializableMatrix<F: Field> {
|
||||
pub data: Vec<Vec<(F, usize)>>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
pub fn read_arkzkey_from_bytes_uncompressed(
|
||||
arkzkey_data: &[u8],
|
||||
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>), ZKeyReadError> {
|
||||
|
||||
@@ -7,6 +7,12 @@ use ark_poly::EvaluationDomain;
|
||||
use ark_relations::r1cs::{ConstraintMatrices, ConstraintSystemRef, SynthesisError};
|
||||
use ark_std::{cfg_into_iter, cfg_iter, cfg_iter_mut, vec};
|
||||
|
||||
#[cfg(feature = "parallel")]
|
||||
use rayon::iter::{
|
||||
IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator,
|
||||
IntoParallelRefMutIterator, ParallelIterator,
|
||||
};
|
||||
|
||||
/// Implements the witness map used by snarkjs. The arkworks witness map calculates the
|
||||
/// coefficients of H through computing (AB-C)/Z in the evaluation domain and going back to the
|
||||
/// coefficients domain. snarkjs instead precomputes the Lagrange form of the powers of tau bases
|
||||
|
||||
@@ -1,371 +0,0 @@
|
||||
// This file is based on the code by arkworks. Its preimage can be found here:
|
||||
// https://github.com/arkworks-rs/circom-compat/blob/3c95ed98e23a408b4d99a53e483a9bba39685a4e/src/zkey.rs
|
||||
|
||||
//! ZKey Parsing
|
||||
//!
|
||||
//! Each ZKey file is broken into sections:
|
||||
//! Header(1)
|
||||
//! Prover Type 1 Groth
|
||||
//! HeaderGroth(2)
|
||||
//! n8q
|
||||
//! q
|
||||
//! n8r
|
||||
//! r
|
||||
//! NVars
|
||||
//! NPub
|
||||
//! DomainSize (multiple of 2
|
||||
//! alpha1
|
||||
//! beta1
|
||||
//! delta1
|
||||
//! beta2
|
||||
//! gamma2
|
||||
//! delta2
|
||||
//! IC(3)
|
||||
//! Coefs(4)
|
||||
//! PointsA(5)
|
||||
//! PointsB1(6)
|
||||
//! PointsB2(7)
|
||||
//! PointsC(8)
|
||||
//! PointsH(9)
|
||||
//! Contributions(10)
|
||||
use ark_ff::{BigInteger256, PrimeField};
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use ark_serialize::{CanonicalDeserialize, SerializationError};
|
||||
use ark_std::log2;
|
||||
use byteorder::{LittleEndian, ReadBytesExt};
|
||||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::{Read, Seek, SeekFrom},
|
||||
};
|
||||
|
||||
use ark_bn254::{Bn254, Fq, Fq2, Fr, G1Affine, G2Affine};
|
||||
use ark_groth16::{ProvingKey, VerifyingKey};
|
||||
use num_traits::Zero;
|
||||
|
||||
type IoResult<T> = Result<T, SerializationError>;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct Section {
|
||||
position: u64,
|
||||
#[allow(dead_code)]
|
||||
size: usize,
|
||||
}
|
||||
|
||||
/// Reads a SnarkJS ZKey file into an Arkworks ProvingKey.
|
||||
pub fn read_zkey<R: Read + Seek>(
|
||||
reader: &mut R,
|
||||
) -> IoResult<(ProvingKey<Bn254>, ConstraintMatrices<Fr>)> {
|
||||
let mut binfile = BinFile::new(reader)?;
|
||||
let proving_key = binfile.proving_key()?;
|
||||
let matrices = binfile.matrices()?;
|
||||
Ok((proving_key, matrices))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct BinFile<'a, R> {
|
||||
#[allow(dead_code)]
|
||||
ftype: String,
|
||||
#[allow(dead_code)]
|
||||
version: u32,
|
||||
sections: HashMap<u32, Vec<Section>>,
|
||||
reader: &'a mut R,
|
||||
}
|
||||
|
||||
impl<'a, R: Read + Seek> BinFile<'a, R> {
|
||||
fn new(reader: &'a mut R) -> IoResult<Self> {
|
||||
let mut magic = [0u8; 4];
|
||||
reader.read_exact(&mut magic)?;
|
||||
|
||||
let version = reader.read_u32::<LittleEndian>()?;
|
||||
|
||||
let num_sections = reader.read_u32::<LittleEndian>()?;
|
||||
|
||||
let mut sections = HashMap::new();
|
||||
for _ in 0..num_sections {
|
||||
let section_id = reader.read_u32::<LittleEndian>()?;
|
||||
let section_length = reader.read_u64::<LittleEndian>()?;
|
||||
|
||||
let section = sections.entry(section_id).or_insert_with(Vec::new);
|
||||
section.push(Section {
|
||||
position: reader.stream_position()?,
|
||||
size: section_length as usize,
|
||||
});
|
||||
|
||||
reader.seek(SeekFrom::Current(section_length as i64))?;
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
ftype: std::str::from_utf8(&magic[..]).unwrap().to_string(),
|
||||
version,
|
||||
sections,
|
||||
reader,
|
||||
})
|
||||
}
|
||||
|
||||
fn proving_key(&mut self) -> IoResult<ProvingKey<Bn254>> {
|
||||
let header = self.groth_header()?;
|
||||
let ic = self.ic(header.n_public)?;
|
||||
|
||||
let a_query = self.a_query(header.n_vars)?;
|
||||
let b_g1_query = self.b_g1_query(header.n_vars)?;
|
||||
let b_g2_query = self.b_g2_query(header.n_vars)?;
|
||||
let l_query = self.l_query(header.n_vars - header.n_public - 1)?;
|
||||
let h_query = self.h_query(header.domain_size as usize)?;
|
||||
|
||||
let vk = VerifyingKey::<Bn254> {
|
||||
alpha_g1: header.verifying_key.alpha_g1,
|
||||
beta_g2: header.verifying_key.beta_g2,
|
||||
gamma_g2: header.verifying_key.gamma_g2,
|
||||
delta_g2: header.verifying_key.delta_g2,
|
||||
gamma_abc_g1: ic,
|
||||
};
|
||||
|
||||
let pk = ProvingKey::<Bn254> {
|
||||
vk,
|
||||
beta_g1: header.verifying_key.beta_g1,
|
||||
delta_g1: header.verifying_key.delta_g1,
|
||||
a_query,
|
||||
b_g1_query,
|
||||
b_g2_query,
|
||||
h_query,
|
||||
l_query,
|
||||
};
|
||||
|
||||
Ok(pk)
|
||||
}
|
||||
|
||||
fn get_section(&self, id: u32) -> Section {
|
||||
self.sections.get(&id).unwrap()[0].clone()
|
||||
}
|
||||
|
||||
fn groth_header(&mut self) -> IoResult<HeaderGroth> {
|
||||
let section = self.get_section(2);
|
||||
let header = HeaderGroth::new(&mut self.reader, §ion)?;
|
||||
Ok(header)
|
||||
}
|
||||
|
||||
fn ic(&mut self, n_public: usize) -> IoResult<Vec<G1Affine>> {
|
||||
// the range is non-inclusive so we do +1 to get all inputs
|
||||
self.g1_section(n_public + 1, 3)
|
||||
}
|
||||
|
||||
/// Returns the [`ConstraintMatrices`] corresponding to the zkey
|
||||
pub fn matrices(&mut self) -> IoResult<ConstraintMatrices<Fr>> {
|
||||
let header = self.groth_header()?;
|
||||
|
||||
let section = self.get_section(4);
|
||||
self.reader.seek(SeekFrom::Start(section.position))?;
|
||||
let num_coeffs: u32 = self.reader.read_u32::<LittleEndian>()?;
|
||||
|
||||
// insantiate AB
|
||||
let mut matrices = vec![vec![vec![]; header.domain_size as usize]; 2];
|
||||
let mut max_constraint_index = 0;
|
||||
for _ in 0..num_coeffs {
|
||||
let matrix: u32 = self.reader.read_u32::<LittleEndian>()?;
|
||||
let constraint: u32 = self.reader.read_u32::<LittleEndian>()?;
|
||||
let signal: u32 = self.reader.read_u32::<LittleEndian>()?;
|
||||
|
||||
let value: Fr = deserialize_field_fr(&mut self.reader)?;
|
||||
max_constraint_index = std::cmp::max(max_constraint_index, constraint);
|
||||
matrices[matrix as usize][constraint as usize].push((value, signal as usize));
|
||||
}
|
||||
|
||||
let num_constraints = max_constraint_index as usize - header.n_public;
|
||||
// Remove the public input constraints, Arkworks adds them later
|
||||
matrices.iter_mut().for_each(|m| {
|
||||
m.truncate(num_constraints);
|
||||
});
|
||||
// This is taken from Arkworks' to_matrices() function
|
||||
let a = matrices[0].clone();
|
||||
let b = matrices[1].clone();
|
||||
let a_num_non_zero: usize = a.iter().map(|lc| lc.len()).sum();
|
||||
let b_num_non_zero: usize = b.iter().map(|lc| lc.len()).sum();
|
||||
let matrices = ConstraintMatrices {
|
||||
num_instance_variables: header.n_public + 1,
|
||||
num_witness_variables: header.n_vars - header.n_public,
|
||||
num_constraints,
|
||||
|
||||
a_num_non_zero,
|
||||
b_num_non_zero,
|
||||
c_num_non_zero: 0,
|
||||
|
||||
a,
|
||||
b,
|
||||
c: vec![],
|
||||
};
|
||||
|
||||
Ok(matrices)
|
||||
}
|
||||
|
||||
fn a_query(&mut self, n_vars: usize) -> IoResult<Vec<G1Affine>> {
|
||||
self.g1_section(n_vars, 5)
|
||||
}
|
||||
|
||||
fn b_g1_query(&mut self, n_vars: usize) -> IoResult<Vec<G1Affine>> {
|
||||
self.g1_section(n_vars, 6)
|
||||
}
|
||||
|
||||
fn b_g2_query(&mut self, n_vars: usize) -> IoResult<Vec<G2Affine>> {
|
||||
self.g2_section(n_vars, 7)
|
||||
}
|
||||
|
||||
fn l_query(&mut self, n_vars: usize) -> IoResult<Vec<G1Affine>> {
|
||||
self.g1_section(n_vars, 8)
|
||||
}
|
||||
|
||||
fn h_query(&mut self, n_vars: usize) -> IoResult<Vec<G1Affine>> {
|
||||
self.g1_section(n_vars, 9)
|
||||
}
|
||||
|
||||
fn g1_section(&mut self, num: usize, section_id: usize) -> IoResult<Vec<G1Affine>> {
|
||||
let section = self.get_section(section_id as u32);
|
||||
self.reader.seek(SeekFrom::Start(section.position))?;
|
||||
deserialize_g1_vec(self.reader, num as u32)
|
||||
}
|
||||
|
||||
fn g2_section(&mut self, num: usize, section_id: usize) -> IoResult<Vec<G2Affine>> {
|
||||
let section = self.get_section(section_id as u32);
|
||||
self.reader.seek(SeekFrom::Start(section.position))?;
|
||||
deserialize_g2_vec(self.reader, num as u32)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug, CanonicalDeserialize)]
|
||||
pub struct ZVerifyingKey {
|
||||
alpha_g1: G1Affine,
|
||||
beta_g1: G1Affine,
|
||||
beta_g2: G2Affine,
|
||||
gamma_g2: G2Affine,
|
||||
delta_g1: G1Affine,
|
||||
delta_g2: G2Affine,
|
||||
}
|
||||
|
||||
impl ZVerifyingKey {
|
||||
fn new<R: Read>(reader: &mut R) -> IoResult<Self> {
|
||||
let alpha_g1 = deserialize_g1(reader)?;
|
||||
let beta_g1 = deserialize_g1(reader)?;
|
||||
let beta_g2 = deserialize_g2(reader)?;
|
||||
let gamma_g2 = deserialize_g2(reader)?;
|
||||
let delta_g1 = deserialize_g1(reader)?;
|
||||
let delta_g2 = deserialize_g2(reader)?;
|
||||
|
||||
Ok(Self {
|
||||
alpha_g1,
|
||||
beta_g1,
|
||||
beta_g2,
|
||||
gamma_g2,
|
||||
delta_g1,
|
||||
delta_g2,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct HeaderGroth {
|
||||
#[allow(dead_code)]
|
||||
n8q: u32,
|
||||
#[allow(dead_code)]
|
||||
q: BigInteger256,
|
||||
#[allow(dead_code)]
|
||||
n8r: u32,
|
||||
#[allow(dead_code)]
|
||||
r: BigInteger256,
|
||||
|
||||
n_vars: usize,
|
||||
n_public: usize,
|
||||
|
||||
domain_size: u32,
|
||||
#[allow(dead_code)]
|
||||
power: u32,
|
||||
|
||||
verifying_key: ZVerifyingKey,
|
||||
}
|
||||
|
||||
impl HeaderGroth {
|
||||
fn new<R: Read + Seek>(reader: &mut R, section: &Section) -> IoResult<Self> {
|
||||
reader.seek(SeekFrom::Start(section.position))?;
|
||||
Self::read(reader)
|
||||
}
|
||||
|
||||
fn read<R: Read>(mut reader: &mut R) -> IoResult<Self> {
|
||||
// TODO: Impl From<u32> in Arkworks
|
||||
let n8q: u32 = u32::deserialize_uncompressed(&mut reader)?;
|
||||
// group order r of Bn254
|
||||
let q = BigInteger256::deserialize_uncompressed(&mut reader)?;
|
||||
|
||||
let n8r: u32 = u32::deserialize_uncompressed(&mut reader)?;
|
||||
// Prime field modulus
|
||||
let r = BigInteger256::deserialize_uncompressed(&mut reader)?;
|
||||
|
||||
let n_vars = u32::deserialize_uncompressed(&mut reader)? as usize;
|
||||
let n_public = u32::deserialize_uncompressed(&mut reader)? as usize;
|
||||
|
||||
let domain_size: u32 = u32::deserialize_uncompressed(&mut reader)?;
|
||||
let power = log2(domain_size as usize);
|
||||
|
||||
let verifying_key = ZVerifyingKey::new(&mut reader)?;
|
||||
|
||||
Ok(Self {
|
||||
n8q,
|
||||
q,
|
||||
n8r,
|
||||
r,
|
||||
n_vars,
|
||||
n_public,
|
||||
domain_size,
|
||||
power,
|
||||
verifying_key,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// need to divide by R, since snarkjs outputs the zkey with coefficients
|
||||
// multiplieid by R^2
|
||||
fn deserialize_field_fr<R: Read>(reader: &mut R) -> IoResult<Fr> {
|
||||
let bigint = BigInteger256::deserialize_uncompressed(reader)?;
|
||||
Ok(Fr::new_unchecked(Fr::new_unchecked(bigint).into_bigint()))
|
||||
}
|
||||
|
||||
// skips the multiplication by R because Circom points are already in Montgomery form
|
||||
fn deserialize_field<R: Read>(reader: &mut R) -> IoResult<Fq> {
|
||||
let bigint = BigInteger256::deserialize_uncompressed(reader)?;
|
||||
// if you use Fq::new it multiplies by R
|
||||
Ok(Fq::new_unchecked(bigint))
|
||||
}
|
||||
|
||||
pub fn deserialize_field2<R: Read>(reader: &mut R) -> IoResult<Fq2> {
|
||||
let c0 = deserialize_field(reader)?;
|
||||
let c1 = deserialize_field(reader)?;
|
||||
Ok(Fq2::new(c0, c1))
|
||||
}
|
||||
|
||||
fn deserialize_g1<R: Read>(reader: &mut R) -> IoResult<G1Affine> {
|
||||
let x = deserialize_field(reader)?;
|
||||
let y = deserialize_field(reader)?;
|
||||
let infinity = x.is_zero() && y.is_zero();
|
||||
if infinity {
|
||||
Ok(G1Affine::identity())
|
||||
} else {
|
||||
Ok(G1Affine::new(x, y))
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_g2<R: Read>(reader: &mut R) -> IoResult<G2Affine> {
|
||||
let f1 = deserialize_field2(reader)?;
|
||||
let f2 = deserialize_field2(reader)?;
|
||||
let infinity = f1.is_zero() && f2.is_zero();
|
||||
if infinity {
|
||||
Ok(G2Affine::identity())
|
||||
} else {
|
||||
Ok(G2Affine::new(f1, f2))
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_g1_vec<R: Read>(reader: &mut R, n_vars: u32) -> IoResult<Vec<G1Affine>> {
|
||||
(0..n_vars).map(|_| deserialize_g1(reader)).collect()
|
||||
}
|
||||
|
||||
fn deserialize_g2_vec<R: Read>(reader: &mut R, n_vars: u32) -> IoResult<Vec<G2Affine>> {
|
||||
(0..n_vars).map(|_| deserialize_g2(reader)).collect()
|
||||
}
|
||||
@@ -5,9 +5,31 @@ pub mod ffi;
|
||||
pub mod hashers;
|
||||
#[cfg(feature = "pmtree-ft")]
|
||||
pub mod pm_tree_adapter;
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub mod poseidon_tree;
|
||||
pub mod protocol;
|
||||
pub mod public;
|
||||
#[cfg(test)]
|
||||
pub mod public_api_tests;
|
||||
pub mod utils;
|
||||
|
||||
// Ensure that only one Merkle tree feature is enabled at a time
|
||||
#[cfg(any(
|
||||
all(feature = "fullmerkletree", feature = "optimalmerkletree"),
|
||||
all(feature = "fullmerkletree", feature = "pmtree-ft"),
|
||||
all(feature = "optimalmerkletree", feature = "pmtree-ft"),
|
||||
))]
|
||||
compile_error!(
|
||||
"Only one of `fullmerkletree`, `optimalmerkletree`, or `pmtree-ft` can be enabled at a time."
|
||||
);
|
||||
|
||||
// Ensure that the `stateless` feature is not enabled with any Merkle tree features
|
||||
#[cfg(all(
|
||||
feature = "stateless",
|
||||
any(
|
||||
feature = "fullmerkletree",
|
||||
feature = "optimalmerkletree",
|
||||
feature = "pmtree-ft"
|
||||
)
|
||||
))]
|
||||
compile_error!("Cannot enable any Merkle tree features with stateless");
|
||||
|
||||
@@ -261,6 +261,11 @@ impl ZerokitMerkleTree for PmTree {
|
||||
.map_err(ZerokitMerkleTreeError::PmtreeErrorKind)
|
||||
}
|
||||
|
||||
/// Delete a leaf in the merkle tree given its index
|
||||
///
|
||||
/// Deleting a leaf is done by resetting it to its default value. Note that the next_index field
|
||||
/// will not be changed (== previously used index cannot be reused - this to avoid replay
|
||||
/// attacks or unexpected and very hard to tackle issues)
|
||||
fn delete(&mut self, index: usize) -> Result<(), ZerokitMerkleTreeError> {
|
||||
self.tree
|
||||
.delete(index)
|
||||
|
||||
@@ -1,37 +1,32 @@
|
||||
// This crate defines the RLN module default Merkle tree implementation and its Hasher
|
||||
// Implementation inspired by https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/poseidon_tree.rs
|
||||
|
||||
// Implementation inspired by https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/poseidon_tree.rs (no differences)
|
||||
#![cfg(not(feature = "stateless"))]
|
||||
|
||||
use cfg_if::cfg_if;
|
||||
|
||||
// The zerokit RLN default Merkle tree implementation is the OptimalMerkleTree.
|
||||
// To switch to FullMerkleTree implementation, it is enough to enable the fullmerkletree feature
|
||||
// The zerokit RLN default Merkle tree implementation is the PMTree from the vacp2p_pmtree crate
|
||||
// To switch to FullMerkleTree or OptimalMerkleTree, enable the corresponding feature in the Cargo.toml file
|
||||
|
||||
cfg_if! {
|
||||
if #[cfg(feature = "fullmerkletree")] {
|
||||
|
||||
use utils::{
|
||||
FullMerkleTree,
|
||||
FullMerkleProof,
|
||||
};
|
||||
use utils::{FullMerkleTree, FullMerkleProof};
|
||||
use crate::hashers::PoseidonHash;
|
||||
|
||||
pub type PoseidonTree = FullMerkleTree<PoseidonHash>;
|
||||
pub type MerkleProof = FullMerkleProof<PoseidonHash>;
|
||||
} else if #[cfg(feature = "pmtree-ft")] {
|
||||
use crate::pm_tree_adapter::{PmTree, PmTreeProof};
|
||||
|
||||
pub type PoseidonTree = PmTree;
|
||||
pub type MerkleProof = PmTreeProof;
|
||||
} else {
|
||||
|
||||
use crate::hashers::{PoseidonHash};
|
||||
use utils::{
|
||||
OptimalMerkleTree,
|
||||
OptimalMerkleProof,
|
||||
};
|
||||
} else if #[cfg(feature = "optimalmerkletree")] {
|
||||
use utils::{OptimalMerkleTree, OptimalMerkleProof};
|
||||
use crate::hashers::PoseidonHash;
|
||||
|
||||
pub type PoseidonTree = OptimalMerkleTree<PoseidonHash>;
|
||||
pub type MerkleProof = OptimalMerkleProof<PoseidonHash>;
|
||||
} else if #[cfg(feature = "pmtree-ft")] {
|
||||
use crate::pm_tree_adapter::{PmTree, PmTreeProof};
|
||||
|
||||
pub type PoseidonTree = PmTree;
|
||||
pub type MerkleProof = PmTreeProof;
|
||||
} else {
|
||||
compile_error!("One of the features `fullmerkletree`, `optimalmerkletree`, or `pmtree-ft` must be enabled.");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,22 @@
|
||||
// This crate collects all the underlying primitives used to implement RLN
|
||||
|
||||
use ark_bn254::Fr;
|
||||
use ark_ff::AdditiveGroup;
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
use {
|
||||
crate::error::ConversionError,
|
||||
crate::poseidon_tree::PoseidonTree,
|
||||
utils::{ZerokitMerkleProof, ZerokitMerkleTree},
|
||||
};
|
||||
|
||||
use crate::circuit::{calculate_rln_witness, qap::CircomReduction, Curve};
|
||||
use crate::error::{ComputeIdSecretError, ProofError, ProtocolError};
|
||||
use crate::hashers::{hash_to_field, poseidon_hash};
|
||||
use crate::public::RLN_IDENTIFIER;
|
||||
use crate::utils::{
|
||||
bytes_le_to_fr, bytes_le_to_vec_fr, bytes_le_to_vec_u8, fr_byte_size, fr_to_bytes_le,
|
||||
normalize_usize, to_bigint, vec_fr_to_bytes_le, vec_u8_to_bytes_le, FrOrSecret, IdSecret,
|
||||
};
|
||||
use ark_bn254::{Fr, FrConfig};
|
||||
use ark_ff::{AdditiveGroup, Fp, MontBackend};
|
||||
use ark_groth16::{prepare_verifying_key, Groth16, Proof as ArkProof, ProvingKey, VerifyingKey};
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
@@ -13,17 +28,7 @@ use serde::{Deserialize, Serialize};
|
||||
#[cfg(test)]
|
||||
use std::time::Instant;
|
||||
use tiny_keccak::{Hasher as _, Keccak};
|
||||
|
||||
use crate::circuit::{calculate_rln_witness, qap::CircomReduction, Curve};
|
||||
use crate::error::{ComputeIdSecretError, ConversionError, ProofError, ProtocolError};
|
||||
use crate::hashers::{hash_to_field, poseidon_hash};
|
||||
use crate::poseidon_tree::{MerkleProof, PoseidonTree};
|
||||
use crate::public::RLN_IDENTIFIER;
|
||||
use crate::utils::{
|
||||
bytes_le_to_fr, bytes_le_to_vec_fr, bytes_le_to_vec_u8, fr_byte_size, fr_to_bytes_le,
|
||||
normalize_usize, to_bigint, vec_fr_to_bytes_le, vec_u8_to_bytes_le,
|
||||
};
|
||||
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
use zeroize::Zeroize;
|
||||
///////////////////////////////////////////////////////
|
||||
// RLN Witness data structure and utility functions
|
||||
///////////////////////////////////////////////////////
|
||||
@@ -31,7 +36,7 @@ use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
#[derive(Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct RLNWitnessInput {
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
identity_secret: Fr,
|
||||
identity_secret: IdSecret,
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
user_message_limit: Fr,
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
@@ -113,7 +118,7 @@ pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Result<Vec<u8>, Proto
|
||||
fr_byte_size() * (5 + rln_witness.path_elements.len())
|
||||
+ rln_witness.identity_path_index.len(),
|
||||
);
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&rln_witness.identity_secret));
|
||||
serialized.extend_from_slice(&rln_witness.identity_secret.to_bytes_le());
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&rln_witness.user_message_limit));
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&rln_witness.message_id));
|
||||
serialized.extend_from_slice(&vec_fr_to_bytes_le(&rln_witness.path_elements));
|
||||
@@ -132,7 +137,7 @@ pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Result<Vec<u8>, Proto
|
||||
pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize), ProtocolError> {
|
||||
let mut all_read: usize = 0;
|
||||
|
||||
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]);
|
||||
let (identity_secret, read) = IdSecret::from_bytes_le(&serialized[all_read..]);
|
||||
all_read += read;
|
||||
|
||||
let (user_message_limit, read) = bytes_le_to_fr(&serialized[all_read..]);
|
||||
@@ -177,13 +182,14 @@ pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)
|
||||
// https://github.com/kilic/rln/blob/7ac74183f8b69b399e3bc96c1ae8ab61c026dc43/src/public.rs#L148
|
||||
// input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
|
||||
// return value is a rln witness populated according to this information
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn proof_inputs_to_rln_witness(
|
||||
tree: &mut PoseidonTree,
|
||||
serialized: &[u8],
|
||||
) -> Result<(RLNWitnessInput, usize), ProtocolError> {
|
||||
let mut all_read: usize = 0;
|
||||
|
||||
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]);
|
||||
let (identity_secret, read) = IdSecret::from_bytes_le(&serialized[all_read..]);
|
||||
all_read += read;
|
||||
|
||||
let id_index = usize::try_from(u64::from_le_bytes(
|
||||
@@ -239,8 +245,9 @@ pub fn proof_inputs_to_rln_witness(
|
||||
///
|
||||
/// Returns an error if `message_id` is not within `user_message_limit`.
|
||||
pub fn rln_witness_from_values(
|
||||
identity_secret: Fr,
|
||||
merkle_proof: &MerkleProof,
|
||||
identity_secret: IdSecret,
|
||||
path_elements: Vec<Fp<MontBackend<FrConfig, 4>, 4>>,
|
||||
identity_path_index: Vec<u8>,
|
||||
x: Fr,
|
||||
external_nullifier: Fr,
|
||||
user_message_limit: Fr,
|
||||
@@ -248,9 +255,6 @@ pub fn rln_witness_from_values(
|
||||
) -> Result<RLNWitnessInput, ProtocolError> {
|
||||
message_id_range_check(&message_id, &user_message_limit)?;
|
||||
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
|
||||
Ok(RLNWitnessInput {
|
||||
identity_secret,
|
||||
path_elements,
|
||||
@@ -265,7 +269,7 @@ pub fn rln_witness_from_values(
|
||||
pub fn random_rln_witness(tree_height: usize) -> RLNWitnessInput {
|
||||
let mut rng = thread_rng();
|
||||
|
||||
let identity_secret = hash_to_field(&rng.gen::<[u8; 32]>());
|
||||
let identity_secret = IdSecret::rand(&mut rng);
|
||||
let x = hash_to_field(&rng.gen::<[u8; 32]>());
|
||||
let epoch = hash_to_field(&rng.gen::<[u8; 32]>());
|
||||
let rln_identifier = hash_to_field(RLN_IDENTIFIER); //hash_to_field(&rng.gen::<[u8; 32]>());
|
||||
@@ -298,12 +302,18 @@ pub fn proof_values_from_witness(
|
||||
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
|
||||
|
||||
// y share
|
||||
let a_0 = rln_witness.identity_secret;
|
||||
let a_1 = poseidon_hash(&[a_0, rln_witness.external_nullifier, rln_witness.message_id]);
|
||||
let y = a_0 + rln_witness.x * a_1;
|
||||
let a_0 = &rln_witness.identity_secret;
|
||||
let mut to_hash = [
|
||||
*(a_0.clone()),
|
||||
rln_witness.external_nullifier,
|
||||
rln_witness.message_id,
|
||||
];
|
||||
let a_1 = poseidon_hash(&to_hash);
|
||||
let y = *(a_0.clone()) + rln_witness.x * a_1;
|
||||
|
||||
// Nullifier
|
||||
let nullifier = poseidon_hash(&[a_1]);
|
||||
to_hash[0].zeroize();
|
||||
|
||||
// Merkle tree root computations
|
||||
let root = compute_tree_root(
|
||||
@@ -371,7 +381,7 @@ pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) {
|
||||
|
||||
// input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
|
||||
pub fn prepare_prove_input(
|
||||
identity_secret: Fr,
|
||||
identity_secret: IdSecret,
|
||||
id_index: usize,
|
||||
user_message_limit: Fr,
|
||||
message_id: Fr,
|
||||
@@ -384,7 +394,7 @@ pub fn prepare_prove_input(
|
||||
// - variable length signal data
|
||||
let mut serialized = Vec::with_capacity(fr_byte_size() * 4 + 16 + signal.len()); // length of 4 fr elements + 16 bytes (id_index + len) + signal length
|
||||
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&identity_secret));
|
||||
serialized.extend_from_slice(&identity_secret.to_bytes_le());
|
||||
serialized.extend_from_slice(&normalize_usize(id_index));
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&user_message_limit));
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&message_id));
|
||||
@@ -415,12 +425,15 @@ pub fn prepare_verify_input(proof_data: Vec<u8>, signal: &[u8]) -> Vec<u8> {
|
||||
///////////////////////////////////////////////////////
|
||||
|
||||
pub fn compute_tree_root(
|
||||
identity_secret: &Fr,
|
||||
identity_secret: &IdSecret,
|
||||
user_message_limit: &Fr,
|
||||
path_elements: &[Fr],
|
||||
identity_path_index: &[u8],
|
||||
) -> Fr {
|
||||
let id_commitment = poseidon_hash(&[*identity_secret]);
|
||||
let mut to_hash = [*identity_secret.clone()];
|
||||
let id_commitment = poseidon_hash(&to_hash);
|
||||
to_hash[0].zeroize();
|
||||
|
||||
let mut root = poseidon_hash(&[id_commitment, *user_message_limit]);
|
||||
|
||||
for i in 0..identity_path_index.len() {
|
||||
@@ -441,10 +454,12 @@ pub fn compute_tree_root(
|
||||
// Generates a tuple (identity_secret_hash, id_commitment) where
|
||||
// identity_secret_hash is random and id_commitment = PoseidonHash(identity_secret_hash)
|
||||
// RNG is instantiated using thread_rng()
|
||||
pub fn keygen() -> (Fr, Fr) {
|
||||
pub fn keygen() -> (IdSecret, Fr) {
|
||||
let mut rng = thread_rng();
|
||||
let identity_secret_hash = Fr::rand(&mut rng);
|
||||
let id_commitment = poseidon_hash(&[identity_secret_hash]);
|
||||
let identity_secret_hash = IdSecret::rand(&mut rng);
|
||||
let mut to_hash = [*identity_secret_hash.clone()];
|
||||
let id_commitment = poseidon_hash(&to_hash);
|
||||
to_hash[0].zeroize();
|
||||
(identity_secret_hash, id_commitment)
|
||||
}
|
||||
|
||||
@@ -512,7 +527,10 @@ pub fn extended_seeded_keygen(signal: &[u8]) -> (Fr, Fr, Fr, Fr) {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn compute_id_secret(share1: (Fr, Fr), share2: (Fr, Fr)) -> Result<Fr, ComputeIdSecretError> {
|
||||
pub fn compute_id_secret(
|
||||
share1: (Fr, Fr),
|
||||
share2: (Fr, Fr),
|
||||
) -> Result<IdSecret, ComputeIdSecretError> {
|
||||
// Assuming a0 is the identity secret and a1 = poseidonHash([a0, external_nullifier]),
|
||||
// a (x,y) share satisfies the following relation
|
||||
// y = a_0 + x * a_1
|
||||
@@ -525,10 +543,11 @@ pub fn compute_id_secret(share1: (Fr, Fr), share2: (Fr, Fr)) -> Result<Fr, Compu
|
||||
|
||||
if (x1 - x2) != Fr::ZERO {
|
||||
let a_1 = (y1 - y2) / (x1 - x2);
|
||||
let a_0 = y1 - x1 * a_1;
|
||||
let mut a_0 = y1 - x1 * a_1;
|
||||
|
||||
// If shares come from the same polynomial, a0 is correctly recovered and a1 = poseidonHash([a0, external_nullifier])
|
||||
Ok(a_0)
|
||||
let id_secret = IdSecret::from(&mut a_0);
|
||||
Ok(id_secret)
|
||||
} else {
|
||||
Err(ComputeIdSecretError::DivisionByZero)
|
||||
}
|
||||
@@ -608,7 +627,7 @@ pub fn generate_proof_with_witness(
|
||||
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
|
||||
pub fn inputs_for_witness_calculation(
|
||||
rln_witness: &RLNWitnessInput,
|
||||
) -> Result<[(&str, Vec<Fr>); 7], ProtocolError> {
|
||||
) -> Result<[(&str, Vec<FrOrSecret>); 7], ProtocolError> {
|
||||
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
|
||||
|
||||
let mut identity_path_index = Vec::with_capacity(rln_witness.identity_path_index.len());
|
||||
@@ -618,13 +637,33 @@ pub fn inputs_for_witness_calculation(
|
||||
.for_each(|v| identity_path_index.push(Fr::from(*v)));
|
||||
|
||||
Ok([
|
||||
("identitySecret", vec![rln_witness.identity_secret]),
|
||||
("userMessageLimit", vec![rln_witness.user_message_limit]),
|
||||
("messageId", vec![rln_witness.message_id]),
|
||||
("pathElements", rln_witness.path_elements.clone()),
|
||||
("identityPathIndex", identity_path_index),
|
||||
("x", vec![rln_witness.x]),
|
||||
("externalNullifier", vec![rln_witness.external_nullifier]),
|
||||
(
|
||||
"identitySecret",
|
||||
vec![rln_witness.identity_secret.clone().into()],
|
||||
),
|
||||
(
|
||||
"userMessageLimit",
|
||||
vec![rln_witness.user_message_limit.into()],
|
||||
),
|
||||
("messageId", vec![rln_witness.message_id.into()]),
|
||||
(
|
||||
"pathElements",
|
||||
rln_witness
|
||||
.path_elements
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
),
|
||||
(
|
||||
"identityPathIndex",
|
||||
identity_path_index.into_iter().map(Into::into).collect(),
|
||||
),
|
||||
("x", vec![rln_witness.x.into()]),
|
||||
(
|
||||
"externalNullifier",
|
||||
vec![rln_witness.external_nullifier.into()],
|
||||
),
|
||||
])
|
||||
}
|
||||
|
||||
|
||||
@@ -2,17 +2,16 @@ use crate::circuit::{zkey_from_raw, Curve, Fr};
|
||||
use crate::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash};
|
||||
use crate::protocol::{
|
||||
compute_id_secret, deserialize_proof_values, deserialize_witness, extended_keygen,
|
||||
extended_seeded_keygen, generate_proof, keygen, proof_inputs_to_rln_witness,
|
||||
proof_values_from_witness, rln_witness_to_bigint_json, rln_witness_to_json, seeded_keygen,
|
||||
serialize_proof_values, serialize_witness, verify_proof,
|
||||
};
|
||||
use crate::utils::{
|
||||
bytes_le_to_fr, bytes_le_to_vec_fr, bytes_le_to_vec_u8, fr_byte_size, fr_to_bytes_le,
|
||||
vec_fr_to_bytes_le, vec_u8_to_bytes_le,
|
||||
extended_seeded_keygen, keygen, proof_values_from_witness, rln_witness_to_bigint_json,
|
||||
rln_witness_to_json, seeded_keygen, serialize_proof_values, verify_proof,
|
||||
};
|
||||
use crate::utils::{bytes_le_to_fr, bytes_le_to_vec_fr, fr_byte_size, fr_to_bytes_le};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use {
|
||||
crate::circuit::{graph_from_folder, zkey_from_folder},
|
||||
crate::{
|
||||
circuit::{graph_from_folder, zkey_from_folder},
|
||||
protocol::generate_proof,
|
||||
},
|
||||
std::default::Default,
|
||||
};
|
||||
|
||||
@@ -23,9 +22,12 @@ use crate::protocol::generate_proof_with_witness;
|
||||
/// used by tests etc. as well
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
use {
|
||||
crate::protocol::{proof_inputs_to_rln_witness, serialize_witness},
|
||||
crate::utils::{bytes_le_to_vec_u8, vec_fr_to_bytes_le, vec_u8_to_bytes_le},
|
||||
crate::{circuit::TEST_TREE_HEIGHT, poseidon_tree::PoseidonTree},
|
||||
serde_json::{json, Value},
|
||||
std::str::FromStr,
|
||||
utils::error::ZerokitMerkleTreeError,
|
||||
utils::{Hasher, ZerokitMerkleProof, ZerokitMerkleTree},
|
||||
};
|
||||
|
||||
@@ -36,7 +38,6 @@ use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, Write};
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
use num_bigint::BigInt;
|
||||
use std::io::Cursor;
|
||||
use utils::error::ZerokitMerkleTreeError;
|
||||
|
||||
/// The application-specific RLN identifier.
|
||||
///
|
||||
@@ -134,7 +135,7 @@ impl RLN {
|
||||
///
|
||||
/// Input parameters are
|
||||
/// - `tree_height`: the height of the internal Merkle tree
|
||||
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) as binary file
|
||||
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.arkzkey`) as binary file
|
||||
/// - `graph_data`: a byte vector containing the graph data (`graph.bin`) as binary file
|
||||
/// - `tree_config_input`: a reader for a string containing a json with the merkle tree configuration
|
||||
///
|
||||
@@ -147,7 +148,7 @@ impl RLN {
|
||||
/// let resources_folder = "./resources/tree_height_20/";
|
||||
///
|
||||
/// let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
/// for filename in ["rln_final.zkey", "graph.bin"] {
|
||||
/// for filename in ["rln_final.arkzkey", "graph.bin"] {
|
||||
/// let fullpath = format!("{resources_folder}{filename}");
|
||||
/// let mut file = File::open(&fullpath).expect("no file found");
|
||||
/// let metadata = std::fs::metadata(&fullpath).expect("unable to read metadata");
|
||||
@@ -205,7 +206,7 @@ impl RLN {
|
||||
/// Creates a new stateless RLN object by passing circuit resources as byte vectors.
|
||||
///
|
||||
/// Input parameters are
|
||||
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) as binary file
|
||||
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.arkzkey`) as binary file
|
||||
/// - `graph_data`: a byte vector containing the graph data (`graph.bin`) as binary file
|
||||
///
|
||||
/// Example:
|
||||
@@ -216,7 +217,7 @@ impl RLN {
|
||||
/// let resources_folder = "./resources/tree_height_20/";
|
||||
///
|
||||
/// let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
/// for filename in ["rln_final.zkey", "graph.bin"] {
|
||||
/// for filename in ["rln_final.arkzkey", "graph.bin"] {
|
||||
/// let fullpath = format!("{resources_folder}{filename}");
|
||||
/// let mut file = File::open(&fullpath).expect("no file found");
|
||||
/// let metadata = std::fs::metadata(&fullpath).expect("unable to read metadata");
|
||||
@@ -245,14 +246,14 @@ impl RLN {
|
||||
/// Creates a new stateless RLN object by passing circuit resources as a byte vector.
|
||||
///
|
||||
/// Input parameters are
|
||||
/// - `zkey_vec`: a byte vector containing the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) as binary file
|
||||
/// - `zkey_vec`: a byte vector containing the proving key (`rln_final.arkzkey`) as binary file
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
/// use std::fs::File;
|
||||
/// use std::io::Read;
|
||||
///
|
||||
/// let zkey_path = "./resources/tree_height_20/rln_final.zkey";
|
||||
/// let zkey_path = "./resources/tree_height_20/rln_final.arkzkey";
|
||||
///
|
||||
/// let mut file = File::open(zkey_path).expect("Failed to open file");
|
||||
/// let metadata = std::fs::metadata(zkey_path).expect("Failed to read metadata");
|
||||
@@ -1133,7 +1134,7 @@ impl RLN {
|
||||
/// ```
|
||||
pub fn key_gen<W: Write>(&self, mut output_data: W) -> Result<(), RLNError> {
|
||||
let (identity_secret_hash, id_commitment) = keygen();
|
||||
output_data.write_all(&fr_to_bytes_le(&identity_secret_hash))?;
|
||||
output_data.write_all(&identity_secret_hash.to_bytes_le())?;
|
||||
output_data.write_all(&fr_to_bytes_le(&id_commitment))?;
|
||||
|
||||
Ok(())
|
||||
@@ -1324,7 +1325,7 @@ impl RLN {
|
||||
compute_id_secret(share1, share2).map_err(RLNError::RecoverSecret)?;
|
||||
|
||||
// If an identity secret hash is recovered, we write it to output_data, otherwise nothing will be written.
|
||||
output_data.write_all(&fr_to_bytes_le(&recovered_identity_secret_hash))?;
|
||||
output_data.write_all(&recovered_identity_secret_hash.to_bytes_le())?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -4,16 +4,18 @@ use crate::protocol::{
|
||||
verify_proof, RLNProofValues,
|
||||
};
|
||||
use crate::public::RLN;
|
||||
use crate::utils::{generate_input_buffer, str_to_fr};
|
||||
use crate::utils::str_to_fr;
|
||||
use ark_groth16::Proof as ArkProof;
|
||||
use ark_serialize::CanonicalDeserialize;
|
||||
use serde_json::{json, Value};
|
||||
use std::io::Cursor;
|
||||
use std::str::FromStr;
|
||||
|
||||
use serde_json::{json, Value};
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
use crate::utils::generate_input_buffer;
|
||||
|
||||
fn fq_from_str(s: &str) -> ark_bn254::Fq {
|
||||
ark_bn254::Fq::from_str(&s).unwrap()
|
||||
ark_bn254::Fq::from_str(s).unwrap()
|
||||
}
|
||||
|
||||
fn g1_from_str(g1: &[String]) -> ark_bn254::G1Affine {
|
||||
@@ -43,7 +45,7 @@ fn value_to_string_vec(value: &Value) -> Vec<String> {
|
||||
value
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|val| val.as_str().unwrap().to_string())
|
||||
.collect()
|
||||
}
|
||||
@@ -90,7 +92,7 @@ fn test_groth16_proof_hardcoded() {
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|item| value_to_string_vec(item))
|
||||
.map(value_to_string_vec)
|
||||
.collect::<Vec<Vec<String>>>(),
|
||||
),
|
||||
c: g1_from_str(&value_to_string_vec(&valid_snarkjs_proof["pi_c"])),
|
||||
@@ -202,7 +204,7 @@ mod tree_test {
|
||||
// We check if the number of leaves set is consistent
|
||||
assert_eq!(rln.tree.leaves_set(), i);
|
||||
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(&leaf));
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(leaf));
|
||||
rln.set_leaf(i, &mut buffer).unwrap();
|
||||
}
|
||||
|
||||
@@ -216,7 +218,7 @@ mod tree_test {
|
||||
|
||||
// We add leaves one by one using the internal index (new leaves goes in next available position)
|
||||
for leaf in &leaves {
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(&leaf));
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(leaf));
|
||||
rln.set_next_leaf(&mut buffer).unwrap();
|
||||
}
|
||||
|
||||
@@ -328,7 +330,7 @@ mod tree_test {
|
||||
|
||||
// We add leaves one by one using the internal index (new leaves goes in next available position)
|
||||
for leaf in &leaves {
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(&leaf));
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(leaf));
|
||||
rln.set_next_leaf(&mut buffer).unwrap();
|
||||
}
|
||||
|
||||
@@ -826,7 +828,7 @@ mod tree_test {
|
||||
.verify_with_roots(&mut input_buffer.clone(), &mut roots_buffer)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(verified, false);
|
||||
assert!(!verified);
|
||||
|
||||
// We get the root of the tree obtained adding one leaf per time
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
@@ -880,7 +882,7 @@ mod tree_test {
|
||||
// We prepare input for generate_rln_proof API
|
||||
// input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
|
||||
let prove_input1 = prepare_prove_input(
|
||||
identity_secret_hash,
|
||||
identity_secret_hash.clone(),
|
||||
identity_index,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
@@ -889,7 +891,7 @@ mod tree_test {
|
||||
);
|
||||
|
||||
let prove_input2 = prepare_prove_input(
|
||||
identity_secret_hash,
|
||||
identity_secret_hash.clone(),
|
||||
identity_index,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
@@ -932,7 +934,7 @@ mod tree_test {
|
||||
|
||||
// We check if the recovered identity secret hash corresponds to the original one
|
||||
let (recovered_identity_secret_hash, _) = bytes_le_to_fr(&serialized_identity_secret_hash);
|
||||
assert_eq!(recovered_identity_secret_hash, identity_secret_hash);
|
||||
assert_eq!(recovered_identity_secret_hash, *identity_secret_hash);
|
||||
|
||||
// We now test that computing identity_secret_hash is unsuccessful if shares computed from two different identity secret hashes but within same epoch are passed
|
||||
|
||||
@@ -982,20 +984,22 @@ mod tree_test {
|
||||
|
||||
// ensure that the recovered secret does not match with either of the
|
||||
// used secrets in proof generation
|
||||
assert_ne!(recovered_identity_secret_hash_new, identity_secret_hash_new);
|
||||
assert_ne!(
|
||||
recovered_identity_secret_hash_new,
|
||||
*identity_secret_hash_new
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "stateless")]
|
||||
mod stateless_test {
|
||||
use crate::circuit::{Fr, TEST_TREE_HEIGHT};
|
||||
use crate::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash};
|
||||
use crate::poseidon_tree::PoseidonTree;
|
||||
use crate::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, PoseidonHash};
|
||||
use crate::protocol::*;
|
||||
use crate::public::RLN;
|
||||
use crate::utils::*;
|
||||
use std::io::Cursor;
|
||||
use utils::ZerokitMerkleTree;
|
||||
use utils::{OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
@@ -1008,10 +1012,10 @@ mod stateless_test {
|
||||
let mut rln = RLN::new().unwrap();
|
||||
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
let mut tree: OptimalMerkleTree<PoseidonHash> = OptimalMerkleTree::new(
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
ConfigOf::<OptimalMerkleTree<PoseidonHash>>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1042,7 +1046,8 @@ mod stateless_test {
|
||||
|
||||
let rln_witness = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
merkle_proof.get_path_elements(),
|
||||
merkle_proof.get_path_index(),
|
||||
x,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
@@ -1084,7 +1089,7 @@ mod stateless_test {
|
||||
.verify_with_roots(&mut input_buffer.clone(), &mut roots_buffer)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(verified, false);
|
||||
assert!(!verified);
|
||||
|
||||
// We get the root of the tree obtained adding one leaf per time
|
||||
let root = tree.root();
|
||||
@@ -1105,10 +1110,10 @@ mod stateless_test {
|
||||
let mut rln = RLN::new().unwrap();
|
||||
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
let mut tree: OptimalMerkleTree<PoseidonHash> = OptimalMerkleTree::new(
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
ConfigOf::<OptimalMerkleTree<PoseidonHash>>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1136,8 +1141,9 @@ mod stateless_test {
|
||||
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
|
||||
|
||||
let rln_witness1 = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
identity_secret_hash.clone(),
|
||||
merkle_proof.get_path_elements(),
|
||||
merkle_proof.get_path_index(),
|
||||
x1,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
@@ -1146,8 +1152,9 @@ mod stateless_test {
|
||||
.unwrap();
|
||||
|
||||
let rln_witness2 = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
identity_secret_hash.clone(),
|
||||
merkle_proof.get_path_elements(),
|
||||
merkle_proof.get_path_index(),
|
||||
x2,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
@@ -1186,7 +1193,7 @@ mod stateless_test {
|
||||
|
||||
// We check if the recovered identity secret hash corresponds to the original one
|
||||
let (recovered_identity_secret_hash, _) = bytes_le_to_fr(&serialized_identity_secret_hash);
|
||||
assert_eq!(recovered_identity_secret_hash, identity_secret_hash);
|
||||
assert_eq!(recovered_identity_secret_hash, *identity_secret_hash);
|
||||
|
||||
// We now test that computing identity_secret_hash is unsuccessful if shares computed from two different identity secret hashes but within same epoch are passed
|
||||
|
||||
@@ -1202,8 +1209,9 @@ mod stateless_test {
|
||||
let merkle_proof_new = tree.proof(identity_index_new).expect("proof should exist");
|
||||
|
||||
let rln_witness3 = rln_witness_from_values(
|
||||
identity_secret_hash_new,
|
||||
&merkle_proof_new,
|
||||
identity_secret_hash_new.clone(),
|
||||
merkle_proof_new.get_path_elements(),
|
||||
merkle_proof_new.get_path_index(),
|
||||
x3,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
@@ -1230,7 +1238,7 @@ mod stateless_test {
|
||||
|
||||
let serialized_identity_secret_hash = output_buffer.into_inner();
|
||||
let (recovered_identity_secret_hash_new, _) =
|
||||
bytes_le_to_fr(&serialized_identity_secret_hash);
|
||||
IdSecret::from_bytes_le(&serialized_identity_secret_hash);
|
||||
|
||||
// ensure that the recovered secret does not match with either of the
|
||||
// used secrets in proof generation
|
||||
|
||||
@@ -3,10 +3,16 @@
|
||||
use crate::circuit::Fr;
|
||||
use crate::error::ConversionError;
|
||||
use ark_ff::PrimeField;
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
use ark_std::UniformRand;
|
||||
use num_bigint::{BigInt, BigUint};
|
||||
use num_traits::Num;
|
||||
use rand::Rng;
|
||||
use ruint::aliases::U256;
|
||||
use serde_json::json;
|
||||
use std::io::Cursor;
|
||||
use std::ops::Deref;
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||
|
||||
#[inline(always)]
|
||||
pub fn to_bigint(el: &Fr) -> BigInt {
|
||||
@@ -14,7 +20,7 @@ pub fn to_bigint(el: &Fr) -> BigInt {
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn fr_byte_size() -> usize {
|
||||
pub const fn fr_byte_size() -> usize {
|
||||
let mbs = <Fr as PrimeField>::MODULUS_BIT_SIZE;
|
||||
((mbs + 64 - (mbs % 64)) / 8) as usize
|
||||
}
|
||||
@@ -106,10 +112,9 @@ pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize), ConversionEr
|
||||
#[inline(always)]
|
||||
pub fn bytes_le_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize), ConversionError> {
|
||||
let mut read: usize = 0;
|
||||
let mut res: Vec<Fr> = Vec::new();
|
||||
|
||||
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
|
||||
read += 8;
|
||||
let mut res: Vec<Fr> = Vec::with_capacity(len);
|
||||
|
||||
let el_size = fr_byte_size();
|
||||
for i in 0..len {
|
||||
@@ -150,3 +155,81 @@ pub fn normalize_usize(input: usize) -> [u8; 8] {
|
||||
pub fn generate_input_buffer() -> Cursor<String> {
|
||||
Cursor::new(json!({}).to_string())
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, Zeroize, ZeroizeOnDrop, Clone, PartialEq, CanonicalSerialize, CanonicalDeserialize,
|
||||
)]
|
||||
pub struct IdSecret(ark_bn254::Fr);
|
||||
|
||||
impl IdSecret {
|
||||
pub fn rand<R: Rng + ?Sized>(rng: &mut R) -> Self {
|
||||
let mut fr = Fr::rand(rng);
|
||||
let res = Self::from(&mut fr);
|
||||
// No need to zeroize fr (already zeroiz'ed in from implementation)
|
||||
#[allow(clippy::let_and_return)]
|
||||
res
|
||||
}
|
||||
|
||||
pub fn from_bytes_le(input: &[u8]) -> (Self, usize) {
|
||||
let el_size = fr_byte_size();
|
||||
let b_uint = BigUint::from_bytes_le(&input[0..el_size]);
|
||||
let mut fr = Fr::from(b_uint);
|
||||
let res = IdSecret::from(&mut fr);
|
||||
// Note: no zeroize on b_uint as it has been moved
|
||||
(res, el_size)
|
||||
}
|
||||
|
||||
pub(crate) fn to_bytes_le(&self) -> Zeroizing<Vec<u8>> {
|
||||
let input_biguint: BigUint = self.0.into();
|
||||
let mut res = input_biguint.to_bytes_le();
|
||||
res.resize(fr_byte_size(), 0);
|
||||
Zeroizing::new(res)
|
||||
}
|
||||
|
||||
/// Warning: this can leak the secret value
|
||||
/// Warning: Leaked value is of type 'U256' which implement Copy (every copy will not be zeroized)
|
||||
pub(crate) fn to_u256(&self) -> U256 {
|
||||
let mut big_int = self.0.into_bigint();
|
||||
let res = U256::from_limbs(big_int.0);
|
||||
big_int.zeroize();
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&mut Fr> for IdSecret {
|
||||
fn from(value: &mut Fr) -> Self {
|
||||
let id_secret = Self(*value);
|
||||
value.zeroize();
|
||||
id_secret
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for IdSecret {
|
||||
type Target = Fr;
|
||||
|
||||
/// Deref to &Fr
|
||||
///
|
||||
/// Warning: this can leak the secret value
|
||||
/// Warning: Leaked value is of type 'Fr' which implement Copy (every copy will not be zeroized)
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub enum FrOrSecret {
|
||||
IdSecret(IdSecret),
|
||||
Fr(Fr),
|
||||
}
|
||||
|
||||
impl From<Fr> for FrOrSecret {
|
||||
fn from(value: Fr) -> Self {
|
||||
FrOrSecret::Fr(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IdSecret> for FrOrSecret {
|
||||
fn from(value: IdSecret) -> Self {
|
||||
FrOrSecret::IdSecret(value)
|
||||
}
|
||||
}
|
||||
|
||||
124
rln/tests/ffi.rs
124
rln/tests/ffi.rs
@@ -14,6 +14,7 @@ mod test {
|
||||
use std::io::Read;
|
||||
use std::mem::MaybeUninit;
|
||||
use std::time::{Duration, Instant};
|
||||
use zeroize::Zeroize;
|
||||
|
||||
const NO_OF_LEAVES: usize = 256;
|
||||
|
||||
@@ -27,7 +28,7 @@ mod test {
|
||||
}
|
||||
|
||||
fn set_leaves_init(rln_pointer: &mut RLN, leaves: &[Fr]) {
|
||||
let leaves_ser = vec_fr_to_bytes_le(&leaves);
|
||||
let leaves_ser = vec_fr_to_bytes_le(leaves);
|
||||
let input_buffer = &Buffer::from(leaves_ser.as_ref());
|
||||
let success = init_tree_with_leaves(rln_pointer, input_buffer);
|
||||
assert!(success, "init tree with leaves call failed");
|
||||
@@ -49,14 +50,14 @@ mod test {
|
||||
root
|
||||
}
|
||||
|
||||
fn identity_pair_gen(rln_pointer: &mut RLN) -> (Fr, Fr) {
|
||||
fn identity_pair_gen(rln_pointer: &mut RLN) -> (IdSecret, Fr) {
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = key_gen(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "key gen call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret_hash, read) = bytes_le_to_fr(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..].to_vec());
|
||||
let (identity_secret_hash, read) = IdSecret::from_bytes_le(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..]);
|
||||
(identity_secret_hash, id_commitment)
|
||||
}
|
||||
|
||||
@@ -80,7 +81,7 @@ mod test {
|
||||
// We first add leaves one by one specifying the index
|
||||
for (i, leaf) in leaves.iter().enumerate() {
|
||||
// We prepare the rate_commitment and we set the leaf at provided index
|
||||
let leaf_ser = fr_to_bytes_le(&leaf);
|
||||
let leaf_ser = fr_to_bytes_le(leaf);
|
||||
let input_buffer = &Buffer::from(leaf_ser.as_ref());
|
||||
let success = set_leaf(rln_pointer, i, input_buffer);
|
||||
assert!(success, "set leaf call failed");
|
||||
@@ -95,7 +96,7 @@ mod test {
|
||||
|
||||
// We add leaves one by one using the internal index (new leaves goes in next available position)
|
||||
for leaf in &leaves {
|
||||
let leaf_ser = fr_to_bytes_le(&leaf);
|
||||
let leaf_ser = fr_to_bytes_le(leaf);
|
||||
let input_buffer = &Buffer::from(leaf_ser.as_ref());
|
||||
let success = set_next_leaf(rln_pointer, input_buffer);
|
||||
assert!(success, "set next leaf call failed");
|
||||
@@ -156,7 +157,7 @@ mod test {
|
||||
// random number between 0..no_of_leaves
|
||||
let mut rng = thread_rng();
|
||||
let set_index = rng.gen_range(0..NO_OF_LEAVES) as usize;
|
||||
println!("set_index: {}", set_index);
|
||||
println!("set_index: {set_index}");
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
set_leaves_init(rln_pointer, &leaves);
|
||||
@@ -188,7 +189,7 @@ mod test {
|
||||
|
||||
// We add leaves one by one using the internal index (new leaves goes in next available position)
|
||||
for leaf in &leaves {
|
||||
let leaf_ser = fr_to_bytes_le(&leaf);
|
||||
let leaf_ser = fr_to_bytes_le(leaf);
|
||||
let input_buffer = &Buffer::from(leaf_ser.as_ref());
|
||||
let success = set_next_leaf(rln_pointer, input_buffer);
|
||||
assert!(success, "set next leaf call failed");
|
||||
@@ -225,12 +226,7 @@ mod test {
|
||||
let leaves = vec_fr_to_bytes_le(&last_leaf);
|
||||
let leaves_buffer = &Buffer::from(leaves.as_ref());
|
||||
|
||||
let success = atomic_operation(
|
||||
rln_pointer,
|
||||
last_leaf_index as usize,
|
||||
leaves_buffer,
|
||||
indices_buffer,
|
||||
);
|
||||
let success = atomic_operation(rln_pointer, last_leaf_index, leaves_buffer, indices_buffer);
|
||||
assert!(success, "atomic operation call failed");
|
||||
|
||||
// We get the root of the tree obtained after a no-op
|
||||
@@ -271,8 +267,11 @@ mod test {
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// generate identity
|
||||
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
|
||||
let id_commitment = utils_poseidon_hash(&[identity_secret_hash]);
|
||||
let mut identity_secret_hash_ = hash_to_field(b"test-merkle-proof");
|
||||
let identity_secret_hash = IdSecret::from(&mut identity_secret_hash_);
|
||||
let mut to_hash = [*identity_secret_hash.clone()];
|
||||
let id_commitment = utils_poseidon_hash(&to_hash);
|
||||
to_hash[0].zeroize();
|
||||
let user_message_limit = Fr::from(100);
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
|
||||
@@ -305,7 +304,7 @@ mod test {
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
|
||||
let (path_elements, read) = bytes_le_to_vec_fr(&result_data).unwrap();
|
||||
let (identity_path_index, _) = bytes_le_to_vec_u8(&result_data[read..].to_vec()).unwrap();
|
||||
let (identity_path_index, _) = bytes_le_to_vec_u8(&result_data[read..]).unwrap();
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
let expected_path_elements: Vec<Fr> = [
|
||||
@@ -393,7 +392,7 @@ mod test {
|
||||
let success = verify(rln_pointer, input_buffer, proof_is_valid_ptr);
|
||||
verify_time += now.elapsed().as_nanos();
|
||||
assert!(success, "verify call failed");
|
||||
assert_eq!(proof_is_valid, true);
|
||||
assert!(proof_is_valid);
|
||||
}
|
||||
|
||||
println!(
|
||||
@@ -415,12 +414,9 @@ mod test {
|
||||
// We obtain the root from the RLN instance
|
||||
let root_rln_folder = get_tree_root(rln_pointer);
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
let zkey_path = "./resources/tree_height_20/rln_final.arkzkey";
|
||||
#[cfg(not(feature = "arkzkey"))]
|
||||
let zkey_path = "./resources/tree_height_20/rln_final.zkey";
|
||||
let mut zkey_file = File::open(&zkey_path).expect("no file found");
|
||||
let metadata = std::fs::metadata(&zkey_path).expect("unable to read metadata");
|
||||
let mut zkey_file = File::open(zkey_path).expect("no file found");
|
||||
let metadata = std::fs::metadata(zkey_path).expect("unable to read metadata");
|
||||
let mut zkey_buffer = vec![0; metadata.len() as usize];
|
||||
zkey_file
|
||||
.read_exact(&mut zkey_buffer)
|
||||
@@ -429,8 +425,8 @@ mod test {
|
||||
let zkey_data = &Buffer::from(&zkey_buffer[..]);
|
||||
|
||||
let graph_data = "./resources/tree_height_20/graph.bin";
|
||||
let mut graph_file = File::open(&graph_data).expect("no file found");
|
||||
let metadata = std::fs::metadata(&graph_data).expect("unable to read metadata");
|
||||
let mut graph_file = File::open(graph_data).expect("no file found");
|
||||
let metadata = std::fs::metadata(graph_data).expect("unable to read metadata");
|
||||
let mut graph_buffer = vec![0; metadata.len() as usize];
|
||||
graph_file
|
||||
.read_exact(&mut graph_buffer)
|
||||
@@ -525,7 +521,7 @@ mod test {
|
||||
let proof_is_valid_ptr = &mut proof_is_valid as *mut bool;
|
||||
let success = verify_rln_proof(rln_pointer, input_buffer, proof_is_valid_ptr);
|
||||
assert!(success, "verify call failed");
|
||||
assert_eq!(proof_is_valid, true);
|
||||
assert!(proof_is_valid);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -600,7 +596,7 @@ mod test {
|
||||
verify_with_roots(rln_pointer, input_buffer, roots_buffer, proof_is_valid_ptr);
|
||||
assert!(success, "verify call failed");
|
||||
// Proof should be valid
|
||||
assert_eq!(proof_is_valid, true);
|
||||
assert!(proof_is_valid);
|
||||
|
||||
// We then try to verify against some random values not containing the correct one.
|
||||
for _ in 0..5 {
|
||||
@@ -614,7 +610,7 @@ mod test {
|
||||
verify_with_roots(rln_pointer, input_buffer, roots_buffer, proof_is_valid_ptr);
|
||||
assert!(success, "verify call failed");
|
||||
// Proof should be invalid.
|
||||
assert_eq!(proof_is_valid, false);
|
||||
assert!(!proof_is_valid);
|
||||
|
||||
// We finally include the correct root
|
||||
// We get the root of the tree obtained adding one leaf per time
|
||||
@@ -630,7 +626,7 @@ mod test {
|
||||
verify_with_roots(rln_pointer, input_buffer, roots_buffer, proof_is_valid_ptr);
|
||||
assert!(success, "verify call failed");
|
||||
// Proof should be valid.
|
||||
assert_eq!(proof_is_valid, true);
|
||||
assert!(proof_is_valid);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -674,7 +670,7 @@ mod test {
|
||||
// We prepare input for generate_rln_proof API
|
||||
// input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
|
||||
let prove_input1 = prepare_prove_input(
|
||||
identity_secret_hash,
|
||||
identity_secret_hash.clone(),
|
||||
identity_index,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
@@ -683,7 +679,7 @@ mod test {
|
||||
);
|
||||
|
||||
let prove_input2 = prepare_prove_input(
|
||||
identity_secret_hash,
|
||||
identity_secret_hash.clone(),
|
||||
identity_index,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
@@ -718,7 +714,7 @@ mod test {
|
||||
|
||||
// We check if the recovered identity secret hash corresponds to the original one
|
||||
let (recovered_identity_secret_hash, _) = bytes_le_to_fr(&serialized_identity_secret_hash);
|
||||
assert_eq!(recovered_identity_secret_hash, identity_secret_hash);
|
||||
assert_eq!(recovered_identity_secret_hash, *identity_secret_hash);
|
||||
|
||||
// We now test that computing identity_secret_hash is unsuccessful if shares computed from two different identity secret hashes but within same epoch are passed
|
||||
|
||||
@@ -772,7 +768,10 @@ mod test {
|
||||
|
||||
// ensure that the recovered secret does not match with either of the
|
||||
// used secrets in proof generation
|
||||
assert_ne!(recovered_identity_secret_hash_new, identity_secret_hash_new);
|
||||
assert_ne!(
|
||||
recovered_identity_secret_hash_new,
|
||||
*identity_secret_hash_new
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -790,7 +789,7 @@ mod test {
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret_hash, read) = bytes_le_to_fr(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..].to_vec());
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..]);
|
||||
|
||||
// We check against expected values
|
||||
let expected_identity_secret_hash_seed_bytes = str_to_fr(
|
||||
@@ -991,14 +990,15 @@ mod stateless_test {
|
||||
use rln::circuit::*;
|
||||
use rln::ffi::generate_rln_proof_with_witness;
|
||||
use rln::ffi::{hash as ffi_hash, poseidon_hash as ffi_poseidon_hash, *};
|
||||
use rln::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
|
||||
use rln::poseidon_tree::PoseidonTree;
|
||||
use rln::hashers::{
|
||||
hash_to_field, poseidon_hash as utils_poseidon_hash, PoseidonHash, ROUND_PARAMS,
|
||||
};
|
||||
use rln::protocol::*;
|
||||
use rln::public::RLN;
|
||||
use rln::utils::*;
|
||||
use std::mem::MaybeUninit;
|
||||
use std::time::{Duration, Instant};
|
||||
use utils::ZerokitMerkleTree;
|
||||
use utils::{OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
|
||||
|
||||
@@ -1009,14 +1009,14 @@ mod stateless_test {
|
||||
unsafe { &mut *rln_pointer.assume_init() }
|
||||
}
|
||||
|
||||
fn identity_pair_gen(rln_pointer: &mut RLN) -> (Fr, Fr) {
|
||||
fn identity_pair_gen(rln_pointer: &mut RLN) -> (IdSecret, Fr) {
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = key_gen(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "key gen call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret_hash, read) = bytes_le_to_fr(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..].to_vec());
|
||||
let (identity_secret_hash, read) = IdSecret::from_bytes_le(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..]);
|
||||
(identity_secret_hash, id_commitment)
|
||||
}
|
||||
|
||||
@@ -1033,10 +1033,10 @@ mod stateless_test {
|
||||
#[test]
|
||||
fn test_recover_id_secret_stateless_ffi() {
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
let mut tree: OptimalMerkleTree<PoseidonHash> = OptimalMerkleTree::new(
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
ConfigOf::<OptimalMerkleTree<PoseidonHash>>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1068,8 +1068,9 @@ mod stateless_test {
|
||||
|
||||
// We prepare input for generate_rln_proof API
|
||||
let rln_witness1 = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
identity_secret_hash.clone(),
|
||||
merkle_proof.get_path_elements(),
|
||||
merkle_proof.get_path_index(),
|
||||
x1,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
@@ -1079,8 +1080,9 @@ mod stateless_test {
|
||||
let serialized1 = serialize_witness(&rln_witness1).unwrap();
|
||||
|
||||
let rln_witness2 = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
identity_secret_hash.clone(),
|
||||
merkle_proof.get_path_elements(),
|
||||
merkle_proof.get_path_index(),
|
||||
x2,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
@@ -1115,7 +1117,8 @@ mod stateless_test {
|
||||
assert!(!serialized_identity_secret_hash.is_empty());
|
||||
|
||||
// We check if the recovered identity secret hash corresponds to the original one
|
||||
let (recovered_identity_secret_hash, _) = bytes_le_to_fr(&serialized_identity_secret_hash);
|
||||
let (recovered_identity_secret_hash, _) =
|
||||
IdSecret::from_bytes_le(&serialized_identity_secret_hash);
|
||||
assert_eq!(recovered_identity_secret_hash, identity_secret_hash);
|
||||
|
||||
// We now test that computing identity_secret_hash is unsuccessful if shares computed from two different identity secret hashes but within same epoch are passed
|
||||
@@ -1133,8 +1136,9 @@ mod stateless_test {
|
||||
let merkle_proof_new = tree.proof(identity_index_new).expect("proof should exist");
|
||||
|
||||
let rln_witness3 = rln_witness_from_values(
|
||||
identity_secret_hash_new,
|
||||
&merkle_proof_new,
|
||||
identity_secret_hash_new.clone(),
|
||||
merkle_proof_new.get_path_elements(),
|
||||
merkle_proof_new.get_path_index(),
|
||||
x3,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
@@ -1166,16 +1170,19 @@ mod stateless_test {
|
||||
|
||||
// ensure that the recovered secret does not match with either of the
|
||||
// used secrets in proof generation
|
||||
assert_ne!(recovered_identity_secret_hash_new, identity_secret_hash_new);
|
||||
assert_ne!(
|
||||
recovered_identity_secret_hash_new,
|
||||
*identity_secret_hash_new
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_verify_with_roots_stateless_ffi() {
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
let mut tree: OptimalMerkleTree<PoseidonHash> = OptimalMerkleTree::new(
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
ConfigOf::<OptimalMerkleTree<PoseidonHash>>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -1205,7 +1212,8 @@ mod stateless_test {
|
||||
// We prepare input for generate_rln_proof API
|
||||
let rln_witness = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
merkle_proof.get_path_elements(),
|
||||
merkle_proof.get_path_index(),
|
||||
x,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
@@ -1229,7 +1237,7 @@ mod stateless_test {
|
||||
verify_with_roots(rln_pointer, input_buffer, roots_buffer, proof_is_valid_ptr);
|
||||
assert!(success, "verify call failed");
|
||||
// Proof should be valid
|
||||
assert_eq!(proof_is_valid, true);
|
||||
assert!(proof_is_valid);
|
||||
|
||||
// We serialize in the roots buffer some random values and we check that the proof is not verified since doesn't contain the correct root the proof refers to
|
||||
for _ in 0..5 {
|
||||
@@ -1243,7 +1251,7 @@ mod stateless_test {
|
||||
verify_with_roots(rln_pointer, input_buffer, roots_buffer, proof_is_valid_ptr);
|
||||
assert!(success, "verify call failed");
|
||||
// Proof should be invalid.
|
||||
assert_eq!(proof_is_valid, false);
|
||||
assert!(!proof_is_valid);
|
||||
|
||||
// We get the root of the tree obtained adding one leaf per time
|
||||
let root = tree.root();
|
||||
@@ -1258,7 +1266,7 @@ mod stateless_test {
|
||||
verify_with_roots(rln_pointer, input_buffer, roots_buffer, proof_is_valid_ptr);
|
||||
assert!(success, "verify call failed");
|
||||
// Proof should be valid.
|
||||
assert_eq!(proof_is_valid, true);
|
||||
assert!(proof_is_valid);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -1303,7 +1311,7 @@ mod stateless_test {
|
||||
let success = verify(rln_pointer, input_buffer, proof_is_valid_ptr);
|
||||
verify_time += now.elapsed().as_nanos();
|
||||
assert!(success, "verify call failed");
|
||||
assert_eq!(proof_is_valid, true);
|
||||
assert!(proof_is_valid);
|
||||
}
|
||||
|
||||
println!(
|
||||
@@ -1331,7 +1339,7 @@ mod stateless_test {
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret_hash, read) = bytes_le_to_fr(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..].to_vec());
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..]);
|
||||
|
||||
// We check against expected values
|
||||
let expected_identity_secret_hash_seed_bytes = str_to_fr(
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
////////////////////////////////////////////////////////////
|
||||
/// Tests
|
||||
// Tests
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
#![cfg(not(feature = "stateless"))]
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use rln::hashers::{poseidon_hash, PoseidonHash};
|
||||
@@ -12,20 +14,24 @@ mod test {
|
||||
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
#[test]
|
||||
// The test is checked correctness for `FullMerkleTree` and `OptimalMerkleTree` with Poseidon hash
|
||||
// The test checked correctness for `FullMerkleTree` and `OptimalMerkleTree` with Poseidon hash
|
||||
fn test_zerokit_merkle_implementations() {
|
||||
let sample_size = 100;
|
||||
let leaves: Vec<Fr> = (0..sample_size).map(|s| Fr::from(s)).collect();
|
||||
let leaves: Vec<Fr> = (0..sample_size).map(Fr::from).collect();
|
||||
|
||||
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
|
||||
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
|
||||
|
||||
for i in 0..sample_size.try_into().unwrap() {
|
||||
tree_full.set(i, leaves[i]).unwrap();
|
||||
for (i, leave) in leaves
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.take(sample_size.try_into().unwrap())
|
||||
{
|
||||
tree_full.set(i, leave).unwrap();
|
||||
let proof = tree_full.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
|
||||
tree_opt.set(i, leaves[i]).unwrap();
|
||||
tree_opt.set(i, leave).unwrap();
|
||||
assert_eq!(tree_opt.root(), tree_full.root());
|
||||
let proof = tree_opt.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
@@ -101,7 +107,7 @@ mod test {
|
||||
|
||||
// check remove_indices_and_set_leaves inside override_range function
|
||||
assert!(tree.get_empty_leaves_indices().is_empty());
|
||||
let leaves_2: Vec<Fr> = (0..2).map(|s| Fr::from(s as i32)).collect();
|
||||
let leaves_2: Vec<Fr> = (0..2).map(Fr::from).collect();
|
||||
tree.override_range(0, leaves_2.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec![2, 3]);
|
||||
@@ -116,7 +122,7 @@ mod test {
|
||||
.unwrap();
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec![2, 3]);
|
||||
|
||||
let leaves_4: Vec<Fr> = (0..4).map(|s| Fr::from(s as i32)).collect();
|
||||
let leaves_4: Vec<Fr> = (0..4).map(Fr::from).collect();
|
||||
// check if the indexes for write and delete are the same
|
||||
tree.override_range(0, leaves_4.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#![cfg(not(feature = "stateless"))]
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use ark_ff::BigInt;
|
||||
@@ -34,7 +36,7 @@ mod test {
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
tree.set(leaf_index, rate_commitment.into()).unwrap();
|
||||
tree.set(leaf_index, rate_commitment).unwrap();
|
||||
|
||||
// We check correct computation of the root
|
||||
let root = tree.root();
|
||||
@@ -105,7 +107,7 @@ mod test {
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
tree.set(leaf_index, rate_commitment.into()).unwrap();
|
||||
tree.set(leaf_index, rate_commitment).unwrap();
|
||||
|
||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||
|
||||
@@ -119,7 +121,8 @@ mod test {
|
||||
|
||||
rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
merkle_proof.get_path_elements(),
|
||||
merkle_proof.get_path_index(),
|
||||
x,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
@@ -142,11 +145,11 @@ mod test {
|
||||
assert_eq!(rln_witness_deser, rln_witness);
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(&proving_key, &rln_witness_deser, &graph_data).unwrap();
|
||||
let proof = generate_proof(proving_key, &rln_witness_deser, graph_data).unwrap();
|
||||
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
|
||||
|
||||
// Let's verify the proof
|
||||
let verified = verify_proof(&verification_key, &proof, &proof_values);
|
||||
let verified = verify_proof(verification_key, &proof, &proof_values);
|
||||
|
||||
assert!(verified.unwrap());
|
||||
}
|
||||
@@ -165,12 +168,12 @@ mod test {
|
||||
let graph_data = graph_from_folder();
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(&proving_key, &rln_witness_deser, &graph_data).unwrap();
|
||||
let proof = generate_proof(proving_key, &rln_witness_deser, graph_data).unwrap();
|
||||
|
||||
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
|
||||
|
||||
// Let's verify the proof
|
||||
let success = verify_proof(&verification_key, &proof, &proof_values).unwrap();
|
||||
let success = verify_proof(verification_key, &proof, &proof_values).unwrap();
|
||||
|
||||
assert!(success);
|
||||
}
|
||||
|
||||
@@ -3,7 +3,15 @@ mod test {
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
use {
|
||||
ark_ff::BigInt,
|
||||
rln::{circuit::TEST_TREE_HEIGHT, protocol::compute_tree_root},
|
||||
rln::{
|
||||
circuit::TEST_TREE_HEIGHT,
|
||||
protocol::compute_tree_root,
|
||||
utils::{
|
||||
bytes_le_to_vec_fr, bytes_le_to_vec_u8, bytes_le_to_vec_usize, fr_to_bytes_le,
|
||||
generate_input_buffer, IdSecret,
|
||||
},
|
||||
},
|
||||
zeroize::Zeroize,
|
||||
};
|
||||
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
@@ -12,10 +20,7 @@ mod test {
|
||||
use rln::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
|
||||
use rln::protocol::deserialize_identity_tuple;
|
||||
use rln::public::{hash as public_hash, poseidon_hash as public_poseidon_hash, RLN};
|
||||
use rln::utils::{
|
||||
bytes_le_to_fr, bytes_le_to_vec_fr, bytes_le_to_vec_u8, bytes_le_to_vec_usize,
|
||||
fr_to_bytes_le, generate_input_buffer, str_to_fr, vec_fr_to_bytes_le,
|
||||
};
|
||||
use rln::utils::{bytes_le_to_fr, str_to_fr, vec_fr_to_bytes_le};
|
||||
use std::io::Cursor;
|
||||
|
||||
#[test]
|
||||
@@ -28,8 +33,13 @@ mod test {
|
||||
let mut rln = RLN::new(TEST_TREE_HEIGHT, generate_input_buffer()).unwrap();
|
||||
|
||||
// generate identity
|
||||
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
|
||||
let id_commitment = utils_poseidon_hash(&vec![identity_secret_hash]);
|
||||
let mut identity_secret_hash_ = hash_to_field(b"test-merkle-proof");
|
||||
let identity_secret_hash = IdSecret::from(&mut identity_secret_hash_);
|
||||
|
||||
let mut to_hash = [*identity_secret_hash.clone()];
|
||||
let id_commitment = utils_poseidon_hash(&to_hash);
|
||||
to_hash[0].zeroize();
|
||||
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit.into()]);
|
||||
|
||||
// check that leaves indices is empty
|
||||
@@ -69,7 +79,7 @@ mod test {
|
||||
|
||||
let buffer_inner = buffer.into_inner();
|
||||
let (path_elements, read) = bytes_le_to_vec_fr(&buffer_inner).unwrap();
|
||||
let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec()).unwrap();
|
||||
let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..]).unwrap();
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
let expected_path_elements: Vec<Fr> = [
|
||||
@@ -151,7 +161,7 @@ mod test {
|
||||
let serialized_output = output_buffer.into_inner();
|
||||
|
||||
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized_output);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&serialized_output[read..].to_vec());
|
||||
let (id_commitment, _) = bytes_le_to_fr(&serialized_output[read..]);
|
||||
|
||||
// We check against expected values
|
||||
let expected_identity_secret_hash_seed_bytes = str_to_fr(
|
||||
|
||||
@@ -12,16 +12,15 @@ repository = "https://github.com/vacp2p/zerokit"
|
||||
bench = false
|
||||
|
||||
[dependencies]
|
||||
ark-ff = { version = "0.5.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
ark-ff = { version = "0.5.0", default-features = false }
|
||||
num-bigint = { version = "0.4.6", default-features = false }
|
||||
pmtree = { package = "vacp2p_pmtree", version = "2.0.2", optional = true }
|
||||
# pmtree = { package = "vacp2p_pmtree", version = "2.0.2", optional = true }
|
||||
pmtree = { git = "https://github.com/vacp2p/pmtree", branch = "upgrade-rayon-version", package = "vacp2p_pmtree", optional = true }
|
||||
sled = "0.34.7"
|
||||
serde_json = "1.0"
|
||||
serde_json = "1.0.141"
|
||||
lazy_static = "1.5.0"
|
||||
hex = "0.4.3"
|
||||
rayon = "1.7.0"
|
||||
rayon = "1.10.0"
|
||||
thiserror = "2.0"
|
||||
|
||||
[dev-dependencies]
|
||||
@@ -29,10 +28,11 @@ ark-bn254 = { version = "0.5.0", features = ["std"] }
|
||||
num-traits = "0.2.19"
|
||||
hex-literal = "0.4.1"
|
||||
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
|
||||
criterion = { version = "0.6.0", features = ["html_reports"] }
|
||||
criterion = { version = "0.7.0", features = ["html_reports"] }
|
||||
|
||||
[features]
|
||||
default = []
|
||||
parallel = ["ark-ff/parallel"]
|
||||
pmtree-ft = ["pmtree"]
|
||||
|
||||
[[bench]]
|
||||
@@ -44,4 +44,4 @@ name = "poseidon_benchmark"
|
||||
harness = false
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
all-features = true
|
||||
|
||||
Reference in New Issue
Block a user