mirror of
https://github.com/vacp2p/zerokit.git
synced 2026-01-09 13:47:58 -05:00
Compare commits
214 Commits
release/v0
...
test-tree-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b5afb847f1 | ||
|
|
a7d58926e4 | ||
|
|
e160ac2524 | ||
|
|
eb8eedfdb4 | ||
|
|
c78f1f1534 | ||
|
|
4d62a4d60d | ||
|
|
57b694db5d | ||
|
|
fd568c17b3 | ||
|
|
cf845c6a74 | ||
|
|
fe566b3314 | ||
|
|
0b00c639a0 | ||
|
|
7c801a804e | ||
|
|
9da80dd807 | ||
|
|
bcbd6a97af | ||
|
|
6965cf2852 | ||
|
|
578e0507b3 | ||
|
|
bf1e184da9 | ||
|
|
4473688efa | ||
|
|
c80569d518 | ||
|
|
fd99b6af74 | ||
|
|
65f53e3da3 | ||
|
|
042f8a9739 | ||
|
|
baf474e747 | ||
|
|
dc0b31752c | ||
|
|
36013bf4ba | ||
|
|
211b2d4830 | ||
|
|
5f4bcb74ce | ||
|
|
de5fd36add | ||
|
|
19c0f551c8 | ||
|
|
4133f1f8c3 | ||
|
|
149096f7a6 | ||
|
|
7023e85fce | ||
|
|
a4cafa6adc | ||
|
|
4077357e3f | ||
|
|
84d9799d09 | ||
|
|
c576af8e62 | ||
|
|
81470b9678 | ||
|
|
9d4198c205 | ||
|
|
c60e0c33fc | ||
|
|
ba467d370c | ||
|
|
ffd5851d7d | ||
|
|
759d312680 | ||
|
|
fb0ffd74a3 | ||
|
|
9d8372be39 | ||
|
|
de9c0d5072 | ||
|
|
5c60ec7cce | ||
|
|
8793965650 | ||
|
|
1930ca1610 | ||
|
|
4b4169d7a7 | ||
|
|
8a3e33be41 | ||
|
|
7bb2444ba4 | ||
|
|
00f8d039a8 | ||
|
|
e39f156fff | ||
|
|
8b04930583 | ||
|
|
b9d27039c3 | ||
|
|
49e2517e15 | ||
|
|
6621efd0bb | ||
|
|
4a74ff0d6c | ||
|
|
fc823e7187 | ||
|
|
0d5642492a | ||
|
|
c4579e1917 | ||
|
|
e6238fd722 | ||
|
|
5540ddc993 | ||
|
|
d8f813bc2e | ||
|
|
c6493bd10f | ||
|
|
dd5edd6818 | ||
|
|
85d71a5427 | ||
|
|
7790954c4a | ||
|
|
820240d8c0 | ||
|
|
fe2b224981 | ||
|
|
d3d85c3e3c | ||
|
|
0005b1d61f | ||
|
|
4931b25237 | ||
|
|
652cc3647e | ||
|
|
51939be4a8 | ||
|
|
cd60af5b52 | ||
|
|
8581ac0b78 | ||
|
|
5937a67ee6 | ||
|
|
d96eb59e92 | ||
|
|
a372053047 | ||
|
|
b450bfdb37 | ||
|
|
0521c7349e | ||
|
|
d91a5b3568 | ||
|
|
cf9dbb419d | ||
|
|
aaa12db70d | ||
|
|
30d5f94181 | ||
|
|
ccd2ead847 | ||
|
|
7669d72f9b | ||
|
|
b5760697bc | ||
|
|
5c4e3fc13c | ||
|
|
a92d6428d6 | ||
|
|
e6db05f27c | ||
|
|
25f822e779 | ||
|
|
0997d15d33 | ||
|
|
8614b2a33a | ||
|
|
b903d8d740 | ||
|
|
f73c83b571 | ||
|
|
a86b859b75 | ||
|
|
f8fc455d08 | ||
|
|
b51896c3a7 | ||
|
|
0c5ef6abcf | ||
|
|
a1c292cb2e | ||
|
|
c6c1bfde91 | ||
|
|
bf3d1d3309 | ||
|
|
7110e00674 | ||
|
|
99966d1a6e | ||
|
|
7d63912ace | ||
|
|
ef1da42d94 | ||
|
|
ecb4d9307f | ||
|
|
d1414a44c5 | ||
|
|
6d58320077 | ||
|
|
be2dccfdd0 | ||
|
|
9d4ed68450 | ||
|
|
5cf2b2e05e | ||
|
|
36158e8d08 | ||
|
|
c8cf033f32 | ||
|
|
23d2331b78 | ||
|
|
c6b7a8c0a4 | ||
|
|
4ec93c5e1f | ||
|
|
c83c9902d7 | ||
|
|
131cacab35 | ||
|
|
8a365f0c9e | ||
|
|
c561741339 | ||
|
|
90fdfb9d78 | ||
|
|
56b9285fef | ||
|
|
be88a432d7 | ||
|
|
8cfd83de54 | ||
|
|
2793fe0e24 | ||
|
|
0d35571215 | ||
|
|
9cc86e526e | ||
|
|
ecd056884c | ||
|
|
96497db7c5 | ||
|
|
ba8f011cc1 | ||
|
|
9dc92ec1ce | ||
|
|
75d760c179 | ||
|
|
72a3ce1770 | ||
|
|
b841e725a0 | ||
|
|
3177e3ae74 | ||
|
|
2c4de0484a | ||
|
|
fcd4854037 | ||
|
|
d68dc1ad8e | ||
|
|
8c3d60ed01 | ||
|
|
c2d386cb74 | ||
|
|
8f2c9e3586 | ||
|
|
584c2cf4c0 | ||
|
|
2c4b399126 | ||
|
|
c4b699ddff | ||
|
|
33d3732922 | ||
|
|
654c77dcf6 | ||
|
|
783f875d3b | ||
|
|
fd7d7d9318 | ||
|
|
4f98fd8028 | ||
|
|
9931e901e5 | ||
|
|
0fb7e0bbcb | ||
|
|
672287b77b | ||
|
|
2e868d6cbf | ||
|
|
39bea35a6d | ||
|
|
6ff4eeb237 | ||
|
|
1f983bb232 | ||
|
|
13a2c61355 | ||
|
|
2bbb710e83 | ||
|
|
8cd4baba8a | ||
|
|
9045e31006 | ||
|
|
9e44bb64dc | ||
|
|
bb7dfb80ee | ||
|
|
c319f32a1e | ||
|
|
bf2aa16a71 | ||
|
|
c423bdea61 | ||
|
|
5eb98d4b33 | ||
|
|
b698153e28 | ||
|
|
a6c8090c93 | ||
|
|
7ee7675d52 | ||
|
|
062055dc5e | ||
|
|
55b00fd653 | ||
|
|
62018b4eba | ||
|
|
48fa1b9b3d | ||
|
|
a6145ab201 | ||
|
|
e21e9954ac | ||
|
|
de5eb2066a | ||
|
|
7aba62ff51 | ||
|
|
cbf8c541c2 | ||
|
|
5bcbc6c22f | ||
|
|
01fdba6d88 | ||
|
|
1502315605 | ||
|
|
92c431c98f | ||
|
|
005393d696 | ||
|
|
89ea87a98a | ||
|
|
32f3202e9d | ||
|
|
e69f6a67d8 | ||
|
|
9e1355d36a | ||
|
|
3551435d60 | ||
|
|
60e3369621 | ||
|
|
284e51483c | ||
|
|
3427729f7e | ||
|
|
e1c16c9c3f | ||
|
|
bc69e25359 | ||
|
|
6a7808d911 | ||
|
|
25bcb7991b | ||
|
|
3d943bccb6 | ||
|
|
fba905f45d | ||
|
|
490206aa44 | ||
|
|
afa4a09bba | ||
|
|
b95b151a1c | ||
|
|
b77facc5e9 | ||
|
|
5d429ca031 | ||
|
|
1df6c53ca0 | ||
|
|
878c3c5c5f | ||
|
|
a5aa4e8d4f | ||
|
|
bbacc9dcce | ||
|
|
c42fcfe644 | ||
|
|
99a7eb003f | ||
|
|
14f41d5340 | ||
|
|
c401c0b21d | ||
|
|
4f08818d7a |
5
.github/labels.yml
vendored
5
.github/labels.yml
vendored
@@ -90,11 +90,6 @@
|
||||
description: go-waku-productionization track (Waku Product)
|
||||
color: 9DEA79
|
||||
|
||||
# Tracks within zk-WASM project
|
||||
- name: track:kickoff
|
||||
description: Kickoff track (zk-WASM)
|
||||
color: 06B6C8
|
||||
|
||||
# Tracks within RAD project
|
||||
- name: track:waku-specs
|
||||
description: Waku specs track (RAD)
|
||||
|
||||
245
.github/workflows/ci.yml
vendored
245
.github/workflows/ci.yml
vendored
@@ -2,44 +2,233 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
- "!.github/workflows/*.yml"
|
||||
- "!rln-wasm/**"
|
||||
- "!rln/src/**"
|
||||
- "!rln/resources/**"
|
||||
- "!utils/src/**"
|
||||
- "!rln-wasm-utils/**"
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
- "!.github/workflows/*.yml"
|
||||
- "!rln-wasm/**"
|
||||
- "!rln/src/**"
|
||||
- "!rln/resources/**"
|
||||
- "!utils/src/**"
|
||||
- "!rln-wasm-utils/**"
|
||||
|
||||
name: Tests
|
||||
name: CI
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
utils-test:
|
||||
# skip tests on draft PRs
|
||||
if: github.event_name == 'push' || (github.event_name == 'pull_request' && !github.event.pull_request.draft)
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [utils]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: Test - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Update git submodules
|
||||
run: git submodule update --init --recursive
|
||||
- name: cargo test
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Test utils
|
||||
run: |
|
||||
cargo test
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
cargo make test --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
rln-test:
|
||||
# skip tests on draft PRs
|
||||
if: github.event_name == 'push' || (github.event_name == 'pull_request' && !github.event.pull_request.draft)
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [rln]
|
||||
feature: ["default", "stateless"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: Test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Test rln
|
||||
run: |
|
||||
if [ ${{ matrix.feature }} == default ]; then
|
||||
cargo make test --release
|
||||
else
|
||||
cargo make test_${{ matrix.feature }} --release
|
||||
fi
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
rln-wasm-test:
|
||||
# skip tests on draft PRs
|
||||
if: github.event_name == 'push' || (github.event_name == 'pull_request' && !github.event.pull_request.draft)
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [rln-wasm]
|
||||
feature: ["default"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: Test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Build rln-wasm
|
||||
run: cargo make build
|
||||
working-directory: ${{ matrix.crate }}
|
||||
- name: Test rln-wasm on node
|
||||
run: cargo make test --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
- name: Test rln-wasm on browser
|
||||
run: cargo make test_browser --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
rln-wasm-parallel-test:
|
||||
# skip tests on draft PRs
|
||||
if: github.event_name == 'push' || (github.event_name == 'pull_request' && !github.event.pull_request.draft)
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [rln-wasm]
|
||||
feature: ["parallel"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: Test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install nightly toolchain
|
||||
uses: dtolnay/rust-toolchain@nightly
|
||||
with:
|
||||
components: rust-src
|
||||
targets: wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Build rln-wasm in parallel mode
|
||||
run: cargo make build_parallel
|
||||
working-directory: ${{ matrix.crate }}
|
||||
- name: Test rln-wasm in parallel mode on browser
|
||||
run: cargo make test_parallel --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
rln-wasm-utils-test:
|
||||
# skip tests on draft PRs
|
||||
if: github.event_name == 'push' || (github.event_name == 'pull_request' && !github.event.pull_request.draft)
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [rln-wasm-utils]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: Test - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Test rln-wasm-utils
|
||||
run: cargo make test --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
lint:
|
||||
# run on both ready and draft PRs
|
||||
if: github.event_name == 'push' || (github.event_name == 'pull_request' && !github.event.pull_request.draft)
|
||||
strategy:
|
||||
matrix:
|
||||
# we run lint tests only on ubuntu
|
||||
platform: [ubuntu-latest]
|
||||
crate: [rln, rln-wasm, rln-wasm-utils, utils]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: Lint - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- name: Update git submodules
|
||||
run: git submodule update --init --recursive
|
||||
- name: cargo fmt
|
||||
run: cargo fmt --all -- --check
|
||||
- name: cargo clippy
|
||||
run: cargo clippy
|
||||
# Currently not treating warnings as error, too noisy
|
||||
# -- -D warnings
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Check formatting
|
||||
if: success() || failure()
|
||||
run: cargo fmt -- --check
|
||||
working-directory: ${{ matrix.crate }}
|
||||
- name: Check clippy
|
||||
if: success() || failure()
|
||||
run: |
|
||||
cargo clippy --all-targets --release -- -D warnings
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
benchmark-utils:
|
||||
# run only on ready pull requests
|
||||
if: github.event_name == 'pull_request' && !github.event.pull_request.draft
|
||||
strategy:
|
||||
matrix:
|
||||
# we run benchmark tests only on ubuntu
|
||||
platform: [ubuntu-latest]
|
||||
crate: [utils]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: Benchmark - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: boa-dev/criterion-compare-action@v3
|
||||
with:
|
||||
branchName: ${{ github.base_ref }}
|
||||
cwd: ${{ matrix.crate }}
|
||||
|
||||
benchmark-rln:
|
||||
# run only on ready pull requests
|
||||
if: github.event_name == 'pull_request' && !github.event.pull_request.draft
|
||||
strategy:
|
||||
matrix:
|
||||
# we run benchmark tests only on ubuntu
|
||||
platform: [ubuntu-latest]
|
||||
crate: [rln]
|
||||
feature: ["default"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: Benchmark - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: boa-dev/criterion-compare-action@v3
|
||||
with:
|
||||
branchName: ${{ github.base_ref }}
|
||||
cwd: ${{ matrix.crate }}
|
||||
features: ${{ matrix.feature }}
|
||||
|
||||
225
.github/workflows/nightly-release.yml
vendored
Normal file
225
.github/workflows/nightly-release.yml
vendored
Normal file
@@ -0,0 +1,225 @@
|
||||
name: Nightly build
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
name: Linux build
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
feature:
|
||||
- "stateless"
|
||||
- "stateless,parallel"
|
||||
- "pmtree-ft"
|
||||
- "pmtree-ft,parallel"
|
||||
- "fullmerkletree"
|
||||
- "fullmerkletree,parallel"
|
||||
- "optimalmerkletree"
|
||||
- "optimalmerkletree,parallel"
|
||||
target:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- aarch64-unknown-linux-gnu
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Cross build
|
||||
run: |
|
||||
cross build --release --target ${{ matrix.target }} --no-default-features --features ${{ matrix.feature }} --workspace
|
||||
mkdir release
|
||||
cp target/${{ matrix.target }}/release/librln* release/
|
||||
tar -czvf ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz release/
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.target }}-${{ matrix.feature }}-archive
|
||||
path: ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz
|
||||
retention-days: 2
|
||||
|
||||
macos:
|
||||
name: MacOS build
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
feature:
|
||||
- "stateless"
|
||||
- "stateless,parallel"
|
||||
- "pmtree-ft"
|
||||
- "pmtree-ft,parallel"
|
||||
- "fullmerkletree"
|
||||
- "fullmerkletree,parallel"
|
||||
- "optimalmerkletree"
|
||||
- "optimalmerkletree,parallel"
|
||||
target:
|
||||
- x86_64-apple-darwin
|
||||
- aarch64-apple-darwin
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Cross build
|
||||
run: |
|
||||
cross build --release --target ${{ matrix.target }} --no-default-features --features ${{ matrix.feature }} --workspace
|
||||
mkdir release
|
||||
cp target/${{ matrix.target }}/release/librln* release/
|
||||
tar -czvf ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz release/
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.target }}-${{ matrix.feature }}-archive
|
||||
path: ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz
|
||||
retention-days: 2
|
||||
|
||||
rln-wasm:
|
||||
name: Build rln-wasm
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
feature:
|
||||
- "default"
|
||||
- "parallel"
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
targets: wasm32-unknown-unknown
|
||||
- name: Install nightly toolchain
|
||||
uses: dtolnay/rust-toolchain@nightly
|
||||
with:
|
||||
targets: wasm32-unknown-unknown
|
||||
components: rust-src
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
key: rln-wasm-${{ matrix.feature }}
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Install wasm-pack
|
||||
run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
|
||||
- name: Install binaryen
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y binaryen
|
||||
- name: Build rln-wasm package
|
||||
run: |
|
||||
if [[ ${{ matrix.feature }} == *parallel* ]]; then
|
||||
env RUSTFLAGS="-C target-feature=+atomics,+bulk-memory,+mutable-globals" \
|
||||
rustup run nightly wasm-pack build --release --target web --scope waku \
|
||||
--features ${{ matrix.feature }} -Z build-std=panic_abort,std
|
||||
else
|
||||
wasm-pack build --release --target web --scope waku
|
||||
fi
|
||||
|
||||
sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/package.json.bak
|
||||
|
||||
wasm-opt pkg/rln_wasm_bg.wasm -Oz --strip-debug --strip-dwarf \
|
||||
--remove-unused-module-elements --vacuum -o pkg/rln_wasm_bg.wasm
|
||||
|
||||
mkdir release
|
||||
cp -r pkg/* release/
|
||||
tar -czvf rln-wasm-${{ matrix.feature }}.tar.gz release/
|
||||
working-directory: rln-wasm
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: rln-wasm-${{ matrix.feature }}-archive
|
||||
path: rln-wasm/rln-wasm-${{ matrix.feature }}.tar.gz
|
||||
retention-days: 2
|
||||
|
||||
rln-wasm-utils:
|
||||
name: Build rln-wasm-utils
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
targets: wasm32-unknown-unknown
|
||||
- name: Install nightly toolchain
|
||||
uses: dtolnay/rust-toolchain@nightly
|
||||
with:
|
||||
targets: wasm32-unknown-unknown
|
||||
components: rust-src
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
key: rln-wasm-utils
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Install wasm-pack
|
||||
run: curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
|
||||
- name: Install binaryen
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y binaryen
|
||||
- name: Build rln-wasm-utils package
|
||||
run: |
|
||||
wasm-pack build --release --target web --scope waku
|
||||
|
||||
sed -i.bak 's/rln-wasm-utils/zerokit-rln-wasm-utils/g' pkg/package.json && rm pkg/package.json.bak
|
||||
|
||||
wasm-opt pkg/rln_wasm_utils_bg.wasm -Oz --strip-debug --strip-dwarf \
|
||||
--remove-unused-module-elements --vacuum -o pkg/rln_wasm_utils_bg.wasm
|
||||
|
||||
mkdir release
|
||||
cp -r pkg/* release/
|
||||
tar -czvf rln-wasm-utils.tar.gz release/
|
||||
working-directory: rln-wasm-utils
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: rln-wasm-utils-archive
|
||||
path: rln-wasm-utils/rln-wasm-utils.tar.gz
|
||||
retention-days: 2
|
||||
|
||||
prepare-prerelease:
|
||||
name: Prepare pre-release
|
||||
needs: [linux, macos, rln-wasm, rln-wasm-utils]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: master
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
- name: Delete tag
|
||||
uses: dev-drprasad/delete-tag-and-release@v0.2.1
|
||||
with:
|
||||
delete_release: true
|
||||
tag_name: nightly
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Create prerelease
|
||||
run: |
|
||||
start_tag=$(gh release list -L 2 --exclude-drafts | grep -v nightly | cut -d$'\t' -f3 | sed -n '1p')
|
||||
gh release create nightly --prerelease --target master \
|
||||
--title 'Nightly build ("master" branch)' \
|
||||
--generate-notes \
|
||||
--draft=false \
|
||||
--notes-start-tag $start_tag \
|
||||
*-archive/*.tar.gz \
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Delete artifacts
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
failOnError: false
|
||||
name: |
|
||||
*-archive
|
||||
2
.github/workflows/sync-labels.yml
vendored
2
.github/workflows/sync-labels.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: micnncim/action-label-syncer@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
15
.gitignore
vendored
15
.gitignore
vendored
@@ -1,16 +1,17 @@
|
||||
# Common files to ignore in Rust projects
|
||||
.DS_Store
|
||||
.idea
|
||||
*.log
|
||||
tmp/
|
||||
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
debug/
|
||||
target/
|
||||
# Generated by Cargo will have compiled files and executables
|
||||
/target
|
||||
|
||||
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
||||
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
||||
Cargo.lock
|
||||
# Generated by rln-cli
|
||||
rln-cli/database
|
||||
|
||||
# Generated by Nix
|
||||
result
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
|
||||
8
.gitmodules
vendored
8
.gitmodules
vendored
@@ -1,8 +0,0 @@
|
||||
[submodule "rln/vendor/rln"]
|
||||
path = rln/vendor/rln
|
||||
ignore = dirty
|
||||
url = https://github.com/Rate-Limiting-Nullifier/rln_circuits
|
||||
[submodule "semaphore/vendor/semaphore"]
|
||||
path = semaphore/vendor/semaphore
|
||||
ignore = dirty
|
||||
url = https://github.com/appliedzkp/semaphore.git
|
||||
29
CHANGELOG.md
Normal file
29
CHANGELOG.md
Normal file
@@ -0,0 +1,29 @@
|
||||
# CHANGE LOG
|
||||
|
||||
## 2023-02-28 v0.2
|
||||
|
||||
This release contains:
|
||||
|
||||
- Improved code quality
|
||||
- Allows consumers of zerokit RLN to set leaves to the Merkle Tree from an arbitrary index. Useful for batching updates to the Merkle Tree.
|
||||
- Improved performance for proof generation and verification
|
||||
- rln_wasm which allows for the consumption of RLN through a WebAssembly interface
|
||||
- Refactored to generate Semaphore-compatible credentials
|
||||
- Dual License under Apache 2.0 and MIT
|
||||
- RLN compiles as a static library, which can be consumed through a C FFI
|
||||
|
||||
## 2022-09-19 v0.1
|
||||
|
||||
Initial beta release.
|
||||
|
||||
This release contains:
|
||||
|
||||
- RLN Module with API to manage, compute and verify [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and RLN primitives.
|
||||
- This can be consumed either as a Rust API or as a C FFI. The latter means it can be easily consumed through other environments, such as [Go](https://github.com/status-im/go-zerokit-rln/blob/master/rln/librln.h) or [Nim](https://github.com/status-im/nwaku/blob/4745c7872c69b5fd5c6ddab36df9c5c3d55f57c3/waku/v2/protocol/waku_rln_relay/waku_rln_relay_types.nim).
|
||||
|
||||
It also contains the following examples and experiments:
|
||||
|
||||
- Basic [example wrapper](https://github.com/vacp2p/zerokit/tree/master/multiplier) around a simple Circom circuit to show Circom integration through ark-circom and FFI.
|
||||
- Experimental [Semaphore wrapper](https://github.com/vacp2p/zerokit/tree/master/semaphore).
|
||||
|
||||
Feedback welcome! You can either [open an issue](https://github.com/vacp2p/zerokit/issues) or come talk to us in our [Vac Discord](https://discord.gg/PQFdubGt6d) #zerokit channel.
|
||||
205
CONTRIBUTING.md
Normal file
205
CONTRIBUTING.md
Normal file
@@ -0,0 +1,205 @@
|
||||
# Contributing to Zerokit
|
||||
|
||||
Thank you for your interest in contributing to Zerokit!
|
||||
This guide will discuss how the Zerokit team handles [Commits](#commits),
|
||||
[Pull Requests](#pull-requests) and [Merging](#merging).
|
||||
|
||||
**Note:** We won't force external contributors to follow this verbatim.
|
||||
Following these guidelines definitely helps us in accepting your contributions.
|
||||
|
||||
## Getting Started
|
||||
|
||||
1. Fork the repository
|
||||
2. Create a feature branch: `git checkout -b fix/your-bug-fix` or `git checkout -b feat/your-feature-name`
|
||||
3. Make your changes following our guidelines
|
||||
4. Ensure relevant tests pass (see [testing guidelines](#building-and-testing))
|
||||
5. Commit your changes (signed commits are highly encouraged - see [commit guidelines](#commits))
|
||||
6. Push and create a Pull Request
|
||||
|
||||
## Development Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Install the required dependencies:
|
||||
|
||||
```bash
|
||||
make installdeps
|
||||
```
|
||||
|
||||
Or use Nix:
|
||||
|
||||
```bash
|
||||
nix develop
|
||||
```
|
||||
|
||||
### Building and Testing
|
||||
|
||||
```bash
|
||||
# Build all crates
|
||||
make build
|
||||
|
||||
# Run standard tests
|
||||
make test
|
||||
|
||||
# Module-specific testing
|
||||
cd rln && cargo make test_stateless # Test stateless features
|
||||
cd rln-wasm && cargo make test_browser # Test in browser headless mode
|
||||
cd rln-wasm && cargo make test_parallel # Test parallel features
|
||||
```
|
||||
|
||||
Choose the appropriate test commands based on your changes:
|
||||
|
||||
- Core RLN changes: `make test`
|
||||
- Stateless features: `cargo make test_stateless`
|
||||
- WASM/browser features: `cargo make test_browser`
|
||||
- Parallel computation: `cargo make test_parallel`
|
||||
|
||||
### Tools
|
||||
|
||||
We recommend using the [markdownlint extension](https://marketplace.visualstudio.com/items?itemName=DavidAnson.vscode-markdownlint)
|
||||
for VS Code to maintain consistent documentation formatting.
|
||||
|
||||
## Commits
|
||||
|
||||
We want to keep our commits small and focused.
|
||||
This allows for easily reviewing individual commits and/or
|
||||
splitting up pull requests when they grow too big.
|
||||
Additionally, this allows us to merge smaller changes quicker and release more often.
|
||||
|
||||
**All commits must be GPG signed.**
|
||||
This ensures the authenticity and integrity of contributions.
|
||||
|
||||
### Conventional Commits
|
||||
|
||||
When making the commit, write the commit message
|
||||
following the [Conventional Commits (v1.0.0)](https://www.conventionalcommits.org/en/v1.0.0/) specification.
|
||||
Following this convention allows us to provide an automated release process
|
||||
that also generates a detailed Changelog.
|
||||
|
||||
As described by the specification, our commit messages should be written as:
|
||||
|
||||
```markdown
|
||||
<type>[optional scope]: <description>
|
||||
|
||||
[optional body]
|
||||
|
||||
[optional footer(s)]
|
||||
```
|
||||
|
||||
Some examples of this pattern include:
|
||||
|
||||
```markdown
|
||||
feat(rln): add parallel witness calculation support
|
||||
```
|
||||
|
||||
```markdown
|
||||
fix(rln-wasm): resolve memory leak in browser threading
|
||||
```
|
||||
|
||||
```markdown
|
||||
docs: update RLN protocol flow documentation
|
||||
```
|
||||
|
||||
#### Scopes
|
||||
|
||||
Use scopes to improve the Changelog:
|
||||
|
||||
- `rln` - Core RLN implementation
|
||||
- `rln-cli` - Command-line interface
|
||||
- `rln-wasm` - WebAssembly bindings
|
||||
- `rln-wasm-utils` - WebAssembly utilities
|
||||
- `utils` - Cryptographic utilities (Merkle trees, Poseidon hash)
|
||||
- `ci` - Continuous integration
|
||||
|
||||
#### Breaking Changes
|
||||
|
||||
Mark breaking changes by adding `!` after the type:
|
||||
|
||||
```markdown
|
||||
feat(rln)!: change proof generation API
|
||||
```
|
||||
|
||||
## Pull Requests
|
||||
|
||||
Before creating a pull request, search for related issues.
|
||||
If none exist, create an issue describing the problem you're solving.
|
||||
|
||||
### CI Flow
|
||||
|
||||
Our continuous integration automatically runs when you create a Pull Request:
|
||||
|
||||
- **Build verification**: All crates compile successfully
|
||||
- **Test execution**: Comprehensive testing across all modules and feature combinations
|
||||
- **Code formatting**: `cargo fmt` compliance
|
||||
- **Linting**: `cargo clippy` checks
|
||||
- **Cross-platform builds**: Testing on multiple platforms
|
||||
|
||||
Ensure the following commands pass before submitting:
|
||||
|
||||
```bash
|
||||
# Format code
|
||||
cargo fmt --all
|
||||
|
||||
# Check for common mistakes
|
||||
cargo clippy --all-targets
|
||||
|
||||
# Run all tests
|
||||
make test
|
||||
```
|
||||
|
||||
### Adding Tests
|
||||
|
||||
Include tests for new functionality:
|
||||
|
||||
- **Unit tests** for specific functions
|
||||
- **Integration tests** for broader functionality
|
||||
- **WASM tests** for browser compatibility
|
||||
|
||||
### Typos and Small Changes
|
||||
|
||||
For minor fixes like typos, please report them as issues instead of opening PRs.
|
||||
This helps us manage resources effectively and ensures meaningful contributions.
|
||||
|
||||
## Merging
|
||||
|
||||
We use "squash merging" for all pull requests.
|
||||
This combines all commits into one commit, so keep pull requests small and focused.
|
||||
|
||||
### Requirements
|
||||
|
||||
- CI checks must pass
|
||||
- At least one maintainer review and approval
|
||||
- All review feedback addressed
|
||||
|
||||
### Squash Guidelines
|
||||
|
||||
When squashing, update the commit title to be a proper Conventional Commit and
|
||||
include any other relevant commits in the body:
|
||||
|
||||
```markdown
|
||||
feat(rln): implement parallel witness calculation (#123)
|
||||
|
||||
fix(tests): resolve memory leak in test suite
|
||||
chore(ci): update rust toolchain version
|
||||
```
|
||||
|
||||
## Roadmap Alignment
|
||||
|
||||
Please refer to our [project roadmap](https://roadmap.vac.dev/) for current development priorities.
|
||||
Consider how your changes align with these strategic goals, when contributing.
|
||||
|
||||
## Getting Help
|
||||
|
||||
- **Issues**: Create a GitHub issue for bugs or feature requests
|
||||
- **Discussions**: Use GitHub Discussions for questions
|
||||
- **Documentation**: Check existing docs and unit tests for examples
|
||||
|
||||
## License
|
||||
|
||||
By contributing to Zerokit, you agree that your contributions will be licensed under both MIT and
|
||||
Apache 2.0 licenses, consistent with the project's dual licensing.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Conventional Commits Guide](https://www.conventionalcommits.org/en/v1.0.0/)
|
||||
- [Project GitHub Repository](https://github.com/vacp2p/zerokit)
|
||||
2118
Cargo.lock
generated
Normal file
2118
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
14
Cargo.toml
14
Cargo.toml
@@ -1,6 +1,10 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"multiplier",
|
||||
"semaphore",
|
||||
"rln",
|
||||
]
|
||||
members = ["rln", "utils"]
|
||||
exclude = ["rln-cli", "rln-wasm", "rln-wasm-utils"]
|
||||
resolver = "2"
|
||||
|
||||
# Compilation profile for any non-workspace member.
|
||||
# Dependencies are optimized, even in a dev build. This improves dev performance
|
||||
# while having neglible impact on incremental build times.
|
||||
[profile.dev.package."*"]
|
||||
opt-level = 3
|
||||
|
||||
35
Cross.toml
Normal file
35
Cross.toml
Normal file
@@ -0,0 +1,35 @@
|
||||
[target.x86_64-pc-windows-gnu]
|
||||
image = "ghcr.io/cross-rs/x86_64-pc-windows-gnu:latest"
|
||||
|
||||
[target.aarch64-unknown-linux-gnu]
|
||||
image = "ghcr.io/cross-rs/aarch64-unknown-linux-gnu:latest"
|
||||
|
||||
[target.x86_64-unknown-linux-gnu]
|
||||
image = "ghcr.io/cross-rs/x86_64-unknown-linux-gnu:latest"
|
||||
|
||||
[target.arm-unknown-linux-gnueabi]
|
||||
image = "ghcr.io/cross-rs/arm-unknown-linux-gnueabi:latest"
|
||||
|
||||
[target.i686-pc-windows-gnu]
|
||||
image = "ghcr.io/cross-rs/i686-pc-windows-gnu:latest"
|
||||
|
||||
[target.i686-unknown-linux-gnu]
|
||||
image = "ghcr.io/cross-rs/i686-unknown-linux-gnu:latest"
|
||||
|
||||
[target.arm-unknown-linux-gnueabihf]
|
||||
image = "ghcr.io/cross-rs/arm-unknown-linux-gnueabihf:latest"
|
||||
|
||||
[target.mips-unknown-linux-gnu]
|
||||
image = "ghcr.io/cross-rs/mips-unknown-linux-gnu:latest"
|
||||
|
||||
[target.mips64-unknown-linux-gnuabi64]
|
||||
image = "ghcr.io/cross-rs/mips64-unknown-linux-gnuabi64:latest"
|
||||
|
||||
[target.mips64el-unknown-linux-gnuabi64]
|
||||
image = "ghcr.io/cross-rs/mips64el-unknown-linux-gnuabi64:latest"
|
||||
|
||||
[target.mipsel-unknown-linux-gnu]
|
||||
image = "ghcr.io/cross-rs/mipsel-unknown-linux-gnu:latest"
|
||||
|
||||
[target.aarch64-linux-android]
|
||||
image = "ghcr.io/cross-rs/aarch64-linux-android:edge"
|
||||
203
LICENSE-APACHE
Normal file
203
LICENSE-APACHE
Normal file
@@ -0,0 +1,203 @@
|
||||
Copyright (c) 2022 Vac Research
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
25
LICENSE-MIT
Normal file
25
LICENSE-MIT
Normal file
@@ -0,0 +1,25 @@
|
||||
Copyright (c) 2022 Vac Research
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE O THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
38
Makefile
Normal file
38
Makefile
Normal file
@@ -0,0 +1,38 @@
|
||||
.PHONY: all installdeps build test bench clean
|
||||
|
||||
all: installdeps build
|
||||
|
||||
.fetch-submodules:
|
||||
@git submodule update --init --recursive
|
||||
|
||||
.pre-build: .fetch-submodules
|
||||
@cargo install cargo-make
|
||||
ifdef CI
|
||||
@cargo install cross --git https://github.com/cross-rs/cross.git --rev 1511a28
|
||||
endif
|
||||
|
||||
installdeps: .pre-build
|
||||
ifeq ($(shell uname),Darwin)
|
||||
@brew install ninja binaryen
|
||||
else ifeq ($(shell uname),Linux)
|
||||
@if [ -f /etc/os-release ] && grep -q "ID=nixos" /etc/os-release; then \
|
||||
echo "Detected NixOS, skipping apt-get installation."; \
|
||||
else \
|
||||
sudo apt-get install -y cmake ninja-build binaryen; \
|
||||
fi
|
||||
endif
|
||||
@which wasm-pack > /dev/null && wasm-pack --version | grep -q "0.13.1" || cargo install wasm-pack --version=0.13.1
|
||||
@test -s "$$HOME/.nvm/nvm.sh" || curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.2/install.sh | bash
|
||||
@bash -c '. "$$HOME/.nvm/nvm.sh"; [ "$$(node -v 2>/dev/null)" = "v22.14.0" ] || nvm install 22.14.0; nvm use 22.14.0; nvm alias default 22.14.0'
|
||||
|
||||
build: installdeps
|
||||
@cargo make build
|
||||
|
||||
test: build
|
||||
@cargo make test
|
||||
|
||||
bench: build
|
||||
@cargo make bench
|
||||
|
||||
clean:
|
||||
@cargo clean
|
||||
2
Makefile.toml
Normal file
2
Makefile.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[env]
|
||||
CARGO_MAKE_EXTEND_WORKSPACE_MAKEFILE = true
|
||||
85
README.md
85
README.md
@@ -1,19 +1,86 @@
|
||||
# Zerokit
|
||||
|
||||
A set of Zero Knowledge modules, written in Rust and designed to be used in other system programming environments.
|
||||
[](https://crates.io/crates/rln)
|
||||
[](https://github.com/vacp2p/zerokit/actions)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
|
||||
## Initial scope
|
||||
A collection of Zero Knowledge modules written in Rust and designed to be used in other system programming environments.
|
||||
|
||||
Focus on RLN and being able to use [Circom](https://iden3.io/circom) based
|
||||
version through ark-circom, as opposed to the native one that currently exists
|
||||
in Rust.
|
||||
## Overview
|
||||
|
||||
Zerokit provides zero-knowledge cryptographic primitives with a focus on performance, security, and usability.
|
||||
The current focus is on Rate-Limiting Nullifier [RLN](https://github.com/Rate-Limiting-Nullifier) implementation.
|
||||
|
||||
Current implementation is based on the following
|
||||
[specification](https://github.com/vacp2p/rfc-index/blob/main/vac/raw/rln-v2.md)
|
||||
and focused on RLNv2 which allows to set a rate limit for the number of messages that can be sent by a user.
|
||||
|
||||
## Features
|
||||
|
||||
- **RLN Implementation**: Efficient Rate-Limiting Nullifier using zkSNARKs
|
||||
- **Circom Compatibility**: Uses Circom-based circuits for RLN
|
||||
- **Cross-Platform**: Support for multiple architectures (see compatibility note below)
|
||||
- **FFI-Friendly**: Easy to integrate with other languages
|
||||
|
||||
## Architecture
|
||||
|
||||
Zerokit currently focuses on RLN (Rate-Limiting Nullifier) implementation using [Circom](https://iden3.io/circom)
|
||||
circuits through ark-circom, providing an alternative to existing native Rust implementations.
|
||||
|
||||
## Build and Test
|
||||
|
||||
### Install Dependencies
|
||||
|
||||
```bash
|
||||
make installdeps
|
||||
```
|
||||
|
||||
#### Use Nix to install dependencies
|
||||
|
||||
```bash
|
||||
nix develop
|
||||
```
|
||||
|
||||
### Build and Test All Crates
|
||||
|
||||
```bash
|
||||
make build
|
||||
make test
|
||||
```
|
||||
|
||||
## Release Assets
|
||||
|
||||
We use [`cross-rs`](https://github.com/cross-rs/cross) to cross-compile and generate release assets:
|
||||
|
||||
```bash
|
||||
# Example: Build for specific target
|
||||
cross build --target x86_64-unknown-linux-gnu --release -p rln
|
||||
```
|
||||
|
||||
## Used By
|
||||
|
||||
Zerokit powers zero-knowledge functionality in:
|
||||
|
||||
- [**nwaku**](https://github.com/waku-org/nwaku) - Nim implementation of the Waku v2 protocol
|
||||
- [**js-rln**](https://github.com/waku-org/js-rln) - JavaScript bindings for RLN
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
- Uses [ark-circom](https://github.com/gakonst/ark-circom), Rust wrapper around Circom.
|
||||
- Inspired by [Applied ZKP](https://zkp.science/) group work, including [zk-kit](https://github.com/appliedzkp/zk-kit)
|
||||
- Uses [ark-circom](https://github.com/gakonst/ark-circom) for zkey and Groth16 proof generation
|
||||
- Witness calculation based on [circom-witnesscalc](https://github.com/iden3/circom-witnesscalc) by iden3.
|
||||
The execution graph file used by this code has been generated by means of the same iden3 software.
|
||||
|
||||
- Inspired by Applied ZKP group work, e.g. [zk-kit](https://github.com/appliedzkp/zk-kit).
|
||||
> [!IMPORTANT]
|
||||
> The circom-witnesscalc code fragments have been borrowed instead of depending on this crate,
|
||||
> because its types of input and output data were incompatible with the corresponding zerokit code fragments,
|
||||
> and circom-witnesscalc has some dependencies, which are redundant for our purpose.
|
||||
|
||||
- [RLN library](https://github.com/kilic/rln) written in Rust based on Bellman.
|
||||
## Documentation
|
||||
|
||||
- [semaphore-rs](https://github.com/worldcoin/semaphore-rs) written in Rust based on ark-circom.
|
||||
For detailed documentation on each module:
|
||||
|
||||
```bash
|
||||
cargo doc --open
|
||||
```
|
||||
|
||||
48
flake.lock
generated
Normal file
48
flake.lock
generated
Normal file
@@ -0,0 +1,48 @@
|
||||
{
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1740603184,
|
||||
"narHash": "sha256-t+VaahjQAWyA+Ctn2idyo1yxRIYpaDxMgHkgCNiMJa4=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "f44bd8ca21e026135061a0a57dcf3d0775b67a49",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "f44bd8ca21e026135061a0a57dcf3d0775b67a49",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs",
|
||||
"rust-overlay": "rust-overlay"
|
||||
}
|
||||
},
|
||||
"rust-overlay": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1748399823,
|
||||
"narHash": "sha256-kahD8D5hOXOsGbNdoLLnqCL887cjHkx98Izc37nDjlA=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "d68a69dc71bc19beb3479800392112c2f6218159",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
69
flake.nix
Normal file
69
flake.nix
Normal file
@@ -0,0 +1,69 @@
|
||||
{
|
||||
description = "A flake for building zerokit";
|
||||
|
||||
inputs = {
|
||||
# Version 24.11
|
||||
nixpkgs.url = "github:NixOS/nixpkgs?rev=f44bd8ca21e026135061a0a57dcf3d0775b67a49";
|
||||
rust-overlay = {
|
||||
url = "github:oxalica/rust-overlay";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, rust-overlay }:
|
||||
let
|
||||
stableSystems = [
|
||||
"x86_64-linux" "aarch64-linux"
|
||||
"x86_64-darwin" "aarch64-darwin"
|
||||
"x86_64-windows" "i686-linux"
|
||||
"i686-windows"
|
||||
];
|
||||
forAllSystems = nixpkgs.lib.genAttrs stableSystems;
|
||||
overlays = [
|
||||
(import rust-overlay)
|
||||
(f: p: { inherit rust-overlay; })
|
||||
];
|
||||
pkgsFor = forAllSystems (system: import nixpkgs { inherit system overlays; });
|
||||
in rec
|
||||
{
|
||||
packages = forAllSystems (system: let
|
||||
pkgs = pkgsFor.${system};
|
||||
buildPackage = pkgs.callPackage ./nix/default.nix;
|
||||
buildRln = (buildPackage { src = self; project = "rln"; }).override;
|
||||
in rec {
|
||||
rln = buildRln { };
|
||||
|
||||
rln-linux-arm64 = buildRln {
|
||||
target-platform = "aarch64-multiplatform";
|
||||
rust-target = "aarch64-unknown-linux-gnu";
|
||||
};
|
||||
|
||||
rln-android-arm64 = buildRln {
|
||||
target-platform = "aarch64-android-prebuilt";
|
||||
rust-target = "aarch64-linux-android";
|
||||
};
|
||||
|
||||
rln-ios-arm64 = buildRln {
|
||||
target-platform = "aarch64-darwin";
|
||||
rust-target = "aarch64-apple-ios";
|
||||
};
|
||||
|
||||
# TODO: Remove legacy name for RLN android library
|
||||
zerokit-android-arm64 = rln-android-arm64;
|
||||
|
||||
default = rln;
|
||||
});
|
||||
|
||||
devShells = forAllSystems (system: let
|
||||
pkgs = pkgsFor.${system};
|
||||
in {
|
||||
default = pkgs.mkShell {
|
||||
buildInputs = with pkgs; [
|
||||
git cmake cargo-make rustup
|
||||
binaryen ninja gnuplot
|
||||
rust-bin.stable.latest.default
|
||||
];
|
||||
};
|
||||
});
|
||||
};
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
[package]
|
||||
name = "multiplier"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
|
||||
# WASM operations
|
||||
# wasmer = { version = "2.0" }
|
||||
# fnv = { version = "1.0.3", default-features = false }
|
||||
# num = { version = "0.4.0" }
|
||||
# num-traits = { version = "0.2.0", default-features = false }
|
||||
num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
|
||||
|
||||
# ZKP Generation
|
||||
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
|
||||
# ark-ff = { version = "0.3.0", default-features = false, features = ["parallel", "asm"] }
|
||||
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
|
||||
ark-bn254 = { version = "0.3.0" }
|
||||
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
|
||||
# ark-poly = { version = "^0.3.0", default-features = false, features = ["parallel"] }
|
||||
ark-relations = { version = "0.3.0", default-features = false }
|
||||
ark-serialize = { version = "0.3.0", default-features = false }
|
||||
|
||||
ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circom-2"] }
|
||||
|
||||
# error handling
|
||||
# thiserror = "1.0.26"
|
||||
color-eyre = "0.5"
|
||||
|
||||
# decoding of data
|
||||
# hex = "0.4.3"
|
||||
# byteorder = "1.4.3"
|
||||
@@ -1,13 +0,0 @@
|
||||
# Multiplier example
|
||||
|
||||
Example wrapper around a basic Circom circuit to test Circom 2 integration
|
||||
through ark-circom and FFI.
|
||||
|
||||
# FFI
|
||||
|
||||
To generate C or Nim bindings from Rust FFI, use `cbindgen` or `nbindgen`:
|
||||
|
||||
```
|
||||
cbindgen . -o target/multiplier.h
|
||||
nbindgen . -o target/multiplier.nim
|
||||
```
|
||||
Binary file not shown.
Binary file not shown.
@@ -1,77 +0,0 @@
|
||||
use crate::public::Multiplier;
|
||||
use std::slice;
|
||||
|
||||
/// Buffer struct is taken from
|
||||
/// https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs
|
||||
///
|
||||
/// Also heavily inspired by https://github.com/kilic/rln/blob/master/src/ffi.rs
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Buffer {
|
||||
pub ptr: *const u8,
|
||||
pub len: usize,
|
||||
}
|
||||
|
||||
impl From<&[u8]> for Buffer {
|
||||
fn from(src: &[u8]) -> Self {
|
||||
Self {
|
||||
ptr: &src[0] as *const u8,
|
||||
len: src.len(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&Buffer> for &'a [u8] {
|
||||
fn from(src: &Buffer) -> &'a [u8] {
|
||||
unsafe { slice::from_raw_parts(src.ptr, src.len) }
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new_circuit(ctx: *mut *mut Multiplier) -> bool {
|
||||
println!("multiplier ffi: new");
|
||||
let mul = Multiplier::new();
|
||||
|
||||
unsafe { *ctx = Box::into_raw(Box::new(mul)) };
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn prove(ctx: *const Multiplier, output_buffer: *mut Buffer) -> bool {
|
||||
println!("multiplier ffi: prove");
|
||||
let mul = unsafe { &*ctx };
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
|
||||
match mul.prove(&mut output_data) {
|
||||
Ok(proof_data) => proof_data,
|
||||
Err(_) => return false,
|
||||
};
|
||||
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn verify(
|
||||
ctx: *const Multiplier,
|
||||
proof_buffer: *const Buffer,
|
||||
result_ptr: *mut u32,
|
||||
) -> bool {
|
||||
println!("multiplier ffi: verify");
|
||||
let mul = unsafe { &*ctx };
|
||||
let proof_data = <&[u8]>::from(unsafe { &*proof_buffer });
|
||||
if match mul.verify(proof_data) {
|
||||
Ok(verified) => verified,
|
||||
Err(_) => return false,
|
||||
} {
|
||||
unsafe { *result_ptr = 0 };
|
||||
} else {
|
||||
unsafe { *result_ptr = 1 };
|
||||
};
|
||||
true
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
pub mod ffi;
|
||||
pub mod public;
|
||||
@@ -1,48 +0,0 @@
|
||||
use ark_circom::{CircomBuilder, CircomConfig};
|
||||
use ark_std::rand::thread_rng;
|
||||
use color_eyre::Result;
|
||||
|
||||
use ark_bn254::Bn254;
|
||||
use ark_groth16::{
|
||||
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
|
||||
};
|
||||
|
||||
fn groth16_proof_example() -> Result<()> {
|
||||
let cfg = CircomConfig::<Bn254>::new(
|
||||
"./resources/circom2_multiplier2.wasm",
|
||||
"./resources/circom2_multiplier2.r1cs",
|
||||
)?;
|
||||
|
||||
let mut builder = CircomBuilder::new(cfg);
|
||||
builder.push_input("a", 3);
|
||||
builder.push_input("b", 11);
|
||||
|
||||
// create an empty instance for setting it up
|
||||
let circom = builder.setup();
|
||||
|
||||
let mut rng = thread_rng();
|
||||
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng)?;
|
||||
|
||||
let circom = builder.build()?;
|
||||
|
||||
let inputs = circom.get_public_inputs().unwrap();
|
||||
|
||||
let proof = prove(circom, ¶ms, &mut rng)?;
|
||||
|
||||
let pvk = prepare_verifying_key(¶ms.vk);
|
||||
|
||||
let verified = verify_proof(&pvk, &proof, &inputs)?;
|
||||
|
||||
assert!(verified);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("Hello, world!");
|
||||
|
||||
match groth16_proof_example() {
|
||||
Ok(_) => println!("Success"),
|
||||
Err(_) => println!("Error"),
|
||||
}
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
use ark_circom::{CircomBuilder, CircomCircuit, CircomConfig};
|
||||
use ark_std::rand::thread_rng;
|
||||
|
||||
use ark_bn254::Bn254;
|
||||
use ark_groth16::{
|
||||
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
|
||||
Proof, ProvingKey,
|
||||
};
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
// , SerializationError};
|
||||
|
||||
use std::io::{self, Read, Write};
|
||||
|
||||
pub struct Multiplier {
|
||||
circom: CircomCircuit<Bn254>,
|
||||
params: ProvingKey<Bn254>,
|
||||
}
|
||||
|
||||
impl Multiplier {
|
||||
// TODO Break this apart here
|
||||
pub fn new() -> Multiplier {
|
||||
let cfg = CircomConfig::<Bn254>::new(
|
||||
"./resources/circom2_multiplier2.wasm",
|
||||
"./resources/circom2_multiplier2.r1cs",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mut builder = CircomBuilder::new(cfg);
|
||||
builder.push_input("a", 3);
|
||||
builder.push_input("b", 11);
|
||||
|
||||
// create an empty instance for setting it up
|
||||
let circom = builder.setup();
|
||||
|
||||
let mut rng = thread_rng();
|
||||
|
||||
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng).unwrap();
|
||||
|
||||
let circom = builder.build().unwrap();
|
||||
|
||||
//let inputs = circom.get_public_inputs().unwrap();
|
||||
|
||||
Multiplier { circom, params }
|
||||
}
|
||||
|
||||
// TODO Input Read
|
||||
pub fn prove<W: Write>(&self, result_data: W) -> io::Result<()> {
|
||||
let mut rng = thread_rng();
|
||||
|
||||
// XXX: There's probably a better way to do this
|
||||
let circom = self.circom.clone();
|
||||
let params = self.params.clone();
|
||||
|
||||
let proof = prove(circom, ¶ms, &mut rng).unwrap();
|
||||
|
||||
// XXX: Unclear if this is different from other serialization(s)
|
||||
let _ = proof.serialize(result_data).unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn verify<R: Read>(&self, input_data: R) -> io::Result<bool> {
|
||||
let proof = Proof::deserialize(input_data).unwrap();
|
||||
|
||||
let pvk = prepare_verifying_key(&self.params.vk);
|
||||
|
||||
// XXX Part of input data?
|
||||
let inputs = self.circom.get_public_inputs().unwrap();
|
||||
|
||||
let verified = verify_proof(&pvk, &proof, &inputs).unwrap();
|
||||
|
||||
Ok(verified)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Multiplier {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiplier_proof() {
|
||||
let mul = Multiplier::new();
|
||||
//let inputs = mul.circom.get_public_inputs().unwrap();
|
||||
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
let _ = mul.prove(&mut output_data);
|
||||
|
||||
let proof_data = &output_data[..];
|
||||
|
||||
// XXX Pass as arg?
|
||||
//let pvk = prepare_verifying_key(&mul.params.vk);
|
||||
|
||||
let verified = mul.verify(proof_data).unwrap();
|
||||
|
||||
assert!(verified);
|
||||
}
|
||||
64
nix/default.nix
Normal file
64
nix/default.nix
Normal file
@@ -0,0 +1,64 @@
|
||||
{
|
||||
pkgs,
|
||||
rust-overlay,
|
||||
project,
|
||||
src ? ../.,
|
||||
release ? true,
|
||||
target-platform ? null,
|
||||
rust-target ? null,
|
||||
features ? null,
|
||||
}:
|
||||
|
||||
let
|
||||
# Use cross-compilation if target-platform is specified.
|
||||
targetPlatformPkgs = if target-platform != null
|
||||
then pkgs.pkgsCross.${target-platform}
|
||||
else pkgs;
|
||||
|
||||
rust-bin = rust-overlay.lib.mkRustBin { } targetPlatformPkgs.buildPackages;
|
||||
|
||||
# Use Rust and Cargo versions from rust-overlay.
|
||||
rustPlatform = targetPlatformPkgs.makeRustPlatform {
|
||||
cargo = rust-bin.stable.latest.minimal;
|
||||
rustc = rust-bin.stable.latest.minimal;
|
||||
};
|
||||
in rustPlatform.buildRustPackage {
|
||||
pname = "zerokit";
|
||||
version = if src ? rev then src.rev else "nightly";
|
||||
|
||||
# Improve caching of sources
|
||||
src = builtins.path { path = src; name = "zerokit"; };
|
||||
|
||||
cargoLock = {
|
||||
lockFile = src + "/Cargo.lock";
|
||||
allowBuiltinFetchGit = true;
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ pkgs.rust-cbindgen ];
|
||||
|
||||
doCheck = false;
|
||||
|
||||
CARGO_HOME = "/tmp";
|
||||
|
||||
buildPhase = ''
|
||||
cargo build --lib \
|
||||
${if release then "--release" else ""} \
|
||||
${if rust-target != null then "--target=${rust-target}" else ""} \
|
||||
${if features != null then "--features=${features}" else ""} \
|
||||
--manifest-path ${project}/Cargo.toml
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
set -eu
|
||||
mkdir -p $out/lib
|
||||
find target -type f -name 'librln.*' -not -path '*/deps/*' -exec cp -v '{}' "$out/lib/" \;
|
||||
mkdir -p $out/include
|
||||
cbindgen ${src}/rln -l c > "$out/include/rln.h"
|
||||
'';
|
||||
|
||||
|
||||
meta = with pkgs.lib; {
|
||||
description = "Zerokit";
|
||||
license = licenses.mit;
|
||||
};
|
||||
}
|
||||
25
rln-cli/Cargo.toml
Normal file
25
rln-cli/Cargo.toml
Normal file
@@ -0,0 +1,25 @@
|
||||
[package]
|
||||
name = "rln-cli"
|
||||
version = "0.4.0"
|
||||
edition = "2021"
|
||||
|
||||
[[example]]
|
||||
name = "relay"
|
||||
path = "src/examples/relay.rs"
|
||||
|
||||
[[example]]
|
||||
name = "stateless"
|
||||
path = "src/examples/stateless.rs"
|
||||
required-features = ["stateless"]
|
||||
|
||||
[dependencies]
|
||||
rln = { path = "../rln", version = "0.8.0", default-features = false }
|
||||
zerokit_utils = { path = "../utils", version = "0.6.0", default-features = false }
|
||||
clap = { version = "4.5.41", features = ["cargo", "derive", "env"] }
|
||||
color-eyre = "0.6.5"
|
||||
serde_json = "1.0.141"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
[features]
|
||||
default = ["rln/pmtree-ft", "rln/parallel"]
|
||||
stateless = ["rln/stateless", "rln/parallel"]
|
||||
9
rln-cli/Makefile.toml
Normal file
9
rln-cli/Makefile.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
[tasks.build]
|
||||
command = "cargo"
|
||||
args = ["build"]
|
||||
|
||||
[tasks.test]
|
||||
disabled = true
|
||||
|
||||
[tasks.bench]
|
||||
disabled = true
|
||||
148
rln-cli/README.md
Normal file
148
rln-cli/README.md
Normal file
@@ -0,0 +1,148 @@
|
||||
# Zerokit RLN-CLI
|
||||
|
||||
The Zerokit RLN-CLI provides a command-line interface for interacting with the public API of the [Zerokit RLN Module](../rln/README.md).
|
||||
|
||||
It also contain:
|
||||
|
||||
+ [Relay Example](#relay-example) to demonstrate the use of the RLN module for spam prevention.
|
||||
+ [Stateless Example](#stateless-example) to demonstrate the use of the RLN module for stateless features.
|
||||
|
||||
## Configuration
|
||||
|
||||
The CLI can be configured using a JSON configuration file (see the [example](example.config.json)).
|
||||
|
||||
You can specify the configuration file path using the `RLN_CONFIG_PATH` environment variable:
|
||||
|
||||
```bash
|
||||
export RLN_CONFIG_PATH=example.config.json
|
||||
```
|
||||
|
||||
Alternatively, you can provide the configuration file path as an argument for each command:
|
||||
|
||||
```bash
|
||||
RLN_CONFIG_PATH=example.config.json cargo run -- <SUBCOMMAND> [OPTIONS]
|
||||
```
|
||||
|
||||
If the configuration file is empty, default settings will be used, but the tree data folder will be temporary and not saved to the preconfigured path.
|
||||
|
||||
We recommend using the example config, as all commands (except `new` and `create-with-params`) require an initialized RLN instance.
|
||||
|
||||
## Relay Example
|
||||
|
||||
The following [Example](src/examples/relay.rs) demonstrates how RLN enables spam prevention in anonymous environments for multple users.
|
||||
|
||||
You can run the example using the following command:
|
||||
|
||||
```bash
|
||||
cargo run --example relay
|
||||
```
|
||||
|
||||
You can also change **MESSAGE_LIMIT** and **TREE_DEPTH** in the [relay.rs](src/examples/relay.rs) file to see how the RLN instance behaves with different parameters.
|
||||
|
||||
The customize **TREE_DEPTH** constant differs from the default value of `20` should follow [Custom Circuit Compilation](../rln/README.md#advanced-custom-circuit-compilation) instructions.
|
||||
|
||||
## Stateless Example
|
||||
|
||||
The following [Example](src/examples/stateless.rs) demonstrates how RLN can be used for stateless features by creating the Merkle tree outside of RLN instance.
|
||||
|
||||
This example function similarly to the [Relay Example](#relay-example) but uses a stateless RLN and seperate Merkle tree.
|
||||
|
||||
You can run the example using the following command:
|
||||
|
||||
```bash
|
||||
cargo run --example stateless --no-default-features --features stateless
|
||||
```
|
||||
|
||||
## CLI Commands
|
||||
|
||||
### Instance Management
|
||||
|
||||
To initialize a new RLN instance:
|
||||
|
||||
```bash
|
||||
cargo run new --tree-depth <DEPTH>
|
||||
```
|
||||
|
||||
To initialize an RLN instance with custom parameters:
|
||||
|
||||
```bash
|
||||
cargo run new-with-params --resources-path <PATH> --tree-depth <DEPTH>
|
||||
```
|
||||
|
||||
To update the Merkle tree depth:
|
||||
|
||||
```bash
|
||||
cargo run set-tree --tree-depth <DEPTH>
|
||||
```
|
||||
|
||||
### Leaf Operations
|
||||
|
||||
To set a single leaf:
|
||||
|
||||
```bash
|
||||
cargo run set-leaf --index <INDEX> --input <INPUT_PATH>
|
||||
```
|
||||
|
||||
To set multiple leaves:
|
||||
|
||||
```bash
|
||||
cargo run set-multiple-leaves --index <START_INDEX> --input <INPUT_PATH>
|
||||
```
|
||||
|
||||
To reset multiple leaves:
|
||||
|
||||
```bash
|
||||
cargo run reset-multiple-leaves --input <INPUT_PATH>
|
||||
```
|
||||
|
||||
To set the next available leaf:
|
||||
|
||||
```bash
|
||||
cargo run set-next-leaf --input <INPUT_PATH>
|
||||
```
|
||||
|
||||
To delete a specific leaf:
|
||||
|
||||
```bash
|
||||
cargo run delete-leaf --index <INDEX>
|
||||
```
|
||||
|
||||
### Proof Operations
|
||||
|
||||
To generate a proof:
|
||||
|
||||
```bash
|
||||
cargo run prove --input <INPUT_PATH>
|
||||
```
|
||||
|
||||
To generate an RLN proof:
|
||||
|
||||
```bash
|
||||
cargo run generate-proof --input <INPUT_PATH>
|
||||
```
|
||||
|
||||
To verify a proof:
|
||||
|
||||
```bash
|
||||
cargo run verify --input <PROOF_PATH>
|
||||
```
|
||||
|
||||
To verify a proof with multiple Merkle roots:
|
||||
|
||||
```bash
|
||||
cargo run verify-with-roots --input <INPUT_PATH> --roots <ROOTS_PATH>
|
||||
```
|
||||
|
||||
### Tree Information
|
||||
|
||||
To retrieve the current Merkle root:
|
||||
|
||||
```bash
|
||||
cargo run get-root
|
||||
```
|
||||
|
||||
To obtain a Merkle proof for a specific index:
|
||||
|
||||
```bash
|
||||
cargo run get-proof --index <INDEX>
|
||||
```
|
||||
9
rln-cli/example.config.json
Normal file
9
rln-cli/example.config.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"path": "database",
|
||||
"temporary": false,
|
||||
"cache_capacity": 1073741824,
|
||||
"flush_every_ms": 500,
|
||||
"mode": "HighThroughput",
|
||||
"use_compression": false,
|
||||
"tree_depth": 20
|
||||
}
|
||||
69
rln-cli/src/commands.rs
Normal file
69
rln-cli/src/commands.rs
Normal file
@@ -0,0 +1,69 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::Subcommand;
|
||||
use rln::circuit::TEST_TREE_DEPTH;
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub(crate) enum Commands {
|
||||
New {
|
||||
#[arg(short, long, default_value_t = TEST_TREE_DEPTH)]
|
||||
tree_depth: usize,
|
||||
},
|
||||
NewWithParams {
|
||||
#[arg(short, long, default_value_t = TEST_TREE_DEPTH)]
|
||||
tree_depth: usize,
|
||||
#[arg(short, long, default_value = "../rln/resources/tree_depth_30")]
|
||||
resources_path: PathBuf,
|
||||
},
|
||||
SetTree {
|
||||
#[arg(short, long, default_value_t = TEST_TREE_DEPTH)]
|
||||
tree_depth: usize,
|
||||
},
|
||||
SetLeaf {
|
||||
#[arg(short, long)]
|
||||
index: usize,
|
||||
#[arg(short, long)]
|
||||
input: PathBuf,
|
||||
},
|
||||
SetMultipleLeaves {
|
||||
#[arg(short, long)]
|
||||
index: usize,
|
||||
#[arg(short, long)]
|
||||
input: PathBuf,
|
||||
},
|
||||
ResetMultipleLeaves {
|
||||
#[arg(short, long)]
|
||||
input: PathBuf,
|
||||
},
|
||||
SetNextLeaf {
|
||||
#[arg(short, long)]
|
||||
input: PathBuf,
|
||||
},
|
||||
DeleteLeaf {
|
||||
#[arg(short, long)]
|
||||
index: usize,
|
||||
},
|
||||
GetRoot,
|
||||
GetProof {
|
||||
#[arg(short, long)]
|
||||
index: usize,
|
||||
},
|
||||
Prove {
|
||||
#[arg(short, long)]
|
||||
input: PathBuf,
|
||||
},
|
||||
Verify {
|
||||
#[arg(short, long)]
|
||||
input: PathBuf,
|
||||
},
|
||||
GenerateProof {
|
||||
#[arg(short, long)]
|
||||
input: PathBuf,
|
||||
},
|
||||
VerifyWithRoots {
|
||||
#[arg(short, long)]
|
||||
input: PathBuf,
|
||||
#[arg(short, long)]
|
||||
roots: PathBuf,
|
||||
},
|
||||
}
|
||||
31
rln-cli/src/config.rs
Normal file
31
rln-cli/src/config.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
use std::{fs::File, io::Read, path::PathBuf};
|
||||
|
||||
use color_eyre::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
pub const RLN_CONFIG_PATH: &str = "RLN_CONFIG_PATH";
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub(crate) struct Config {
|
||||
pub tree_config: Option<String>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub(crate) fn load_config() -> Result<Config> {
|
||||
match std::env::var(RLN_CONFIG_PATH) {
|
||||
Ok(env) => {
|
||||
let path = PathBuf::from(env);
|
||||
let mut file = File::open(path)?;
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents)?;
|
||||
let tree_config: Value = serde_json::from_str(&contents)?;
|
||||
println!("Initializing RLN with custom config");
|
||||
Ok(Config {
|
||||
tree_config: Some(tree_config.to_string()),
|
||||
})
|
||||
}
|
||||
Err(_) => Ok(Config { tree_config: None }),
|
||||
}
|
||||
}
|
||||
}
|
||||
315
rln-cli/src/examples/relay.rs
Normal file
315
rln-cli/src/examples/relay.rs
Normal file
@@ -0,0 +1,315 @@
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs::File,
|
||||
io::{stdin, stdout, Cursor, Read, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use clap::{Parser, Subcommand};
|
||||
use color_eyre::{eyre::eyre, Report, Result};
|
||||
use rln::{
|
||||
circuit::Fr,
|
||||
hashers::{hash_to_field_le, poseidon_hash},
|
||||
protocol::{keygen, prepare_prove_input, prepare_verify_input},
|
||||
public::RLN,
|
||||
utils::{fr_to_bytes_le, generate_input_buffer, IdSecret},
|
||||
};
|
||||
|
||||
const MESSAGE_LIMIT: u32 = 1;
|
||||
|
||||
const TREE_DEPTH: usize = 30;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
List,
|
||||
Register,
|
||||
Send {
|
||||
#[arg(short, long)]
|
||||
user_index: usize,
|
||||
#[arg(short, long)]
|
||||
message_id: u32,
|
||||
#[arg(short, long)]
|
||||
signal: String,
|
||||
},
|
||||
Clear,
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct Identity {
|
||||
identity_secret_hash: IdSecret,
|
||||
id_commitment: Fr,
|
||||
}
|
||||
|
||||
impl Identity {
|
||||
fn new() -> Self {
|
||||
let (identity_secret_hash, id_commitment) = keygen();
|
||||
Identity {
|
||||
identity_secret_hash,
|
||||
id_commitment,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct RLNSystem {
|
||||
rln: RLN,
|
||||
used_nullifiers: HashMap<[u8; 32], Vec<u8>>,
|
||||
local_identities: HashMap<usize, Identity>,
|
||||
}
|
||||
|
||||
impl RLNSystem {
|
||||
fn new() -> Result<Self> {
|
||||
let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
let resources_path: PathBuf = format!("../rln/resources/tree_depth_{TREE_DEPTH}").into();
|
||||
let filenames = ["rln_final.arkzkey", "graph.bin"];
|
||||
for filename in filenames {
|
||||
let fullpath = resources_path.join(Path::new(filename));
|
||||
let mut file = File::open(&fullpath)?;
|
||||
let metadata = std::fs::metadata(&fullpath)?;
|
||||
let mut output_buffer = vec![0; metadata.len() as usize];
|
||||
file.read_exact(&mut output_buffer)?;
|
||||
resources.push(output_buffer);
|
||||
}
|
||||
let rln = RLN::new_with_params(
|
||||
TREE_DEPTH,
|
||||
resources[0].clone(),
|
||||
resources[1].clone(),
|
||||
generate_input_buffer(),
|
||||
)?;
|
||||
println!("RLN instance initialized successfully");
|
||||
Ok(RLNSystem {
|
||||
rln,
|
||||
used_nullifiers: HashMap::new(),
|
||||
local_identities: HashMap::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn list_users(&self) {
|
||||
if self.local_identities.is_empty() {
|
||||
println!("No users registered yet.");
|
||||
return;
|
||||
}
|
||||
|
||||
println!("Registered users:");
|
||||
for (index, identity) in &self.local_identities {
|
||||
println!("User Index: {index}");
|
||||
println!("+ Identity Secret Hash: {}", *identity.identity_secret_hash);
|
||||
println!("+ Identity Commitment: {}", identity.id_commitment);
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
fn register_user(&mut self) -> Result<usize> {
|
||||
let index = self.rln.leaves_set();
|
||||
let identity = Identity::new();
|
||||
|
||||
let rate_commitment = poseidon_hash(&[identity.id_commitment, Fr::from(MESSAGE_LIMIT)]);
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
|
||||
match self.rln.set_next_leaf(&mut buffer) {
|
||||
Ok(_) => {
|
||||
println!("Registered User Index: {index}");
|
||||
println!("+ Identity secret hash: {}", *identity.identity_secret_hash);
|
||||
println!("+ Identity commitment: {},", identity.id_commitment);
|
||||
self.local_identities.insert(index, identity);
|
||||
}
|
||||
Err(_) => {
|
||||
println!("Maximum user limit reached: 2^{TREE_DEPTH}");
|
||||
}
|
||||
};
|
||||
|
||||
Ok(index)
|
||||
}
|
||||
|
||||
fn generate_proof(
|
||||
&mut self,
|
||||
user_index: usize,
|
||||
message_id: u32,
|
||||
signal: &str,
|
||||
external_nullifier: Fr,
|
||||
) -> Result<Vec<u8>> {
|
||||
let identity = match self.local_identities.get(&user_index) {
|
||||
Some(identity) => identity,
|
||||
None => return Err(eyre!("user index {user_index} not found")),
|
||||
};
|
||||
|
||||
let serialized = prepare_prove_input(
|
||||
identity.identity_secret_hash.clone(),
|
||||
user_index,
|
||||
Fr::from(MESSAGE_LIMIT),
|
||||
Fr::from(message_id),
|
||||
external_nullifier,
|
||||
signal.as_bytes(),
|
||||
);
|
||||
let mut input_buffer = Cursor::new(serialized);
|
||||
let mut output_buffer = Cursor::new(Vec::new());
|
||||
self.rln
|
||||
.generate_rln_proof(&mut input_buffer, &mut output_buffer)?;
|
||||
|
||||
println!("Proof generated successfully:");
|
||||
println!("+ User Index: {user_index}");
|
||||
println!("+ Message ID: {message_id}");
|
||||
println!("+ Signal: {signal}");
|
||||
|
||||
Ok(output_buffer.into_inner())
|
||||
}
|
||||
|
||||
fn verify_proof(&mut self, proof_data: Vec<u8>, signal: &str) -> Result<()> {
|
||||
let proof_with_signal = prepare_verify_input(proof_data.clone(), signal.as_bytes());
|
||||
let mut input_buffer = Cursor::new(proof_with_signal);
|
||||
|
||||
match self.rln.verify_rln_proof(&mut input_buffer) {
|
||||
Ok(true) => {
|
||||
let nullifier = &proof_data[256..288];
|
||||
let nullifier_key: [u8; 32] = nullifier.try_into()?;
|
||||
|
||||
if let Some(previous_proof) = self.used_nullifiers.get(&nullifier_key) {
|
||||
self.handle_duplicate_message_id(previous_proof.clone(), proof_data)?;
|
||||
return Ok(());
|
||||
}
|
||||
self.used_nullifiers.insert(nullifier_key, proof_data);
|
||||
println!("Message verified and accepted");
|
||||
}
|
||||
Ok(false) => {
|
||||
println!("Verification failed: message_id must be unique within the epoch and satisfy 0 <= message_id < MESSAGE_LIMIT: {MESSAGE_LIMIT}");
|
||||
}
|
||||
Err(err) => return Err(Report::new(err)),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_duplicate_message_id(
|
||||
&mut self,
|
||||
previous_proof: Vec<u8>,
|
||||
current_proof: Vec<u8>,
|
||||
) -> Result<()> {
|
||||
let x = ¤t_proof[192..224];
|
||||
let y = ¤t_proof[224..256];
|
||||
|
||||
let prev_x = &previous_proof[192..224];
|
||||
let prev_y = &previous_proof[224..256];
|
||||
if x == prev_x && y == prev_y {
|
||||
return Err(eyre!("this exact message and signal has already been sent"));
|
||||
}
|
||||
|
||||
let mut proof1 = Cursor::new(previous_proof);
|
||||
let mut proof2 = Cursor::new(current_proof);
|
||||
let mut output = Cursor::new(Vec::new());
|
||||
|
||||
match self
|
||||
.rln
|
||||
.recover_id_secret(&mut proof1, &mut proof2, &mut output)
|
||||
{
|
||||
Ok(_) => {
|
||||
let output_data = output.into_inner();
|
||||
let (leaked_identity_secret_hash, _) = IdSecret::from_bytes_le(&output_data);
|
||||
|
||||
if let Some((user_index, identity)) = self
|
||||
.local_identities
|
||||
.iter()
|
||||
.find(|(_, identity)| {
|
||||
identity.identity_secret_hash == leaked_identity_secret_hash
|
||||
})
|
||||
.map(|(index, identity)| (*index, identity))
|
||||
{
|
||||
let real_identity_secret_hash = identity.identity_secret_hash.clone();
|
||||
if leaked_identity_secret_hash != real_identity_secret_hash {
|
||||
Err(eyre!("identity secret hash mismatch: leaked_identity_secret_hash != real_identity_secret_hash"))
|
||||
} else {
|
||||
println!(
|
||||
"DUPLICATE message ID detected! Reveal identity secret hash: {}",
|
||||
*leaked_identity_secret_hash
|
||||
);
|
||||
self.local_identities.remove(&user_index);
|
||||
self.rln.delete_leaf(user_index)?;
|
||||
println!("User index {user_index} has been SLASHED");
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
Err(eyre!("user identity secret hash ******** not found"))
|
||||
}
|
||||
}
|
||||
Err(err) => Err(eyre!("Failed to recover identity secret: {err}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
println!("Initializing RLN instance...");
|
||||
print!("\x1B[2J\x1B[1;1H");
|
||||
let mut rln_system = RLNSystem::new()?;
|
||||
let rln_epoch = hash_to_field_le(b"epoch");
|
||||
let rln_identifier = hash_to_field_le(b"rln-identifier");
|
||||
let external_nullifier = poseidon_hash(&[rln_epoch, rln_identifier]);
|
||||
println!("RLN Relay Example:");
|
||||
println!("Message Limit: {MESSAGE_LIMIT}");
|
||||
println!("----------------------------------");
|
||||
println!();
|
||||
show_commands();
|
||||
loop {
|
||||
print!("\n> ");
|
||||
stdout().flush()?;
|
||||
let mut input = String::new();
|
||||
stdin().read_line(&mut input)?;
|
||||
let trimmed = input.trim();
|
||||
let args = std::iter::once("").chain(trimmed.split_whitespace());
|
||||
|
||||
match Cli::try_parse_from(args) {
|
||||
Ok(cli) => match cli.command {
|
||||
Commands::List => {
|
||||
rln_system.list_users();
|
||||
}
|
||||
Commands::Register => {
|
||||
rln_system.register_user()?;
|
||||
}
|
||||
Commands::Send {
|
||||
user_index,
|
||||
message_id,
|
||||
signal,
|
||||
} => {
|
||||
match rln_system.generate_proof(
|
||||
user_index,
|
||||
message_id,
|
||||
&signal,
|
||||
external_nullifier,
|
||||
) {
|
||||
Ok(proof) => {
|
||||
if let Err(err) = rln_system.verify_proof(proof, &signal) {
|
||||
println!("Verification error: {err}");
|
||||
};
|
||||
}
|
||||
Err(err) => {
|
||||
println!("Proof generation error: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::Clear => {
|
||||
print!("\x1B[2J\x1B[1;1H");
|
||||
show_commands();
|
||||
}
|
||||
Commands::Exit => {
|
||||
break;
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
eprintln!("Command error: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn show_commands() {
|
||||
println!("Available commands:");
|
||||
println!(" list - List registered users");
|
||||
println!(" register - Register a new user index");
|
||||
println!(" send -u <index> -m <message_id> -s <signal> - Send a message with proof");
|
||||
println!(" clear - Clear the screen");
|
||||
println!(" exit - Exit the program");
|
||||
}
|
||||
316
rln-cli/src/examples/stateless.rs
Normal file
316
rln-cli/src/examples/stateless.rs
Normal file
@@ -0,0 +1,316 @@
|
||||
#![cfg(feature = "stateless")]
|
||||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::{stdin, stdout, Cursor, Write},
|
||||
};
|
||||
|
||||
use clap::{Parser, Subcommand};
|
||||
use color_eyre::{eyre::eyre, Result};
|
||||
use rln::{
|
||||
circuit::{Fr, TEST_TREE_DEPTH},
|
||||
hashers::{hash_to_field_le, poseidon_hash, PoseidonHash},
|
||||
protocol::{keygen, prepare_verify_input, rln_witness_from_values, serialize_witness},
|
||||
public::RLN,
|
||||
utils::{fr_to_bytes_le, IdSecret},
|
||||
};
|
||||
use zerokit_utils::{OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
const MESSAGE_LIMIT: u32 = 1;
|
||||
|
||||
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
List,
|
||||
Register,
|
||||
Send {
|
||||
#[arg(short, long)]
|
||||
user_index: usize,
|
||||
#[arg(short, long)]
|
||||
message_id: u32,
|
||||
#[arg(short, long)]
|
||||
signal: String,
|
||||
},
|
||||
Clear,
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct Identity {
|
||||
identity_secret_hash: IdSecret,
|
||||
id_commitment: Fr,
|
||||
}
|
||||
|
||||
impl Identity {
|
||||
fn new() -> Self {
|
||||
let (identity_secret_hash, id_commitment) = keygen();
|
||||
Identity {
|
||||
identity_secret_hash,
|
||||
id_commitment,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct RLNSystem {
|
||||
rln: RLN,
|
||||
tree: OptimalMerkleTree<PoseidonHash>,
|
||||
used_nullifiers: HashMap<[u8; 32], Vec<u8>>,
|
||||
local_identities: HashMap<usize, Identity>,
|
||||
}
|
||||
|
||||
impl RLNSystem {
|
||||
fn new() -> Result<Self> {
|
||||
let rln = RLN::new()?;
|
||||
let default_leaf = Fr::from(0);
|
||||
let tree: OptimalMerkleTree<PoseidonHash> = OptimalMerkleTree::new(
|
||||
TEST_TREE_DEPTH,
|
||||
default_leaf,
|
||||
ConfigOf::<OptimalMerkleTree<PoseidonHash>>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
Ok(RLNSystem {
|
||||
rln,
|
||||
tree,
|
||||
used_nullifiers: HashMap::new(),
|
||||
local_identities: HashMap::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn list_users(&self) {
|
||||
if self.local_identities.is_empty() {
|
||||
println!("No users registered yet.");
|
||||
return;
|
||||
}
|
||||
|
||||
println!("Registered users:");
|
||||
for (index, identity) in &self.local_identities {
|
||||
println!("User Index: {index}");
|
||||
println!("+ Identity Secret Hash: {}", *identity.identity_secret_hash);
|
||||
println!("+ Identity Commitment: {}", identity.id_commitment);
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
fn register_user(&mut self) -> Result<usize> {
|
||||
let index = self.tree.leaves_set();
|
||||
let identity = Identity::new();
|
||||
|
||||
let rate_commitment = poseidon_hash(&[identity.id_commitment, Fr::from(MESSAGE_LIMIT)]);
|
||||
self.tree.update_next(rate_commitment)?;
|
||||
|
||||
println!("Registered User Index: {index}");
|
||||
println!("+ Identity secret hash: {}", *identity.identity_secret_hash);
|
||||
println!("+ Identity commitment: {}", identity.id_commitment);
|
||||
|
||||
self.local_identities.insert(index, identity);
|
||||
Ok(index)
|
||||
}
|
||||
|
||||
fn generate_proof(
|
||||
&mut self,
|
||||
user_index: usize,
|
||||
message_id: u32,
|
||||
signal: &str,
|
||||
external_nullifier: Fr,
|
||||
) -> Result<Vec<u8>> {
|
||||
let identity = match self.local_identities.get(&user_index) {
|
||||
Some(identity) => identity,
|
||||
None => return Err(eyre!("user index {user_index} not found")),
|
||||
};
|
||||
|
||||
let merkle_proof = self.tree.proof(user_index)?;
|
||||
let x = hash_to_field_le(signal.as_bytes());
|
||||
|
||||
let rln_witness = rln_witness_from_values(
|
||||
identity.identity_secret_hash.clone(),
|
||||
merkle_proof.get_path_elements(),
|
||||
merkle_proof.get_path_index(),
|
||||
x,
|
||||
external_nullifier,
|
||||
Fr::from(MESSAGE_LIMIT),
|
||||
Fr::from(message_id),
|
||||
)?;
|
||||
|
||||
let serialized = serialize_witness(&rln_witness)?;
|
||||
let mut input_buffer = Cursor::new(serialized);
|
||||
let mut output_buffer = Cursor::new(Vec::new());
|
||||
|
||||
self.rln
|
||||
.generate_rln_proof_with_witness(&mut input_buffer, &mut output_buffer)?;
|
||||
|
||||
println!("Proof generated successfully:");
|
||||
println!("+ User Index: {user_index}");
|
||||
println!("+ Message ID: {message_id}");
|
||||
println!("+ Signal: {signal}");
|
||||
|
||||
Ok(output_buffer.into_inner())
|
||||
}
|
||||
|
||||
fn verify_proof(&mut self, proof_data: Vec<u8>, signal: &str) -> Result<()> {
|
||||
let proof_with_signal = prepare_verify_input(proof_data.clone(), signal.as_bytes());
|
||||
let mut input_buffer = Cursor::new(proof_with_signal);
|
||||
|
||||
let root = self.tree.root();
|
||||
let roots_serialized = fr_to_bytes_le(&root);
|
||||
let mut roots_buffer = Cursor::new(roots_serialized);
|
||||
|
||||
match self
|
||||
.rln
|
||||
.verify_with_roots(&mut input_buffer, &mut roots_buffer)
|
||||
{
|
||||
Ok(true) => {
|
||||
let nullifier = &proof_data[256..288];
|
||||
let nullifier_key: [u8; 32] = nullifier.try_into()?;
|
||||
|
||||
if let Some(previous_proof) = self.used_nullifiers.get(&nullifier_key) {
|
||||
self.handle_duplicate_message_id(previous_proof.clone(), proof_data)?;
|
||||
return Ok(());
|
||||
}
|
||||
self.used_nullifiers.insert(nullifier_key, proof_data);
|
||||
println!("Message verified and accepted");
|
||||
}
|
||||
Ok(false) => {
|
||||
println!("Verification failed: message_id must be unique within the epoch and satisfy 0 <= message_id < MESSAGE_LIMIT: {MESSAGE_LIMIT}");
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_duplicate_message_id(
|
||||
&mut self,
|
||||
previous_proof: Vec<u8>,
|
||||
current_proof: Vec<u8>,
|
||||
) -> Result<()> {
|
||||
let x = ¤t_proof[192..224];
|
||||
let y = ¤t_proof[224..256];
|
||||
|
||||
let prev_x = &previous_proof[192..224];
|
||||
let prev_y = &previous_proof[224..256];
|
||||
if x == prev_x && y == prev_y {
|
||||
return Err(eyre!("this exact message and signal has already been sent"));
|
||||
}
|
||||
|
||||
let mut proof1 = Cursor::new(previous_proof);
|
||||
let mut proof2 = Cursor::new(current_proof);
|
||||
let mut output = Cursor::new(Vec::new());
|
||||
|
||||
match self
|
||||
.rln
|
||||
.recover_id_secret(&mut proof1, &mut proof2, &mut output)
|
||||
{
|
||||
Ok(_) => {
|
||||
let output_data = output.into_inner();
|
||||
let (leaked_identity_secret_hash, _) = IdSecret::from_bytes_le(&output_data);
|
||||
|
||||
if let Some((user_index, identity)) = self
|
||||
.local_identities
|
||||
.iter()
|
||||
.find(|(_, identity)| {
|
||||
identity.identity_secret_hash == leaked_identity_secret_hash
|
||||
})
|
||||
.map(|(index, identity)| (*index, identity))
|
||||
{
|
||||
let real_identity_secret_hash = identity.identity_secret_hash.clone();
|
||||
if leaked_identity_secret_hash != real_identity_secret_hash {
|
||||
Err(eyre!("identity secret hash mismatch: leaked_identity_secret_hash != real_identity_secret_hash"))
|
||||
} else {
|
||||
println!(
|
||||
"DUPLICATE message ID detected! Reveal identity secret hash: ********"
|
||||
);
|
||||
self.local_identities.remove(&user_index);
|
||||
println!("User index {user_index} has been SLASHED");
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
Err(eyre!("user identity secret hash ******** not found"))
|
||||
}
|
||||
}
|
||||
Err(err) => Err(eyre!("Failed to recover identity secret: {err}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
println!("Initializing RLN instance...");
|
||||
print!("\x1B[2J\x1B[1;1H");
|
||||
let mut rln_system = RLNSystem::new()?;
|
||||
let rln_epoch = hash_to_field_le(b"epoch");
|
||||
let rln_identifier = hash_to_field_le(b"rln-identifier");
|
||||
let external_nullifier = poseidon_hash(&[rln_epoch, rln_identifier]);
|
||||
println!("RLN Stateless Relay Example:");
|
||||
println!("Message Limit: {MESSAGE_LIMIT}");
|
||||
println!("----------------------------------");
|
||||
println!();
|
||||
show_commands();
|
||||
|
||||
loop {
|
||||
print!("\n> ");
|
||||
stdout().flush()?;
|
||||
let mut input = String::new();
|
||||
stdin().read_line(&mut input)?;
|
||||
let trimmed = input.trim();
|
||||
let args = std::iter::once("").chain(trimmed.split_whitespace());
|
||||
|
||||
match Cli::try_parse_from(args) {
|
||||
Ok(cli) => match cli.command {
|
||||
Commands::List => {
|
||||
rln_system.list_users();
|
||||
}
|
||||
Commands::Register => {
|
||||
rln_system.register_user()?;
|
||||
}
|
||||
Commands::Send {
|
||||
user_index,
|
||||
message_id,
|
||||
signal,
|
||||
} => {
|
||||
match rln_system.generate_proof(
|
||||
user_index,
|
||||
message_id,
|
||||
&signal,
|
||||
external_nullifier,
|
||||
) {
|
||||
Ok(proof) => {
|
||||
if let Err(err) = rln_system.verify_proof(proof, &signal) {
|
||||
println!("Verification error: {err}");
|
||||
};
|
||||
}
|
||||
Err(err) => {
|
||||
println!("Proof generation error: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::Clear => {
|
||||
print!("\x1B[2J\x1B[1;1H");
|
||||
show_commands();
|
||||
}
|
||||
Commands::Exit => {
|
||||
break;
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
eprintln!("Command error: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn show_commands() {
|
||||
println!("Available commands:");
|
||||
println!(" list - List registered users");
|
||||
println!(" register - Register a new user index");
|
||||
println!(" send -u <index> -m <message_id> -s <signal> - Send a message with proof");
|
||||
println!(" clear - Clear the screen");
|
||||
println!(" exit - Exit the program");
|
||||
}
|
||||
195
rln-cli/src/main.rs
Normal file
195
rln-cli/src/main.rs
Normal file
@@ -0,0 +1,195 @@
|
||||
use std::{
|
||||
fs::File,
|
||||
io::{Cursor, Read},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use clap::Parser;
|
||||
use color_eyre::{eyre::Report, Result};
|
||||
use commands::Commands;
|
||||
use config::Config;
|
||||
use rln::{
|
||||
public::RLN,
|
||||
utils::{bytes_le_to_fr, bytes_le_to_vec_fr},
|
||||
};
|
||||
use serde_json::json;
|
||||
use state::State;
|
||||
|
||||
mod commands;
|
||||
mod config;
|
||||
mod state;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Option<Commands>,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
|
||||
let mut state = match &cli.command {
|
||||
Some(Commands::New { .. }) | Some(Commands::NewWithParams { .. }) => State::default(),
|
||||
_ => State::load_state()?,
|
||||
};
|
||||
|
||||
match cli.command {
|
||||
Some(Commands::New { tree_depth }) => {
|
||||
let config = Config::load_config()?;
|
||||
state.rln = if let Some(tree_config) = config.tree_config {
|
||||
println!("Initializing RLN with custom config");
|
||||
Some(RLN::new(tree_depth, Cursor::new(tree_config.as_bytes()))?)
|
||||
} else {
|
||||
println!("Initializing RLN with default config");
|
||||
Some(RLN::new(tree_depth, Cursor::new(json!({}).to_string()))?)
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::NewWithParams {
|
||||
tree_depth,
|
||||
resources_path,
|
||||
}) => {
|
||||
let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
let filenames = ["rln_final.arkzkey", "graph.bin"];
|
||||
for filename in filenames {
|
||||
let fullpath = resources_path.join(Path::new(filename));
|
||||
let mut file = File::open(&fullpath)?;
|
||||
let metadata = std::fs::metadata(&fullpath)?;
|
||||
let mut buffer = vec![0; metadata.len() as usize];
|
||||
file.read_exact(&mut buffer)?;
|
||||
resources.push(buffer);
|
||||
}
|
||||
let config = Config::load_config()?;
|
||||
if let Some(tree_config) = config.tree_config {
|
||||
println!("Initializing RLN with custom config");
|
||||
state.rln = Some(RLN::new_with_params(
|
||||
tree_depth,
|
||||
resources[0].clone(),
|
||||
resources[1].clone(),
|
||||
Cursor::new(tree_config.to_string().as_bytes()),
|
||||
)?)
|
||||
} else {
|
||||
println!("Initializing RLN with default config");
|
||||
state.rln = Some(RLN::new_with_params(
|
||||
tree_depth,
|
||||
resources[0].clone(),
|
||||
resources[1].clone(),
|
||||
Cursor::new(json!({}).to_string()),
|
||||
)?)
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::SetTree { tree_depth }) => {
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.set_tree(tree_depth)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::SetLeaf { index, input }) => {
|
||||
let input_data = File::open(input)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.set_leaf(index, input_data)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::SetMultipleLeaves { index, input }) => {
|
||||
let input_data = File::open(input)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.set_leaves_from(index, input_data)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::ResetMultipleLeaves { input }) => {
|
||||
let input_data = File::open(input)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.init_tree_with_leaves(input_data)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::SetNextLeaf { input }) => {
|
||||
let input_data = File::open(input)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.set_next_leaf(input_data)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::DeleteLeaf { index }) => {
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.delete_leaf(index)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::Prove { input }) => {
|
||||
let input_data = File::open(input)?;
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.prove(input_data, &mut output_buffer)?;
|
||||
let proof = output_buffer.into_inner();
|
||||
println!("proof: {proof:?}");
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::Verify { input }) => {
|
||||
let input_data = File::open(input)?;
|
||||
let verified = state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.verify(input_data)?;
|
||||
println!("verified: {verified:?}");
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::GenerateProof { input }) => {
|
||||
let input_data = File::open(input)?;
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.generate_rln_proof(input_data, &mut output_buffer)?;
|
||||
let proof = output_buffer.into_inner();
|
||||
println!("proof: {proof:?}");
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::VerifyWithRoots { input, roots }) => {
|
||||
let input_data = File::open(input)?;
|
||||
let roots_data = File::open(roots)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.verify_with_roots(input_data, roots_data)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::GetRoot) => {
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.get_root(&mut output_buffer)
|
||||
.unwrap();
|
||||
let (root, _) = bytes_le_to_fr(&output_buffer.into_inner());
|
||||
println!("root: {root}");
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::GetProof { index }) => {
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.get_proof(index, &mut output_buffer)?;
|
||||
let output_buffer_inner = output_buffer.into_inner();
|
||||
let (path_elements, _) = bytes_le_to_vec_fr(&output_buffer_inner)?;
|
||||
for (index, element) in path_elements.iter().enumerate() {
|
||||
println!("path element {index}: {element}");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
None => Ok(()),
|
||||
}
|
||||
}
|
||||
31
rln-cli/src/state.rs
Normal file
31
rln-cli/src/state.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
use std::io::Cursor;
|
||||
|
||||
use color_eyre::Result;
|
||||
use rln::{circuit::TEST_TREE_DEPTH, public::RLN};
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::config::Config;
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct State {
|
||||
pub rln: Option<RLN>,
|
||||
}
|
||||
|
||||
impl State {
|
||||
pub(crate) fn load_state() -> Result<State> {
|
||||
let config = Config::load_config()?;
|
||||
let rln = if let Some(tree_config) = config.tree_config {
|
||||
let config_json: Value = serde_json::from_str(&tree_config)?;
|
||||
let tree_depth = config_json["tree_depth"]
|
||||
.as_u64()
|
||||
.unwrap_or(TEST_TREE_DEPTH as u64);
|
||||
Some(RLN::new(
|
||||
tree_depth as usize,
|
||||
Cursor::new(tree_config.as_bytes()),
|
||||
)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(State { rln })
|
||||
}
|
||||
}
|
||||
21
rln-wasm-utils/.gitignore
vendored
Normal file
21
rln-wasm-utils/.gitignore
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
# Common files to ignore in Rust projects
|
||||
.DS_Store
|
||||
.idea
|
||||
*.log
|
||||
tmp/
|
||||
|
||||
# Generated by Cargo will have compiled files and executables
|
||||
/target
|
||||
Cargo.lock
|
||||
|
||||
# Generated by rln-wasm
|
||||
pkg/
|
||||
|
||||
# Generated by Nix
|
||||
result
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
|
||||
# MSVC Windows builds of rustc generate these, which store debugging information
|
||||
*.pdb
|
||||
35
rln-wasm-utils/Cargo.toml
Normal file
35
rln-wasm-utils/Cargo.toml
Normal file
@@ -0,0 +1,35 @@
|
||||
[package]
|
||||
name = "rln-wasm-utils"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
# TODO: remove this once we have a proper release
|
||||
rln = { path = "../rln", default-features = false, features = ["stateless"] }
|
||||
js-sys = "0.3.77"
|
||||
wasm-bindgen = "0.2.100"
|
||||
rand = "0.8.5"
|
||||
|
||||
# The `console_error_panic_xhook` crate provides better debugging of panics by
|
||||
# logging them with `console.error`. This is great for development, but requires
|
||||
# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for
|
||||
# code size when deploying.
|
||||
console_error_panic_hook = { version = "0.1.7", optional = true }
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
getrandom = { version = "0.2.16", features = ["js"] }
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen-test = "0.3.50"
|
||||
web-sys = { version = "0.3.77", features = ["console"] }
|
||||
ark-std = { version = "0.5.0", default-features = false }
|
||||
|
||||
[features]
|
||||
default = ["console_error_panic_hook"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
36
rln-wasm-utils/Makefile.toml
Normal file
36
rln-wasm-utils/Makefile.toml
Normal file
@@ -0,0 +1,36 @@
|
||||
[tasks.build]
|
||||
clear = true
|
||||
dependencies = ["pack_build", "pack_rename", "pack_resize"]
|
||||
|
||||
[tasks.pack_build]
|
||||
command = "wasm-pack"
|
||||
args = ["build", "--release", "--target", "web", "--scope", "waku"]
|
||||
|
||||
[tasks.pack_rename]
|
||||
script = "sed -i.bak 's/rln-wasm-utils/zerokit-rln-wasm-utils/g' pkg/package.json && rm pkg/package.json.bak"
|
||||
|
||||
[tasks.pack_resize]
|
||||
command = "wasm-opt"
|
||||
args = [
|
||||
"pkg/rln_wasm_utils_bg.wasm",
|
||||
"-Oz",
|
||||
"--strip-debug",
|
||||
"--strip-dwarf",
|
||||
"--remove-unused-module-elements",
|
||||
"--vacuum",
|
||||
"-o",
|
||||
"pkg/rln_wasm_utils_bg.wasm",
|
||||
]
|
||||
|
||||
[tasks.test]
|
||||
command = "wasm-pack"
|
||||
args = [
|
||||
"test",
|
||||
"--release",
|
||||
"--node",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
dependencies = ["build"]
|
||||
206
rln-wasm-utils/README.md
Normal file
206
rln-wasm-utils/README.md
Normal file
@@ -0,0 +1,206 @@
|
||||
# RLN WASM Utils
|
||||
|
||||
[](https://badge.fury.io/js/@waku%2Fzerokit-rln-wasm-utils)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
|
||||
The Zerokit RLN WASM Utils Module provides WebAssembly bindings for Rate-Limiting Nullifier [RLN](https://rfc.vac.dev/spec/32/) cryptographic primitives.
|
||||
This module offers comprehensive functionality for identity generation and hashing needed for RLN applications.
|
||||
|
||||
## Features
|
||||
|
||||
### Identity Generation
|
||||
|
||||
- **Random Identity Generation**: Generate cryptographically secure random identities
|
||||
- **Seeded Identity Generation**: Generate deterministic identities from seeds
|
||||
- **Extended Identity Generation**: Generate extended identities with additional parameters
|
||||
- **Seeded Extended Identity Generation**: Generate deterministic extended identities from seeds
|
||||
- **Endianness Support**: Both little-endian and big-endian serialization support
|
||||
|
||||
### Hashing
|
||||
|
||||
- **Standard Hashing**: Hash arbitrary data to field elements
|
||||
- **Poseidon Hashing**: Advanced cryptographic hashing using Poseidon hash function
|
||||
- **Endianness Support**: Both little-endian and big-endian serialization support
|
||||
|
||||
## API Reference
|
||||
|
||||
### Identity Generation Functions
|
||||
|
||||
#### `generateMembershipKey(isLittleEndian: boolean): Uint8Array`
|
||||
|
||||
Generates a random membership key pair (identity secret and commitment).
|
||||
|
||||
**Inputs:**
|
||||
|
||||
- `isLittleEndian`: Boolean indicating endianness for serialization
|
||||
|
||||
**Outputs:** Serialized identity pair as `Uint8Array` in corresponding endianness
|
||||
|
||||
#### `generateExtendedMembershipKey(isLittleEndian: boolean): Uint8Array`
|
||||
|
||||
Generates an extended membership key with additional parameters.
|
||||
|
||||
**Inputs:**
|
||||
|
||||
- `isLittleEndian`: Boolean indicating endianness for serialization
|
||||
|
||||
**Outputs:** Serialized extended identity tuple as `Uint8Array` in corresponding endianness
|
||||
|
||||
#### `generateSeededMembershipKey(seed: Uint8Array, isLittleEndian: boolean): Uint8Array`
|
||||
|
||||
Generates a deterministic membership key from a seed.
|
||||
|
||||
**Inputs:**
|
||||
|
||||
- `seed`: Seed data as `Uint8Array`
|
||||
- `isLittleEndian`: Boolean indicating endianness for serialization
|
||||
|
||||
**Outputs:** Serialized identity pair as `Uint8Array` in corresponding endianness
|
||||
|
||||
#### `generateSeededExtendedMembershipKey(seed: Uint8Array, isLittleEndian: boolean): Uint8Array`
|
||||
|
||||
Generates a deterministic extended membership key from a seed.
|
||||
|
||||
**Inputs:**
|
||||
|
||||
- `seed`: Seed data as `Uint8Array`
|
||||
- `isLittleEndian`: Boolean indicating endianness for serialization
|
||||
|
||||
**Outputs:** Serialized extended identity tuple as `Uint8Array` in corresponding endianness
|
||||
|
||||
### Hashing Functions
|
||||
|
||||
#### `hash(input: Uint8Array, isLittleEndian: boolean): Uint8Array`
|
||||
|
||||
Hashes input data to a field element.
|
||||
|
||||
**Inputs:**
|
||||
|
||||
- `input`: Input data as `Uint8Array`
|
||||
- `isLittleEndian`: Boolean indicating endianness for serialization
|
||||
|
||||
**Outputs:** Serialized hash result as `Uint8Array` in corresponding endianness
|
||||
|
||||
#### `poseidonHash(input: Uint8Array, isLittleEndian: boolean): Uint8Array`
|
||||
|
||||
Computes Poseidon hash of input field elements.
|
||||
|
||||
**Inputs:**
|
||||
|
||||
- `input`: Serialized field elements as `Uint8Array` (format: length + field elements)
|
||||
- `isLittleEndian`: Boolean indicating endianness for serialization
|
||||
|
||||
**Outputs:** Serialized hash result as `Uint8Array` in corresponding endianness
|
||||
|
||||
## Usage Examples
|
||||
|
||||
### JavaScript/TypeScript
|
||||
|
||||
```javascript
|
||||
import init, {
|
||||
generateMembershipKey,
|
||||
generateSeededMembershipKey,
|
||||
hash,
|
||||
poseidonHash
|
||||
} from '@waku/zerokit-rln-wasm-utils';
|
||||
|
||||
// Initialize the WASM module
|
||||
await init();
|
||||
|
||||
// Generate a random membership key
|
||||
const membershipKey = generateMembershipKey(true); // little-endian
|
||||
console.log('Membership key:', membershipKey);
|
||||
|
||||
// Generate a deterministic membership key from seed
|
||||
const seed = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
|
||||
const seededKey = generateSeededMembershipKey(seed, true);
|
||||
console.log('Seeded key:', seededKey);
|
||||
|
||||
// Hash some data
|
||||
const input = new Uint8Array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
|
||||
const hashResult = hash(input, true);
|
||||
console.log('Hash result:', hashResult);
|
||||
|
||||
// Poseidon hash with field elements
|
||||
const fieldElements = new Uint8Array([
|
||||
// Length (8 bytes) + field elements (32 bytes each)
|
||||
1, 0, 0, 0, 0, 0, 0, 0, // length = 1
|
||||
// field element data...
|
||||
]);
|
||||
const poseidonResult = poseidonHash(fieldElements, true);
|
||||
console.log('Poseidon hash:', poseidonResult);
|
||||
```
|
||||
|
||||
## Install Dependencies
|
||||
|
||||
> [!NOTE]
|
||||
> This project requires the following tools:
|
||||
>
|
||||
> - `wasm-pack` - for compiling Rust to WebAssembly
|
||||
> - `cargo-make` - for running build commands
|
||||
> - `nvm` - to install and manage Node.js
|
||||
>
|
||||
> Ensure all dependencies are installed before proceeding.
|
||||
|
||||
### Manually
|
||||
|
||||
#### Install `wasm-pack`
|
||||
|
||||
```bash
|
||||
cargo install wasm-pack --version=0.13.1
|
||||
```
|
||||
|
||||
#### Install `cargo-make`
|
||||
|
||||
```bash
|
||||
cargo install cargo-make
|
||||
```
|
||||
|
||||
#### Install `Node.js`
|
||||
|
||||
If you don't have `nvm` (Node Version Manager), install it by following
|
||||
the [installation instructions](https://github.com/nvm-sh/nvm?tab=readme-ov-file#install--update-script).
|
||||
|
||||
After installing `nvm`, install and use Node.js `v22.14.0`:
|
||||
|
||||
```bash
|
||||
nvm install 22.14.0
|
||||
nvm use 22.14.0
|
||||
nvm alias default 22.14.0
|
||||
```
|
||||
|
||||
If you already have Node.js installed,
|
||||
check your version with `node -v` command — the version must be strictly greater than 22.
|
||||
|
||||
### Or install everything
|
||||
|
||||
You can run the following command from the root of the repository to install all required dependencies for `zerokit`
|
||||
|
||||
```bash
|
||||
make installdeps
|
||||
```
|
||||
|
||||
## Building the library
|
||||
|
||||
First, navigate to the rln-wasm-utils directory:
|
||||
|
||||
```bash
|
||||
cd rln-wasm-utils
|
||||
```
|
||||
|
||||
Compile rln-wasm-utils for `wasm32-unknown-unknown`:
|
||||
|
||||
```bash
|
||||
cargo make build
|
||||
```
|
||||
|
||||
## Running tests
|
||||
|
||||
```bash
|
||||
cargo make test
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under both MIT and Apache 2.0 licenses. See the LICENSE files for details.
|
||||
112
rln-wasm-utils/src/lib.rs
Normal file
112
rln-wasm-utils/src/lib.rs
Normal file
@@ -0,0 +1,112 @@
|
||||
#![cfg(target_arch = "wasm32")]
|
||||
|
||||
use js_sys::Uint8Array;
|
||||
use rln::public::{
|
||||
extended_key_gen, hash, key_gen, poseidon_hash, seeded_extended_key_gen, seeded_key_gen,
|
||||
};
|
||||
use std::vec::Vec;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = generateMembershipKey)]
|
||||
pub fn wasm_key_gen(is_little_endian: bool) -> Result<Uint8Array, String> {
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
if let Err(err) = key_gen(&mut output_data, is_little_endian) {
|
||||
std::mem::forget(output_data);
|
||||
Err(format!(
|
||||
"Msg: could not generate membership keys, Error: {:#?}",
|
||||
err
|
||||
))
|
||||
} else {
|
||||
let result = Uint8Array::from(&output_data[..]);
|
||||
std::mem::forget(output_data);
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = generateExtendedMembershipKey)]
|
||||
pub fn wasm_extended_key_gen(is_little_endian: bool) -> Result<Uint8Array, String> {
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
if let Err(err) = extended_key_gen(&mut output_data, is_little_endian) {
|
||||
std::mem::forget(output_data);
|
||||
Err(format!(
|
||||
"Msg: could not generate membership keys, Error: {:#?}",
|
||||
err
|
||||
))
|
||||
} else {
|
||||
let result = Uint8Array::from(&output_data[..]);
|
||||
std::mem::forget(output_data);
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = generateSeededMembershipKey)]
|
||||
pub fn wasm_seeded_key_gen(seed: Uint8Array, is_little_endian: bool) -> Result<Uint8Array, String> {
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
let input_data = &seed.to_vec()[..];
|
||||
if let Err(err) = seeded_key_gen(input_data, &mut output_data, is_little_endian) {
|
||||
std::mem::forget(output_data);
|
||||
Err(format!(
|
||||
"Msg: could not generate membership key, Error: {:#?}",
|
||||
err
|
||||
))
|
||||
} else {
|
||||
let result = Uint8Array::from(&output_data[..]);
|
||||
std::mem::forget(output_data);
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = generateSeededExtendedMembershipKey)]
|
||||
pub fn wasm_seeded_extended_key_gen(
|
||||
seed: Uint8Array,
|
||||
is_little_endian: bool,
|
||||
) -> Result<Uint8Array, String> {
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
let input_data = &seed.to_vec()[..];
|
||||
if let Err(err) = seeded_extended_key_gen(input_data, &mut output_data, is_little_endian) {
|
||||
std::mem::forget(output_data);
|
||||
Err(format!(
|
||||
"Msg: could not generate membership key, Error: {:#?}",
|
||||
err
|
||||
))
|
||||
} else {
|
||||
let result = Uint8Array::from(&output_data[..]);
|
||||
std::mem::forget(output_data);
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = hash)]
|
||||
pub fn wasm_hash(input: Uint8Array, is_little_endian: bool) -> Result<Uint8Array, String> {
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
let input_data = &input.to_vec()[..];
|
||||
if let Err(err) = hash(input_data, &mut output_data, is_little_endian) {
|
||||
std::mem::forget(output_data);
|
||||
Err(format!("Msg: could not generate hash, Error: {:#?}", err))
|
||||
} else {
|
||||
let result = Uint8Array::from(&output_data[..]);
|
||||
std::mem::forget(output_data);
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = poseidonHash)]
|
||||
pub fn wasm_poseidon_hash(input: Uint8Array, is_little_endian: bool) -> Result<Uint8Array, String> {
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
let input_data = &input.to_vec()[..];
|
||||
if let Err(err) = poseidon_hash(input_data, &mut output_data, is_little_endian) {
|
||||
std::mem::forget(output_data);
|
||||
Err(format!(
|
||||
"Msg: could not generate poseidon hash, Error: {:#?}",
|
||||
err
|
||||
))
|
||||
} else {
|
||||
let result = Uint8Array::from(&output_data[..]);
|
||||
std::mem::forget(output_data);
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
114
rln-wasm-utils/tests/wasm_utils_test.rs
Normal file
114
rln-wasm-utils/tests/wasm_utils_test.rs
Normal file
@@ -0,0 +1,114 @@
|
||||
#![cfg(target_arch = "wasm32")]
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use ark_std::{UniformRand, rand::thread_rng};
|
||||
use rand::Rng;
|
||||
use rln::circuit::Fr;
|
||||
use rln::hashers::{ROUND_PARAMS, hash_to_field_le, poseidon_hash};
|
||||
use rln::protocol::{
|
||||
deserialize_identity_pair_be, deserialize_identity_pair_le, deserialize_identity_tuple_be,
|
||||
deserialize_identity_tuple_le,
|
||||
};
|
||||
use rln::utils::{bytes_le_to_fr, vec_fr_to_bytes_le};
|
||||
use rln_wasm_utils::{
|
||||
wasm_extended_key_gen, wasm_hash, wasm_key_gen, wasm_poseidon_hash,
|
||||
wasm_seeded_extended_key_gen, wasm_seeded_key_gen,
|
||||
};
|
||||
use wasm_bindgen_test::*;
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_wasm_key_gen() {
|
||||
let result_le = wasm_key_gen(true);
|
||||
assert!(result_le.is_ok());
|
||||
deserialize_identity_pair_le(result_le.unwrap().to_vec());
|
||||
|
||||
let result_be = wasm_key_gen(false);
|
||||
assert!(result_be.is_ok());
|
||||
deserialize_identity_pair_be(result_be.unwrap().to_vec());
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_wasm_extended_key_gen() {
|
||||
let result_le = wasm_extended_key_gen(true);
|
||||
assert!(result_le.is_ok());
|
||||
deserialize_identity_tuple_le(result_le.unwrap().to_vec());
|
||||
|
||||
let result_be = wasm_extended_key_gen(false);
|
||||
assert!(result_be.is_ok());
|
||||
deserialize_identity_tuple_be(result_be.unwrap().to_vec());
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_wasm_seeded_key_gen() {
|
||||
// Create a test seed
|
||||
let seed_data = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||
let seed = js_sys::Uint8Array::from(&seed_data[..]);
|
||||
|
||||
let result_le = wasm_seeded_key_gen(seed.clone(), true);
|
||||
assert!(result_le.is_ok());
|
||||
let fr_le = deserialize_identity_pair_le(result_le.unwrap().to_vec());
|
||||
|
||||
let result_be = wasm_seeded_key_gen(seed, false);
|
||||
assert!(result_be.is_ok());
|
||||
let fr_be = deserialize_identity_pair_be(result_be.unwrap().to_vec());
|
||||
|
||||
assert_eq!(fr_le, fr_be);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_wasm_seeded_extended_key_gen() {
|
||||
// Create a test seed
|
||||
let seed_data = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||
let seed = js_sys::Uint8Array::from(&seed_data[..]);
|
||||
|
||||
let result_le = wasm_seeded_extended_key_gen(seed.clone(), true);
|
||||
assert!(result_le.is_ok());
|
||||
let fr_le = deserialize_identity_tuple_le(result_le.unwrap().to_vec());
|
||||
|
||||
let result_be = wasm_seeded_extended_key_gen(seed, false);
|
||||
assert!(result_be.is_ok());
|
||||
let fr_be = deserialize_identity_tuple_be(result_be.unwrap().to_vec());
|
||||
|
||||
assert_eq!(fr_le, fr_be);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_wasm_hash() {
|
||||
// Create test input data
|
||||
let signal: [u8; 32] = [0; 32];
|
||||
let input = js_sys::Uint8Array::from(&signal[..]);
|
||||
|
||||
let result_le = wasm_hash(input.clone(), true);
|
||||
assert!(result_le.is_ok());
|
||||
|
||||
let serialized_hash = result_le.unwrap().to_vec();
|
||||
let (hash1, _) = bytes_le_to_fr(&serialized_hash);
|
||||
|
||||
let hash2 = hash_to_field_le(&signal);
|
||||
|
||||
assert_eq!(hash1, hash2);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_wasm_poseidon_hash() {
|
||||
let mut rng = thread_rng();
|
||||
let number_of_inputs = rng.gen_range(1..ROUND_PARAMS.len());
|
||||
let mut inputs = Vec::with_capacity(number_of_inputs);
|
||||
for _ in 0..number_of_inputs {
|
||||
inputs.push(Fr::rand(&mut rng));
|
||||
}
|
||||
let inputs_ser = vec_fr_to_bytes_le(&inputs);
|
||||
let input = js_sys::Uint8Array::from(&inputs_ser[..]);
|
||||
|
||||
let expected_hash = poseidon_hash(inputs.as_ref());
|
||||
|
||||
let result_le = wasm_poseidon_hash(input.clone(), true);
|
||||
assert!(result_le.is_ok());
|
||||
|
||||
let serialized_hash = result_le.unwrap().to_vec();
|
||||
let (received_hash, _) = bytes_le_to_fr(&serialized_hash);
|
||||
|
||||
assert_eq!(received_hash, expected_hash);
|
||||
}
|
||||
}
|
||||
21
rln-wasm/.gitignore
vendored
Normal file
21
rln-wasm/.gitignore
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
# Common files to ignore in Rust projects
|
||||
.DS_Store
|
||||
.idea
|
||||
*.log
|
||||
tmp/
|
||||
|
||||
# Generated by Cargo will have compiled files and executables
|
||||
/target
|
||||
Cargo.lock
|
||||
|
||||
# Generated by rln-wasm
|
||||
pkg/
|
||||
|
||||
# Generated by Nix
|
||||
result
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
|
||||
# MSVC Windows builds of rustc generate these, which store debugging information
|
||||
*.pdb
|
||||
47
rln-wasm/Cargo.toml
Normal file
47
rln-wasm/Cargo.toml
Normal file
@@ -0,0 +1,47 @@
|
||||
[package]
|
||||
name = "rln-wasm"
|
||||
version = "0.2.0"
|
||||
edition = "2021"
|
||||
license = "MIT or Apache2"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
rln = { path = "../rln", version = "0.8.0", default-features = false, features = [
|
||||
"stateless",
|
||||
] }
|
||||
rln-wasm-utils = { path = "../rln-wasm-utils", version = "0.1.0", default-features = false }
|
||||
zerokit_utils = { path = "../utils", version = "0.6.0", default-features = false }
|
||||
num-bigint = { version = "0.4.6", default-features = false }
|
||||
js-sys = "0.3.77"
|
||||
wasm-bindgen = "0.2.100"
|
||||
serde-wasm-bindgen = "0.6.5"
|
||||
wasm-bindgen-rayon = { version = "1.3.0", features = [
|
||||
"no-bundler",
|
||||
], optional = true }
|
||||
|
||||
# The `console_error_panic_xhook` crate provides better debugging of panics by
|
||||
# logging them with `console.error`. This is great for development, but requires
|
||||
# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for
|
||||
# code size when deploying.
|
||||
console_error_panic_hook = { version = "0.1.7", optional = true }
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
getrandom = { version = "0.2.16", features = ["js"] }
|
||||
|
||||
[dev-dependencies]
|
||||
serde_json = "1.0.141"
|
||||
wasm-bindgen-test = "0.3.50"
|
||||
wasm-bindgen-futures = "0.4.50"
|
||||
|
||||
[dev-dependencies.web-sys]
|
||||
version = "0.3.77"
|
||||
features = ["Window", "Navigator"]
|
||||
|
||||
[features]
|
||||
default = ["console_error_panic_hook"]
|
||||
parallel = ["rln/parallel", "wasm-bindgen-rayon"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
99
rln-wasm/Makefile.toml
Normal file
99
rln-wasm/Makefile.toml
Normal file
@@ -0,0 +1,99 @@
|
||||
[tasks.build]
|
||||
clear = true
|
||||
dependencies = ["pack_build", "pack_rename", "pack_resize"]
|
||||
|
||||
[tasks.build_parallel]
|
||||
clear = true
|
||||
dependencies = ["pack_build_parallel", "pack_rename", "pack_resize"]
|
||||
|
||||
[tasks.pack_build]
|
||||
command = "wasm-pack"
|
||||
args = ["build", "--release", "--target", "web", "--scope", "waku"]
|
||||
|
||||
[tasks.pack_build_parallel]
|
||||
command = "env"
|
||||
args = [
|
||||
"RUSTFLAGS=-C target-feature=+atomics,+bulk-memory,+mutable-globals",
|
||||
"rustup",
|
||||
"run",
|
||||
"nightly",
|
||||
"wasm-pack",
|
||||
"build",
|
||||
"--release",
|
||||
"--target",
|
||||
"web",
|
||||
"--scope",
|
||||
"waku",
|
||||
"--features",
|
||||
"parallel",
|
||||
"-Z",
|
||||
"build-std=panic_abort,std",
|
||||
]
|
||||
[tasks.pack_rename]
|
||||
script = "sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/package.json.bak"
|
||||
|
||||
[tasks.pack_resize]
|
||||
command = "wasm-opt"
|
||||
args = [
|
||||
"pkg/rln_wasm_bg.wasm",
|
||||
"-Oz",
|
||||
"--strip-debug",
|
||||
"--strip-dwarf",
|
||||
"--remove-unused-module-elements",
|
||||
"--vacuum",
|
||||
"-o",
|
||||
"pkg/rln_wasm_bg.wasm",
|
||||
]
|
||||
|
||||
[tasks.test]
|
||||
command = "wasm-pack"
|
||||
args = [
|
||||
"test",
|
||||
"--release",
|
||||
"--node",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
dependencies = ["build"]
|
||||
|
||||
[tasks.test_browser]
|
||||
command = "wasm-pack"
|
||||
args = [
|
||||
"test",
|
||||
"--release",
|
||||
"--chrome",
|
||||
"--headless",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
dependencies = ["build"]
|
||||
|
||||
[tasks.test_parallel]
|
||||
command = "env"
|
||||
args = [
|
||||
"RUSTFLAGS=-C target-feature=+atomics,+bulk-memory,+mutable-globals",
|
||||
"rustup",
|
||||
"run",
|
||||
"nightly",
|
||||
"wasm-pack",
|
||||
"test",
|
||||
"--release",
|
||||
"--chrome",
|
||||
"--headless",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--features",
|
||||
"parallel",
|
||||
"-Z",
|
||||
"build-std=panic_abort,std",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
dependencies = ["build_parallel"]
|
||||
|
||||
[tasks.bench]
|
||||
disabled = true
|
||||
146
rln-wasm/README.md
Normal file
146
rln-wasm/README.md
Normal file
@@ -0,0 +1,146 @@
|
||||
# RLN for WASM
|
||||
|
||||
[](https://badge.fury.io/js/@waku%2Fzerokit-rln-wasm)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
|
||||
The Zerokit RLN WASM Module provides WebAssembly bindings for working with
|
||||
Rate-Limiting Nullifier [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and primitives.
|
||||
This module is used by [waku-org/js-rln](https://github.com/waku-org/js-rln/) to enable
|
||||
RLN functionality in JavaScript/TypeScript applications.
|
||||
|
||||
## Install Dependencies
|
||||
|
||||
> [!NOTE]
|
||||
> This project requires the following tools:
|
||||
>
|
||||
> - `wasm-pack` - for compiling Rust to WebAssembly
|
||||
> - `cargo-make` - for running build commands
|
||||
> - `nvm` - to install and manage Node.js
|
||||
>
|
||||
> Ensure all dependencies are installed before proceeding.
|
||||
|
||||
### Manually
|
||||
|
||||
#### Install `wasm-pack`
|
||||
|
||||
```bash
|
||||
cargo install wasm-pack --version=0.13.1
|
||||
```
|
||||
|
||||
#### Install `cargo-make`
|
||||
|
||||
```bash
|
||||
cargo install cargo-make
|
||||
```
|
||||
|
||||
#### Install `Node.js`
|
||||
|
||||
If you don't have `nvm` (Node Version Manager), install it by following
|
||||
the [installation instructions](https://github.com/nvm-sh/nvm?tab=readme-ov-file#install--update-script).
|
||||
|
||||
After installing `nvm`, install and use Node.js `v22.14.0`:
|
||||
|
||||
```bash
|
||||
nvm install 22.14.0
|
||||
nvm use 22.14.0
|
||||
nvm alias default 22.14.0
|
||||
```
|
||||
|
||||
If you already have Node.js installed,
|
||||
check your version with `node -v` command — the version must be strictly greater than 22.
|
||||
|
||||
### Or install everything
|
||||
|
||||
You can run the following command from the root of the repository to install all required dependencies for `zerokit`
|
||||
|
||||
```bash
|
||||
make installdeps
|
||||
```
|
||||
|
||||
## Building the library
|
||||
|
||||
First, navigate to the rln-wasm directory:
|
||||
|
||||
```bash
|
||||
cd rln-wasm
|
||||
```
|
||||
|
||||
Compile zerokit for `wasm32-unknown-unknown`:
|
||||
|
||||
```bash
|
||||
cargo make build
|
||||
```
|
||||
|
||||
## Running tests and benchmarks
|
||||
|
||||
```bash
|
||||
cargo make test
|
||||
```
|
||||
|
||||
If you want to run the tests in browser headless mode, you can use the following command:
|
||||
|
||||
```bash
|
||||
cargo make test_browser
|
||||
```
|
||||
|
||||
## Parallel computation
|
||||
|
||||
The library supports parallel computation using the `wasm-bindgen-rayon` crate,
|
||||
enabling multi-threaded execution in the browser.
|
||||
|
||||
> [!NOTE]
|
||||
> Parallel support is not enabled by default due to WebAssembly and browser limitations. \
|
||||
> Compiling this feature requires `nightly` Rust.
|
||||
|
||||
### Build Setup
|
||||
|
||||
#### Install `nightly` Rust
|
||||
|
||||
```bash
|
||||
rustup install nightly
|
||||
```
|
||||
|
||||
### Build Commands
|
||||
|
||||
To enable parallel computation for WebAssembly threads, you can use the following command:
|
||||
|
||||
```bash
|
||||
cargo make build_parallel
|
||||
```
|
||||
|
||||
### WebAssembly Threading Support
|
||||
|
||||
Most modern browsers support WebAssembly threads,
|
||||
but they require the following headers to enable `SharedArrayBuffer`, which is necessary for multithreading:
|
||||
|
||||
- Cross-Origin-Opener-Policy: same-origin
|
||||
- Cross-Origin-Embedder-Policy: require-corp
|
||||
|
||||
Without these, the application will fall back to single-threaded mode.
|
||||
|
||||
## Feature detection
|
||||
|
||||
If you're targeting [older browser versions that didn't support WebAssembly threads yet](https://webassembly.org/roadmap/),
|
||||
you'll likely want to create two builds - one with thread support and one without -
|
||||
and use feature detection to choose the right one on the JavaScript side.
|
||||
|
||||
You can use [wasm-feature-detect](https://github.com/GoogleChromeLabs/wasm-feature-detect) library for this purpose.
|
||||
For example, your code might look like this:
|
||||
|
||||
```js
|
||||
import { threads } from 'wasm-feature-detect';
|
||||
|
||||
let wasmPkg;
|
||||
|
||||
if (await threads()) {
|
||||
wasmPkg = await import('./pkg-with-threads/index.js');
|
||||
await wasmPkg.default();
|
||||
await wasmPkg.initThreadPool(navigator.hardwareConcurrency);
|
||||
} else {
|
||||
wasmPkg = await import('./pkg-without-threads/index.js');
|
||||
await wasmPkg.default();
|
||||
}
|
||||
|
||||
wasmPkg.nowCallAnyExportedFuncs();
|
||||
```
|
||||
328
rln-wasm/resources/witness_calculator.js
Normal file
328
rln-wasm/resources/witness_calculator.js
Normal file
@@ -0,0 +1,328 @@
|
||||
// File generated with https://github.com/iden3/circom
|
||||
// following the instructions from:
|
||||
// https://github.com/vacp2p/zerokit/tree/master/rln#advanced-custom-circuit-compilation
|
||||
|
||||
export async function builder(code, options) {
|
||||
options = options || {};
|
||||
|
||||
let wasmModule;
|
||||
try {
|
||||
wasmModule = await WebAssembly.compile(code);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
console.log(
|
||||
"\nTry to run circom --c in order to generate c++ code instead\n"
|
||||
);
|
||||
throw new Error(err);
|
||||
}
|
||||
|
||||
let wc;
|
||||
|
||||
let errStr = "";
|
||||
let msgStr = "";
|
||||
|
||||
const instance = await WebAssembly.instantiate(wasmModule, {
|
||||
runtime: {
|
||||
exceptionHandler: function (code) {
|
||||
let err;
|
||||
if (code == 1) {
|
||||
err = "Signal not found.\n";
|
||||
} else if (code == 2) {
|
||||
err = "Too many signals set.\n";
|
||||
} else if (code == 3) {
|
||||
err = "Signal already set.\n";
|
||||
} else if (code == 4) {
|
||||
err = "Assert Failed.\n";
|
||||
} else if (code == 5) {
|
||||
err = "Not enough memory.\n";
|
||||
} else if (code == 6) {
|
||||
err = "Input signal array access exceeds the size.\n";
|
||||
} else {
|
||||
err = "Unknown error.\n";
|
||||
}
|
||||
throw new Error(err + errStr);
|
||||
},
|
||||
printErrorMessage: function () {
|
||||
errStr += getMessage() + "\n";
|
||||
// console.error(getMessage());
|
||||
},
|
||||
writeBufferMessage: function () {
|
||||
const msg = getMessage();
|
||||
// Any calls to `log()` will always end with a `\n`, so that's when we print and reset
|
||||
if (msg === "\n") {
|
||||
console.log(msgStr);
|
||||
msgStr = "";
|
||||
} else {
|
||||
// If we've buffered other content, put a space in between the items
|
||||
if (msgStr !== "") {
|
||||
msgStr += " ";
|
||||
}
|
||||
// Then append the message to the message we are creating
|
||||
msgStr += msg;
|
||||
}
|
||||
},
|
||||
showSharedRWMemory: function () {
|
||||
printSharedRWMemory();
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const sanityCheck = options;
|
||||
// options &&
|
||||
// (
|
||||
// options.sanityCheck ||
|
||||
// options.logGetSignal ||
|
||||
// options.logSetSignal ||
|
||||
// options.logStartComponent ||
|
||||
// options.logFinishComponent
|
||||
// );
|
||||
|
||||
wc = new WitnessCalculator(instance, sanityCheck);
|
||||
return wc;
|
||||
|
||||
function getMessage() {
|
||||
var message = "";
|
||||
var c = instance.exports.getMessageChar();
|
||||
while (c != 0) {
|
||||
message += String.fromCharCode(c);
|
||||
c = instance.exports.getMessageChar();
|
||||
}
|
||||
return message;
|
||||
}
|
||||
|
||||
function printSharedRWMemory() {
|
||||
const shared_rw_memory_size = instance.exports.getFieldNumLen32();
|
||||
const arr = new Uint32Array(shared_rw_memory_size);
|
||||
for (let j = 0; j < shared_rw_memory_size; j++) {
|
||||
arr[shared_rw_memory_size - 1 - j] =
|
||||
instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
|
||||
// If we've buffered other content, put a space in between the items
|
||||
if (msgStr !== "") {
|
||||
msgStr += " ";
|
||||
}
|
||||
// Then append the value to the message we are creating
|
||||
msgStr += fromArray32(arr).toString();
|
||||
}
|
||||
}
|
||||
|
||||
class WitnessCalculator {
|
||||
constructor(instance, sanityCheck) {
|
||||
this.instance = instance;
|
||||
|
||||
this.version = this.instance.exports.getVersion();
|
||||
this.n32 = this.instance.exports.getFieldNumLen32();
|
||||
|
||||
this.instance.exports.getRawPrime();
|
||||
const arr = new Uint32Array(this.n32);
|
||||
for (let i = 0; i < this.n32; i++) {
|
||||
arr[this.n32 - 1 - i] = this.instance.exports.readSharedRWMemory(i);
|
||||
}
|
||||
this.prime = fromArray32(arr);
|
||||
|
||||
this.witnessSize = this.instance.exports.getWitnessSize();
|
||||
|
||||
this.sanityCheck = sanityCheck;
|
||||
}
|
||||
|
||||
circom_version() {
|
||||
return this.instance.exports.getVersion();
|
||||
}
|
||||
|
||||
async _doCalculateWitness(input, sanityCheck) {
|
||||
//input is assumed to be a map from signals to arrays of bigints
|
||||
this.instance.exports.init(this.sanityCheck || sanityCheck ? 1 : 0);
|
||||
const keys = Object.keys(input);
|
||||
var input_counter = 0;
|
||||
keys.forEach((k) => {
|
||||
const h = fnvHash(k);
|
||||
const hMSB = parseInt(h.slice(0, 8), 16);
|
||||
const hLSB = parseInt(h.slice(8, 16), 16);
|
||||
const fArr = flatArray(input[k]);
|
||||
let signalSize = this.instance.exports.getInputSignalSize(hMSB, hLSB);
|
||||
if (signalSize < 0) {
|
||||
throw new Error(`Signal ${k} not found\n`);
|
||||
}
|
||||
if (fArr.length < signalSize) {
|
||||
throw new Error(`Not enough values for input signal ${k}\n`);
|
||||
}
|
||||
if (fArr.length > signalSize) {
|
||||
throw new Error(`Too many values for input signal ${k}\n`);
|
||||
}
|
||||
for (let i = 0; i < fArr.length; i++) {
|
||||
const arrFr = toArray32(BigInt(fArr[i]) % this.prime, this.n32);
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
this.instance.exports.writeSharedRWMemory(j, arrFr[this.n32 - 1 - j]);
|
||||
}
|
||||
try {
|
||||
this.instance.exports.setInputSignal(hMSB, hLSB, i);
|
||||
input_counter++;
|
||||
} catch (err) {
|
||||
// console.log(`After adding signal ${i} of ${k}`)
|
||||
throw new Error(err);
|
||||
}
|
||||
}
|
||||
});
|
||||
if (input_counter < this.instance.exports.getInputSize()) {
|
||||
throw new Error(
|
||||
`Not all inputs have been set. Only ${input_counter} out of ${this.instance.exports.getInputSize()}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async calculateWitness(input, sanityCheck) {
|
||||
const w = [];
|
||||
|
||||
await this._doCalculateWitness(input, sanityCheck);
|
||||
|
||||
for (let i = 0; i < this.witnessSize; i++) {
|
||||
this.instance.exports.getWitness(i);
|
||||
const arr = new Uint32Array(this.n32);
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
arr[this.n32 - 1 - j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
w.push(fromArray32(arr));
|
||||
}
|
||||
|
||||
return w;
|
||||
}
|
||||
|
||||
async calculateBinWitness(input, sanityCheck) {
|
||||
const buff32 = new Uint32Array(this.witnessSize * this.n32);
|
||||
const buff = new Uint8Array(buff32.buffer);
|
||||
await this._doCalculateWitness(input, sanityCheck);
|
||||
|
||||
for (let i = 0; i < this.witnessSize; i++) {
|
||||
this.instance.exports.getWitness(i);
|
||||
const pos = i * this.n32;
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
}
|
||||
|
||||
return buff;
|
||||
}
|
||||
|
||||
async calculateWTNSBin(input, sanityCheck) {
|
||||
const buff32 = new Uint32Array(this.witnessSize * this.n32 + this.n32 + 11);
|
||||
const buff = new Uint8Array(buff32.buffer);
|
||||
await this._doCalculateWitness(input, sanityCheck);
|
||||
|
||||
//"wtns"
|
||||
buff[0] = "w".charCodeAt(0);
|
||||
buff[1] = "t".charCodeAt(0);
|
||||
buff[2] = "n".charCodeAt(0);
|
||||
buff[3] = "s".charCodeAt(0);
|
||||
|
||||
//version 2
|
||||
buff32[1] = 2;
|
||||
|
||||
//number of sections: 2
|
||||
buff32[2] = 2;
|
||||
|
||||
//id section 1
|
||||
buff32[3] = 1;
|
||||
|
||||
const n8 = this.n32 * 4;
|
||||
//id section 1 length in 64bytes
|
||||
const idSection1length = 8 + n8;
|
||||
const idSection1lengthHex = idSection1length.toString(16);
|
||||
buff32[4] = parseInt(idSection1lengthHex.slice(0, 8), 16);
|
||||
buff32[5] = parseInt(idSection1lengthHex.slice(8, 16), 16);
|
||||
|
||||
//this.n32
|
||||
buff32[6] = n8;
|
||||
|
||||
//prime number
|
||||
this.instance.exports.getRawPrime();
|
||||
|
||||
var pos = 7;
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
pos += this.n32;
|
||||
|
||||
// witness size
|
||||
buff32[pos] = this.witnessSize;
|
||||
pos++;
|
||||
|
||||
//id section 2
|
||||
buff32[pos] = 2;
|
||||
pos++;
|
||||
|
||||
// section 2 length
|
||||
const idSection2length = n8 * this.witnessSize;
|
||||
const idSection2lengthHex = idSection2length.toString(16);
|
||||
buff32[pos] = parseInt(idSection2lengthHex.slice(0, 8), 16);
|
||||
buff32[pos + 1] = parseInt(idSection2lengthHex.slice(8, 16), 16);
|
||||
|
||||
pos += 2;
|
||||
for (let i = 0; i < this.witnessSize; i++) {
|
||||
this.instance.exports.getWitness(i);
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
pos += this.n32;
|
||||
}
|
||||
|
||||
return buff;
|
||||
}
|
||||
}
|
||||
|
||||
function toArray32(rem, size) {
|
||||
const res = []; //new Uint32Array(size); //has no unshift
|
||||
const radix = BigInt(0x100000000);
|
||||
while (rem) {
|
||||
res.unshift(Number(rem % radix));
|
||||
rem = rem / radix;
|
||||
}
|
||||
if (size) {
|
||||
var i = size - res.length;
|
||||
while (i > 0) {
|
||||
res.unshift(0);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
function fromArray32(arr) {
|
||||
//returns a BigInt
|
||||
var res = BigInt(0);
|
||||
const radix = BigInt(0x100000000);
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
res = res * radix + BigInt(arr[i]);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
function flatArray(a) {
|
||||
var res = [];
|
||||
fillArray(res, a);
|
||||
return res;
|
||||
|
||||
function fillArray(res, a) {
|
||||
if (Array.isArray(a)) {
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
fillArray(res, a[i]);
|
||||
}
|
||||
} else {
|
||||
res.push(a);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function fnvHash(str) {
|
||||
const uint64_max = BigInt(2) ** BigInt(64);
|
||||
let hash = BigInt("0xCBF29CE484222325");
|
||||
for (var i = 0; i < str.length; i++) {
|
||||
hash ^= BigInt(str[i].charCodeAt());
|
||||
hash *= BigInt(0x100000001b3);
|
||||
hash %= uint64_max;
|
||||
}
|
||||
let shash = hash.toString(16);
|
||||
let n = 16 - shash.length;
|
||||
shash = "0".repeat(n).concat(shash);
|
||||
return shash;
|
||||
}
|
||||
212
rln-wasm/src/lib.rs
Normal file
212
rln-wasm/src/lib.rs
Normal file
@@ -0,0 +1,212 @@
|
||||
#![cfg(target_arch = "wasm32")]
|
||||
|
||||
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
|
||||
use num_bigint::BigInt;
|
||||
use rln::public::RLN;
|
||||
use std::vec::Vec;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
#[cfg(feature = "parallel")]
|
||||
pub use wasm_bindgen_rayon::init_thread_pool;
|
||||
|
||||
#[wasm_bindgen(js_name = initPanicHook)]
|
||||
pub fn init_panic_hook() {
|
||||
console_error_panic_hook::set_once();
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = RLN)]
|
||||
pub struct RLNWrapper {
|
||||
// The purpose of this wrapper is to hold a RLN instance with the 'static lifetime
|
||||
// because wasm_bindgen does not allow returning elements with lifetimes
|
||||
instance: RLN,
|
||||
}
|
||||
|
||||
// Macro to call methods with arbitrary amount of arguments,
|
||||
// which have the last argument is output buffer pointer
|
||||
// First argument to the macro is context,
|
||||
// second is the actual method on `RLN`
|
||||
// third is the aforementioned output buffer argument
|
||||
// rest are all other arguments to the method
|
||||
macro_rules! call_with_output_and_error_msg {
|
||||
// this variant is needed for the case when
|
||||
// there are zero other arguments
|
||||
($instance:expr, $method:ident, $error_msg:expr) => {
|
||||
{
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
let new_instance = $instance.process();
|
||||
if let Err(err) = new_instance.instance.$method(&mut output_data) {
|
||||
std::mem::forget(output_data);
|
||||
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
|
||||
} else {
|
||||
let result = Uint8Array::from(&output_data[..]);
|
||||
std::mem::forget(output_data);
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
};
|
||||
($instance:expr, $method:ident, $error_msg:expr, $( $arg:expr ),* ) => {
|
||||
{
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
let new_instance = $instance.process();
|
||||
if let Err(err) = new_instance.instance.$method($($arg.process()),*, &mut output_data) {
|
||||
std::mem::forget(output_data);
|
||||
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
|
||||
} else {
|
||||
let result = Uint8Array::from(&output_data[..]);
|
||||
std::mem::forget(output_data);
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! call {
|
||||
($instance:expr, $method:ident $(, $arg:expr)*) => {
|
||||
{
|
||||
let new_instance: &mut RLNWrapper = $instance.process();
|
||||
new_instance.instance.$method($($arg.process()),*)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! call_bool_method_with_error_msg {
|
||||
($instance:expr, $method:ident, $error_msg:expr $(, $arg:expr)*) => {
|
||||
{
|
||||
let new_instance: &RLNWrapper = $instance.process();
|
||||
new_instance.instance.$method($($arg.process()),*).map_err(|err| format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait ProcessArg {
|
||||
type ReturnType;
|
||||
fn process(self) -> Self::ReturnType;
|
||||
}
|
||||
|
||||
impl ProcessArg for usize {
|
||||
type ReturnType = usize;
|
||||
fn process(self) -> Self::ReturnType {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ProcessArg for Vec<T> {
|
||||
type ReturnType = Vec<T>;
|
||||
fn process(self) -> Self::ReturnType {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl ProcessArg for *const RLN {
|
||||
type ReturnType = &'static RLN;
|
||||
fn process(self) -> Self::ReturnType {
|
||||
unsafe { &*self }
|
||||
}
|
||||
}
|
||||
|
||||
impl ProcessArg for *const RLNWrapper {
|
||||
type ReturnType = &'static RLNWrapper;
|
||||
fn process(self) -> Self::ReturnType {
|
||||
unsafe { &*self }
|
||||
}
|
||||
}
|
||||
|
||||
impl ProcessArg for *mut RLNWrapper {
|
||||
type ReturnType = &'static mut RLNWrapper;
|
||||
fn process(self) -> Self::ReturnType {
|
||||
unsafe { &mut *self }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ProcessArg for &'a [u8] {
|
||||
type ReturnType = &'a [u8];
|
||||
|
||||
fn process(self) -> Self::ReturnType {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = newRLN)]
|
||||
pub fn wasm_new(zkey: Uint8Array) -> Result<*mut RLNWrapper, String> {
|
||||
let instance = RLN::new_with_params(zkey.to_vec()).map_err(|err| format!("{:#?}", err))?;
|
||||
let wrapper = RLNWrapper { instance };
|
||||
Ok(Box::into_raw(Box::new(wrapper)))
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = rlnWitnessToJson)]
|
||||
pub fn wasm_rln_witness_to_json(
|
||||
ctx: *mut RLNWrapper,
|
||||
serialized_witness: Uint8Array,
|
||||
) -> Result<Object, String> {
|
||||
let inputs = call!(
|
||||
ctx,
|
||||
get_rln_witness_bigint_json,
|
||||
&serialized_witness.to_vec()[..]
|
||||
)
|
||||
.map_err(|err| err.to_string())?;
|
||||
let js_value = serde_wasm_bindgen::to_value(&inputs).map_err(|err| err.to_string())?;
|
||||
Object::from_entries(&js_value).map_err(|err| format!("{:#?}", err))
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = generateRLNProofWithWitness)]
|
||||
pub fn wasm_generate_rln_proof_with_witness(
|
||||
ctx: *mut RLNWrapper,
|
||||
calculated_witness: Vec<JsBigInt>,
|
||||
serialized_witness: Uint8Array,
|
||||
) -> Result<Uint8Array, String> {
|
||||
let mut witness_vec: Vec<BigInt> = vec![];
|
||||
|
||||
for v in calculated_witness {
|
||||
witness_vec.push(
|
||||
v.to_string(10)
|
||||
.map_err(|err| format!("{:#?}", err))?
|
||||
.as_string()
|
||||
.ok_or("not a string error")?
|
||||
.parse::<BigInt>()
|
||||
.map_err(|err| format!("{:#?}", err))?,
|
||||
);
|
||||
}
|
||||
|
||||
call_with_output_and_error_msg!(
|
||||
ctx,
|
||||
generate_rln_proof_with_witness,
|
||||
"could not generate proof",
|
||||
witness_vec,
|
||||
serialized_witness.to_vec()
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = recovedIDSecret)]
|
||||
pub fn wasm_recover_id_secret(
|
||||
ctx: *const RLNWrapper,
|
||||
input_proof_data_1: Uint8Array,
|
||||
input_proof_data_2: Uint8Array,
|
||||
) -> Result<Uint8Array, String> {
|
||||
call_with_output_and_error_msg!(
|
||||
ctx,
|
||||
recover_id_secret,
|
||||
"could not recover id secret",
|
||||
&input_proof_data_1.to_vec()[..],
|
||||
&input_proof_data_2.to_vec()[..]
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = verifyWithRoots)]
|
||||
pub fn wasm_verify_with_roots(
|
||||
ctx: *const RLNWrapper,
|
||||
proof: Uint8Array,
|
||||
roots: Uint8Array,
|
||||
) -> Result<bool, String> {
|
||||
call_bool_method_with_error_msg!(
|
||||
ctx,
|
||||
verify_with_roots,
|
||||
"error while verifying proof with roots".to_string(),
|
||||
&proof.to_vec()[..],
|
||||
&roots.to_vec()[..]
|
||||
)
|
||||
}
|
||||
266
rln-wasm/tests/browser.rs
Normal file
266
rln-wasm/tests/browser.rs
Normal file
@@ -0,0 +1,266 @@
|
||||
#![cfg(target_arch = "wasm32")]
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use js_sys::{BigInt as JsBigInt, Date, Object, Uint8Array};
|
||||
use rln::circuit::{Fr, TEST_TREE_DEPTH};
|
||||
use rln::hashers::{hash_to_field_le, poseidon_hash, PoseidonHash};
|
||||
use rln::protocol::{prepare_verify_input, rln_witness_from_values, serialize_witness};
|
||||
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, IdSecret};
|
||||
use rln_wasm::{
|
||||
wasm_generate_rln_proof_with_witness, wasm_new, wasm_rln_witness_to_json,
|
||||
wasm_verify_with_roots,
|
||||
};
|
||||
use rln_wasm_utils::wasm_key_gen;
|
||||
use wasm_bindgen::{prelude::wasm_bindgen, JsValue};
|
||||
use wasm_bindgen_test::{console_log, wasm_bindgen_test, wasm_bindgen_test_configure};
|
||||
use zerokit_utils::{
|
||||
OptimalMerkleProof, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree,
|
||||
};
|
||||
|
||||
#[cfg(feature = "parallel")]
|
||||
use {rln_wasm::init_thread_pool, wasm_bindgen_futures::JsFuture, web_sys::window};
|
||||
|
||||
#[wasm_bindgen(inline_js = r#"
|
||||
export function isThreadpoolSupported() {
|
||||
return typeof SharedArrayBuffer !== 'undefined' &&
|
||||
typeof Atomics !== 'undefined' &&
|
||||
typeof crossOriginIsolated !== 'undefined' &&
|
||||
crossOriginIsolated;
|
||||
}
|
||||
|
||||
export function initWitnessCalculator(jsCode) {
|
||||
const processedCode = jsCode
|
||||
.replace(/export\s+async\s+function\s+builder/, 'async function builder')
|
||||
.replace(/export\s*\{\s*builder\s*\};?/g, '');
|
||||
|
||||
const moduleFunc = new Function(processedCode + '\nreturn { builder };');
|
||||
const witnessCalculatorModule = moduleFunc();
|
||||
|
||||
window.witnessCalculatorBuilder = witnessCalculatorModule.builder;
|
||||
|
||||
if (typeof window.witnessCalculatorBuilder !== 'function') {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function readFile(data) {
|
||||
return new Uint8Array(data);
|
||||
}
|
||||
|
||||
export async function calculateWitness(circom_data, inputs) {
|
||||
const wasmBuffer = circom_data instanceof Uint8Array ? circom_data : new Uint8Array(circom_data);
|
||||
const witnessCalculator = await window.witnessCalculatorBuilder(wasmBuffer);
|
||||
const calculatedWitness = await witnessCalculator.calculateWitness(inputs, false);
|
||||
return JSON.stringify(calculatedWitness, (key, value) =>
|
||||
typeof value === "bigint" ? value.toString() : value
|
||||
);
|
||||
}
|
||||
"#)]
|
||||
extern "C" {
|
||||
#[wasm_bindgen(catch)]
|
||||
fn isThreadpoolSupported() -> Result<bool, JsValue>;
|
||||
|
||||
#[wasm_bindgen(catch)]
|
||||
fn initWitnessCalculator(js: &str) -> Result<bool, JsValue>;
|
||||
|
||||
#[wasm_bindgen(catch)]
|
||||
fn readFile(data: &[u8]) -> Result<Uint8Array, JsValue>;
|
||||
|
||||
#[wasm_bindgen(catch)]
|
||||
async fn calculateWitness(circom_data: &[u8], inputs: Object) -> Result<JsValue, JsValue>;
|
||||
}
|
||||
|
||||
const WITNESS_CALCULATOR_JS: &str = include_str!("../resources/witness_calculator.js");
|
||||
|
||||
const ARKZKEY_BYTES: &[u8] =
|
||||
include_bytes!("../../rln/resources/tree_depth_30/rln_final.arkzkey");
|
||||
|
||||
const CIRCOM_BYTES: &[u8] = include_bytes!("../../rln/resources/tree_depth_30/rln.wasm");
|
||||
|
||||
wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
pub async fn rln_wasm_benchmark() {
|
||||
// Check if thread pool is supported
|
||||
#[cfg(feature = "parallel")]
|
||||
if !isThreadpoolSupported().expect("Failed to check thread pool support") {
|
||||
panic!("Thread pool is NOT supported");
|
||||
} else {
|
||||
// Initialize thread pool
|
||||
let cpu_count = window()
|
||||
.expect("Failed to get window")
|
||||
.navigator()
|
||||
.hardware_concurrency() as usize;
|
||||
JsFuture::from(init_thread_pool(cpu_count))
|
||||
.await
|
||||
.expect("Failed to initialize thread pool");
|
||||
}
|
||||
|
||||
// Initialize witness calculator
|
||||
initWitnessCalculator(WITNESS_CALCULATOR_JS)
|
||||
.expect("Failed to initialize witness calculator");
|
||||
|
||||
let mut results = String::from("\nbenchmarks:\n");
|
||||
let iterations = 10;
|
||||
|
||||
let zkey = readFile(&ARKZKEY_BYTES).expect("Failed to read zkey file");
|
||||
|
||||
// Benchmark wasm_new
|
||||
let start_wasm_new = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = wasm_new(zkey.clone()).expect("Failed to create RLN instance");
|
||||
}
|
||||
let wasm_new_result = Date::now() - start_wasm_new;
|
||||
|
||||
// Create RLN instance for other benchmarks
|
||||
let rln_instance = wasm_new(zkey).expect("Failed to create RLN instance");
|
||||
let mut tree: OptimalMerkleTree<PoseidonHash> =
|
||||
OptimalMerkleTree::default(TEST_TREE_DEPTH).expect("Failed to create tree");
|
||||
|
||||
// Benchmark wasm_key_gen
|
||||
let start_wasm_key_gen = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = wasm_key_gen(true).expect("Failed to generate keys");
|
||||
}
|
||||
let wasm_key_gen_result = Date::now() - start_wasm_key_gen;
|
||||
|
||||
// Generate identity pair for other benchmarks
|
||||
let mem_keys = wasm_key_gen(true).expect("Failed to generate keys");
|
||||
let id_key = mem_keys.subarray(0, 32);
|
||||
let (identity_secret_hash, _) = IdSecret::from_bytes_le(&id_key.to_vec());
|
||||
let (id_commitment, _) = bytes_le_to_fr(&mem_keys.subarray(32, 64).to_vec());
|
||||
|
||||
let epoch = hash_to_field_le(b"test-epoch");
|
||||
let rln_identifier = hash_to_field_le(b"test-rln-identifier");
|
||||
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
|
||||
|
||||
let identity_index = tree.leaves_set();
|
||||
|
||||
let user_message_limit = Fr::from(100);
|
||||
|
||||
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
tree.update_next(rate_commitment)
|
||||
.expect("Failed to update tree");
|
||||
|
||||
let message_id = Fr::from(0);
|
||||
let signal: [u8; 32] = [0; 32];
|
||||
let x = hash_to_field_le(&signal);
|
||||
|
||||
let merkle_proof: OptimalMerkleProof<PoseidonHash> = tree
|
||||
.proof(identity_index)
|
||||
.expect("Failed to generate merkle proof");
|
||||
|
||||
let rln_witness = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
merkle_proof.get_path_elements(),
|
||||
merkle_proof.get_path_index(),
|
||||
x,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
)
|
||||
.expect("Failed to create RLN witness");
|
||||
|
||||
let serialized_witness =
|
||||
serialize_witness(&rln_witness).expect("Failed to serialize witness");
|
||||
let witness_buffer = Uint8Array::from(&serialized_witness[..]);
|
||||
|
||||
let json_inputs = wasm_rln_witness_to_json(rln_instance, witness_buffer.clone())
|
||||
.expect("Failed to convert witness to JSON");
|
||||
|
||||
// Benchmark calculateWitness
|
||||
let start_calculate_witness = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = calculateWitness(&CIRCOM_BYTES, json_inputs.clone())
|
||||
.await
|
||||
.expect("Failed to calculate witness");
|
||||
}
|
||||
let calculate_witness_result = Date::now() - start_calculate_witness;
|
||||
|
||||
// Calculate witness for other benchmarks
|
||||
let calculated_witness_json = calculateWitness(&CIRCOM_BYTES, json_inputs)
|
||||
.await
|
||||
.expect("Failed to calculate witness")
|
||||
.as_string()
|
||||
.expect("Failed to convert calculated witness to string");
|
||||
let calculated_witness_vec_str: Vec<String> =
|
||||
serde_json::from_str(&calculated_witness_json).expect("Failed to parse JSON");
|
||||
let calculated_witness: Vec<JsBigInt> = calculated_witness_vec_str
|
||||
.iter()
|
||||
.map(|x| JsBigInt::new(&x.into()).expect("Failed to create JsBigInt"))
|
||||
.collect();
|
||||
|
||||
// Benchmark wasm_generate_rln_proof_with_witness
|
||||
let start_wasm_generate_rln_proof_with_witness = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = wasm_generate_rln_proof_with_witness(
|
||||
rln_instance,
|
||||
calculated_witness.clone(),
|
||||
witness_buffer.clone(),
|
||||
)
|
||||
.expect("Failed to generate proof");
|
||||
}
|
||||
let wasm_generate_rln_proof_with_witness_result =
|
||||
Date::now() - start_wasm_generate_rln_proof_with_witness;
|
||||
|
||||
// Generate a proof for other benchmarks
|
||||
let proof =
|
||||
wasm_generate_rln_proof_with_witness(rln_instance, calculated_witness, witness_buffer)
|
||||
.expect("Failed to generate proof");
|
||||
|
||||
let proof_data = proof.to_vec();
|
||||
let verify_input = prepare_verify_input(proof_data, &signal);
|
||||
let input_buffer = Uint8Array::from(&verify_input[..]);
|
||||
|
||||
let root = tree.root();
|
||||
let roots_serialized = fr_to_bytes_le(&root);
|
||||
let roots_buffer = Uint8Array::from(&roots_serialized[..]);
|
||||
|
||||
// Benchmark wasm_verify_with_roots
|
||||
let start_wasm_verify_with_roots = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ =
|
||||
wasm_verify_with_roots(rln_instance, input_buffer.clone(), roots_buffer.clone())
|
||||
.expect("Failed to verify proof");
|
||||
}
|
||||
let wasm_verify_with_roots_result = Date::now() - start_wasm_verify_with_roots;
|
||||
|
||||
// Verify the proof with the root
|
||||
let is_proof_valid = wasm_verify_with_roots(rln_instance, input_buffer, roots_buffer)
|
||||
.expect("Failed to verify proof");
|
||||
assert!(is_proof_valid, "verification failed");
|
||||
|
||||
// Format and display results
|
||||
let format_duration = |duration_ms: f64| -> String {
|
||||
let avg_ms = duration_ms / (iterations as f64);
|
||||
if avg_ms >= 1000.0 {
|
||||
format!("{:.3} s", avg_ms / 1000.0)
|
||||
} else {
|
||||
format!("{:.3} ms", avg_ms)
|
||||
}
|
||||
};
|
||||
|
||||
results.push_str(&format!("wasm_new: {}\n", format_duration(wasm_new_result)));
|
||||
results.push_str(&format!(
|
||||
"wasm_key_gen: {}\n",
|
||||
format_duration(wasm_key_gen_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"calculateWitness: {}\n",
|
||||
format_duration(calculate_witness_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"wasm_generate_rln_proof_with_witness: {}\n",
|
||||
format_duration(wasm_generate_rln_proof_with_witness_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"wasm_verify_with_roots: {}\n",
|
||||
format_duration(wasm_verify_with_roots_result)
|
||||
));
|
||||
|
||||
// Log the results
|
||||
console_log!("{results}");
|
||||
}
|
||||
}
|
||||
247
rln-wasm/tests/node.rs
Normal file
247
rln-wasm/tests/node.rs
Normal file
@@ -0,0 +1,247 @@
|
||||
#![cfg(not(feature = "parallel"))]
|
||||
#![cfg(target_arch = "wasm32")]
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use js_sys::{BigInt as JsBigInt, Date, Object, Uint8Array};
|
||||
use rln::circuit::{Fr, TEST_TREE_DEPTH};
|
||||
use rln::hashers::{hash_to_field_le, poseidon_hash, PoseidonHash};
|
||||
use rln::protocol::{prepare_verify_input, rln_witness_from_values, serialize_witness};
|
||||
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, IdSecret};
|
||||
use rln_wasm::{
|
||||
wasm_generate_rln_proof_with_witness, wasm_new, wasm_rln_witness_to_json,
|
||||
wasm_verify_with_roots,
|
||||
};
|
||||
use rln_wasm_utils::wasm_key_gen;
|
||||
use wasm_bindgen::{prelude::wasm_bindgen, JsValue};
|
||||
use wasm_bindgen_test::{console_log, wasm_bindgen_test};
|
||||
use zerokit_utils::{
|
||||
OptimalMerkleProof, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree,
|
||||
};
|
||||
|
||||
const WITNESS_CALCULATOR_JS: &str = include_str!("../resources/witness_calculator.js");
|
||||
|
||||
#[wasm_bindgen(inline_js = r#"
|
||||
const fs = require("fs");
|
||||
|
||||
let witnessCalculatorModule = null;
|
||||
|
||||
module.exports = {
|
||||
initWitnessCalculator: function(code) {
|
||||
const processedCode = code
|
||||
.replace(/export\s+async\s+function\s+builder/, 'async function builder')
|
||||
.replace(/export\s*\{\s*builder\s*\};?/g, '');
|
||||
|
||||
const moduleFunc = new Function(processedCode + '\nreturn { builder };');
|
||||
witnessCalculatorModule = moduleFunc();
|
||||
|
||||
if (typeof witnessCalculatorModule.builder !== 'function') {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
|
||||
readFile: function (path) {
|
||||
return fs.readFileSync(path);
|
||||
},
|
||||
|
||||
calculateWitness: async function (circom_path, inputs) {
|
||||
const wasmFile = fs.readFileSync(circom_path);
|
||||
const wasmFileBuffer = wasmFile.slice(
|
||||
wasmFile.byteOffset,
|
||||
wasmFile.byteOffset + wasmFile.byteLength
|
||||
);
|
||||
const witnessCalculator = await witnessCalculatorModule.builder(wasmFileBuffer);
|
||||
const calculatedWitness = await witnessCalculator.calculateWitness(
|
||||
inputs,
|
||||
false
|
||||
);
|
||||
return JSON.stringify(calculatedWitness, (key, value) =>
|
||||
typeof value === "bigint" ? value.toString() : value
|
||||
);
|
||||
},
|
||||
};
|
||||
"#)]
|
||||
extern "C" {
|
||||
#[wasm_bindgen(catch)]
|
||||
fn initWitnessCalculator(code: &str) -> Result<bool, JsValue>;
|
||||
|
||||
#[wasm_bindgen(catch)]
|
||||
fn readFile(path: &str) -> Result<Uint8Array, JsValue>;
|
||||
|
||||
#[wasm_bindgen(catch)]
|
||||
async fn calculateWitness(circom_path: &str, input: Object) -> Result<JsValue, JsValue>;
|
||||
}
|
||||
|
||||
const ARKZKEY_PATH: &str = "../rln/resources/tree_depth_30/rln_final.arkzkey";
|
||||
|
||||
const CIRCOM_PATH: &str = "../rln/resources/tree_depth_30/rln.wasm";
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
pub async fn rln_wasm_benchmark() {
|
||||
// Initialize witness calculator
|
||||
initWitnessCalculator(WITNESS_CALCULATOR_JS)
|
||||
.expect("Failed to initialize witness calculator");
|
||||
|
||||
let mut results = String::from("\nbenchmarks:\n");
|
||||
let iterations = 10;
|
||||
|
||||
let zkey = readFile(&ARKZKEY_PATH).expect("Failed to read zkey file");
|
||||
|
||||
// Benchmark wasm_new
|
||||
let start_wasm_new = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = wasm_new(zkey.clone()).expect("Failed to create RLN instance");
|
||||
}
|
||||
let wasm_new_result = Date::now() - start_wasm_new;
|
||||
|
||||
// Create RLN instance for other benchmarks
|
||||
let rln_instance = wasm_new(zkey).expect("Failed to create RLN instance");
|
||||
let mut tree: OptimalMerkleTree<PoseidonHash> =
|
||||
OptimalMerkleTree::default(TEST_TREE_DEPTH).expect("Failed to create tree");
|
||||
|
||||
// Benchmark wasm_key_gen
|
||||
let start_wasm_key_gen = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = wasm_key_gen(true).expect("Failed to generate keys");
|
||||
}
|
||||
let wasm_key_gen_result = Date::now() - start_wasm_key_gen;
|
||||
|
||||
// Generate identity pair for other benchmarks
|
||||
let mem_keys = wasm_key_gen(true).expect("Failed to generate keys");
|
||||
let id_key = mem_keys.subarray(0, 32);
|
||||
let (identity_secret_hash, _) = IdSecret::from_bytes_le(&id_key.to_vec());
|
||||
let (id_commitment, _) = bytes_le_to_fr(&mem_keys.subarray(32, 64).to_vec());
|
||||
|
||||
let epoch = hash_to_field_le(b"test-epoch");
|
||||
let rln_identifier = hash_to_field_le(b"test-rln-identifier");
|
||||
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
|
||||
|
||||
let identity_index = tree.leaves_set();
|
||||
|
||||
let user_message_limit = Fr::from(100);
|
||||
|
||||
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
tree.update_next(rate_commitment)
|
||||
.expect("Failed to update tree");
|
||||
|
||||
let message_id = Fr::from(0);
|
||||
let signal: [u8; 32] = [0; 32];
|
||||
let x = hash_to_field_le(&signal);
|
||||
|
||||
let merkle_proof: OptimalMerkleProof<PoseidonHash> = tree
|
||||
.proof(identity_index)
|
||||
.expect("Failed to generate merkle proof");
|
||||
|
||||
let rln_witness = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
merkle_proof.get_path_elements(),
|
||||
merkle_proof.get_path_index(),
|
||||
x,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
)
|
||||
.expect("Failed to create RLN witness");
|
||||
|
||||
let serialized_witness =
|
||||
serialize_witness(&rln_witness).expect("Failed to serialize witness");
|
||||
let witness_buffer = Uint8Array::from(&serialized_witness[..]);
|
||||
|
||||
let json_inputs = wasm_rln_witness_to_json(rln_instance, witness_buffer.clone())
|
||||
.expect("Failed to convert witness to JSON");
|
||||
|
||||
// Benchmark calculateWitness
|
||||
let start_calculate_witness = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = calculateWitness(&CIRCOM_PATH, json_inputs.clone())
|
||||
.await
|
||||
.expect("Failed to calculate witness");
|
||||
}
|
||||
let calculate_witness_result = Date::now() - start_calculate_witness;
|
||||
|
||||
// Calculate witness for other benchmarks
|
||||
let calculated_witness_json = calculateWitness(&CIRCOM_PATH, json_inputs)
|
||||
.await
|
||||
.expect("Failed to calculate witness")
|
||||
.as_string()
|
||||
.expect("Failed to convert calculated witness to string");
|
||||
let calculated_witness_vec_str: Vec<String> =
|
||||
serde_json::from_str(&calculated_witness_json).expect("Failed to parse JSON");
|
||||
let calculated_witness: Vec<JsBigInt> = calculated_witness_vec_str
|
||||
.iter()
|
||||
.map(|x| JsBigInt::new(&x.into()).expect("Failed to create JsBigInt"))
|
||||
.collect();
|
||||
|
||||
// Benchmark wasm_generate_rln_proof_with_witness
|
||||
let start_wasm_generate_rln_proof_with_witness = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = wasm_generate_rln_proof_with_witness(
|
||||
rln_instance,
|
||||
calculated_witness.clone(),
|
||||
witness_buffer.clone(),
|
||||
)
|
||||
.expect("Failed to generate proof");
|
||||
}
|
||||
let wasm_generate_rln_proof_with_witness_result =
|
||||
Date::now() - start_wasm_generate_rln_proof_with_witness;
|
||||
|
||||
// Generate a proof for other benchmarks
|
||||
let proof =
|
||||
wasm_generate_rln_proof_with_witness(rln_instance, calculated_witness, witness_buffer)
|
||||
.expect("Failed to generate proof");
|
||||
|
||||
let proof_data = proof.to_vec();
|
||||
let verify_input = prepare_verify_input(proof_data, &signal);
|
||||
let input_buffer = Uint8Array::from(&verify_input[..]);
|
||||
|
||||
let root = tree.root();
|
||||
let roots_serialized = fr_to_bytes_le(&root);
|
||||
let roots_buffer = Uint8Array::from(&roots_serialized[..]);
|
||||
|
||||
// Benchmark wasm_verify_with_roots
|
||||
let start_wasm_verify_with_roots = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ =
|
||||
wasm_verify_with_roots(rln_instance, input_buffer.clone(), roots_buffer.clone())
|
||||
.expect("Failed to verify proof");
|
||||
}
|
||||
let wasm_verify_with_roots_result = Date::now() - start_wasm_verify_with_roots;
|
||||
|
||||
// Verify the proof with the root
|
||||
let is_proof_valid = wasm_verify_with_roots(rln_instance, input_buffer, roots_buffer)
|
||||
.expect("Failed to verify proof");
|
||||
assert!(is_proof_valid, "verification failed");
|
||||
|
||||
// Format and display results
|
||||
let format_duration = |duration_ms: f64| -> String {
|
||||
let avg_ms = duration_ms / (iterations as f64);
|
||||
if avg_ms >= 1000.0 {
|
||||
format!("{:.3} s", avg_ms / 1000.0)
|
||||
} else {
|
||||
format!("{:.3} ms", avg_ms)
|
||||
}
|
||||
};
|
||||
|
||||
results.push_str(&format!("wasm_new: {}\n", format_duration(wasm_new_result)));
|
||||
results.push_str(&format!(
|
||||
"wasm_key_gen: {}\n",
|
||||
format_duration(wasm_key_gen_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"calculate_witness: {}\n",
|
||||
format_duration(calculate_witness_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"wasm_generate_rln_proof_with_witness: {}\n",
|
||||
format_duration(wasm_generate_rln_proof_with_witness_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"wasm_verify_with_roots: {}\n",
|
||||
format_duration(wasm_verify_with_roots_result)
|
||||
));
|
||||
|
||||
// Log the results
|
||||
console_log!("{results}");
|
||||
}
|
||||
}
|
||||
@@ -1,42 +1,85 @@
|
||||
[package]
|
||||
name = "rln"
|
||||
version = "0.1.0"
|
||||
version = "0.8.0"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "APIs to manage, compute and verify zkSNARK proofs and RLN primitives"
|
||||
documentation = "https://github.com/vacp2p/zerokit"
|
||||
homepage = "https://vac.dev"
|
||||
repository = "https://github.com/vacp2p/zerokit"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib", "staticlib"]
|
||||
crate-type = ["rlib", "staticlib", "cdylib"]
|
||||
bench = false
|
||||
|
||||
# This flag disable cargo doctests, i.e. testing example code-snippets in documentation
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
|
||||
# ZKP Generation
|
||||
ark-ff = { version = "0.3.0", default-features = false, features = ["parallel", "asm"] }
|
||||
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
|
||||
ark-bn254 = { version = "0.3.0" }
|
||||
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
|
||||
ark-relations = { version = "0.3.0", default-features = false, features = [ "std" ] }
|
||||
ark-serialize = { version = "0.3.0", default-features = false }
|
||||
ark-circom = { git = "https://github.com/gakonst/ark-circom", rev = "06eb075", features = ["circom-2"] }
|
||||
#ark-circom = { git = "https://github.com/vacp2p/ark-circom", branch = "no-ethers-core", features = ["circom-2"] }
|
||||
wasmer = "2.3.0"
|
||||
ark-bn254 = { version = "0.5.0", features = ["std"] }
|
||||
ark-relations = { version = "0.5.1", features = ["std"] }
|
||||
ark-ff = { version = "0.5.0", default-features = false }
|
||||
ark-ec = { version = "0.5.0", default-features = false }
|
||||
ark-std = { version = "0.5.0", default-features = false }
|
||||
ark-poly = { version = "0.5.0", default-features = false }
|
||||
ark-groth16 = { version = "0.5.0", default-features = false }
|
||||
ark-serialize = { version = "0.5.0", default-features = false }
|
||||
|
||||
# error handling
|
||||
color-eyre = "0.5.11"
|
||||
thiserror = "1.0.0"
|
||||
thiserror = "2.0.12"
|
||||
|
||||
# utilities
|
||||
rayon = { version = "1.10.0", optional = true }
|
||||
byteorder = "1.5.0"
|
||||
cfg-if = "1.0"
|
||||
num-bigint = { version = "0.4.3", default-features = false, features = ["rand"] }
|
||||
num-traits = "0.2.11"
|
||||
once_cell = "1.14.0"
|
||||
rand = "0.8"
|
||||
num-bigint = { version = "0.4.6", default-features = false, features = ["std"] }
|
||||
num-traits = "0.2.19"
|
||||
once_cell = "1.21.3"
|
||||
lazy_static = "1.5.0"
|
||||
rand = "0.8.5"
|
||||
rand_chacha = "0.3.1"
|
||||
ruint = { version = "1.15.0", features = ["rand", "serde", "ark-ff-04"] }
|
||||
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
|
||||
zeroize = "1.8"
|
||||
tempfile = "3.21.0"
|
||||
utils = { package = "zerokit_utils", version = "0.6.0", path = "../utils", default-features = false }
|
||||
|
||||
# serialization
|
||||
serde_json = "1.0.48"
|
||||
prost = "0.14.1"
|
||||
serde_json = "1.0.141"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
document-features = { version = "0.2.11", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
hex-literal = "0.3.4"
|
||||
criterion = { version = "0.7.0", features = ["html_reports"] }
|
||||
|
||||
[features]
|
||||
fullmerkletree = []
|
||||
default = ["parallel", "pmtree-ft"]
|
||||
stateless = []
|
||||
parallel = [
|
||||
"rayon",
|
||||
"utils/parallel",
|
||||
"ark-ff/parallel",
|
||||
"ark-ec/parallel",
|
||||
"ark-std/parallel",
|
||||
"ark-poly/parallel",
|
||||
"ark-groth16/parallel",
|
||||
"ark-serialize/parallel",
|
||||
]
|
||||
fullmerkletree = [] # Pre-allocated tree, fastest access
|
||||
optimalmerkletree = [] # Sparse storage, memory efficient
|
||||
pmtree-ft = ["utils/pmtree-ft"] # Persistent storage, disk-based
|
||||
|
||||
[[bench]]
|
||||
name = "pmtree_benchmark"
|
||||
harness = false
|
||||
required-features = ["pmtree-ft"]
|
||||
|
||||
[[bench]]
|
||||
name = "poseidon_tree_benchmark"
|
||||
harness = false
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
|
||||
15
rln/Makefile.toml
Normal file
15
rln/Makefile.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[tasks.build]
|
||||
command = "cargo"
|
||||
args = ["build", "--release"]
|
||||
|
||||
[tasks.test]
|
||||
command = "cargo"
|
||||
args = ["test", "--release", "--", "--nocapture"]
|
||||
|
||||
[tasks.test_stateless]
|
||||
command = "cargo"
|
||||
args = ["test", "--release", "--no-default-features", "--features", "stateless"]
|
||||
|
||||
[tasks.bench]
|
||||
command = "cargo"
|
||||
args = ["bench"]
|
||||
335
rln/README.md
335
rln/README.md
@@ -1,43 +1,334 @@
|
||||
# Zerokit RLN Module
|
||||
|
||||
This module provides APIs to manage, compute and verify [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and RLN primitives.
|
||||
[](https://crates.io/crates/rln)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
|
||||
Currently, this module comes with three [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources) RLN circuits having Merkle tree of height `15`, `19` and `20`, respectively.
|
||||
The Zerokit RLN Module provides a Rust implementation for working with
|
||||
Rate-Limiting Nullifier [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and primitives.
|
||||
This module allows you to:
|
||||
|
||||
Implemented tests can be executed by running within the module folder
|
||||
- Generate and verify RLN proofs
|
||||
- Work with Merkle trees for commitment storage
|
||||
- Implement rate-limiting mechanisms for distributed systems
|
||||
|
||||
`cargo test --release`
|
||||
## Quick Start
|
||||
|
||||
## Compiling circuits
|
||||
> [!IMPORTANT]
|
||||
> Version 0.7.0 is the only version that does not support WASM and x32 architecture.
|
||||
> WASM support is available in version 0.8.0 and above.
|
||||
|
||||
`rln` (https://github.com/privacy-scaling-explorations/rln) repo with Circuits is contained as a submodule.
|
||||
### Add RLN as dependency
|
||||
|
||||
``` sh
|
||||
# Update submodules
|
||||
git submodule update --init --recursive
|
||||
We start by adding zerokit RLN to our `Cargo.toml`
|
||||
|
||||
# Install rln dependencies
|
||||
cd vendor/rln/ && npm install
|
||||
```toml
|
||||
[dependencies]
|
||||
rln = { git = "https://github.com/vacp2p/zerokit" }
|
||||
```
|
||||
|
||||
## Basic Usage Example
|
||||
|
||||
The RLN object constructor requires the following files:
|
||||
|
||||
- `rln_final.arkzkey`: The proving key in arkzkey format.
|
||||
- `graph.bin`: The graph file built for the input tree size
|
||||
|
||||
Additionally, `rln.wasm` is used for testing in the rln-wasm module.
|
||||
|
||||
In the following we will use [cursors](https://doc.rust-lang.org/std/io/struct.Cursor.html)
|
||||
as readers/writers for interfacing with RLN public APIs.
|
||||
|
||||
```rust
|
||||
use std::io::Cursor;
|
||||
|
||||
use rln::{
|
||||
circuit::Fr,
|
||||
hashers::{hash_to_field, poseidon_hash},
|
||||
protocol::{keygen, prepare_prove_input, prepare_verify_input},
|
||||
public::RLN,
|
||||
utils::fr_to_bytes_le,
|
||||
};
|
||||
use serde_json::json;
|
||||
|
||||
fn main() {
|
||||
// 1. Initialize RLN with parameters:
|
||||
// - the tree depth;
|
||||
// - the tree config, if it is not defined, the default value will be set
|
||||
let tree_depth = 30;
|
||||
let input = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(tree_depth, input).unwrap();
|
||||
|
||||
// 2. Generate an identity keypair
|
||||
let (identity_secret_hash, id_commitment) = keygen();
|
||||
|
||||
// 3. Add a rate commitment to the Merkle tree
|
||||
let id_index = 10;
|
||||
let user_message_limit = Fr::from(10);
|
||||
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
|
||||
rln.set_leaf(id_index, &mut buffer).unwrap();
|
||||
|
||||
// 4. Set up external nullifier (epoch + app identifier)
|
||||
// We generate epoch from a date seed and we ensure is
|
||||
// mapped to a field element by hashing-to-field its content
|
||||
let epoch = hash_to_field(b"Today at noon, this year");
|
||||
// We generate rln_identifier from a date seed and we ensure is
|
||||
// mapped to a field element by hashing-to-field its content
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
// We generate a external nullifier
|
||||
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
|
||||
// We choose a message_id satisfy 0 <= message_id < user_message_limit
|
||||
let message_id = Fr::from(1);
|
||||
|
||||
// 5. Generate and verify a proof for a message
|
||||
let signal = b"RLN is awesome";
|
||||
|
||||
// 6. Prepare input for generate_rln_proof API
|
||||
// input_data is [ identity_secret<32> | id_index<8> | external_nullifier<32>
|
||||
// | user_message_limit<32> | message_id<32> | signal_len<8> | signal<var> ]
|
||||
let prove_input = prepare_prove_input(
|
||||
identity_secret_hash,
|
||||
id_index,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
external_nullifier,
|
||||
signal,
|
||||
);
|
||||
|
||||
// 7. Generate a RLN proof
|
||||
// We generate a RLN proof for proof_input
|
||||
let mut input_buffer = Cursor::new(prove_input);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.generate_rln_proof(&mut input_buffer, &mut output_buffer)
|
||||
.unwrap();
|
||||
|
||||
// We get the public outputs returned by the circuit evaluation
|
||||
// The byte vector `proof_data` is serialized as
|
||||
// `[ zk-proof | tree_root | external_nullifier | share_x | share_y | nullifier ]`.
|
||||
let proof_data = output_buffer.into_inner();
|
||||
|
||||
// 8. Verify a RLN proof
|
||||
// Input buffer is serialized as `[proof_data | signal_len | signal ]`,
|
||||
// where `proof_data` is (computed as) the output obtained by `generate_rln_proof`.
|
||||
let verify_data = prepare_verify_input(proof_data, signal);
|
||||
|
||||
// We verify the zk-proof against the provided proof values
|
||||
let mut input_buffer = Cursor::new(verify_data);
|
||||
let verified = rln.verify_rln_proof(&mut input_buffer).unwrap();
|
||||
|
||||
// We ensure the proof is valid
|
||||
assert!(verified);
|
||||
}
|
||||
```
|
||||
|
||||
### Comments for the code above for point 4
|
||||
|
||||
The `external nullifier` includes two parameters.
|
||||
|
||||
The first one is `epoch` and it's used to identify messages received in a certain time frame.
|
||||
It usually corresponds to the current UNIX time but can also be set to a random value or generated by a seed,
|
||||
provided that it corresponds to a field element.
|
||||
|
||||
The second one is `rln_identifier` and it's used to prevent a RLN ZK proof generated
|
||||
for one application to be re-used in another one.
|
||||
|
||||
### Features
|
||||
|
||||
- **Stateless Mode**: Allows the use of RLN without maintaining state of the Merkle tree.
|
||||
- **Pre-compiled Circuits**: Ready-to-use circuits with Merkle tree depth of 20
|
||||
- **Wasm Support**: WebAssembly bindings via rln-wasm crate with features like:
|
||||
- Browser and Node.js compatibility
|
||||
- Optional parallel feature support using [wasm-bindgen-rayon](https://github.com/RReverser/wasm-bindgen-rayon)
|
||||
- Headless browser testing capabilities
|
||||
- **Merkle Tree Implementations**: Multiple tree variants optimized for different use cases:
|
||||
- **Full Merkle Tree**: Fastest access with complete pre-allocated tree in memory. Best for frequent random access (enable with `fullmerkletree` feature).
|
||||
- **Optimal Merkle Tree**: Memory-efficient sparse storage using HashMap. Ideal for partially populated trees (enable with `optimalmerkletree` feature).
|
||||
- **Persistent Merkle Tree**: Disk-based storage with [sled](https://github.com/spacejam/sled) for persistence across application restarts and large datasets (enable with `pmtree-ft` feature).
|
||||
|
||||
## Building and Testing
|
||||
|
||||
### Prerequisites
|
||||
|
||||
```sh
|
||||
git clone https://github.com/vacp2p/zerokit.git
|
||||
make installdeps
|
||||
cd zerokit/rln
|
||||
```
|
||||
|
||||
### Build Commands
|
||||
|
||||
```sh
|
||||
# Build with default features
|
||||
cargo make build
|
||||
|
||||
# Test with default features
|
||||
cargo make test
|
||||
|
||||
# Test with stateless features
|
||||
cargo make test_stateless
|
||||
```
|
||||
|
||||
## Advanced: Custom Circuit Compilation
|
||||
|
||||
The `rln` (<https://github.com/rate-limiting-nullifier/circom-rln>) repository,
|
||||
which contains the RLN circuit implementation is using for pre-compiled RLN circuit for zerokit RLN.
|
||||
If you want to compile your own RLN circuit, you can follow the instructions below.
|
||||
|
||||
### 1. Compile ZK Circuits for getting the zkey file
|
||||
|
||||
This script actually generates not only the zkey file for the RLN circuit,
|
||||
but also the execution wasm file used for witness calculation.
|
||||
However, the wasm file is not needed for the `rln` module,
|
||||
because current implementation uses the iden3 graph file for witness calculation.
|
||||
This graph file is generated by the `circom-witnesscalc` tool in [step 2](#2-generate-witness-calculation-graph).
|
||||
|
||||
To customize the circuit parameters, modify `circom-rln/circuits/rln.circom`:
|
||||
|
||||
```circom
|
||||
pragma circom 2.1.0;
|
||||
include "./rln.circom";
|
||||
component main { public [x, externalNullifier] } = RLN(N, M);
|
||||
```
|
||||
|
||||
Where:
|
||||
|
||||
- `N`: Merkle tree depth, determining the maximum membership capacity (2^N members).
|
||||
|
||||
- `M`: Bit size for range checks, setting an upper bound for the number of messages per epoch (2^M messages).
|
||||
|
||||
> [!NOTE]
|
||||
> However, if `N` is too big, this might require a larger Powers of Tau ceremony
|
||||
> than the one hardcoded in `./scripts/build-circuits.sh`, which is `2^14`.
|
||||
> In such case, we refer to the official
|
||||
> [Circom documentation](https://docs.circom.io/getting-started/proving-circuits/#powers-of-tau)
|
||||
> for instructions on how to run an appropriate Powers of Tau ceremony and Phase 2 in order to compile the desired circuit. \
|
||||
> Additionally, while `M` sets an upper bound on the number of messages per epoch (`2^M`),
|
||||
> you can configure lower message limit for your use case, as long as it satisfies `user_message_limit ≤ 2^M`. \
|
||||
> Currently, the `rln` module comes with a [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources)
|
||||
> RLN circuit with a Merkle tree of depth `20` and a bit size of `16`,
|
||||
> allowing up to `2^20` registered members and a `2^16` message limit per epoch.
|
||||
|
||||
#### Install circom compiler
|
||||
|
||||
You can follow the instructions below or refer to the
|
||||
[installing Circom](https://docs.circom.io/getting-started/installation/#installing-circom) guide for more details,
|
||||
but make sure to use the specific version `v2.1.0`.
|
||||
|
||||
```sh
|
||||
# Clone the circom repository
|
||||
git clone https://github.com/iden3/circom.git
|
||||
|
||||
# Checkout the specific version
|
||||
cd circom && git checkout v2.1.0
|
||||
|
||||
# Build the circom compiler
|
||||
cargo build --release
|
||||
|
||||
# Install the circom binary globally
|
||||
cargo install --path circom
|
||||
|
||||
# Check the circom version to ensure it's v2.1.0
|
||||
circom --version
|
||||
```
|
||||
|
||||
#### Generate the zkey and verification key files example
|
||||
|
||||
```sh
|
||||
# Clone the circom-rln repository
|
||||
git clone https://github.com/rate-limiting-nullifier/circom-rln
|
||||
|
||||
# Install dependencies
|
||||
cd circom-rln && npm install
|
||||
|
||||
# Build circuits
|
||||
./scripts/build-circuits.sh rln
|
||||
|
||||
# Copy over assets
|
||||
cp build/zkeyFiles/rln-final.zkey ../../resources/tree_height_15
|
||||
cp build/zkeyFiles/rln.wasm ../../resources/tree_height_15
|
||||
# Use the generated zkey file in subsequent steps
|
||||
cp zkeyFiles/rln/final.zkey <path_to_rln_final.zkey>
|
||||
```
|
||||
|
||||
Note that the above code snippet will compile a RLN circuit with a Merkle tree of height equal `15` based on the default value set in `rln/circuit/rln.circom`.
|
||||
### 2. Generate Witness Calculation Graph
|
||||
|
||||
To compile a RLN circuit with Merkle tree height `N`, it suffices to change `rln/circuit/rln.circom` to
|
||||
The execution graph file used for witness calculation can be compiled following instructions
|
||||
in the [circom-witnesscalc](https://github.com/iden3/circom-witnesscalc) repository.
|
||||
As mentioned in step 1, we should use `rln.circom` file from `circom-rln` repository.
|
||||
|
||||
```
|
||||
pragma circom 2.0.0;
|
||||
```sh
|
||||
# Clone the circom-witnesscalc repository
|
||||
git clone https://github.com/iden3/circom-witnesscalc
|
||||
|
||||
include "./rln-base.circom";
|
||||
# Load the submodules
|
||||
cd circom-witnesscalc && git submodule update --init --recursive
|
||||
|
||||
component main {public [x, epoch, rln_identifier ]} = RLN(N);
|
||||
# Build the circom-witnesscalc tool
|
||||
cargo build
|
||||
|
||||
# Generate the witness calculation graph
|
||||
cargo run --package circom_witnesscalc --bin build-circuit ../circom-rln/circuits/rln.circom <path_to_graph.bin>
|
||||
```
|
||||
|
||||
However, if `N` is too big, this might require a bigger Powers of Tau ceremony than the one hardcoded in `./scripts/build-circuits.sh`, which is `2^14`.
|
||||
In such case we refer to the official [Circom documentation](https://docs.circom.io/getting-started/proving-circuits/#powers-of-tau) for instructions on how to run an appropriate Powers of Tau ceremony and Phase 2 in order to compile the desired circuit.
|
||||
The `rln` module comes with [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources)
|
||||
execution graph files for the RLN circuit.
|
||||
|
||||
### 3. Generate Arkzkey Representation for zkey file
|
||||
|
||||
For faster loading, compile the zkey file into the arkzkey format using
|
||||
[ark-zkey](https://github.com/seemenkina/ark-zkey).
|
||||
This is fork of the [original](https://github.com/zkmopro/ark-zkey) repository with the uncompressed arkzkey support.
|
||||
|
||||
```sh
|
||||
# Clone the ark-zkey repository
|
||||
git clone https://github.com/seemenkina/ark-zkey.git
|
||||
|
||||
# Build the ark-zkey tool
|
||||
cd ark-zkey && cargo build
|
||||
|
||||
# Generate the arkzkey representation for the zkey file
|
||||
cargo run --bin arkzkey-util <path_to_rln_final.zkey>
|
||||
```
|
||||
|
||||
This will generate the `rln_final.arkzkey` file, which is used by the `rln` module.
|
||||
|
||||
Currently, the `rln` module comes with
|
||||
[pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources) arkzkey keys for the RLN circuit.
|
||||
|
||||
> [!NOTE]
|
||||
> You can use this [convert_zkey.sh](./convert_zkey.sh) script
|
||||
> to automate the process of generating the arkzkey file from any zkey file
|
||||
|
||||
Run the script as follows:
|
||||
|
||||
```sh
|
||||
chmod +x ./convert_zkey.sh
|
||||
./convert_zkey.sh <path_to_rln_final.zkey>
|
||||
```
|
||||
|
||||
## Get involved
|
||||
|
||||
Zerokit RLN public and FFI APIs allow interaction with many more features than what briefly showcased above.
|
||||
|
||||
We invite you to check our API documentation by running
|
||||
|
||||
```rust
|
||||
cargo doc --no-deps
|
||||
```
|
||||
|
||||
and look at unit tests to have an hint on how to interface and use them.
|
||||
|
||||
## Detailed Protocol Flow
|
||||
|
||||
1. **Identity Creation**: Generate a secret key and commitment
|
||||
2. **Rate Commitment**: Add commitment to a Merkle tree
|
||||
3. **External Nullifier Setup**: Combine epoch and application identifier
|
||||
4. **Proof Generation**: Create a zkSNARK proof that:
|
||||
- Proves membership in the Merkle tree
|
||||
- Ensures rate-limiting constraints are satisfied
|
||||
- Generates a nullifier to prevent double-usage
|
||||
5. **Proof Verification**: Verify the proof without revealing the prover's identity
|
||||
|
||||
## Getting Involved
|
||||
|
||||
- Check the [unit tests](https://github.com/vacp2p/zerokit/tree/master/rln/tests) for more usage examples
|
||||
- [RFC specification](https://rfc.vac.dev/spec/32/) for the Rate-Limiting Nullifier protocol
|
||||
- [GitHub repository](https://github.com/vacp2p/zerokit) for the latest updates
|
||||
|
||||
50
rln/benches/pmtree_benchmark.rs
Normal file
50
rln/benches/pmtree_benchmark.rs
Normal file
@@ -0,0 +1,50 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use rln::{circuit::Fr, pm_tree_adapter::PmTree};
|
||||
use utils::ZerokitMerkleTree;
|
||||
|
||||
pub fn pmtree_benchmark(c: &mut Criterion) {
|
||||
let mut tree = PmTree::default(2).unwrap();
|
||||
|
||||
let leaves: Vec<Fr> = (0..4).map(Fr::from).collect();
|
||||
|
||||
c.bench_function("Pmtree::set", |b| {
|
||||
b.iter(|| {
|
||||
tree.set(0, leaves[0]).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("Pmtree:delete", |b| {
|
||||
b.iter(|| {
|
||||
tree.delete(0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("Pmtree::override_range", |b| {
|
||||
b.iter(|| {
|
||||
tree.override_range(0, leaves.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("Pmtree::get", |b| {
|
||||
b.iter(|| {
|
||||
tree.get(0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
// check intermediate node getter which required additional computation of sub root index
|
||||
c.bench_function("Pmtree::get_subtree_root", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_subtree_root(1, 0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("Pmtree::get_empty_leaves_indices", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_empty_leaves_indices();
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(benches, pmtree_benchmark);
|
||||
criterion_main!(benches);
|
||||
79
rln/benches/poseidon_tree_benchmark.rs
Normal file
79
rln/benches/poseidon_tree_benchmark.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
|
||||
use rln::{
|
||||
circuit::{Fr, TEST_TREE_DEPTH},
|
||||
hashers::PoseidonHash,
|
||||
};
|
||||
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleTree};
|
||||
|
||||
pub fn get_leaves(n: u32) -> Vec<Fr> {
|
||||
(0..n).map(Fr::from).collect()
|
||||
}
|
||||
|
||||
pub fn optimal_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
|
||||
c.bench_function("OptimalMerkleTree::<Poseidon>::full_depth_gen", |b| {
|
||||
b.iter(|| {
|
||||
OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_DEPTH).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
let mut group = c.benchmark_group("Set");
|
||||
for &n in [1u32, 10, 100].iter() {
|
||||
let leaves = get_leaves(n);
|
||||
|
||||
let mut tree = OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_DEPTH).unwrap();
|
||||
group.bench_function(
|
||||
BenchmarkId::new("OptimalMerkleTree::<Poseidon>::set", n),
|
||||
|b| {
|
||||
b.iter(|| {
|
||||
for (i, l) in leaves.iter().enumerate() {
|
||||
let _ = tree.set(i, *l);
|
||||
}
|
||||
})
|
||||
},
|
||||
);
|
||||
|
||||
group.bench_function(
|
||||
BenchmarkId::new("OptimalMerkleTree::<Poseidon>::set_range", n),
|
||||
|b| b.iter(|| tree.set_range(0, leaves.iter().cloned())),
|
||||
);
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
pub fn full_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
|
||||
c.bench_function("FullMerkleTree::<Poseidon>::full_depth_gen", |b| {
|
||||
b.iter(|| {
|
||||
FullMerkleTree::<PoseidonHash>::default(TEST_TREE_DEPTH).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
let mut group = c.benchmark_group("Set");
|
||||
for &n in [1u32, 10, 100].iter() {
|
||||
let leaves = get_leaves(n);
|
||||
|
||||
let mut tree = FullMerkleTree::<PoseidonHash>::default(TEST_TREE_DEPTH).unwrap();
|
||||
group.bench_function(
|
||||
BenchmarkId::new("FullMerkleTree::<Poseidon>::set", n),
|
||||
|b| {
|
||||
b.iter(|| {
|
||||
for (i, l) in leaves.iter().enumerate() {
|
||||
let _ = tree.set(i, *l);
|
||||
}
|
||||
})
|
||||
},
|
||||
);
|
||||
|
||||
group.bench_function(
|
||||
BenchmarkId::new("FullMerkleTree::<Poseidon>::set_range", n),
|
||||
|b| b.iter(|| tree.set_range(0, leaves.iter().cloned())),
|
||||
);
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
benches,
|
||||
optimal_merkle_tree_poseidon_benchmark,
|
||||
full_merkle_tree_poseidon_benchmark
|
||||
);
|
||||
criterion_main!(benches);
|
||||
53
rln/convert_zkey.sh
Executable file
53
rln/convert_zkey.sh
Executable file
@@ -0,0 +1,53 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Convert zkey to arkzkey using /tmp directory
|
||||
# Usage: ./convert.sh <path_to_zkey_file>
|
||||
|
||||
set -e
|
||||
|
||||
# Check input
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Usage: $0 <path_to_zkey_file>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ZKEY_FILE="$1"
|
||||
|
||||
if [ ! -f "$ZKEY_FILE" ]; then
|
||||
echo "Error: File '$ZKEY_FILE' does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get absolute path before changing directories
|
||||
ZKEY_ABSOLUTE_PATH=$(realpath "$ZKEY_FILE")
|
||||
|
||||
# Create temp directory in /tmp
|
||||
TEMP_DIR="/tmp/ark-zkey-$$"
|
||||
echo "Using temp directory: $TEMP_DIR"
|
||||
|
||||
# Cleanup function
|
||||
cleanup() {
|
||||
echo "Cleaning up temp directory: $TEMP_DIR"
|
||||
rm -rf "$TEMP_DIR"
|
||||
}
|
||||
|
||||
# Setup cleanup trap
|
||||
trap cleanup EXIT
|
||||
|
||||
# Create temp directory and clone ark-zkey
|
||||
mkdir -p "$TEMP_DIR"
|
||||
cd "$TEMP_DIR"
|
||||
git clone https://github.com/seemenkina/ark-zkey.git
|
||||
cd ark-zkey
|
||||
cargo build
|
||||
|
||||
# Convert
|
||||
cargo run --bin arkzkey-util "$ZKEY_ABSOLUTE_PATH"
|
||||
|
||||
# Check if arkzkey file was created (tool creates it in same directory as input)
|
||||
ARKZKEY_FILE="${ZKEY_ABSOLUTE_PATH%.zkey}.arkzkey"
|
||||
|
||||
if [ ! -f "$ARKZKEY_FILE" ]; then
|
||||
echo "Could not find generated .arkzkey file at $ARKZKEY_FILE"
|
||||
exit 1
|
||||
fi
|
||||
BIN
rln/resources/tree_depth_20/graph.bin
Normal file
BIN
rln/resources/tree_depth_20/graph.bin
Normal file
Binary file not shown.
BIN
rln/resources/tree_depth_20/rln.wasm
Normal file
BIN
rln/resources/tree_depth_20/rln.wasm
Normal file
Binary file not shown.
BIN
rln/resources/tree_depth_20/rln_final.arkzkey
Normal file
BIN
rln/resources/tree_depth_20/rln_final.arkzkey
Normal file
Binary file not shown.
BIN
rln/resources/tree_depth_30/graph.bin
Normal file
BIN
rln/resources/tree_depth_30/graph.bin
Normal file
Binary file not shown.
BIN
rln/resources/tree_depth_30/rln.wasm
Normal file
BIN
rln/resources/tree_depth_30/rln.wasm
Normal file
Binary file not shown.
BIN
rln/resources/tree_depth_30/rln_final.arkzkey
Normal file
BIN
rln/resources/tree_depth_30/rln_final.arkzkey
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,119 +0,0 @@
|
||||
{
|
||||
"protocol": "groth16",
|
||||
"curve": "bn128",
|
||||
"nPublic": 6,
|
||||
"vk_alpha_1": [
|
||||
"1805378556360488226980822394597799963030511477964155500103132920745199284516",
|
||||
"11990395240534218699464972016456017378439762088320057798320175886595281336136",
|
||||
"1"
|
||||
],
|
||||
"vk_beta_2": [
|
||||
[
|
||||
"11031529986141021025408838211017932346992429731488270384177563837022796743627",
|
||||
"16042159910707312759082561183373181639420894978640710177581040523252926273854"
|
||||
],
|
||||
[
|
||||
"20112698439519222240302944148895052359035104222313380895334495118294612255131",
|
||||
"19441583024670359810872018179190533814486480928824742448673677460151702019379"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_gamma_2": [
|
||||
[
|
||||
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
|
||||
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
|
||||
],
|
||||
[
|
||||
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
|
||||
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_delta_2": [
|
||||
[
|
||||
"1342791402398183550129987853701397066695422166542200371137242980909975744720",
|
||||
"19885954793721639146517398722913034453263197732511169431324269951156805454588"
|
||||
],
|
||||
[
|
||||
"16612518449808520746616592899100682320852224744311197908486719118388461103870",
|
||||
"13039435290897389787786546960964558630619663289413586834851804020863949546009"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_alphabeta_12": [
|
||||
[
|
||||
[
|
||||
"5151991366823434428398919091000210787450832786814248297320989361921939794156",
|
||||
"15735191313289001022885148627913534790382722933676436876510746491415970766821"
|
||||
],
|
||||
[
|
||||
"3387907257437913904447588318761906430938415556102110876587455322225272831272",
|
||||
"1998779853452712881084781956683721603875246565720647583735935725110674288056"
|
||||
],
|
||||
[
|
||||
"14280074182991498185075387990446437410077692353432005297922275464876153151820",
|
||||
"17092408446352310039633488224969232803092763095456307462247653153107223117633"
|
||||
]
|
||||
],
|
||||
[
|
||||
[
|
||||
"4359046709531668109201634396816565829237358165496082832279660960675584351266",
|
||||
"4511888308846208349307186938266411423935335853916317436093178288331845821336"
|
||||
],
|
||||
[
|
||||
"11429499807090785857812316277335883295048773373068683863667725283965356423273",
|
||||
"16232274853200678548795010078253506586114563833318973594428907292096178657392"
|
||||
],
|
||||
[
|
||||
"18068999605870933925311275504102553573815570223888590384919752303726860800970",
|
||||
"17309569111965782732372130116757295842160193489132771344011460471298173784984"
|
||||
]
|
||||
]
|
||||
],
|
||||
"IC": [
|
||||
[
|
||||
"15907620619058468322652190166474219459106695372760190199814463422116003944385",
|
||||
"15752765921940703867480319151728055971288798043197983667046402260506178676501",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"12004081423498474638814710157503496372594892372197913146719480190853290407272",
|
||||
"17759993271504587923309435837545182941635937261719294500288793819648071033469",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"878120019311612655450010384994897394984265086410869146105626241891073100410",
|
||||
"17631186298933191134732246976686754514124819009836710500647157641262968661294",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"14710016919630225372037989028011020715054625029990218653012745498368446893907",
|
||||
"2581293501049347486538806758240731445964309309490885835380825245889909387041",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"766327921864693063481261933507417084013182964450768912480746815296334678928",
|
||||
"18104222034822903557262264275808261481286672296559910954337205847153944954509",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"8877686447180479408315100041907552504213694351585462004774320248566787828012",
|
||||
"15836202093850379814510995758762098170932781831518064786308541653541698178373",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"19567388833538990982537236781224917793757180861915757860561618079730704818311",
|
||||
"3535132838196675082818592669173684593624477421910576112671761297886253127546",
|
||||
"1"
|
||||
]
|
||||
]
|
||||
}
|
||||
Binary file not shown.
Binary file not shown.
@@ -1,119 +0,0 @@
|
||||
{
|
||||
"protocol": "groth16",
|
||||
"curve": "bn128",
|
||||
"nPublic": 6,
|
||||
"vk_alpha_1": [
|
||||
"1805378556360488226980822394597799963030511477964155500103132920745199284516",
|
||||
"11990395240534218699464972016456017378439762088320057798320175886595281336136",
|
||||
"1"
|
||||
],
|
||||
"vk_beta_2": [
|
||||
[
|
||||
"11031529986141021025408838211017932346992429731488270384177563837022796743627",
|
||||
"16042159910707312759082561183373181639420894978640710177581040523252926273854"
|
||||
],
|
||||
[
|
||||
"20112698439519222240302944148895052359035104222313380895334495118294612255131",
|
||||
"19441583024670359810872018179190533814486480928824742448673677460151702019379"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_gamma_2": [
|
||||
[
|
||||
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
|
||||
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
|
||||
],
|
||||
[
|
||||
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
|
||||
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_delta_2": [
|
||||
[
|
||||
"1948496782571164085469528023647105317580208688174386157591917599801657832035",
|
||||
"20445814069256658101339037520922621162739470138213615104905368409238414511981"
|
||||
],
|
||||
[
|
||||
"10024680869920840984813249386422727863826862577760330492647062850849851925340",
|
||||
"10512156247842686783409460795717734694774542185222602679117887145206209285142"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_alphabeta_12": [
|
||||
[
|
||||
[
|
||||
"5151991366823434428398919091000210787450832786814248297320989361921939794156",
|
||||
"15735191313289001022885148627913534790382722933676436876510746491415970766821"
|
||||
],
|
||||
[
|
||||
"3387907257437913904447588318761906430938415556102110876587455322225272831272",
|
||||
"1998779853452712881084781956683721603875246565720647583735935725110674288056"
|
||||
],
|
||||
[
|
||||
"14280074182991498185075387990446437410077692353432005297922275464876153151820",
|
||||
"17092408446352310039633488224969232803092763095456307462247653153107223117633"
|
||||
]
|
||||
],
|
||||
[
|
||||
[
|
||||
"4359046709531668109201634396816565829237358165496082832279660960675584351266",
|
||||
"4511888308846208349307186938266411423935335853916317436093178288331845821336"
|
||||
],
|
||||
[
|
||||
"11429499807090785857812316277335883295048773373068683863667725283965356423273",
|
||||
"16232274853200678548795010078253506586114563833318973594428907292096178657392"
|
||||
],
|
||||
[
|
||||
"18068999605870933925311275504102553573815570223888590384919752303726860800970",
|
||||
"17309569111965782732372130116757295842160193489132771344011460471298173784984"
|
||||
]
|
||||
]
|
||||
],
|
||||
"IC": [
|
||||
[
|
||||
"18693301901828818437917730940595978397160482710354161265484535387752523310572",
|
||||
"17985273354976640088538673802000794244421192643855111089693820179790551470769",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"21164641723988537620541455173278629777250883365474191521194244273980931825942",
|
||||
"998385854410718613441067082771678946155853656328717326195057262123686425518",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"21666968581672145768705229094968410656430989593283335488162701230986314747515",
|
||||
"17996457608540683483506630273632100555125353447506062045735279661096094677264",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"20137761979695192602424300886442379728165712610493092740175904438282083668117",
|
||||
"19184814924890679891263780109959113289320127263583260218200636509492157834679",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"10943171273393803842589314082509655332154393332394322726077270895078286354146",
|
||||
"10872472035685319847811233167729172672344935625121511932198535224727331126439",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"13049169779481227658517545034348883391527506091990880778783387628208561946597",
|
||||
"10083689369261379027228809473568899816311684698866922944902456565434209079955",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"19633516378466409167014413361365552102431118630694133723053441455184566611083",
|
||||
"8059525100726933978719058611146131904598011633549012007359165766216730722269",
|
||||
"1"
|
||||
]
|
||||
]
|
||||
}
|
||||
@@ -1,239 +0,0 @@
|
||||
// This crate provides interfaces for the zero-knowledge circuit and keys
|
||||
|
||||
use ark_bn254::{
|
||||
Bn254, Fq as ArkFq, Fq2 as ArkFq2, Fr as ArkFr, G1Affine as ArkG1Affine,
|
||||
G1Projective as ArkG1Projective, G2Affine as ArkG2Affine, G2Projective as ArkG2Projective,
|
||||
};
|
||||
use ark_circom::{read_zkey, WitnessCalculator};
|
||||
use ark_groth16::{ProvingKey, VerifyingKey};
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use num_bigint::BigUint;
|
||||
use once_cell::sync::OnceCell;
|
||||
use serde_json::Value;
|
||||
use std::fs::File;
|
||||
use std::io::{Cursor, Error, ErrorKind, Result};
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Mutex;
|
||||
use wasmer::{Module, Store};
|
||||
|
||||
const ZKEY_FILENAME: &str = "rln_final.zkey";
|
||||
const VK_FILENAME: &str = "verifying_key.json";
|
||||
const WASM_FILENAME: &str = "rln.wasm";
|
||||
|
||||
// These parameters are used for tests
|
||||
// Note that the circuit and keys in TEST_RESOURCES_FOLDER are compiled for Merkle trees of height 15, 19 and 20
|
||||
// Changing these parameters to other values than these defaults will cause zkSNARK proof verification to fail
|
||||
pub const TEST_PARAMETERS_INDEX: usize = 2;
|
||||
pub const TEST_TREE_HEIGHT: usize = [15, 19, 20][TEST_PARAMETERS_INDEX];
|
||||
pub const TEST_RESOURCES_FOLDER: &str = [
|
||||
"./resources/tree_height_15/",
|
||||
"./resources/tree_height_19/",
|
||||
"./resources/tree_height_20/",
|
||||
][TEST_PARAMETERS_INDEX];
|
||||
|
||||
// The following types define the pairing friendly elliptic curve, the underlying finite fields and groups default to this module
|
||||
// Note that proofs are serialized assuming Fr to be 4x8 = 32 bytes in size. Hence, changing to a curve with different encoding will make proof verification to fail
|
||||
pub type Curve = Bn254;
|
||||
pub type Fr = ArkFr;
|
||||
pub type Fq = ArkFq;
|
||||
pub type Fq2 = ArkFq2;
|
||||
pub type G1Affine = ArkG1Affine;
|
||||
pub type G1Projective = ArkG1Projective;
|
||||
pub type G2Affine = ArkG2Affine;
|
||||
pub type G2Projective = ArkG2Projective;
|
||||
|
||||
// Loads the proving key using a bytes vector
|
||||
pub fn zkey_from_raw(zkey_data: &Vec<u8>) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
|
||||
if !zkey_data.is_empty() {
|
||||
let mut c = Cursor::new(zkey_data);
|
||||
let proving_key_and_matrices = read_zkey(&mut c)?;
|
||||
Ok(proving_key_and_matrices)
|
||||
} else {
|
||||
Err(Error::new(ErrorKind::NotFound, "No proving key found!"))
|
||||
}
|
||||
}
|
||||
|
||||
// Loads the proving key
|
||||
pub fn zkey_from_folder(
|
||||
resources_folder: &str,
|
||||
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
|
||||
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
|
||||
if Path::new(&zkey_path).exists() {
|
||||
let mut file = File::open(&zkey_path)?;
|
||||
let proving_key_and_matrices = read_zkey(&mut file)?;
|
||||
Ok(proving_key_and_matrices)
|
||||
} else {
|
||||
Err(Error::new(ErrorKind::NotFound, "No proving key found!"))
|
||||
}
|
||||
}
|
||||
|
||||
// Loads the verification key from a bytes vector
|
||||
pub fn vk_from_raw(vk_data: &Vec<u8>, zkey_data: &Vec<u8>) -> Result<VerifyingKey<Curve>> {
|
||||
let verifying_key: VerifyingKey<Curve>;
|
||||
|
||||
if !vk_data.is_empty() {
|
||||
verifying_key = vk_from_vector(vk_data);
|
||||
Ok(verifying_key)
|
||||
} else if !zkey_data.is_empty() {
|
||||
let (proving_key, _matrices) = zkey_from_raw(zkey_data)?;
|
||||
verifying_key = proving_key.vk;
|
||||
Ok(verifying_key)
|
||||
} else {
|
||||
Err(Error::new(
|
||||
ErrorKind::NotFound,
|
||||
"No proving/verification key found!",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
// Loads the verification key
|
||||
pub fn vk_from_folder(resources_folder: &str) -> Result<VerifyingKey<Curve>> {
|
||||
let vk_path = format!("{resources_folder}{VK_FILENAME}");
|
||||
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
|
||||
|
||||
let verifying_key: VerifyingKey<Curve>;
|
||||
|
||||
if Path::new(&vk_path).exists() {
|
||||
verifying_key = vk_from_json(&vk_path);
|
||||
Ok(verifying_key)
|
||||
} else if Path::new(&zkey_path).exists() {
|
||||
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
|
||||
verifying_key = proving_key.vk;
|
||||
Ok(verifying_key)
|
||||
} else {
|
||||
Err(Error::new(
|
||||
ErrorKind::NotFound,
|
||||
"No proving/verification key found!",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
static WITNESS_CALCULATOR: OnceCell<Mutex<WitnessCalculator>> = OnceCell::new();
|
||||
|
||||
// Initializes the witness calculator using a bytes vector
|
||||
pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> &'static Mutex<WitnessCalculator> {
|
||||
WITNESS_CALCULATOR.get_or_init(|| {
|
||||
let store = Store::default();
|
||||
let module = Module::new(&store, wasm_buffer).unwrap();
|
||||
let result =
|
||||
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
|
||||
Mutex::new(result)
|
||||
})
|
||||
}
|
||||
|
||||
// Initializes the witness calculator
|
||||
pub fn circom_from_folder(resources_folder: &str) -> &'static Mutex<WitnessCalculator> {
|
||||
// We read the wasm file
|
||||
let wasm_path = format!("{resources_folder}{WASM_FILENAME}");
|
||||
let wasm_buffer = std::fs::read(&wasm_path).unwrap();
|
||||
circom_from_raw(wasm_buffer)
|
||||
}
|
||||
|
||||
// The following function implementations are taken/adapted from https://github.com/gakonst/ark-circom/blob/1732e15d6313fe176b0b1abb858ac9e095d0dbd7/src/zkey.rs
|
||||
|
||||
// Utilities to convert a json verification key in a groth16::VerificationKey
|
||||
fn fq_from_str(s: &str) -> Fq {
|
||||
Fq::try_from(BigUint::from_str(s).unwrap()).unwrap()
|
||||
}
|
||||
|
||||
// Extracts the element in G1 corresponding to its JSON serialization
|
||||
fn json_to_g1(json: &Value, key: &str) -> G1Affine {
|
||||
let els: Vec<String> = json
|
||||
.get(key)
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|i| i.as_str().unwrap().to_string())
|
||||
.collect();
|
||||
G1Affine::from(G1Projective::new(
|
||||
fq_from_str(&els[0]),
|
||||
fq_from_str(&els[1]),
|
||||
fq_from_str(&els[2]),
|
||||
))
|
||||
}
|
||||
|
||||
// Extracts the vector of G1 elements corresponding to its JSON serialization
|
||||
fn json_to_g1_vec(json: &Value, key: &str) -> Vec<G1Affine> {
|
||||
let els: Vec<Vec<String>> = json
|
||||
.get(key)
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|i| {
|
||||
i.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|x| x.as_str().unwrap().to_string())
|
||||
.collect::<Vec<String>>()
|
||||
})
|
||||
.collect();
|
||||
|
||||
els.iter()
|
||||
.map(|coords| {
|
||||
G1Affine::from(G1Projective::new(
|
||||
fq_from_str(&coords[0]),
|
||||
fq_from_str(&coords[1]),
|
||||
fq_from_str(&coords[2]),
|
||||
))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
// Extracts the element in G2 corresponding to its JSON serialization
|
||||
fn json_to_g2(json: &Value, key: &str) -> G2Affine {
|
||||
let els: Vec<Vec<String>> = json
|
||||
.get(key)
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|i| {
|
||||
i.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|x| x.as_str().unwrap().to_string())
|
||||
.collect::<Vec<String>>()
|
||||
})
|
||||
.collect();
|
||||
|
||||
let x = Fq2::new(fq_from_str(&els[0][0]), fq_from_str(&els[0][1]));
|
||||
let y = Fq2::new(fq_from_str(&els[1][0]), fq_from_str(&els[1][1]));
|
||||
let z = Fq2::new(fq_from_str(&els[2][0]), fq_from_str(&els[2][1]));
|
||||
G2Affine::from(G2Projective::new(x, y, z))
|
||||
}
|
||||
|
||||
// Converts JSON to a VerifyingKey
|
||||
fn to_verifying_key(json: serde_json::Value) -> VerifyingKey<Curve> {
|
||||
VerifyingKey {
|
||||
alpha_g1: json_to_g1(&json, "vk_alpha_1"),
|
||||
beta_g2: json_to_g2(&json, "vk_beta_2"),
|
||||
gamma_g2: json_to_g2(&json, "vk_gamma_2"),
|
||||
delta_g2: json_to_g2(&json, "vk_delta_2"),
|
||||
gamma_abc_g1: json_to_g1_vec(&json, "IC"),
|
||||
}
|
||||
}
|
||||
|
||||
// Computes the verification key from its JSON serialization
|
||||
fn vk_from_json(vk_path: &str) -> VerifyingKey<Curve> {
|
||||
let json = std::fs::read_to_string(vk_path).unwrap();
|
||||
let json: Value = serde_json::from_str(&json).unwrap();
|
||||
|
||||
to_verifying_key(json)
|
||||
}
|
||||
|
||||
// Computes the verification key from a bytes vector containing its JSON serialization
|
||||
fn vk_from_vector(vk: &[u8]) -> VerifyingKey<Curve> {
|
||||
let json = String::from_utf8(vk.to_vec()).expect("Found invalid UTF-8");
|
||||
let json: Value = serde_json::from_str(&json).unwrap();
|
||||
|
||||
to_verifying_key(json)
|
||||
}
|
||||
|
||||
// Checks verification key to be correct with respect to proving key
|
||||
pub fn check_vk_from_zkey(resources_folder: &str, verifying_key: VerifyingKey<Curve>) {
|
||||
let (proving_key, _matrices) = zkey_from_folder(resources_folder).unwrap();
|
||||
assert_eq!(proving_key.vk, verifying_key);
|
||||
}
|
||||
7
rln/src/circuit/error.rs
Normal file
7
rln/src/circuit/error.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ZKeyReadError {
|
||||
#[error("No proving key found!")]
|
||||
EmptyBytes,
|
||||
#[error("{0}")]
|
||||
SerializationError(#[from] ark_serialize::SerializationError),
|
||||
}
|
||||
94
rln/src/circuit/iden3calc.rs
Normal file
94
rln/src/circuit/iden3calc.rs
Normal file
@@ -0,0 +1,94 @@
|
||||
// This file is based on the code by iden3. Its preimage can be found here:
|
||||
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/lib.rs
|
||||
|
||||
pub mod graph;
|
||||
pub mod proto;
|
||||
pub mod storage;
|
||||
|
||||
use ruint::aliases::U256;
|
||||
use std::collections::HashMap;
|
||||
use storage::deserialize_witnesscalc_graph;
|
||||
use zeroize::zeroize_flat_type;
|
||||
|
||||
use crate::circuit::iden3calc::graph::fr_to_u256;
|
||||
use crate::circuit::Fr;
|
||||
use crate::utils::FrOrSecret;
|
||||
use graph::Node;
|
||||
|
||||
pub type InputSignalsInfo = HashMap<String, (usize, usize)>;
|
||||
|
||||
pub fn calc_witness<I: IntoIterator<Item = (String, Vec<FrOrSecret>)>>(
|
||||
inputs: I,
|
||||
graph_data: &[u8],
|
||||
) -> Vec<Fr> {
|
||||
let mut inputs: HashMap<String, Vec<U256>> = inputs
|
||||
.into_iter()
|
||||
.map(|(key, value)| {
|
||||
(
|
||||
key,
|
||||
value
|
||||
.iter()
|
||||
.map(|f_| match f_ {
|
||||
FrOrSecret::IdSecret(s) => s.to_u256(),
|
||||
FrOrSecret::Fr(f) => fr_to_u256(f),
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let (nodes, signals, input_mapping): (Vec<Node>, Vec<usize>, InputSignalsInfo) =
|
||||
deserialize_witnesscalc_graph(std::io::Cursor::new(graph_data)).unwrap();
|
||||
|
||||
let mut inputs_buffer = get_inputs_buffer(get_inputs_size(&nodes));
|
||||
populate_inputs(&inputs, &input_mapping, &mut inputs_buffer);
|
||||
if let Some(v) = inputs.get_mut("identitySecret") {
|
||||
// ~== v[0] = U256::ZERO;
|
||||
unsafe { zeroize_flat_type(v) };
|
||||
}
|
||||
let res = graph::evaluate(&nodes, inputs_buffer.as_slice(), &signals);
|
||||
inputs_buffer.iter_mut().for_each(|i| {
|
||||
unsafe { zeroize_flat_type(i) };
|
||||
});
|
||||
res
|
||||
}
|
||||
|
||||
fn get_inputs_size(nodes: &[Node]) -> usize {
|
||||
let mut start = false;
|
||||
let mut max_index = 0usize;
|
||||
for &node in nodes.iter() {
|
||||
if let Node::Input(i) = node {
|
||||
if i > max_index {
|
||||
max_index = i;
|
||||
}
|
||||
start = true
|
||||
} else if start {
|
||||
break;
|
||||
}
|
||||
}
|
||||
max_index + 1
|
||||
}
|
||||
|
||||
fn populate_inputs(
|
||||
input_list: &HashMap<String, Vec<U256>>,
|
||||
inputs_info: &InputSignalsInfo,
|
||||
input_buffer: &mut [U256],
|
||||
) {
|
||||
for (key, value) in input_list {
|
||||
let (offset, len) = inputs_info[key];
|
||||
if len != value.len() {
|
||||
panic!("Invalid input length for {key}");
|
||||
}
|
||||
|
||||
for (i, v) in value.iter().enumerate() {
|
||||
input_buffer[offset + i] = *v;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocates inputs vec with position 0 set to 1
|
||||
fn get_inputs_buffer(size: usize) -> Vec<U256> {
|
||||
let mut inputs = vec![U256::ZERO; size];
|
||||
inputs[0] = U256::from(1);
|
||||
inputs
|
||||
}
|
||||
957
rln/src/circuit/iden3calc/graph.rs
Normal file
957
rln/src/circuit/iden3calc/graph.rs
Normal file
@@ -0,0 +1,957 @@
|
||||
// This file is based on the code by iden3. Its preimage can be found here:
|
||||
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/graph.rs
|
||||
|
||||
use ark_ff::{BigInt, BigInteger, One, PrimeField, Zero};
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate};
|
||||
use rand::Rng;
|
||||
use ruint::{aliases::U256, uint};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
collections::HashMap,
|
||||
error::Error,
|
||||
ops::{Deref, Shl, Shr},
|
||||
};
|
||||
|
||||
use crate::circuit::iden3calc::proto;
|
||||
use crate::circuit::Fr;
|
||||
|
||||
pub const M: U256 =
|
||||
uint!(21888242871839275222246405745257275088548364400416034343698204186575808495617_U256);
|
||||
|
||||
fn ark_se<S, A: CanonicalSerialize>(a: &A, s: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let mut bytes = vec![];
|
||||
a.serialize_with_mode(&mut bytes, Compress::Yes)
|
||||
.map_err(serde::ser::Error::custom)?;
|
||||
s.serialize_bytes(&bytes)
|
||||
}
|
||||
|
||||
fn ark_de<'de, D, A: CanonicalDeserialize>(data: D) -> Result<A, D::Error>
|
||||
where
|
||||
D: serde::de::Deserializer<'de>,
|
||||
{
|
||||
let s: Vec<u8> = serde::de::Deserialize::deserialize(data)?;
|
||||
let a = A::deserialize_with_mode(s.as_slice(), Compress::Yes, Validate::Yes);
|
||||
a.map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn fr_to_u256(x: &Fr) -> U256 {
|
||||
U256::from_limbs(x.into_bigint().0)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn u256_to_fr(x: &U256) -> Fr {
|
||||
Fr::from_bigint(BigInt::new(x.into_limbs())).expect("Failed to convert U256 to Fr")
|
||||
}
|
||||
|
||||
#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub enum Operation {
|
||||
Mul,
|
||||
Div,
|
||||
Add,
|
||||
Sub,
|
||||
Pow,
|
||||
Idiv,
|
||||
Mod,
|
||||
Eq,
|
||||
Neq,
|
||||
Lt,
|
||||
Gt,
|
||||
Leq,
|
||||
Geq,
|
||||
Land,
|
||||
Lor,
|
||||
Shl,
|
||||
Shr,
|
||||
Bor,
|
||||
Band,
|
||||
Bxor,
|
||||
}
|
||||
|
||||
impl Operation {
|
||||
// TODO: rewrite to &U256 type
|
||||
pub fn eval(&self, a: U256, b: U256) -> U256 {
|
||||
use Operation::*;
|
||||
match self {
|
||||
Mul => a.mul_mod(b, M),
|
||||
Div => {
|
||||
if b == U256::ZERO {
|
||||
// as we are simulating a circuit execution with signals
|
||||
// values all equal to 0, just return 0 here in case of
|
||||
// division by zero
|
||||
U256::ZERO
|
||||
} else {
|
||||
a.mul_mod(b.inv_mod(M).unwrap(), M)
|
||||
}
|
||||
}
|
||||
Add => a.add_mod(b, M),
|
||||
Sub => a.add_mod(M - b, M),
|
||||
Pow => a.pow_mod(b, M),
|
||||
Mod => a.div_rem(b).1,
|
||||
Eq => U256::from(a == b),
|
||||
Neq => U256::from(a != b),
|
||||
Lt => u_lt(&a, &b),
|
||||
Gt => u_gt(&a, &b),
|
||||
Leq => u_lte(&a, &b),
|
||||
Geq => u_gte(&a, &b),
|
||||
Land => U256::from(a != U256::ZERO && b != U256::ZERO),
|
||||
Lor => U256::from(a != U256::ZERO || b != U256::ZERO),
|
||||
Shl => compute_shl_uint(a, b),
|
||||
Shr => compute_shr_uint(a, b),
|
||||
// TODO test with conner case when it is possible to get the number
|
||||
// bigger then modulus
|
||||
Bor => a.bitor(b),
|
||||
Band => a.bitand(b),
|
||||
// TODO test with conner case when it is possible to get the number
|
||||
// bigger then modulus
|
||||
Bxor => a.bitxor(b),
|
||||
Idiv => a / b,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eval_fr(&self, a: Fr, b: Fr) -> Fr {
|
||||
use Operation::*;
|
||||
match self {
|
||||
Mul => a * b,
|
||||
// We always should return something on the circuit execution.
|
||||
// So in case of division by 0 we would return 0. And the proof
|
||||
// should be invalid in the end.
|
||||
Div => {
|
||||
if b.is_zero() {
|
||||
Fr::zero()
|
||||
} else {
|
||||
a / b
|
||||
}
|
||||
}
|
||||
Add => a + b,
|
||||
Sub => a - b,
|
||||
Idiv => {
|
||||
if b.is_zero() {
|
||||
Fr::zero()
|
||||
} else {
|
||||
let a_u256 = fr_to_u256(&a);
|
||||
let b_u256 = fr_to_u256(&b);
|
||||
u256_to_fr(&(a_u256 / b_u256))
|
||||
}
|
||||
}
|
||||
Mod => {
|
||||
if b.is_zero() {
|
||||
Fr::zero()
|
||||
} else {
|
||||
let a_u256 = fr_to_u256(&a);
|
||||
let b_u256 = fr_to_u256(&b);
|
||||
u256_to_fr(&(a_u256 % b_u256))
|
||||
}
|
||||
}
|
||||
Eq => match a.cmp(&b) {
|
||||
Ordering::Equal => Fr::one(),
|
||||
_ => Fr::zero(),
|
||||
},
|
||||
Neq => match a.cmp(&b) {
|
||||
Ordering::Equal => Fr::zero(),
|
||||
_ => Fr::one(),
|
||||
},
|
||||
Lt => u256_to_fr(&u_lt(&fr_to_u256(&a), &fr_to_u256(&b))),
|
||||
Gt => u256_to_fr(&u_gt(&fr_to_u256(&a), &fr_to_u256(&b))),
|
||||
Leq => u256_to_fr(&u_lte(&fr_to_u256(&a), &fr_to_u256(&b))),
|
||||
Geq => u256_to_fr(&u_gte(&fr_to_u256(&a), &fr_to_u256(&b))),
|
||||
Land => {
|
||||
if a.is_zero() || b.is_zero() {
|
||||
Fr::zero()
|
||||
} else {
|
||||
Fr::one()
|
||||
}
|
||||
}
|
||||
Lor => {
|
||||
if a.is_zero() && b.is_zero() {
|
||||
Fr::zero()
|
||||
} else {
|
||||
Fr::one()
|
||||
}
|
||||
}
|
||||
Shl => shl(a, b),
|
||||
Shr => shr(a, b),
|
||||
Bor => bit_or(a, b),
|
||||
Band => bit_and(a, b),
|
||||
Bxor => bit_xor(a, b),
|
||||
// TODO implement other operators
|
||||
_ => unimplemented!("operator {:?} not implemented for Montgomery", self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Operation> for proto::DuoOp {
|
||||
fn from(v: &Operation) -> Self {
|
||||
match v {
|
||||
Operation::Mul => proto::DuoOp::Mul,
|
||||
Operation::Div => proto::DuoOp::Div,
|
||||
Operation::Add => proto::DuoOp::Add,
|
||||
Operation::Sub => proto::DuoOp::Sub,
|
||||
Operation::Pow => proto::DuoOp::Pow,
|
||||
Operation::Idiv => proto::DuoOp::Idiv,
|
||||
Operation::Mod => proto::DuoOp::Mod,
|
||||
Operation::Eq => proto::DuoOp::Eq,
|
||||
Operation::Neq => proto::DuoOp::Neq,
|
||||
Operation::Lt => proto::DuoOp::Lt,
|
||||
Operation::Gt => proto::DuoOp::Gt,
|
||||
Operation::Leq => proto::DuoOp::Leq,
|
||||
Operation::Geq => proto::DuoOp::Geq,
|
||||
Operation::Land => proto::DuoOp::Land,
|
||||
Operation::Lor => proto::DuoOp::Lor,
|
||||
Operation::Shl => proto::DuoOp::Shl,
|
||||
Operation::Shr => proto::DuoOp::Shr,
|
||||
Operation::Bor => proto::DuoOp::Bor,
|
||||
Operation::Band => proto::DuoOp::Band,
|
||||
Operation::Bxor => proto::DuoOp::Bxor,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub enum UnoOperation {
|
||||
Neg,
|
||||
Id, // identity - just return self
|
||||
}
|
||||
|
||||
impl UnoOperation {
|
||||
pub fn eval(&self, a: U256) -> U256 {
|
||||
match self {
|
||||
UnoOperation::Neg => {
|
||||
if a == U256::ZERO {
|
||||
U256::ZERO
|
||||
} else {
|
||||
M - a
|
||||
}
|
||||
}
|
||||
UnoOperation::Id => a,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eval_fr(&self, a: Fr) -> Fr {
|
||||
match self {
|
||||
UnoOperation::Neg => {
|
||||
if a.is_zero() {
|
||||
Fr::zero()
|
||||
} else {
|
||||
let mut x = Fr::MODULUS;
|
||||
x.sub_with_borrow(&a.into_bigint());
|
||||
Fr::from_bigint(x).unwrap()
|
||||
}
|
||||
}
|
||||
_ => unimplemented!("uno operator {:?} not implemented for Montgomery", self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&UnoOperation> for proto::UnoOp {
|
||||
fn from(v: &UnoOperation) -> Self {
|
||||
match v {
|
||||
UnoOperation::Neg => proto::UnoOp::Neg,
|
||||
UnoOperation::Id => proto::UnoOp::Id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub enum TresOperation {
|
||||
TernCond,
|
||||
}
|
||||
|
||||
impl TresOperation {
|
||||
pub fn eval(&self, a: U256, b: U256, c: U256) -> U256 {
|
||||
match self {
|
||||
TresOperation::TernCond => {
|
||||
if a == U256::ZERO {
|
||||
c
|
||||
} else {
|
||||
b
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eval_fr(&self, a: Fr, b: Fr, c: Fr) -> Fr {
|
||||
match self {
|
||||
TresOperation::TernCond => {
|
||||
if a.is_zero() {
|
||||
c
|
||||
} else {
|
||||
b
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&TresOperation> for proto::TresOp {
|
||||
fn from(v: &TresOperation) -> Self {
|
||||
match v {
|
||||
TresOperation::TernCond => proto::TresOp::TernCond,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum Node {
|
||||
Input(usize),
|
||||
Constant(U256),
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
MontConstant(Fr),
|
||||
UnoOp(UnoOperation, usize),
|
||||
Op(Operation, usize, usize),
|
||||
TresOp(TresOperation, usize, usize, usize),
|
||||
}
|
||||
|
||||
// TODO remove pub from Vec<Node>
|
||||
#[derive(Default)]
|
||||
pub struct Nodes(pub Vec<Node>);
|
||||
|
||||
impl Nodes {
|
||||
pub fn new() -> Self {
|
||||
Nodes(Vec::new())
|
||||
}
|
||||
|
||||
pub fn to_const(&self, idx: NodeIdx) -> Result<U256, NodeConstErr> {
|
||||
let me = self.0.get(idx.0).ok_or(NodeConstErr::EmptyNode(idx))?;
|
||||
match me {
|
||||
Node::Constant(v) => Ok(*v),
|
||||
Node::UnoOp(op, a) => Ok(op.eval(self.to_const(NodeIdx(*a))?)),
|
||||
Node::Op(op, a, b) => {
|
||||
Ok(op.eval(self.to_const(NodeIdx(*a))?, self.to_const(NodeIdx(*b))?))
|
||||
}
|
||||
Node::TresOp(op, a, b, c) => Ok(op.eval(
|
||||
self.to_const(NodeIdx(*a))?,
|
||||
self.to_const(NodeIdx(*b))?,
|
||||
self.to_const(NodeIdx(*c))?,
|
||||
)),
|
||||
Node::Input(_) => Err(NodeConstErr::InputSignal),
|
||||
Node::MontConstant(_) => {
|
||||
panic!("MontConstant should not be used here")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push(&mut self, n: Node) -> NodeIdx {
|
||||
self.0.push(n);
|
||||
NodeIdx(self.0.len() - 1)
|
||||
}
|
||||
|
||||
pub fn get(&self, idx: NodeIdx) -> Option<&Node> {
|
||||
self.0.get(idx.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Nodes {
|
||||
type Target = Vec<Node>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct NodeIdx(pub usize);
|
||||
|
||||
impl From<usize> for NodeIdx {
|
||||
fn from(v: usize) -> Self {
|
||||
NodeIdx(v)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum NodeConstErr {
|
||||
EmptyNode(NodeIdx),
|
||||
InputSignal,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for NodeConstErr {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
NodeConstErr::EmptyNode(idx) => {
|
||||
write!(f, "empty node at index {}", idx.0)
|
||||
}
|
||||
NodeConstErr::InputSignal => {
|
||||
write!(f, "input signal is not a constant")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for NodeConstErr {}
|
||||
|
||||
fn compute_shl_uint(a: U256, b: U256) -> U256 {
|
||||
debug_assert!(b.lt(&U256::from(256)));
|
||||
let ls_limb = b.as_limbs()[0];
|
||||
a.shl(ls_limb as usize)
|
||||
}
|
||||
|
||||
fn compute_shr_uint(a: U256, b: U256) -> U256 {
|
||||
debug_assert!(b.lt(&U256::from(256)));
|
||||
let ls_limb = b.as_limbs()[0];
|
||||
a.shr(ls_limb as usize)
|
||||
}
|
||||
|
||||
/// All references must be backwards.
|
||||
fn assert_valid(nodes: &[Node]) {
|
||||
for (i, &node) in nodes.iter().enumerate() {
|
||||
if let Node::Op(_, a, b) = node {
|
||||
assert!(a < i);
|
||||
assert!(b < i);
|
||||
} else if let Node::UnoOp(_, a) = node {
|
||||
assert!(a < i);
|
||||
} else if let Node::TresOp(_, a, b, c) = node {
|
||||
assert!(a < i);
|
||||
assert!(b < i);
|
||||
assert!(c < i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn optimize(nodes: &mut Vec<Node>, outputs: &mut [usize]) {
|
||||
tree_shake(nodes, outputs);
|
||||
propagate(nodes);
|
||||
value_numbering(nodes, outputs);
|
||||
constants(nodes);
|
||||
tree_shake(nodes, outputs);
|
||||
montgomery_form(nodes);
|
||||
}
|
||||
|
||||
pub fn evaluate(nodes: &[Node], inputs: &[U256], outputs: &[usize]) -> Vec<Fr> {
|
||||
// assert_valid(nodes);
|
||||
|
||||
// Evaluate the graph.
|
||||
let mut values = Vec::with_capacity(nodes.len());
|
||||
for &node in nodes.iter() {
|
||||
let value = match node {
|
||||
Node::Constant(c) => u256_to_fr(&c),
|
||||
Node::MontConstant(c) => c,
|
||||
Node::Input(i) => u256_to_fr(&inputs[i]),
|
||||
Node::Op(op, a, b) => op.eval_fr(values[a], values[b]),
|
||||
Node::UnoOp(op, a) => op.eval_fr(values[a]),
|
||||
Node::TresOp(op, a, b, c) => op.eval_fr(values[a], values[b], values[c]),
|
||||
};
|
||||
values.push(value);
|
||||
}
|
||||
|
||||
// Convert from Montgomery form and return the outputs.
|
||||
let mut out = vec![Fr::from(0); outputs.len()];
|
||||
for i in 0..outputs.len() {
|
||||
out[i] = values[outputs[i]];
|
||||
}
|
||||
|
||||
out
|
||||
}
|
||||
|
||||
/// Constant propagation
|
||||
pub fn propagate(nodes: &mut [Node]) {
|
||||
assert_valid(nodes);
|
||||
for i in 0..nodes.len() {
|
||||
if let Node::Op(op, a, b) = nodes[i] {
|
||||
if let (Node::Constant(va), Node::Constant(vb)) = (nodes[a], nodes[b]) {
|
||||
nodes[i] = Node::Constant(op.eval(va, vb));
|
||||
} else if a == b {
|
||||
// Not constant but equal
|
||||
use Operation::*;
|
||||
if let Some(c) = match op {
|
||||
Eq | Leq | Geq => Some(true),
|
||||
Neq | Lt | Gt => Some(false),
|
||||
_ => None,
|
||||
} {
|
||||
nodes[i] = Node::Constant(U256::from(c));
|
||||
}
|
||||
}
|
||||
} else if let Node::UnoOp(op, a) = nodes[i] {
|
||||
if let Node::Constant(va) = nodes[a] {
|
||||
nodes[i] = Node::Constant(op.eval(va));
|
||||
}
|
||||
} else if let Node::TresOp(op, a, b, c) = nodes[i] {
|
||||
if let (Node::Constant(va), Node::Constant(vb), Node::Constant(vc)) =
|
||||
(nodes[a], nodes[b], nodes[c])
|
||||
{
|
||||
nodes[i] = Node::Constant(op.eval(va, vb, vc));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove unused nodes
|
||||
pub fn tree_shake(nodes: &mut Vec<Node>, outputs: &mut [usize]) {
|
||||
assert_valid(nodes);
|
||||
|
||||
// Mark all nodes that are used.
|
||||
let mut used = vec![false; nodes.len()];
|
||||
for &i in outputs.iter() {
|
||||
used[i] = true;
|
||||
}
|
||||
|
||||
// Work backwards from end as all references are backwards.
|
||||
for i in (0..nodes.len()).rev() {
|
||||
if used[i] {
|
||||
if let Node::Op(_, a, b) = nodes[i] {
|
||||
used[a] = true;
|
||||
used[b] = true;
|
||||
}
|
||||
if let Node::UnoOp(_, a) = nodes[i] {
|
||||
used[a] = true;
|
||||
}
|
||||
if let Node::TresOp(_, a, b, c) = nodes[i] {
|
||||
used[a] = true;
|
||||
used[b] = true;
|
||||
used[c] = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove unused nodes
|
||||
let n = nodes.len();
|
||||
let mut retain = used.iter();
|
||||
nodes.retain(|_| *retain.next().unwrap());
|
||||
|
||||
// Renumber references.
|
||||
let mut renumber = vec![None; n];
|
||||
let mut index = 0;
|
||||
for (i, &used) in used.iter().enumerate() {
|
||||
if used {
|
||||
renumber[i] = Some(index);
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
assert_eq!(index, nodes.len());
|
||||
for (&used, renumber) in used.iter().zip(renumber.iter()) {
|
||||
assert_eq!(used, renumber.is_some());
|
||||
}
|
||||
|
||||
// Renumber references.
|
||||
for node in nodes.iter_mut() {
|
||||
if let Node::Op(_, a, b) = node {
|
||||
*a = renumber[*a].unwrap();
|
||||
*b = renumber[*b].unwrap();
|
||||
}
|
||||
if let Node::UnoOp(_, a) = node {
|
||||
*a = renumber[*a].unwrap();
|
||||
}
|
||||
if let Node::TresOp(_, a, b, c) = node {
|
||||
*a = renumber[*a].unwrap();
|
||||
*b = renumber[*b].unwrap();
|
||||
*c = renumber[*c].unwrap();
|
||||
}
|
||||
}
|
||||
for output in outputs.iter_mut() {
|
||||
*output = renumber[*output].unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
/// Randomly evaluate the graph
|
||||
fn random_eval(nodes: &mut [Node]) -> Vec<U256> {
|
||||
let mut rng = rand::thread_rng();
|
||||
let mut values = Vec::with_capacity(nodes.len());
|
||||
let mut inputs = HashMap::new();
|
||||
let mut prfs = HashMap::new();
|
||||
let mut prfs_uno = HashMap::new();
|
||||
let mut prfs_tres = HashMap::new();
|
||||
for node in nodes.iter() {
|
||||
use Operation::*;
|
||||
let value = match node {
|
||||
// Constants evaluate to themselves
|
||||
Node::Constant(c) => *c,
|
||||
|
||||
Node::MontConstant(_) => unimplemented!("should not be used"),
|
||||
|
||||
// Algebraic Ops are evaluated directly
|
||||
// Since the field is large, by Swartz-Zippel if
|
||||
// two values are the same then they are likely algebraically equal.
|
||||
Node::Op(op @ (Add | Sub | Mul), a, b) => op.eval(values[*a], values[*b]),
|
||||
|
||||
// Input and non-algebraic ops are random functions
|
||||
// TODO: https://github.com/recmo/uint/issues/95 and use .gen_range(..M)
|
||||
Node::Input(i) => *inputs.entry(*i).or_insert_with(|| rng.gen::<U256>() % M),
|
||||
Node::Op(op, a, b) => *prfs
|
||||
.entry((*op, values[*a], values[*b]))
|
||||
.or_insert_with(|| rng.gen::<U256>() % M),
|
||||
Node::UnoOp(op, a) => *prfs_uno
|
||||
.entry((*op, values[*a]))
|
||||
.or_insert_with(|| rng.gen::<U256>() % M),
|
||||
Node::TresOp(op, a, b, c) => *prfs_tres
|
||||
.entry((*op, values[*a], values[*b], values[*c]))
|
||||
.or_insert_with(|| rng.gen::<U256>() % M),
|
||||
};
|
||||
values.push(value);
|
||||
}
|
||||
values
|
||||
}
|
||||
|
||||
/// Value numbering
|
||||
pub fn value_numbering(nodes: &mut [Node], outputs: &mut [usize]) {
|
||||
assert_valid(nodes);
|
||||
|
||||
// Evaluate the graph in random field elements.
|
||||
let values = random_eval(nodes);
|
||||
|
||||
// Find all nodes with the same value.
|
||||
let mut value_map = HashMap::new();
|
||||
for (i, &value) in values.iter().enumerate() {
|
||||
value_map.entry(value).or_insert_with(Vec::new).push(i);
|
||||
}
|
||||
|
||||
// For nodes that are the same, pick the first index.
|
||||
let renumber: Vec<_> = values.into_iter().map(|v| value_map[&v][0]).collect();
|
||||
|
||||
// Renumber references.
|
||||
for node in nodes.iter_mut() {
|
||||
if let Node::Op(_, a, b) = node {
|
||||
*a = renumber[*a];
|
||||
*b = renumber[*b];
|
||||
}
|
||||
if let Node::UnoOp(_, a) = node {
|
||||
*a = renumber[*a];
|
||||
}
|
||||
if let Node::TresOp(_, a, b, c) = node {
|
||||
*a = renumber[*a];
|
||||
*b = renumber[*b];
|
||||
*c = renumber[*c];
|
||||
}
|
||||
}
|
||||
for output in outputs.iter_mut() {
|
||||
*output = renumber[*output];
|
||||
}
|
||||
}
|
||||
|
||||
/// Probabilistic constant determination
|
||||
pub fn constants(nodes: &mut [Node]) {
|
||||
assert_valid(nodes);
|
||||
|
||||
// Evaluate the graph in random field elements.
|
||||
let values_a = random_eval(nodes);
|
||||
let values_b = random_eval(nodes);
|
||||
|
||||
// Find all nodes with the same value.
|
||||
for i in 0..nodes.len() {
|
||||
if let Node::Constant(_) = nodes[i] {
|
||||
continue;
|
||||
}
|
||||
if values_a[i] == values_b[i] {
|
||||
nodes[i] = Node::Constant(values_a[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert to Montgomery form
|
||||
pub fn montgomery_form(nodes: &mut [Node]) {
|
||||
for node in nodes.iter_mut() {
|
||||
use Node::*;
|
||||
use Operation::*;
|
||||
match node {
|
||||
Constant(c) => *node = MontConstant(u256_to_fr(c)),
|
||||
MontConstant(..) => (),
|
||||
Input(..) => (),
|
||||
Op(
|
||||
Mul | Div | Add | Sub | Idiv | Mod | Eq | Neq | Lt | Gt | Leq | Geq | Land | Lor
|
||||
| Shl | Shr | Bor | Band | Bxor,
|
||||
..,
|
||||
) => (),
|
||||
Op(op @ Pow, ..) => unimplemented!("Operators Montgomery form: {:?}", op),
|
||||
UnoOp(UnoOperation::Neg, ..) => (),
|
||||
UnoOp(op, ..) => unimplemented!("Uno Operators Montgomery form: {:?}", op),
|
||||
TresOp(TresOperation::TernCond, ..) => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn shl(a: Fr, b: Fr) -> Fr {
|
||||
if b.is_zero() {
|
||||
return a;
|
||||
}
|
||||
|
||||
if b.cmp(&Fr::from(Fr::MODULUS_BIT_SIZE)).is_ge() {
|
||||
return Fr::zero();
|
||||
}
|
||||
|
||||
let n = b.into_bigint().0[0] as u32;
|
||||
let a = a.into_bigint();
|
||||
Fr::from_bigint(a << n).unwrap()
|
||||
}
|
||||
|
||||
fn shr(a: Fr, b: Fr) -> Fr {
|
||||
if b.is_zero() {
|
||||
return a;
|
||||
}
|
||||
|
||||
match b.cmp(&Fr::from(254u64)) {
|
||||
Ordering::Equal => return Fr::zero(),
|
||||
Ordering::Greater => return Fr::zero(),
|
||||
_ => (),
|
||||
};
|
||||
|
||||
let mut n = b.into_bigint().to_bytes_le()[0];
|
||||
let mut result = a.into_bigint();
|
||||
let c = result.as_mut();
|
||||
while n >= 64 {
|
||||
for i in 0..3 {
|
||||
c[i as usize] = c[(i + 1) as usize];
|
||||
}
|
||||
c[3] = 0;
|
||||
n -= 64;
|
||||
}
|
||||
|
||||
if n == 0 {
|
||||
return Fr::from_bigint(result).unwrap();
|
||||
}
|
||||
|
||||
let mask: u64 = (1 << n) - 1;
|
||||
let mut carrier: u64 = c[3] & mask;
|
||||
c[3] >>= n;
|
||||
for i in (0..3).rev() {
|
||||
let new_carrier = c[i] & mask;
|
||||
c[i] = (c[i] >> n) | (carrier << (64 - n));
|
||||
carrier = new_carrier;
|
||||
}
|
||||
Fr::from_bigint(result).unwrap()
|
||||
}
|
||||
|
||||
fn bit_and(a: Fr, b: Fr) -> Fr {
|
||||
let a = a.into_bigint();
|
||||
let b = b.into_bigint();
|
||||
let c: [u64; 4] = [
|
||||
a.0[0] & b.0[0],
|
||||
a.0[1] & b.0[1],
|
||||
a.0[2] & b.0[2],
|
||||
a.0[3] & b.0[3],
|
||||
];
|
||||
let mut d: BigInt<4> = BigInt::new(c);
|
||||
if d > Fr::MODULUS {
|
||||
d.sub_with_borrow(&Fr::MODULUS);
|
||||
}
|
||||
|
||||
Fr::from_bigint(d).unwrap()
|
||||
}
|
||||
|
||||
fn bit_or(a: Fr, b: Fr) -> Fr {
|
||||
let a = a.into_bigint();
|
||||
let b = b.into_bigint();
|
||||
let c: [u64; 4] = [
|
||||
a.0[0] | b.0[0],
|
||||
a.0[1] | b.0[1],
|
||||
a.0[2] | b.0[2],
|
||||
a.0[3] | b.0[3],
|
||||
];
|
||||
let mut d: BigInt<4> = BigInt::new(c);
|
||||
if d > Fr::MODULUS {
|
||||
d.sub_with_borrow(&Fr::MODULUS);
|
||||
}
|
||||
|
||||
Fr::from_bigint(d).unwrap()
|
||||
}
|
||||
|
||||
fn bit_xor(a: Fr, b: Fr) -> Fr {
|
||||
let a = a.into_bigint();
|
||||
let b = b.into_bigint();
|
||||
let c: [u64; 4] = [
|
||||
a.0[0] ^ b.0[0],
|
||||
a.0[1] ^ b.0[1],
|
||||
a.0[2] ^ b.0[2],
|
||||
a.0[3] ^ b.0[3],
|
||||
];
|
||||
let mut d: BigInt<4> = BigInt::new(c);
|
||||
if d > Fr::MODULUS {
|
||||
d.sub_with_borrow(&Fr::MODULUS);
|
||||
}
|
||||
|
||||
Fr::from_bigint(d).unwrap()
|
||||
}
|
||||
|
||||
// M / 2
|
||||
const HALF_M: U256 =
|
||||
uint!(10944121435919637611123202872628637544274182200208017171849102093287904247808_U256);
|
||||
|
||||
fn u_gte(a: &U256, b: &U256) -> U256 {
|
||||
let a_neg = &HALF_M < a;
|
||||
let b_neg = &HALF_M < b;
|
||||
|
||||
match (a_neg, b_neg) {
|
||||
(false, false) => U256::from(a >= b),
|
||||
(true, false) => uint!(0_U256),
|
||||
(false, true) => uint!(1_U256),
|
||||
(true, true) => U256::from(a >= b),
|
||||
}
|
||||
}
|
||||
|
||||
fn u_lte(a: &U256, b: &U256) -> U256 {
|
||||
let a_neg = &HALF_M < a;
|
||||
let b_neg = &HALF_M < b;
|
||||
|
||||
match (a_neg, b_neg) {
|
||||
(false, false) => U256::from(a <= b),
|
||||
(true, false) => uint!(1_U256),
|
||||
(false, true) => uint!(0_U256),
|
||||
(true, true) => U256::from(a <= b),
|
||||
}
|
||||
}
|
||||
|
||||
fn u_gt(a: &U256, b: &U256) -> U256 {
|
||||
let a_neg = &HALF_M < a;
|
||||
let b_neg = &HALF_M < b;
|
||||
|
||||
match (a_neg, b_neg) {
|
||||
(false, false) => U256::from(a > b),
|
||||
(true, false) => uint!(0_U256),
|
||||
(false, true) => uint!(1_U256),
|
||||
(true, true) => U256::from(a > b),
|
||||
}
|
||||
}
|
||||
|
||||
fn u_lt(a: &U256, b: &U256) -> U256 {
|
||||
let a_neg = &HALF_M < a;
|
||||
let b_neg = &HALF_M < b;
|
||||
|
||||
match (a_neg, b_neg) {
|
||||
(false, false) => U256::from(a < b),
|
||||
(true, false) => uint!(1_U256),
|
||||
(false, true) => uint!(0_U256),
|
||||
(true, true) => U256::from(a < b),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use ruint::uint;
|
||||
use std::ops::Div;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[test]
|
||||
fn test_ok() {
|
||||
let a = Fr::from(4u64);
|
||||
let b = Fr::from(2u64);
|
||||
let c = shl(a, b);
|
||||
assert_eq!(c.cmp(&Fr::from(16u64)), Ordering::Equal)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_div() {
|
||||
assert_eq!(
|
||||
Operation::Div.eval_fr(Fr::from(2u64), Fr::from(3u64)),
|
||||
Fr::from_str(
|
||||
"7296080957279758407415468581752425029516121466805344781232734728858602831873"
|
||||
)
|
||||
.unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Operation::Div.eval_fr(Fr::from(6u64), Fr::from(2u64)),
|
||||
Fr::from_str("3").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Operation::Div.eval_fr(Fr::from(7u64), Fr::from(2u64)),
|
||||
Fr::from_str(
|
||||
"10944121435919637611123202872628637544274182200208017171849102093287904247812"
|
||||
)
|
||||
.unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_idiv() {
|
||||
assert_eq!(
|
||||
Operation::Idiv.eval_fr(Fr::from(2u64), Fr::from(3u64)),
|
||||
Fr::from_str("0").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Operation::Idiv.eval_fr(Fr::from(6u64), Fr::from(2u64)),
|
||||
Fr::from_str("3").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Operation::Idiv.eval_fr(Fr::from(7u64), Fr::from(2u64)),
|
||||
Fr::from_str("3").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fr_mod() {
|
||||
assert_eq!(
|
||||
Operation::Mod.eval_fr(Fr::from(7u64), Fr::from(2u64)),
|
||||
Fr::from_str("1").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Operation::Mod.eval_fr(Fr::from(7u64), Fr::from(9u64)),
|
||||
Fr::from_str("7").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_u_gte() {
|
||||
let result = u_gte(&uint!(10_U256), &uint!(3_U256));
|
||||
assert_eq!(result, uint!(1_U256));
|
||||
|
||||
let result = u_gte(&uint!(3_U256), &uint!(3_U256));
|
||||
assert_eq!(result, uint!(1_U256));
|
||||
|
||||
let result = u_gte(&uint!(2_U256), &uint!(3_U256));
|
||||
assert_eq!(result, uint!(0_U256));
|
||||
|
||||
// -1 >= 3 => 0
|
||||
let result = u_gte(
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495616_U256
|
||||
),
|
||||
&uint!(3_U256),
|
||||
);
|
||||
assert_eq!(result, uint!(0_U256));
|
||||
|
||||
// -1 >= -2 => 1
|
||||
let result = u_gte(
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495616_U256
|
||||
),
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
|
||||
),
|
||||
);
|
||||
assert_eq!(result, uint!(1_U256));
|
||||
|
||||
// -2 >= -1 => 0
|
||||
let result = u_gte(
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
|
||||
),
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495616_U256
|
||||
),
|
||||
);
|
||||
assert_eq!(result, uint!(0_U256));
|
||||
|
||||
// -2 == -2 => 1
|
||||
let result = u_gte(
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
|
||||
),
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
|
||||
),
|
||||
);
|
||||
assert_eq!(result, uint!(1_U256));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_x() {
|
||||
let x = M.div(uint!(2_U256));
|
||||
|
||||
println!("x: {:?}", x.as_limbs());
|
||||
println!("x: {M}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_2() {
|
||||
let nodes: Vec<Node> = vec![];
|
||||
// let node = nodes[0];
|
||||
let node = nodes.first();
|
||||
println!("{node:?}");
|
||||
}
|
||||
}
|
||||
117
rln/src/circuit/iden3calc/proto.rs
Normal file
117
rln/src/circuit/iden3calc/proto.rs
Normal file
@@ -0,0 +1,117 @@
|
||||
// This file has been generated by prost-build during compilation of the code by iden3
|
||||
// and modified manually. The *.proto file used to generate this on can be found here:
|
||||
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/protos/messages.proto
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct BigUInt {
|
||||
#[prost(bytes = "vec", tag = "1")]
|
||||
pub value_le: Vec<u8>,
|
||||
}
|
||||
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
|
||||
pub struct InputNode {
|
||||
#[prost(uint32, tag = "1")]
|
||||
pub idx: u32,
|
||||
}
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct ConstantNode {
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub value: Option<BigUInt>,
|
||||
}
|
||||
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
|
||||
pub struct UnoOpNode {
|
||||
#[prost(enumeration = "UnoOp", tag = "1")]
|
||||
pub op: i32,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub a_idx: u32,
|
||||
}
|
||||
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
|
||||
pub struct DuoOpNode {
|
||||
#[prost(enumeration = "DuoOp", tag = "1")]
|
||||
pub op: i32,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub a_idx: u32,
|
||||
#[prost(uint32, tag = "3")]
|
||||
pub b_idx: u32,
|
||||
}
|
||||
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
|
||||
pub struct TresOpNode {
|
||||
#[prost(enumeration = "TresOp", tag = "1")]
|
||||
pub op: i32,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub a_idx: u32,
|
||||
#[prost(uint32, tag = "3")]
|
||||
pub b_idx: u32,
|
||||
#[prost(uint32, tag = "4")]
|
||||
pub c_idx: u32,
|
||||
}
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct Node {
|
||||
#[prost(oneof = "node::Node", tags = "1, 2, 3, 4, 5")]
|
||||
pub node: Option<node::Node>,
|
||||
}
|
||||
/// Nested message and enum types in `Node`.
|
||||
pub mod node {
|
||||
#[derive(Clone, PartialEq, ::prost::Oneof)]
|
||||
pub enum Node {
|
||||
#[prost(message, tag = "1")]
|
||||
Input(super::InputNode),
|
||||
#[prost(message, tag = "2")]
|
||||
Constant(super::ConstantNode),
|
||||
#[prost(message, tag = "3")]
|
||||
UnoOp(super::UnoOpNode),
|
||||
#[prost(message, tag = "4")]
|
||||
DuoOp(super::DuoOpNode),
|
||||
#[prost(message, tag = "5")]
|
||||
TresOp(super::TresOpNode),
|
||||
}
|
||||
}
|
||||
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
|
||||
pub struct SignalDescription {
|
||||
#[prost(uint32, tag = "1")]
|
||||
pub offset: u32,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub len: u32,
|
||||
}
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct GraphMetadata {
|
||||
#[prost(uint32, repeated, tag = "1")]
|
||||
pub witness_signals: Vec<u32>,
|
||||
#[prost(map = "string, message", tag = "2")]
|
||||
pub inputs: HashMap<String, SignalDescription>,
|
||||
}
|
||||
#[derive(Clone, Copy, Debug, PartialEq, ::prost::Enumeration)]
|
||||
pub enum DuoOp {
|
||||
Mul = 0,
|
||||
Div = 1,
|
||||
Add = 2,
|
||||
Sub = 3,
|
||||
Pow = 4,
|
||||
Idiv = 5,
|
||||
Mod = 6,
|
||||
Eq = 7,
|
||||
Neq = 8,
|
||||
Lt = 9,
|
||||
Gt = 10,
|
||||
Leq = 11,
|
||||
Geq = 12,
|
||||
Land = 13,
|
||||
Lor = 14,
|
||||
Shl = 15,
|
||||
Shr = 16,
|
||||
Bor = 17,
|
||||
Band = 18,
|
||||
Bxor = 19,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, ::prost::Enumeration)]
|
||||
pub enum UnoOp {
|
||||
Neg = 0,
|
||||
Id = 1,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, ::prost::Enumeration)]
|
||||
pub enum TresOp {
|
||||
TernCond = 0,
|
||||
}
|
||||
497
rln/src/circuit/iden3calc/storage.rs
Normal file
497
rln/src/circuit/iden3calc/storage.rs
Normal file
@@ -0,0 +1,497 @@
|
||||
// This file is based on the code by iden3. Its preimage can be found here:
|
||||
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/storage.rs
|
||||
|
||||
use ark_bn254::Fr;
|
||||
use ark_ff::PrimeField;
|
||||
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
|
||||
use prost::Message;
|
||||
use std::io::{Read, Write};
|
||||
|
||||
use crate::circuit::iden3calc::{
|
||||
graph,
|
||||
graph::{Operation, TresOperation, UnoOperation},
|
||||
proto, InputSignalsInfo,
|
||||
};
|
||||
|
||||
// format of the wtns.graph file:
|
||||
// + magic line: wtns.graph.001
|
||||
// + 4 bytes unsigned LE 32-bit integer: number of nodes
|
||||
// + series of protobuf serialized nodes. Each node prefixed by varint length
|
||||
// + protobuf serialized GraphMetadata
|
||||
// + 8 bytes unsigned LE 64-bit integer: offset of GraphMetadata message
|
||||
|
||||
const WITNESSCALC_GRAPH_MAGIC: &[u8] = b"wtns.graph.001";
|
||||
|
||||
const MAX_VARINT_LENGTH: usize = 10;
|
||||
|
||||
impl From<proto::Node> for graph::Node {
|
||||
fn from(value: proto::Node) -> Self {
|
||||
match value.node.unwrap() {
|
||||
proto::node::Node::Input(input_node) => graph::Node::Input(input_node.idx as usize),
|
||||
proto::node::Node::Constant(constant_node) => {
|
||||
let i = constant_node.value.unwrap();
|
||||
graph::Node::MontConstant(Fr::from_le_bytes_mod_order(i.value_le.as_slice()))
|
||||
}
|
||||
proto::node::Node::UnoOp(uno_op_node) => {
|
||||
let op = proto::UnoOp::try_from(uno_op_node.op).unwrap();
|
||||
graph::Node::UnoOp(op.into(), uno_op_node.a_idx as usize)
|
||||
}
|
||||
proto::node::Node::DuoOp(duo_op_node) => {
|
||||
let op = proto::DuoOp::try_from(duo_op_node.op).unwrap();
|
||||
graph::Node::Op(
|
||||
op.into(),
|
||||
duo_op_node.a_idx as usize,
|
||||
duo_op_node.b_idx as usize,
|
||||
)
|
||||
}
|
||||
proto::node::Node::TresOp(tres_op_node) => {
|
||||
let op = proto::TresOp::try_from(tres_op_node.op).unwrap();
|
||||
graph::Node::TresOp(
|
||||
op.into(),
|
||||
tres_op_node.a_idx as usize,
|
||||
tres_op_node.b_idx as usize,
|
||||
tres_op_node.c_idx as usize,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&graph::Node> for proto::node::Node {
|
||||
fn from(node: &graph::Node) -> Self {
|
||||
match node {
|
||||
graph::Node::Input(i) => proto::node::Node::Input(proto::InputNode { idx: *i as u32 }),
|
||||
graph::Node::Constant(_) => {
|
||||
panic!("We are not supposed to write Constant to the witnesscalc graph. All Constant should be converted to MontConstant.");
|
||||
}
|
||||
graph::Node::UnoOp(op, a) => {
|
||||
let op = proto::UnoOp::from(op);
|
||||
proto::node::Node::UnoOp(proto::UnoOpNode {
|
||||
op: op as i32,
|
||||
a_idx: *a as u32,
|
||||
})
|
||||
}
|
||||
graph::Node::Op(op, a, b) => proto::node::Node::DuoOp(proto::DuoOpNode {
|
||||
op: proto::DuoOp::from(op) as i32,
|
||||
a_idx: *a as u32,
|
||||
b_idx: *b as u32,
|
||||
}),
|
||||
graph::Node::TresOp(op, a, b, c) => proto::node::Node::TresOp(proto::TresOpNode {
|
||||
op: proto::TresOp::from(op) as i32,
|
||||
a_idx: *a as u32,
|
||||
b_idx: *b as u32,
|
||||
c_idx: *c as u32,
|
||||
}),
|
||||
graph::Node::MontConstant(c) => {
|
||||
let bi = Into::<num_bigint::BigUint>::into(*c);
|
||||
let i = proto::BigUInt {
|
||||
value_le: bi.to_bytes_le(),
|
||||
};
|
||||
proto::node::Node::Constant(proto::ConstantNode { value: Some(i) })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proto::UnoOp> for UnoOperation {
|
||||
fn from(value: proto::UnoOp) -> Self {
|
||||
match value {
|
||||
proto::UnoOp::Neg => UnoOperation::Neg,
|
||||
proto::UnoOp::Id => UnoOperation::Id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proto::DuoOp> for Operation {
|
||||
fn from(value: proto::DuoOp) -> Self {
|
||||
match value {
|
||||
proto::DuoOp::Mul => Operation::Mul,
|
||||
proto::DuoOp::Div => Operation::Div,
|
||||
proto::DuoOp::Add => Operation::Add,
|
||||
proto::DuoOp::Sub => Operation::Sub,
|
||||
proto::DuoOp::Pow => Operation::Pow,
|
||||
proto::DuoOp::Idiv => Operation::Idiv,
|
||||
proto::DuoOp::Mod => Operation::Mod,
|
||||
proto::DuoOp::Eq => Operation::Eq,
|
||||
proto::DuoOp::Neq => Operation::Neq,
|
||||
proto::DuoOp::Lt => Operation::Lt,
|
||||
proto::DuoOp::Gt => Operation::Gt,
|
||||
proto::DuoOp::Leq => Operation::Leq,
|
||||
proto::DuoOp::Geq => Operation::Geq,
|
||||
proto::DuoOp::Land => Operation::Land,
|
||||
proto::DuoOp::Lor => Operation::Lor,
|
||||
proto::DuoOp::Shl => Operation::Shl,
|
||||
proto::DuoOp::Shr => Operation::Shr,
|
||||
proto::DuoOp::Bor => Operation::Bor,
|
||||
proto::DuoOp::Band => Operation::Band,
|
||||
proto::DuoOp::Bxor => Operation::Bxor,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proto::TresOp> for graph::TresOperation {
|
||||
fn from(value: proto::TresOp) -> Self {
|
||||
match value {
|
||||
proto::TresOp::TernCond => TresOperation::TernCond,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serialize_witnesscalc_graph<T: Write>(
|
||||
mut w: T,
|
||||
nodes: &Vec<graph::Node>,
|
||||
witness_signals: &[usize],
|
||||
input_signals: &InputSignalsInfo,
|
||||
) -> std::io::Result<()> {
|
||||
let mut ptr = 0usize;
|
||||
w.write_all(WITNESSCALC_GRAPH_MAGIC).unwrap();
|
||||
ptr += WITNESSCALC_GRAPH_MAGIC.len();
|
||||
|
||||
w.write_u64::<LittleEndian>(nodes.len() as u64)?;
|
||||
ptr += 8;
|
||||
|
||||
let metadata = proto::GraphMetadata {
|
||||
witness_signals: witness_signals
|
||||
.iter()
|
||||
.map(|x| *x as u32)
|
||||
.collect::<Vec<u32>>(),
|
||||
inputs: input_signals
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
let sig = proto::SignalDescription {
|
||||
offset: v.0 as u32,
|
||||
len: v.1 as u32,
|
||||
};
|
||||
(k.clone(), sig)
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
||||
// capacity of buf should be enough to hold the largest message + 10 bytes
|
||||
// of varint length
|
||||
let mut buf = Vec::with_capacity(metadata.encoded_len() + MAX_VARINT_LENGTH);
|
||||
|
||||
for node in nodes {
|
||||
let node_pb = proto::Node {
|
||||
node: Some(proto::node::Node::from(node)),
|
||||
};
|
||||
|
||||
assert_eq!(buf.len(), 0);
|
||||
node_pb.encode_length_delimited(&mut buf)?;
|
||||
ptr += buf.len();
|
||||
|
||||
w.write_all(&buf)?;
|
||||
buf.clear();
|
||||
}
|
||||
|
||||
metadata.encode_length_delimited(&mut buf)?;
|
||||
w.write_all(&buf)?;
|
||||
buf.clear();
|
||||
|
||||
w.write_u64::<LittleEndian>(ptr as u64)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_message_length<R: Read>(rw: &mut WriteBackReader<R>) -> std::io::Result<usize> {
|
||||
let mut buf = [0u8; MAX_VARINT_LENGTH];
|
||||
let bytes_read = rw.read(&mut buf)?;
|
||||
if bytes_read == 0 {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::UnexpectedEof,
|
||||
"Unexpected EOF",
|
||||
));
|
||||
}
|
||||
|
||||
let len_delimiter = prost::decode_length_delimiter(buf.as_ref())?;
|
||||
|
||||
let lnln = prost::length_delimiter_len(len_delimiter);
|
||||
|
||||
if lnln < bytes_read {
|
||||
rw.write_all(&buf[lnln..bytes_read])?;
|
||||
}
|
||||
|
||||
Ok(len_delimiter)
|
||||
}
|
||||
|
||||
fn read_message<R: Read, M: Message + std::default::Default>(
|
||||
rw: &mut WriteBackReader<R>,
|
||||
) -> std::io::Result<M> {
|
||||
let ln = read_message_length(rw)?;
|
||||
let mut buf = vec![0u8; ln];
|
||||
let bytes_read = rw.read(&mut buf)?;
|
||||
if bytes_read != ln {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::UnexpectedEof,
|
||||
"Unexpected EOF",
|
||||
));
|
||||
}
|
||||
|
||||
let msg = prost::Message::decode(&buf[..])?;
|
||||
|
||||
Ok(msg)
|
||||
}
|
||||
|
||||
pub fn deserialize_witnesscalc_graph(
|
||||
r: impl Read,
|
||||
) -> std::io::Result<(Vec<graph::Node>, Vec<usize>, InputSignalsInfo)> {
|
||||
let mut br = WriteBackReader::new(r);
|
||||
let mut magic = [0u8; WITNESSCALC_GRAPH_MAGIC.len()];
|
||||
|
||||
br.read_exact(&mut magic)?;
|
||||
|
||||
if !magic.eq(WITNESSCALC_GRAPH_MAGIC) {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"Invalid magic",
|
||||
));
|
||||
}
|
||||
|
||||
let nodes_num = br.read_u64::<LittleEndian>()?;
|
||||
let mut nodes = Vec::with_capacity(nodes_num as usize);
|
||||
for _ in 0..nodes_num {
|
||||
let n: proto::Node = read_message(&mut br)?;
|
||||
let n2: graph::Node = n.into();
|
||||
nodes.push(n2);
|
||||
}
|
||||
|
||||
let md: proto::GraphMetadata = read_message(&mut br)?;
|
||||
|
||||
let witness_signals = md
|
||||
.witness_signals
|
||||
.iter()
|
||||
.map(|x| *x as usize)
|
||||
.collect::<Vec<usize>>();
|
||||
|
||||
let input_signals = md
|
||||
.inputs
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), (v.offset as usize, v.len as usize)))
|
||||
.collect::<InputSignalsInfo>();
|
||||
|
||||
Ok((nodes, witness_signals, input_signals))
|
||||
}
|
||||
|
||||
struct WriteBackReader<R: Read> {
|
||||
reader: R,
|
||||
buffer: Vec<u8>,
|
||||
}
|
||||
|
||||
impl<R: Read> WriteBackReader<R> {
|
||||
fn new(reader: R) -> Self {
|
||||
WriteBackReader {
|
||||
reader,
|
||||
buffer: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read> Read for WriteBackReader<R> {
|
||||
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
if buf.is_empty() {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let mut n = 0usize;
|
||||
|
||||
if !self.buffer.is_empty() {
|
||||
n = std::cmp::min(buf.len(), self.buffer.len());
|
||||
self.buffer[self.buffer.len() - n..]
|
||||
.iter()
|
||||
.rev()
|
||||
.enumerate()
|
||||
.for_each(|(i, x)| {
|
||||
buf[i] = *x;
|
||||
});
|
||||
self.buffer.truncate(self.buffer.len() - n);
|
||||
}
|
||||
|
||||
while n < buf.len() {
|
||||
let m = self.reader.read(&mut buf[n..])?;
|
||||
if m == 0 {
|
||||
break;
|
||||
}
|
||||
n += m;
|
||||
}
|
||||
|
||||
Ok(n)
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read> Write for WriteBackReader<R> {
|
||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
self.buffer.reserve(buf.len());
|
||||
self.buffer.extend(buf.iter().rev());
|
||||
Ok(buf.len())
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> std::io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use byteorder::ByteOrder;
|
||||
use core::str::FromStr;
|
||||
use graph::{Operation, TresOperation, UnoOperation};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[test]
|
||||
fn test_read_message() {
|
||||
let mut buf = Vec::new();
|
||||
let n1 = proto::Node {
|
||||
node: Some(proto::node::Node::Input(proto::InputNode { idx: 1 })),
|
||||
};
|
||||
n1.encode_length_delimited(&mut buf).unwrap();
|
||||
|
||||
let n2 = proto::Node {
|
||||
node: Some(proto::node::Node::Input(proto::InputNode { idx: 2 })),
|
||||
};
|
||||
n2.encode_length_delimited(&mut buf).unwrap();
|
||||
|
||||
let mut reader = std::io::Cursor::new(&buf);
|
||||
|
||||
let mut rw = WriteBackReader::new(&mut reader);
|
||||
|
||||
let got_n1: proto::Node = read_message(&mut rw).unwrap();
|
||||
assert!(n1.eq(&got_n1));
|
||||
|
||||
let got_n2: proto::Node = read_message(&mut rw).unwrap();
|
||||
assert!(n2.eq(&got_n2));
|
||||
|
||||
assert_eq!(reader.position(), buf.len() as u64);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_read_message_variant() {
|
||||
let nodes = vec![
|
||||
proto::Node {
|
||||
node: Some(proto::node::Node::from(&graph::Node::Input(0))),
|
||||
},
|
||||
proto::Node {
|
||||
node: Some(proto::node::Node::from(&graph::Node::MontConstant(
|
||||
Fr::from_str("1").unwrap(),
|
||||
))),
|
||||
},
|
||||
proto::Node {
|
||||
node: Some(proto::node::Node::from(&graph::Node::UnoOp(
|
||||
UnoOperation::Id,
|
||||
4,
|
||||
))),
|
||||
},
|
||||
proto::Node {
|
||||
node: Some(proto::node::Node::from(&graph::Node::Op(
|
||||
Operation::Mul,
|
||||
5,
|
||||
6,
|
||||
))),
|
||||
},
|
||||
proto::Node {
|
||||
node: Some(proto::node::Node::from(&graph::Node::TresOp(
|
||||
TresOperation::TernCond,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
))),
|
||||
},
|
||||
];
|
||||
|
||||
let mut buf = Vec::new();
|
||||
for n in &nodes {
|
||||
n.encode_length_delimited(&mut buf).unwrap();
|
||||
}
|
||||
|
||||
let mut nodes_got: Vec<proto::Node> = Vec::new();
|
||||
let mut reader = std::io::Cursor::new(&buf);
|
||||
let mut rw = WriteBackReader::new(&mut reader);
|
||||
for _ in 0..nodes.len() {
|
||||
nodes_got.push(read_message(&mut rw).unwrap());
|
||||
}
|
||||
|
||||
assert_eq!(nodes, nodes_got);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_back_reader() {
|
||||
let data = [1u8, 2, 3, 4, 5, 6];
|
||||
let mut r = WriteBackReader::new(std::io::Cursor::new(&data));
|
||||
|
||||
let buf = &mut [0u8; 5];
|
||||
r.read_exact(buf).unwrap();
|
||||
assert_eq!(buf, &[1, 2, 3, 4, 5]);
|
||||
|
||||
// return [4, 5] to reader
|
||||
r.write_all(&buf[3..]).unwrap();
|
||||
// return [2, 3] to reader
|
||||
r.write_all(&buf[1..3]).unwrap();
|
||||
|
||||
buf.fill(0);
|
||||
|
||||
// read 3 bytes, expect [2, 3, 4] after returns
|
||||
let mut n = r.read(&mut buf[..3]).unwrap();
|
||||
assert_eq!(n, 3);
|
||||
assert_eq!(buf, &[2, 3, 4, 0, 0]);
|
||||
|
||||
buf.fill(0);
|
||||
|
||||
// read everything left in reader
|
||||
n = r.read(buf).unwrap();
|
||||
assert_eq!(n, 2);
|
||||
assert_eq!(buf, &[5, 6, 0, 0, 0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_inputs() {
|
||||
let nodes = vec![
|
||||
graph::Node::Input(0),
|
||||
graph::Node::MontConstant(Fr::from_str("1").unwrap()),
|
||||
graph::Node::UnoOp(UnoOperation::Id, 4),
|
||||
graph::Node::Op(Operation::Mul, 5, 6),
|
||||
graph::Node::TresOp(TresOperation::TernCond, 7, 8, 9),
|
||||
];
|
||||
|
||||
let witness_signals = vec![4, 1];
|
||||
|
||||
let mut input_signals: InputSignalsInfo = HashMap::new();
|
||||
input_signals.insert("sig1".to_string(), (1, 3));
|
||||
input_signals.insert("sig2".to_string(), (5, 1));
|
||||
|
||||
let mut tmp = Vec::new();
|
||||
serialize_witnesscalc_graph(&mut tmp, &nodes, &witness_signals, &input_signals).unwrap();
|
||||
|
||||
let mut reader = std::io::Cursor::new(&tmp);
|
||||
|
||||
let (nodes_res, witness_signals_res, input_signals_res) =
|
||||
deserialize_witnesscalc_graph(&mut reader).unwrap();
|
||||
|
||||
assert_eq!(nodes, nodes_res);
|
||||
assert_eq!(input_signals, input_signals_res);
|
||||
assert_eq!(witness_signals, witness_signals_res);
|
||||
|
||||
let metadata_start = LittleEndian::read_u64(&tmp[tmp.len() - 8..]);
|
||||
|
||||
let mt_reader = std::io::Cursor::new(&tmp[metadata_start as usize..]);
|
||||
let mut rw = WriteBackReader::new(mt_reader);
|
||||
let metadata: proto::GraphMetadata = read_message(&mut rw).unwrap();
|
||||
|
||||
let metadata_want = proto::GraphMetadata {
|
||||
witness_signals: vec![4, 1],
|
||||
inputs: input_signals
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
(
|
||||
k.clone(),
|
||||
proto::SignalDescription {
|
||||
offset: v.0 as u32,
|
||||
len: v.1 as u32,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
||||
assert_eq!(metadata, metadata_want);
|
||||
}
|
||||
}
|
||||
132
rln/src/circuit/mod.rs
Normal file
132
rln/src/circuit/mod.rs
Normal file
@@ -0,0 +1,132 @@
|
||||
// This crate provides interfaces for the zero-knowledge circuit and keys
|
||||
|
||||
pub mod error;
|
||||
pub mod iden3calc;
|
||||
pub mod qap;
|
||||
|
||||
use ::lazy_static::lazy_static;
|
||||
use ark_bn254::{
|
||||
Bn254, Fq as ArkFq, Fq2 as ArkFq2, Fr as ArkFr, G1Affine as ArkG1Affine,
|
||||
G1Projective as ArkG1Projective, G2Affine as ArkG2Affine, G2Projective as ArkG2Projective,
|
||||
};
|
||||
use ark_groth16::ProvingKey;
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
|
||||
use crate::circuit::error::ZKeyReadError;
|
||||
use crate::circuit::iden3calc::calc_witness;
|
||||
|
||||
use {ark_ff::Field, ark_serialize::CanonicalDeserialize, ark_serialize::CanonicalSerialize};
|
||||
|
||||
use crate::utils::FrOrSecret;
|
||||
|
||||
pub const ARKZKEY_BYTES: &[u8] = include_bytes!("../../resources/tree_depth_30/rln_final.arkzkey");
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
const GRAPH_BYTES: &[u8] = include_bytes!("../../resources/tree_depth_30/graph.bin");
|
||||
|
||||
lazy_static! {
|
||||
static ref ARKZKEY: (ProvingKey<Curve>, ConstraintMatrices<Fr>) =
|
||||
read_arkzkey_from_bytes_uncompressed(ARKZKEY_BYTES).expect("Failed to read arkzkey");
|
||||
}
|
||||
|
||||
pub const TEST_TREE_DEPTH: usize = 30;
|
||||
|
||||
// The following types define the pairing friendly elliptic curve, the underlying finite fields and groups default to this module
|
||||
// Note that proofs are serialized assuming Fr to be 4x8 = 32 bytes in size. Hence, changing to a curve with different encoding will make proof verification to fail
|
||||
pub type Curve = Bn254;
|
||||
pub type Fr = ArkFr;
|
||||
pub type Fq = ArkFq;
|
||||
pub type Fq2 = ArkFq2;
|
||||
pub type G1Affine = ArkG1Affine;
|
||||
pub type G1Projective = ArkG1Projective;
|
||||
pub type G2Affine = ArkG2Affine;
|
||||
pub type G2Projective = ArkG2Projective;
|
||||
|
||||
// Loads the proving key using a bytes vector
|
||||
pub fn zkey_from_raw(
|
||||
zkey_data: &[u8],
|
||||
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>), ZKeyReadError> {
|
||||
if zkey_data.is_empty() {
|
||||
return Err(ZKeyReadError::EmptyBytes);
|
||||
}
|
||||
|
||||
let proving_key_and_matrices = read_arkzkey_from_bytes_uncompressed(zkey_data)?;
|
||||
|
||||
Ok(proving_key_and_matrices)
|
||||
}
|
||||
|
||||
// Loads the proving key
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn zkey_from_folder() -> &'static (ProvingKey<Curve>, ConstraintMatrices<Fr>) {
|
||||
&ARKZKEY
|
||||
}
|
||||
|
||||
pub fn calculate_rln_witness<I: IntoIterator<Item = (String, Vec<FrOrSecret>)>>(
|
||||
inputs: I,
|
||||
graph_data: &[u8],
|
||||
) -> Vec<Fr> {
|
||||
calc_witness(inputs, graph_data)
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn graph_from_folder() -> &'static [u8] {
|
||||
GRAPH_BYTES
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// Functions and structs from [arkz-key](https://github.com/zkmopro/ark-zkey/blob/main/src/lib.rs#L106)
|
||||
// without print and allow to choose between compressed and uncompressed arkzkey
|
||||
////////////////////////////////////////////////////////
|
||||
|
||||
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
|
||||
pub struct SerializableProvingKey(pub ProvingKey<Bn254>);
|
||||
|
||||
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
|
||||
pub struct SerializableConstraintMatrices<F: Field> {
|
||||
pub num_instance_variables: usize,
|
||||
pub num_witness_variables: usize,
|
||||
pub num_constraints: usize,
|
||||
pub a_num_non_zero: usize,
|
||||
pub b_num_non_zero: usize,
|
||||
pub c_num_non_zero: usize,
|
||||
pub a: SerializableMatrix<F>,
|
||||
pub b: SerializableMatrix<F>,
|
||||
pub c: SerializableMatrix<F>,
|
||||
}
|
||||
|
||||
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
|
||||
pub struct SerializableMatrix<F: Field> {
|
||||
pub data: Vec<Vec<(F, usize)>>,
|
||||
}
|
||||
|
||||
pub fn read_arkzkey_from_bytes_uncompressed(
|
||||
arkzkey_data: &[u8],
|
||||
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>), ZKeyReadError> {
|
||||
if arkzkey_data.is_empty() {
|
||||
return Err(ZKeyReadError::EmptyBytes);
|
||||
}
|
||||
|
||||
let mut cursor = std::io::Cursor::new(arkzkey_data);
|
||||
|
||||
let serialized_proving_key =
|
||||
SerializableProvingKey::deserialize_uncompressed_unchecked(&mut cursor)?;
|
||||
|
||||
let serialized_constraint_matrices =
|
||||
SerializableConstraintMatrices::deserialize_uncompressed_unchecked(&mut cursor)?;
|
||||
|
||||
// Get on right form for API
|
||||
let proving_key: ProvingKey<Bn254> = serialized_proving_key.0;
|
||||
let constraint_matrices: ConstraintMatrices<ark_bn254::Fr> = ConstraintMatrices {
|
||||
num_instance_variables: serialized_constraint_matrices.num_instance_variables,
|
||||
num_witness_variables: serialized_constraint_matrices.num_witness_variables,
|
||||
num_constraints: serialized_constraint_matrices.num_constraints,
|
||||
a_num_non_zero: serialized_constraint_matrices.a_num_non_zero,
|
||||
b_num_non_zero: serialized_constraint_matrices.b_num_non_zero,
|
||||
c_num_non_zero: serialized_constraint_matrices.c_num_non_zero,
|
||||
a: serialized_constraint_matrices.a.data,
|
||||
b: serialized_constraint_matrices.b.data,
|
||||
c: serialized_constraint_matrices.c.data,
|
||||
};
|
||||
|
||||
Ok((proving_key, constraint_matrices))
|
||||
}
|
||||
120
rln/src/circuit/qap.rs
Normal file
120
rln/src/circuit/qap.rs
Normal file
@@ -0,0 +1,120 @@
|
||||
// This file is based on the code by arkworks. Its preimage can be found here:
|
||||
// https://github.com/arkworks-rs/circom-compat/blob/3c95ed98e23a408b4d99a53e483a9bba39685a4e/src/circom/qap.rs
|
||||
|
||||
use ark_ff::PrimeField;
|
||||
use ark_groth16::r1cs_to_qap::{evaluate_constraint, LibsnarkReduction, R1CSToQAP};
|
||||
use ark_poly::EvaluationDomain;
|
||||
use ark_relations::r1cs::{ConstraintMatrices, ConstraintSystemRef, SynthesisError};
|
||||
use ark_std::{cfg_into_iter, cfg_iter, cfg_iter_mut, vec};
|
||||
|
||||
#[cfg(feature = "parallel")]
|
||||
use rayon::iter::{
|
||||
IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator,
|
||||
IntoParallelRefMutIterator, ParallelIterator,
|
||||
};
|
||||
|
||||
/// Implements the witness map used by snarkjs. The arkworks witness map calculates the
|
||||
/// coefficients of H through computing (AB-C)/Z in the evaluation domain and going back to the
|
||||
/// coefficients domain. snarkjs instead precomputes the Lagrange form of the powers of tau bases
|
||||
/// in a domain twice as large and the witness map is computed as the odd coefficients of (AB-C)
|
||||
/// in that domain. This serves as HZ when computing the C proof element.
|
||||
pub struct CircomReduction;
|
||||
|
||||
impl R1CSToQAP for CircomReduction {
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn instance_map_with_evaluation<F: PrimeField, D: EvaluationDomain<F>>(
|
||||
cs: ConstraintSystemRef<F>,
|
||||
t: &F,
|
||||
) -> Result<(Vec<F>, Vec<F>, Vec<F>, F, usize, usize), SynthesisError> {
|
||||
LibsnarkReduction::instance_map_with_evaluation::<F, D>(cs, t)
|
||||
}
|
||||
|
||||
fn witness_map_from_matrices<F: PrimeField, D: EvaluationDomain<F>>(
|
||||
matrices: &ConstraintMatrices<F>,
|
||||
num_inputs: usize,
|
||||
num_constraints: usize,
|
||||
full_assignment: &[F],
|
||||
) -> Result<Vec<F>, SynthesisError> {
|
||||
let zero = F::zero();
|
||||
let domain =
|
||||
D::new(num_constraints + num_inputs).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;
|
||||
let domain_size = domain.size();
|
||||
|
||||
let mut a = vec![zero; domain_size];
|
||||
let mut b = vec![zero; domain_size];
|
||||
|
||||
#[allow(unexpected_cfgs)]
|
||||
cfg_iter_mut!(a[..num_constraints])
|
||||
.zip(cfg_iter_mut!(b[..num_constraints]))
|
||||
.zip(cfg_iter!(&matrices.a))
|
||||
.zip(cfg_iter!(&matrices.b))
|
||||
.for_each(|(((a, b), at_i), bt_i)| {
|
||||
*a = evaluate_constraint(at_i, full_assignment);
|
||||
*b = evaluate_constraint(bt_i, full_assignment);
|
||||
});
|
||||
|
||||
{
|
||||
let start = num_constraints;
|
||||
let end = start + num_inputs;
|
||||
a[start..end].clone_from_slice(&full_assignment[..num_inputs]);
|
||||
}
|
||||
|
||||
let mut c = vec![zero; domain_size];
|
||||
#[allow(unexpected_cfgs)]
|
||||
cfg_iter_mut!(c[..num_constraints])
|
||||
.zip(&a)
|
||||
.zip(&b)
|
||||
.for_each(|((c_i, &a), &b)| {
|
||||
*c_i = a * b;
|
||||
});
|
||||
|
||||
domain.ifft_in_place(&mut a);
|
||||
domain.ifft_in_place(&mut b);
|
||||
|
||||
let root_of_unity = {
|
||||
let domain_size_double = 2 * domain_size;
|
||||
let domain_double =
|
||||
D::new(domain_size_double).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;
|
||||
domain_double.element(1)
|
||||
};
|
||||
D::distribute_powers_and_mul_by_const(&mut a, root_of_unity, F::one());
|
||||
D::distribute_powers_and_mul_by_const(&mut b, root_of_unity, F::one());
|
||||
|
||||
domain.fft_in_place(&mut a);
|
||||
domain.fft_in_place(&mut b);
|
||||
|
||||
let mut ab = domain.mul_polynomials_in_evaluation_domain(&a, &b);
|
||||
drop(a);
|
||||
drop(b);
|
||||
|
||||
domain.ifft_in_place(&mut c);
|
||||
D::distribute_powers_and_mul_by_const(&mut c, root_of_unity, F::one());
|
||||
domain.fft_in_place(&mut c);
|
||||
|
||||
#[allow(unexpected_cfgs)]
|
||||
cfg_iter_mut!(ab)
|
||||
.zip(c)
|
||||
.for_each(|(ab_i, c_i)| *ab_i -= &c_i);
|
||||
|
||||
Ok(ab)
|
||||
}
|
||||
|
||||
fn h_query_scalars<F: PrimeField, D: EvaluationDomain<F>>(
|
||||
max_power: usize,
|
||||
t: F,
|
||||
_: F,
|
||||
delta_inverse: F,
|
||||
) -> Result<Vec<F>, SynthesisError> {
|
||||
// the usual H query has domain-1 powers. Z has domain powers. So HZ has 2*domain-1 powers.
|
||||
#[allow(unexpected_cfgs)]
|
||||
let mut scalars = cfg_into_iter!(0..2 * max_power + 1)
|
||||
.map(|i| delta_inverse * t.pow([i as u64]))
|
||||
.collect::<Vec<_>>();
|
||||
let domain_size = scalars.len();
|
||||
let domain = D::new(domain_size).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;
|
||||
// generate the lagrange coefficients
|
||||
domain.ifft_in_place(&mut scalars);
|
||||
#[allow(unexpected_cfgs)]
|
||||
Ok(cfg_into_iter!(scalars).skip(1).step_by(2).collect())
|
||||
}
|
||||
}
|
||||
79
rln/src/error.rs
Normal file
79
rln/src/error.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use crate::circuit::error::ZKeyReadError;
|
||||
use ark_bn254::Fr;
|
||||
use ark_relations::r1cs::SynthesisError;
|
||||
use ark_serialize::SerializationError;
|
||||
use num_bigint::{BigInt, ParseBigIntError};
|
||||
use std::array::TryFromSliceError;
|
||||
use std::num::TryFromIntError;
|
||||
use std::string::FromUtf8Error;
|
||||
use thiserror::Error;
|
||||
use utils::error::{FromConfigError, ZerokitMerkleTreeError};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ConversionError {
|
||||
#[error("Expected radix 10 or 16")]
|
||||
WrongRadix,
|
||||
#[error("{0}")]
|
||||
ParseBigInt(#[from] ParseBigIntError),
|
||||
#[error("{0}")]
|
||||
ToUsize(#[from] TryFromIntError),
|
||||
#[error("{0}")]
|
||||
FromSlice(#[from] TryFromSliceError),
|
||||
#[error("Input data too short: expected at least {expected} bytes, got {actual} bytes")]
|
||||
InsufficientData { expected: usize, actual: usize },
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ProofError {
|
||||
#[error("{0}")]
|
||||
ProtocolError(#[from] ProtocolError),
|
||||
#[error("Error producing proof: {0}")]
|
||||
SynthesisError(#[from] SynthesisError),
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ProtocolError {
|
||||
#[error("{0}")]
|
||||
Conversion(#[from] ConversionError),
|
||||
#[error("Expected to read {0} bytes but read only {1} bytes")]
|
||||
InvalidReadLen(usize, usize),
|
||||
#[error("Cannot convert bigint {0:?} to biguint")]
|
||||
BigUintConversion(BigInt),
|
||||
#[error("{0}")]
|
||||
JsonError(#[from] serde_json::Error),
|
||||
#[error("Message id ({0}) is not within user_message_limit ({1})")]
|
||||
InvalidMessageId(Fr, Fr),
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ComputeIdSecretError {
|
||||
/// Usually it means that the same signal is used to recover the user secret hash
|
||||
#[error("Cannot recover secret: division by zero")]
|
||||
DivisionByZero,
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum RLNError {
|
||||
#[error("I/O error: {0}")]
|
||||
IO(#[from] std::io::Error),
|
||||
#[error("Utf8 error: {0}")]
|
||||
Utf8(#[from] FromUtf8Error),
|
||||
#[error("Serde json error: {0}")]
|
||||
JSON(#[from] serde_json::Error),
|
||||
#[error("Config error: {0}")]
|
||||
Config(#[from] FromConfigError),
|
||||
#[error("Serialization error: {0}")]
|
||||
Serialization(#[from] SerializationError),
|
||||
#[error("Merkle tree error: {0}")]
|
||||
MerkleTree(#[from] ZerokitMerkleTreeError),
|
||||
#[error("ZKey error: {0}")]
|
||||
ZKey(#[from] ZKeyReadError),
|
||||
#[error("Conversion error: {0}")]
|
||||
Conversion(#[from] ConversionError),
|
||||
#[error("Protocol error: {0}")]
|
||||
Protocol(#[from] ProtocolError),
|
||||
#[error("Proof error: {0}")]
|
||||
Proof(#[from] ProofError),
|
||||
#[error("Unable to extract secret")]
|
||||
RecoverSecret(#[from] ComputeIdSecretError),
|
||||
}
|
||||
1127
rln/src/ffi.rs
1127
rln/src/ffi.rs
File diff suppressed because it is too large
Load Diff
77
rln/src/hashers.rs
Normal file
77
rln/src/hashers.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
/// This crate instantiates the Poseidon hash algorithm.
|
||||
use crate::{
|
||||
circuit::Fr,
|
||||
utils::{bytes_be_to_fr, bytes_le_to_fr},
|
||||
};
|
||||
use once_cell::sync::Lazy;
|
||||
use tiny_keccak::{Hasher, Keccak};
|
||||
use utils::poseidon::Poseidon;
|
||||
|
||||
/// These indexed constants hardcode the supported round parameters tuples (t, RF, RN, SKIP_MATRICES) for the Bn254 scalar field.
|
||||
/// SKIP_MATRICES is the index of the randomly generated secure MDS matrix.
|
||||
/// TODO: generate these parameters
|
||||
pub const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
|
||||
(2, 8, 56, 0),
|
||||
(3, 8, 57, 0),
|
||||
(4, 8, 56, 0),
|
||||
(5, 8, 60, 0),
|
||||
(6, 8, 60, 0),
|
||||
(7, 8, 63, 0),
|
||||
(8, 8, 64, 0),
|
||||
(9, 8, 63, 0),
|
||||
];
|
||||
|
||||
/// Poseidon Hash wrapper over above implementation.
|
||||
static POSEIDON: Lazy<Poseidon<Fr>> = Lazy::new(|| Poseidon::<Fr>::from(&ROUND_PARAMS));
|
||||
|
||||
pub fn poseidon_hash(input: &[Fr]) -> Fr {
|
||||
POSEIDON
|
||||
.hash(input)
|
||||
.expect("hash with fixed input size can't fail")
|
||||
}
|
||||
|
||||
/// The zerokit RLN Merkle tree Hasher.
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub struct PoseidonHash;
|
||||
|
||||
/// The default Hasher trait used by Merkle tree implementation in utils.
|
||||
impl utils::merkle_tree::Hasher for PoseidonHash {
|
||||
type Fr = Fr;
|
||||
|
||||
fn default_leaf() -> Self::Fr {
|
||||
Self::Fr::from(0)
|
||||
}
|
||||
|
||||
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
|
||||
poseidon_hash(inputs)
|
||||
}
|
||||
}
|
||||
|
||||
/// Hashes arbitrary signal to the underlying prime field.
|
||||
pub fn hash_to_field_le(signal: &[u8]) -> Fr {
|
||||
// We hash the input signal using Keccak256
|
||||
let mut hash = [0; 32];
|
||||
let mut hasher = Keccak::v256();
|
||||
hasher.update(signal);
|
||||
hasher.finalize(&mut hash);
|
||||
|
||||
// We export the hash as a field element
|
||||
let (el, _) = bytes_le_to_fr(hash.as_ref());
|
||||
el
|
||||
}
|
||||
|
||||
/// Hashes arbitrary signal to the underlying prime field.
|
||||
pub fn hash_to_field_be(signal: &[u8]) -> Fr {
|
||||
// We hash the input signal using Keccak256
|
||||
let mut hash = [0; 32];
|
||||
let mut hasher = Keccak::v256();
|
||||
hasher.update(signal);
|
||||
hasher.finalize(&mut hash);
|
||||
|
||||
// Reverse the bytes to get big endian representation
|
||||
hash.reverse();
|
||||
|
||||
// We export the hash as a field element
|
||||
let (el, _) = bytes_be_to_fr(hash.as_ref());
|
||||
el
|
||||
}
|
||||
451
rln/src/lib.rs
451
rln/src/lib.rs
@@ -1,432 +1,35 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
pub mod circuit;
|
||||
pub mod error;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub mod ffi;
|
||||
pub mod merkle_tree;
|
||||
pub mod poseidon_constants;
|
||||
pub mod poseidon_hash;
|
||||
pub mod hashers;
|
||||
#[cfg(feature = "pmtree-ft")]
|
||||
pub mod pm_tree_adapter;
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub mod poseidon_tree;
|
||||
pub mod protocol;
|
||||
pub mod public;
|
||||
#[cfg(test)]
|
||||
pub mod public_api_tests;
|
||||
pub mod utils;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
// Ensure that only one Merkle tree feature is enabled at a time
|
||||
#[cfg(any(
|
||||
all(feature = "fullmerkletree", feature = "optimalmerkletree"),
|
||||
all(feature = "fullmerkletree", feature = "pmtree-ft"),
|
||||
all(feature = "optimalmerkletree", feature = "pmtree-ft"),
|
||||
))]
|
||||
compile_error!(
|
||||
"Only one of `fullmerkletree`, `optimalmerkletree`, or `pmtree-ft` can be enabled at a time."
|
||||
);
|
||||
|
||||
use crate::circuit::{
|
||||
circom_from_folder, vk_from_folder, zkey_from_folder, Fr, TEST_RESOURCES_FOLDER,
|
||||
TEST_TREE_HEIGHT,
|
||||
};
|
||||
use crate::poseidon_hash::poseidon_hash;
|
||||
use crate::poseidon_tree::PoseidonTree;
|
||||
use crate::protocol::*;
|
||||
use crate::utils::str_to_fr;
|
||||
|
||||
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
|
||||
const WITNESS_JSON_15: &str = r#"
|
||||
{
|
||||
"identity_secret": "12825549237505733615964533204745049909430608936689388901883576945030025938736",
|
||||
"path_elements": [
|
||||
"18622655742232062119094611065896226799484910997537830749762961454045300666333",
|
||||
"20590447254980891299813706518821659736846425329007960381537122689749540452732",
|
||||
"7423237065226347324353380772367382631490014989348495481811164164159255474657",
|
||||
"11286972368698509976183087595462810875513684078608517520839298933882497716792",
|
||||
"3607627140608796879659380071776844901612302623152076817094415224584923813162",
|
||||
"19712377064642672829441595136074946683621277828620209496774504837737984048981",
|
||||
"20775607673010627194014556968476266066927294572720319469184847051418138353016",
|
||||
"3396914609616007258851405644437304192397291162432396347162513310381425243293",
|
||||
"21551820661461729022865262380882070649935529853313286572328683688269863701601",
|
||||
"6573136701248752079028194407151022595060682063033565181951145966236778420039",
|
||||
"12413880268183407374852357075976609371175688755676981206018884971008854919922",
|
||||
"14271763308400718165336499097156975241954733520325982997864342600795471836726",
|
||||
"20066985985293572387227381049700832219069292839614107140851619262827735677018",
|
||||
"9394776414966240069580838672673694685292165040808226440647796406499139370960",
|
||||
"11331146992410411304059858900317123658895005918277453009197229807340014528524"
|
||||
],
|
||||
"identity_path_index": [
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0
|
||||
],
|
||||
"x": "8143228284048792769012135629627737459844825626241842423967352803501040982",
|
||||
"epoch": "0x0000005b612540fc986b42322f8cb91c2273afad58ed006fdba0c97b4b16b12f",
|
||||
"rln_identifier": "11412926387081627876309792396682864042420635853496105400039841573530884328439"
|
||||
}
|
||||
"#;
|
||||
|
||||
// Input generated with protocol::random_rln_witness
|
||||
const WITNESS_JSON_19: &str = r#"
|
||||
{
|
||||
"identity_secret": "922538810348594125658702672067738675294669207539999802857585668079702330450",
|
||||
"path_elements": [
|
||||
"16059714054680148404543504061485737353203416489071538960876865983954285286166",
|
||||
"3041470753871943901334053763207316028823782848445723460227667780327106380356",
|
||||
"2557297527793326315072058421057853700096944625924483912548759909801348042183",
|
||||
"6677578602456189582427063963562590713054668181987223110955234085327917303436",
|
||||
"2250827150965576973906150764756422151438812678308727218463995574869267980301",
|
||||
"1895457427602709606993445561553433669787657053834360973759981803464906070980",
|
||||
"11033689991077061346803816826729204895841441316315304395980565540264104346466",
|
||||
"18588752216879570844240300406954267039026327526134910835334500497981810174976",
|
||||
"19346480964028499661277403659363466542857230928032088490855656809181891953123",
|
||||
"21460193770370072688835316363068413651465631481105148051902686770759127189327",
|
||||
"20906347653364838502964722817589315918082261023317339146393355650507243340078",
|
||||
"13466599592974387800162739317046838825289754472645703919149409009404541432954",
|
||||
"9617165663598957201253074168824246164494443748556931540348223968573884172285",
|
||||
"6936463137584425684797785981770877165377386163416057257854261010817156666898",
|
||||
"369902028235468424790098825415813437044876310542601948037281422841675126849",
|
||||
"13510969869821080499683463562609720931680005714401083864659516045615497273644",
|
||||
"2567921390740781421487331055530491683313154421589525170472201828596388395736",
|
||||
"14360870889466292805403568662660511177232987619663547772298178013674025998478",
|
||||
"4735344599616284973799984501493858013178071155960162022656706545116168334293"
|
||||
],
|
||||
"identity_path_index": [
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
0
|
||||
],
|
||||
"x": "6427050788896290028100534859169645070970780055911091444144195464808120686416",
|
||||
"epoch": "0x2bd155d9f85c741044da6909d144f9cc5ce8e0d545a9ed4921b156e8b8569bab",
|
||||
"rln_identifier": "2193983000213424579594329476781986065965849144986973472766961413131458022566"
|
||||
}
|
||||
"#;
|
||||
|
||||
const WITNESS_JSON_20: &str = r#"
|
||||
{
|
||||
"identity_secret": "13732353453861280511150022598793312186188599006979552959297495195757997428306",
|
||||
"path_elements": [
|
||||
"20463525608687844300981085488128968694844212760055234622292326942405619575964",
|
||||
"8040856403709217901175408904825741112286158901303127670929462145501210871313",
|
||||
"3776499751255585163563840252112871568402966629435152937692711318702338789837",
|
||||
"19415813252626942110541463414404411443562242499365750694284604341271149125679",
|
||||
"19414720788761208006634240390286942738242262010168559813148115573784354129237",
|
||||
"17680594732844291740094158892269696200077963275550625226493856898849422516043",
|
||||
"16009199741350632715210088346611798597033333293348807000623441780059543674510",
|
||||
"18743496911007535170857676824393811326863602477260615792503039058813338644738",
|
||||
"1029572792321380246989475723806770724699749375691788486434716005338938722216",
|
||||
"21713138150151063186050010182615713685603650963220209951496401043119768920892",
|
||||
"6713732504049401389983008178456811894856018247924860823028704114266363984580",
|
||||
"2746686888799473963221285145390361693256731812094259845879519459924507786594",
|
||||
"18620748467731297359505500266677881218553438497271819903304075323783392031715",
|
||||
"2446201221122671119406471414204229600430018713181038717206670749886932158104",
|
||||
"12037171942017611311954851302868199608036334625783560875426350283156617524597",
|
||||
"21798743392351780927808323348278035105395367759688979232116905142049921734349",
|
||||
"17450230289417496971557215666910229260621413088991137405744457922069827319039",
|
||||
"20936854099128086256353520300046664152516566958630447858438908748907198510485",
|
||||
"13513344965831154386658059617477268600255664386844920822248038939666265737046",
|
||||
"15546319496880899251450021422131511560001766832580480193115646510655765306630"
|
||||
|
||||
],
|
||||
"identity_path_index": [
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0
|
||||
],
|
||||
"x": "18073935665561339809445069958310044423750771681863480888589546877024349720547",
|
||||
"epoch": "0x147e4c23a43a1ddca78d94bcd28147f62ca74b3dc7e56bb0a314a954b9f0e567",
|
||||
"rln_identifier": "2193983000213424579594329476781986065965849144986973472766961413131458022566"
|
||||
}
|
||||
"#;
|
||||
|
||||
#[test]
|
||||
// We test Merkle tree generation, proofs and verification
|
||||
fn test_merkle_proof() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
|
||||
// generate identity
|
||||
let identity_secret = hash_to_field(b"test-merkle-proof");
|
||||
let id_commitment = poseidon_hash(&vec![identity_secret]);
|
||||
|
||||
// generate merkle tree
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(tree_height, default_leaf);
|
||||
tree.set(leaf_index, id_commitment.into()).unwrap();
|
||||
|
||||
// We check correct computation of the root
|
||||
let root = tree.root();
|
||||
|
||||
if TEST_TREE_HEIGHT == 15 {
|
||||
assert_eq!(
|
||||
root,
|
||||
str_to_fr(
|
||||
"0x1984f2e01184aef5cb974640898a5f5c25556554e2b06d99d4841badb8b198cd",
|
||||
16
|
||||
)
|
||||
);
|
||||
} else if TEST_TREE_HEIGHT == 19 {
|
||||
assert_eq!(
|
||||
root,
|
||||
str_to_fr(
|
||||
"0x219ceb53f2b1b7a6cf74e80d50d44d68ecb4a53c6cc65b25593c8d56343fb1fe",
|
||||
16
|
||||
)
|
||||
);
|
||||
} else if TEST_TREE_HEIGHT == 20 {
|
||||
assert_eq!(
|
||||
root,
|
||||
str_to_fr(
|
||||
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
|
||||
16
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
// These values refers to TEST_TREE_HEIGHT == 16
|
||||
let mut expected_path_elements = vec![
|
||||
str_to_fr(
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
16,
|
||||
),
|
||||
];
|
||||
|
||||
let mut expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
// We add the remaining elements for the case TEST_TREE_HEIGHT = 20
|
||||
if TEST_TREE_HEIGHT == 19 || TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
),
|
||||
]);
|
||||
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
|
||||
}
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
16,
|
||||
)]);
|
||||
expected_identity_path_index.append(&mut vec![0]);
|
||||
}
|
||||
|
||||
assert_eq!(path_elements, expected_path_elements);
|
||||
assert_eq!(identity_path_index, expected_identity_path_index);
|
||||
|
||||
// We check correct verification of the proof
|
||||
assert!(tree.verify(&id_commitment, &merkle_proof).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_witness_from_json() {
|
||||
// We generate all relevant keys
|
||||
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let builder = circom_from_folder(TEST_RESOURCES_FOLDER);
|
||||
|
||||
// We compute witness from the json input example
|
||||
let mut witness_json: &str = "";
|
||||
|
||||
if TEST_TREE_HEIGHT == 15 {
|
||||
witness_json = WITNESS_JSON_15;
|
||||
} else if TEST_TREE_HEIGHT == 19 {
|
||||
witness_json = WITNESS_JSON_19;
|
||||
} else if TEST_TREE_HEIGHT == 20 {
|
||||
witness_json = WITNESS_JSON_20;
|
||||
}
|
||||
|
||||
let rln_witness = rln_witness_from_json(witness_json);
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
|
||||
|
||||
let proof_values = proof_values_from_witness(&rln_witness);
|
||||
|
||||
// Let's verify the proof
|
||||
let verified = verify_proof(&verification_key, &proof, &proof_values);
|
||||
|
||||
assert!(verified.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_end_to_end() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
|
||||
// Generate identity pair
|
||||
let (identity_secret, id_commitment) = keygen();
|
||||
|
||||
//// generate merkle tree
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(tree_height, default_leaf);
|
||||
tree.set(leaf_index, id_commitment.into()).unwrap();
|
||||
|
||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||
|
||||
let signal = b"hey hey";
|
||||
let x = hash_to_field(signal);
|
||||
|
||||
// We set the remaining values to random ones
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
//let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
|
||||
let rln_witness: RLNWitnessInput = rln_witness_from_values(
|
||||
identity_secret,
|
||||
&merkle_proof,
|
||||
x,
|
||||
epoch, /*, rln_identifier*/
|
||||
);
|
||||
|
||||
// We generate all relevant keys
|
||||
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let builder = circom_from_folder(TEST_RESOURCES_FOLDER);
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
|
||||
|
||||
let proof_values = proof_values_from_witness(&rln_witness);
|
||||
|
||||
// Let's verify the proof
|
||||
let success = verify_proof(&verification_key, &proof, &proof_values).unwrap();
|
||||
|
||||
assert!(success);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_witness_serialization() {
|
||||
// We test witness serialization
|
||||
let mut witness_json: &str = "";
|
||||
|
||||
if TEST_TREE_HEIGHT == 15 {
|
||||
witness_json = WITNESS_JSON_15;
|
||||
} else if TEST_TREE_HEIGHT == 19 {
|
||||
witness_json = WITNESS_JSON_19;
|
||||
} else if TEST_TREE_HEIGHT == 20 {
|
||||
witness_json = WITNESS_JSON_20;
|
||||
}
|
||||
|
||||
let rln_witness = rln_witness_from_json(witness_json);
|
||||
|
||||
let ser = serialize_witness(&rln_witness);
|
||||
let (deser, _) = deserialize_witness(&ser);
|
||||
assert_eq!(rln_witness, deser);
|
||||
|
||||
// We test Proof values serialization
|
||||
let proof_values = proof_values_from_witness(&rln_witness);
|
||||
let ser = serialize_proof_values(&proof_values);
|
||||
let (deser, _) = deserialize_proof_values(&ser);
|
||||
assert_eq!(proof_values, deser);
|
||||
}
|
||||
}
|
||||
// Ensure that the `stateless` feature is not enabled with any Merkle tree features
|
||||
#[cfg(all(
|
||||
feature = "stateless",
|
||||
any(
|
||||
feature = "fullmerkletree",
|
||||
feature = "optimalmerkletree",
|
||||
feature = "pmtree-ft"
|
||||
)
|
||||
))]
|
||||
compile_error!("Cannot enable any Merkle tree features with stateless");
|
||||
|
||||
@@ -1,666 +0,0 @@
|
||||
// This crate provides different implementation of Merkle tree
|
||||
// Currently two interchangeable implementations are supported:
|
||||
// - FullMerkleTree: each tree node is stored
|
||||
// - OptimalMerkleTree: only nodes used to prove accumulation of set leaves are stored
|
||||
// Library defaults are set in the poseidon_tree crate
|
||||
//
|
||||
// Merkle tree implementations are adapted from https://github.com/kilic/rln/blob/master/src/merkle.rs
|
||||
// and https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/merkle_tree.rs
|
||||
|
||||
//!
|
||||
//! # To do
|
||||
//!
|
||||
//! * Disk based storage backend (using mmaped files should be easy)
|
||||
//! * Implement serialization for tree and Merkle proof
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::io;
|
||||
use std::{
|
||||
cmp::max,
|
||||
fmt::Debug,
|
||||
iter::{once, repeat, successors},
|
||||
};
|
||||
|
||||
/// In the Hasher trait we define the node type, the default leaf
|
||||
/// and the hash function used to initialize a Merkle Tree implementation
|
||||
pub trait Hasher {
|
||||
/// Type of the leaf and tree node
|
||||
type Fr: Copy + Clone + Eq;
|
||||
|
||||
/// Returns the default tree leaf
|
||||
fn default_leaf() -> Self::Fr;
|
||||
|
||||
/// Utility to compute the hash of an intermediate node
|
||||
fn hash(input: &[Self::Fr]) -> Self::Fr;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
/// Optimal Merkle Tree Implementation
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
/// The Merkle tree structure
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct OptimalMerkleTree<H>
|
||||
where
|
||||
H: Hasher,
|
||||
{
|
||||
/// The depth of the tree, i.e. the number of levels from leaf to root
|
||||
depth: usize,
|
||||
|
||||
/// The nodes cached from the empty part of the tree (where leaves are set to default).
|
||||
/// Since the rightmost part of the tree is usually changed much later than its creation,
|
||||
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
|
||||
/// and by caching few intermediate nodes to the root computed from default leaves
|
||||
cached_nodes: Vec<H::Fr>,
|
||||
|
||||
/// The tree nodes
|
||||
nodes: HashMap<(usize, usize), H::Fr>,
|
||||
|
||||
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
|
||||
// (deletions leave next_index unchanged)
|
||||
next_index: usize,
|
||||
}
|
||||
|
||||
/// The Merkle proof
|
||||
/// Contains a vector of (node, branch_index) that defines the proof path elements and branch direction (1 or 0)
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct OptimalMerkleProof<H: Hasher>(pub Vec<(H::Fr, u8)>);
|
||||
|
||||
/// Implementations
|
||||
|
||||
impl<H: Hasher> OptimalMerkleTree<H> {
|
||||
pub fn default(depth: usize) -> Self {
|
||||
OptimalMerkleTree::<H>::new(depth, H::default_leaf())
|
||||
}
|
||||
|
||||
/// Creates a new `MerkleTree`
|
||||
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
|
||||
pub fn new(depth: usize, default_leaf: H::Fr) -> Self {
|
||||
let mut cached_nodes: Vec<H::Fr> = Vec::with_capacity(depth + 1);
|
||||
cached_nodes.push(default_leaf);
|
||||
for i in 0..depth {
|
||||
cached_nodes.push(H::hash(&[cached_nodes[i]; 2]));
|
||||
}
|
||||
cached_nodes.reverse();
|
||||
OptimalMerkleTree {
|
||||
cached_nodes: cached_nodes.clone(),
|
||||
depth: depth,
|
||||
nodes: HashMap::new(),
|
||||
next_index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Returns the depth of the tree
|
||||
pub fn depth(&self) -> usize {
|
||||
self.depth
|
||||
}
|
||||
|
||||
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
|
||||
pub fn capacity(&self) -> usize {
|
||||
1 << self.depth
|
||||
}
|
||||
|
||||
// Returns the total number of leaves set
|
||||
pub fn leaves_set(&mut self) -> usize {
|
||||
self.next_index
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
// Returns the root of the tree
|
||||
pub fn root(&self) -> H::Fr {
|
||||
self.get_node(0, 0)
|
||||
}
|
||||
|
||||
// Sets a leaf at the specified tree index
|
||||
pub fn set(&mut self, index: usize, leaf: H::Fr) -> io::Result<()> {
|
||||
if index >= self.capacity() {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"index exceeds set size",
|
||||
));
|
||||
}
|
||||
self.nodes.insert((self.depth, index), leaf);
|
||||
self.recalculate_from(index);
|
||||
self.next_index = max(self.next_index, index + 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Sets a leaf at the next available index
|
||||
pub fn update_next(&mut self, leaf: H::Fr) -> io::Result<()> {
|
||||
self.set(self.next_index, leaf)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
|
||||
pub fn delete(&mut self, index: usize) -> io::Result<()> {
|
||||
// We reset the leaf only if we previously set a leaf at that index
|
||||
if index < self.next_index {
|
||||
self.set(index, H::default_leaf())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Computes a merkle proof the the leaf at the specified index
|
||||
pub fn proof(&self, index: usize) -> io::Result<OptimalMerkleProof<H>> {
|
||||
if index >= self.capacity() {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"index exceeds set size",
|
||||
));
|
||||
}
|
||||
let mut witness = Vec::<(H::Fr, u8)>::with_capacity(self.depth);
|
||||
let mut i = index;
|
||||
let mut depth = self.depth;
|
||||
loop {
|
||||
i ^= 1;
|
||||
witness.push((self.get_node(depth, i), (1 - (i & 1)).try_into().unwrap()));
|
||||
i >>= 1;
|
||||
depth -= 1;
|
||||
if depth == 0 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert_eq!(i, 0);
|
||||
Ok(OptimalMerkleProof(witness))
|
||||
}
|
||||
|
||||
// Verifies a Merkle proof with respect to the input leaf and the tree root
|
||||
pub fn verify(&self, leaf: &H::Fr, witness: &OptimalMerkleProof<H>) -> io::Result<bool> {
|
||||
if witness.length() != self.depth {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"witness length doesn't match tree depth",
|
||||
));
|
||||
}
|
||||
let expected_root = witness.compute_root_from(leaf);
|
||||
Ok(expected_root.eq(&self.root()))
|
||||
}
|
||||
|
||||
// Utilities for updating the tree nodes
|
||||
|
||||
fn get_node(&self, depth: usize, index: usize) -> H::Fr {
|
||||
let node = *self
|
||||
.nodes
|
||||
.get(&(depth, index))
|
||||
.unwrap_or_else(|| &self.cached_nodes[depth]);
|
||||
node
|
||||
}
|
||||
|
||||
fn get_leaf(&self, index: usize) -> H::Fr {
|
||||
self.get_node(self.depth, index)
|
||||
}
|
||||
|
||||
fn hash_couple(&mut self, depth: usize, index: usize) -> H::Fr {
|
||||
let b = index & !1;
|
||||
H::hash(&[self.get_node(depth, b), self.get_node(depth, b + 1)])
|
||||
}
|
||||
|
||||
fn recalculate_from(&mut self, index: usize) {
|
||||
let mut i = index;
|
||||
let mut depth = self.depth;
|
||||
loop {
|
||||
let h = self.hash_couple(depth, i);
|
||||
i >>= 1;
|
||||
depth -= 1;
|
||||
self.nodes.insert((depth, i), h);
|
||||
if depth == 0 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert_eq!(depth, 0);
|
||||
assert_eq!(i, 0);
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: Hasher> OptimalMerkleProof<H> {
|
||||
#[must_use]
|
||||
// Returns the length of a Merkle proof
|
||||
pub fn length(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
/// Computes the leaf index corresponding to a Merkle proof
|
||||
#[must_use]
|
||||
pub fn leaf_index(&self) -> usize {
|
||||
// In current implementation the path indexes in a proof correspond to the binary representation of the leaf index
|
||||
let mut binary_repr = self.get_path_index();
|
||||
binary_repr.reverse();
|
||||
binary_repr
|
||||
.into_iter()
|
||||
.fold(0, |acc, digit| (acc << 1) + usize::from(digit))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
/// Returns the path elements forming a Merkle proof
|
||||
pub fn get_path_elements(&self) -> Vec<H::Fr> {
|
||||
self.0.iter().map(|x| x.0).collect()
|
||||
}
|
||||
|
||||
/// Returns the path indexes forming a Merkle proof
|
||||
#[must_use]
|
||||
pub fn get_path_index(&self) -> Vec<u8> {
|
||||
self.0.iter().map(|x| x.1).collect()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
|
||||
pub fn compute_root_from(&self, leaf: &H::Fr) -> H::Fr {
|
||||
let mut acc: H::Fr = *leaf;
|
||||
for w in self.0.iter() {
|
||||
if w.1 == 0 {
|
||||
acc = H::hash(&[acc, w.0]);
|
||||
} else {
|
||||
acc = H::hash(&[w.0, acc]);
|
||||
}
|
||||
}
|
||||
acc
|
||||
}
|
||||
}
|
||||
|
||||
// Debug formatting for printing a (Optimal) Merkle Proof
|
||||
impl<H> Debug for OptimalMerkleProof<H>
|
||||
where
|
||||
H: Hasher,
|
||||
H::Fr: Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("Proof").field(&self.0).finish()
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
/// Full Merkle Tree Implementation
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
/// Merkle tree with all leaf and intermediate hashes stored
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct FullMerkleTree<H: Hasher> {
|
||||
/// The depth of the tree, i.e. the number of levels from leaf to root
|
||||
depth: usize,
|
||||
|
||||
/// The nodes cached from the empty part of the tree (where leaves are set to default).
|
||||
/// Since the rightmost part of the tree is usually changed much later than its creation,
|
||||
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
|
||||
/// and by caching few intermediate nodes to the root computed from default leaves
|
||||
cached_nodes: Vec<H::Fr>,
|
||||
|
||||
/// The tree nodes
|
||||
nodes: Vec<H::Fr>,
|
||||
|
||||
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
|
||||
// (deletions leave next_index unchanged)
|
||||
next_index: usize,
|
||||
}
|
||||
|
||||
/// Element of a Merkle proof
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub enum FullMerkleBranch<H: Hasher> {
|
||||
/// Left branch taken, value is the right sibling hash.
|
||||
Left(H::Fr),
|
||||
|
||||
/// Right branch taken, value is the left sibling hash.
|
||||
Right(H::Fr),
|
||||
}
|
||||
|
||||
/// Merkle proof path, bottom to top.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct FullMerkleProof<H: Hasher>(pub Vec<FullMerkleBranch<H>>);
|
||||
|
||||
/// Implementations
|
||||
|
||||
impl<H: Hasher> FullMerkleTree<H> {
|
||||
pub fn default(depth: usize) -> Self {
|
||||
FullMerkleTree::<H>::new(depth, H::default_leaf())
|
||||
}
|
||||
|
||||
/// Creates a new `MerkleTree`
|
||||
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
|
||||
pub fn new(depth: usize, initial_leaf: H::Fr) -> Self {
|
||||
// Compute cache node values, leaf to root
|
||||
let cached_nodes = successors(Some(initial_leaf), |prev| Some(H::hash(&[*prev, *prev])))
|
||||
.take(depth + 1)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Compute node values
|
||||
let nodes = cached_nodes
|
||||
.iter()
|
||||
.rev()
|
||||
.enumerate()
|
||||
.flat_map(|(levels, hash)| repeat(hash).take(1 << levels))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
debug_assert!(nodes.len() == (1 << (depth + 1)) - 1);
|
||||
|
||||
let next_index = 0;
|
||||
|
||||
Self {
|
||||
depth,
|
||||
cached_nodes,
|
||||
nodes,
|
||||
next_index,
|
||||
}
|
||||
}
|
||||
|
||||
// Returns the depth of the tree
|
||||
pub fn depth(&self) -> usize {
|
||||
self.depth
|
||||
}
|
||||
|
||||
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
|
||||
pub fn capacity(&self) -> usize {
|
||||
1 << self.depth
|
||||
}
|
||||
|
||||
// Returns the total number of leaves set
|
||||
pub fn leaves_set(&mut self) -> usize {
|
||||
self.next_index
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
// Returns the root of the tree
|
||||
pub fn root(&self) -> H::Fr {
|
||||
self.nodes[0]
|
||||
}
|
||||
|
||||
// Sets a leaf at the specified tree index
|
||||
pub fn set(&mut self, leaf: usize, hash: H::Fr) -> io::Result<()> {
|
||||
self.set_range(leaf, once(hash))?;
|
||||
self.next_index = max(self.next_index, leaf + 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Sets tree nodes, starting from start index
|
||||
// Function proper of FullMerkleTree implementation
|
||||
fn set_range<I: IntoIterator<Item = H::Fr>>(
|
||||
&mut self,
|
||||
start: usize,
|
||||
hashes: I,
|
||||
) -> io::Result<()> {
|
||||
let index = self.capacity() + start - 1;
|
||||
let mut count = 0;
|
||||
// TODO: Error/panic when hashes is longer than available leafs
|
||||
for (leaf, hash) in self.nodes[index..].iter_mut().zip(hashes) {
|
||||
*leaf = hash;
|
||||
count += 1;
|
||||
}
|
||||
if count != 0 {
|
||||
self.update_nodes(index, index + (count - 1));
|
||||
self.next_index = max(self.next_index, start + count);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Sets a leaf at the next available index
|
||||
pub fn update_next(&mut self, leaf: H::Fr) -> io::Result<()> {
|
||||
self.set(self.next_index, leaf)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
|
||||
pub fn delete(&mut self, index: usize) -> io::Result<()> {
|
||||
// We reset the leaf only if we previously set a leaf at that index
|
||||
if index < self.next_index {
|
||||
self.set(index, H::default_leaf())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Computes a merkle proof the the leaf at the specified index
|
||||
pub fn proof(&self, leaf: usize) -> io::Result<FullMerkleProof<H>> {
|
||||
if leaf >= self.capacity() {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"index exceeds set size",
|
||||
));
|
||||
}
|
||||
let mut index = self.capacity() + leaf - 1;
|
||||
let mut path = Vec::with_capacity(self.depth + 1);
|
||||
while let Some(parent) = self.parent(index) {
|
||||
// Add proof for node at index to parent
|
||||
path.push(match index & 1 {
|
||||
1 => FullMerkleBranch::Left(self.nodes[index + 1]),
|
||||
0 => FullMerkleBranch::Right(self.nodes[index - 1]),
|
||||
_ => unreachable!(),
|
||||
});
|
||||
index = parent;
|
||||
}
|
||||
Ok(FullMerkleProof(path))
|
||||
}
|
||||
|
||||
// Verifies a Merkle proof with respect to the input leaf and the tree root
|
||||
pub fn verify(&self, hash: &H::Fr, proof: &FullMerkleProof<H>) -> io::Result<bool> {
|
||||
Ok(proof.compute_root_from(hash) == self.root())
|
||||
}
|
||||
|
||||
// Utilities for updating the tree nodes
|
||||
|
||||
/// For a given node index, return the parent node index
|
||||
/// Returns None if there is no parent (root node)
|
||||
fn parent(&self, index: usize) -> Option<usize> {
|
||||
if index == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(((index + 1) >> 1) - 1)
|
||||
}
|
||||
}
|
||||
|
||||
/// For a given node index, return index of the first (left) child.
|
||||
fn first_child(&self, index: usize) -> usize {
|
||||
(index << 1) + 1
|
||||
}
|
||||
|
||||
fn levels(&self, index: usize) -> usize {
|
||||
// `n.next_power_of_two()` will return `n` iff `n` is a power of two.
|
||||
// The extra offset corrects this.
|
||||
(index + 2).next_power_of_two().trailing_zeros() as usize - 1
|
||||
}
|
||||
|
||||
fn update_nodes(&mut self, start: usize, end: usize) {
|
||||
debug_assert_eq!(self.levels(start), self.levels(end));
|
||||
if let (Some(start), Some(end)) = (self.parent(start), self.parent(end)) {
|
||||
for parent in start..=end {
|
||||
let child = self.first_child(parent);
|
||||
self.nodes[parent] = H::hash(&[self.nodes[child], self.nodes[child + 1]]);
|
||||
}
|
||||
self.update_nodes(start, end);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: Hasher> FullMerkleProof<H> {
|
||||
#[must_use]
|
||||
// Returns the length of a Merkle proof
|
||||
pub fn length(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
/// Computes the leaf index corresponding to a Merkle proof
|
||||
#[must_use]
|
||||
pub fn leaf_index(&self) -> usize {
|
||||
self.0.iter().rev().fold(0, |index, branch| match branch {
|
||||
FullMerkleBranch::Left(_) => index << 1,
|
||||
FullMerkleBranch::Right(_) => (index << 1) + 1,
|
||||
})
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
/// Returns the path elements forming a Merkle proof
|
||||
pub fn get_path_elements(&self) -> Vec<H::Fr> {
|
||||
self.0
|
||||
.iter()
|
||||
.map(|x| match x {
|
||||
FullMerkleBranch::Left(value) | FullMerkleBranch::Right(value) => *value,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns the path indexes forming a Merkle proof
|
||||
#[must_use]
|
||||
pub fn get_path_index(&self) -> Vec<u8> {
|
||||
self.0
|
||||
.iter()
|
||||
.map(|branch| match branch {
|
||||
FullMerkleBranch::Left(_) => 0,
|
||||
FullMerkleBranch::Right(_) => 1,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
|
||||
#[must_use]
|
||||
pub fn compute_root_from(&self, hash: &H::Fr) -> H::Fr {
|
||||
self.0.iter().fold(*hash, |hash, branch| match branch {
|
||||
FullMerkleBranch::Left(sibling) => H::hash(&[hash, *sibling]),
|
||||
FullMerkleBranch::Right(sibling) => H::hash(&[*sibling, hash]),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Debug formatting for printing a (Full) Merkle Proof Branch
|
||||
impl<H> Debug for FullMerkleBranch<H>
|
||||
where
|
||||
H: Hasher,
|
||||
H::Fr: Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Left(arg0) => f.debug_tuple("Left").field(arg0).finish(),
|
||||
Self::Right(arg0) => f.debug_tuple("Right").field(arg0).finish(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Debug formatting for printing a (Full) Merkle Proof
|
||||
impl<H> Debug for FullMerkleProof<H>
|
||||
where
|
||||
H: Hasher,
|
||||
H::Fr: Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("Proof").field(&self.0).finish()
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
/// Tests
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
// Tests adapted from https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/merkle_tree.rs
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use hex_literal::hex;
|
||||
use tiny_keccak::{Hasher as _, Keccak};
|
||||
|
||||
struct Keccak256;
|
||||
|
||||
impl Hasher for Keccak256 {
|
||||
type Fr = [u8; 32];
|
||||
|
||||
fn default_leaf() -> Self::Fr {
|
||||
[0; 32]
|
||||
}
|
||||
|
||||
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
|
||||
let mut output = [0; 32];
|
||||
let mut hasher = Keccak::v256();
|
||||
for element in inputs {
|
||||
hasher.update(element);
|
||||
}
|
||||
hasher.finalize(&mut output);
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_root() {
|
||||
let leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
];
|
||||
|
||||
let default_tree_root =
|
||||
hex!("b4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30");
|
||||
|
||||
let roots = [
|
||||
hex!("c1ba1812ff680ce84c1d5b4f1087eeb08147a4d510f3496b2849df3a73f5af95"),
|
||||
hex!("893760ec5b5bee236f29e85aef64f17139c3c1b7ff24ce64eb6315fca0f2485b"),
|
||||
hex!("222ff5e0b5877792c2bc1670e2ccd0c2c97cd7bb1672a57d598db05092d3d72c"),
|
||||
hex!("a9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36"),
|
||||
];
|
||||
|
||||
let mut tree = FullMerkleTree::<Keccak256>::new(2, [0; 32]);
|
||||
assert_eq!(tree.root(), default_tree_root);
|
||||
for i in 0..leaves.len() {
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
assert_eq!(tree.root(), roots[i]);
|
||||
}
|
||||
|
||||
let mut tree = OptimalMerkleTree::<Keccak256>::new(2, [0; 32]);
|
||||
assert_eq!(tree.root(), default_tree_root);
|
||||
for i in 0..leaves.len() {
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
assert_eq!(tree.root(), roots[i]);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proof() {
|
||||
let leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
];
|
||||
|
||||
// We thest the FullMerkleTree implementation
|
||||
let mut tree = FullMerkleTree::<Keccak256>::new(2, [0; 32]);
|
||||
for i in 0..leaves.len() {
|
||||
// We set the leaves
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
|
||||
// We compute a merkle proof
|
||||
let proof = tree.proof(i).expect("index should be set");
|
||||
|
||||
// We verify if the merkle proof corresponds to the right leaf index
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
|
||||
// We verify the proof
|
||||
assert!(tree.verify(&leaves[i], &proof).unwrap());
|
||||
|
||||
// We ensure that the Merkle proof and the leaf generate the same root as the tree
|
||||
assert_eq!(proof.compute_root_from(&leaves[i]), tree.root());
|
||||
|
||||
// We check that the proof is not valid for another leaf
|
||||
assert!(!tree
|
||||
.verify(&leaves[(i + 1) % leaves.len()], &proof)
|
||||
.unwrap());
|
||||
}
|
||||
|
||||
// We test the OptimalMerkleTree implementation
|
||||
let mut tree = OptimalMerkleTree::<Keccak256>::new(2, [0; 32]);
|
||||
for i in 0..leaves.len() {
|
||||
// We set the leaves
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
|
||||
// We compute a merkle proof
|
||||
let proof = tree.proof(i).expect("index should be set");
|
||||
|
||||
// We verify if the merkle proof corresponds to the right leaf index
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
|
||||
// We verify the proof
|
||||
assert!(tree.verify(&leaves[i], &proof).unwrap());
|
||||
|
||||
// We ensure that the Merkle proof and the leaf generate the same root as the tree
|
||||
assert_eq!(proof.compute_root_from(&leaves[i]), tree.root());
|
||||
|
||||
// We check that the proof is not valid for another leaf
|
||||
assert!(!tree
|
||||
.verify(&leaves[(i + 1) % leaves.len()], &proof)
|
||||
.unwrap());
|
||||
}
|
||||
}
|
||||
}
|
||||
505
rln/src/pm_tree_adapter.rs
Normal file
505
rln/src/pm_tree_adapter.rs
Normal file
@@ -0,0 +1,505 @@
|
||||
use serde_json::Value;
|
||||
use std::fmt::Debug;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use tempfile::Builder;
|
||||
|
||||
use crate::circuit::Fr;
|
||||
use crate::hashers::{poseidon_hash, PoseidonHash};
|
||||
use crate::utils::{bytes_le_to_fr, fr_to_bytes_le};
|
||||
use utils::error::{FromConfigError, ZerokitMerkleTreeError};
|
||||
use utils::pmtree::tree::Key;
|
||||
use utils::pmtree::{Database, Hasher, PmtreeErrorKind};
|
||||
use utils::{pmtree, Config, Mode, SledDB, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
const METADATA_KEY: [u8; 8] = *b"metadata";
|
||||
|
||||
pub struct PmTree {
|
||||
tree: pmtree::MerkleTree<SledDB, PoseidonHash>,
|
||||
/// The indices of leaves which are set into zero upto next_index.
|
||||
/// Set to 0 if the leaf is empty and set to 1 in otherwise.
|
||||
cached_leaves_indices: Vec<u8>,
|
||||
// metadata that an application may use to store additional information
|
||||
metadata: Vec<u8>,
|
||||
}
|
||||
|
||||
pub struct PmTreeProof {
|
||||
proof: pmtree::tree::MerkleProof<PoseidonHash>,
|
||||
}
|
||||
|
||||
pub type FrOf<H> = <H as Hasher>::Fr;
|
||||
|
||||
// The pmtree Hasher trait used by pmtree Merkle tree
|
||||
impl Hasher for PoseidonHash {
|
||||
type Fr = Fr;
|
||||
|
||||
fn serialize(value: Self::Fr) -> pmtree::Value {
|
||||
fr_to_bytes_le(&value)
|
||||
}
|
||||
|
||||
fn deserialize(value: pmtree::Value) -> Self::Fr {
|
||||
let (fr, _) = bytes_le_to_fr(&value);
|
||||
fr
|
||||
}
|
||||
|
||||
fn default_leaf() -> Self::Fr {
|
||||
Fr::from(0)
|
||||
}
|
||||
|
||||
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
|
||||
poseidon_hash(inputs)
|
||||
}
|
||||
}
|
||||
|
||||
fn default_tmp_path() -> PathBuf {
|
||||
Builder::new()
|
||||
.prefix("pmtree-")
|
||||
.tempfile()
|
||||
.expect("Failed to create temp file")
|
||||
.into_temp_path()
|
||||
.to_path_buf()
|
||||
}
|
||||
|
||||
const DEFAULT_TEMPORARY: bool = true;
|
||||
const DEFAULT_CACHE_CAPACITY: u64 = 1073741824; // 1 Gigabyte
|
||||
const DEFAULT_FLUSH_EVERY_MS: u64 = 500; // 500 Milliseconds
|
||||
const DEFAULT_MODE: Mode = Mode::HighThroughput;
|
||||
const DEFAULT_USE_COMPRESSION: bool = false;
|
||||
|
||||
pub struct PmtreeConfigBuilder {
|
||||
path: Option<PathBuf>,
|
||||
temporary: bool,
|
||||
cache_capacity: u64,
|
||||
flush_every_ms: u64,
|
||||
mode: Mode,
|
||||
use_compression: bool,
|
||||
}
|
||||
|
||||
impl PmtreeConfigBuilder {
|
||||
fn new() -> Self {
|
||||
PmtreeConfigBuilder {
|
||||
path: None,
|
||||
temporary: DEFAULT_TEMPORARY,
|
||||
cache_capacity: DEFAULT_CACHE_CAPACITY,
|
||||
flush_every_ms: DEFAULT_FLUSH_EVERY_MS,
|
||||
mode: DEFAULT_MODE,
|
||||
use_compression: DEFAULT_USE_COMPRESSION,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn path<P: Into<PathBuf>>(mut self, path: P) -> Self {
|
||||
self.path = Some(path.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn temporary(mut self, temporary: bool) -> Self {
|
||||
self.temporary = temporary;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn cache_capacity(mut self, capacity: u64) -> Self {
|
||||
self.cache_capacity = capacity;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn flush_every_ms(mut self, ms: u64) -> Self {
|
||||
self.flush_every_ms = ms;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn mode(mut self, mode: Mode) -> Self {
|
||||
self.mode = mode;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn use_compression(mut self, compression: bool) -> Self {
|
||||
self.use_compression = compression;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn build(self) -> Result<PmtreeConfig, FromConfigError> {
|
||||
let path = match (self.temporary, self.path) {
|
||||
(true, None) => default_tmp_path(),
|
||||
(false, None) => return Err(FromConfigError::MissingPath),
|
||||
(true, Some(path)) if path.exists() => return Err(FromConfigError::PathExists),
|
||||
(_, Some(path)) => path,
|
||||
};
|
||||
|
||||
let config = Config::new()
|
||||
.temporary(self.temporary)
|
||||
.path(path)
|
||||
.cache_capacity(self.cache_capacity)
|
||||
.flush_every_ms(Some(self.flush_every_ms))
|
||||
.mode(self.mode)
|
||||
.use_compression(self.use_compression);
|
||||
|
||||
Ok(PmtreeConfig(config))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PmtreeConfig(Config);
|
||||
|
||||
impl PmtreeConfig {
|
||||
pub fn builder() -> PmtreeConfigBuilder {
|
||||
PmtreeConfigBuilder::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for PmtreeConfig {
|
||||
type Err = FromConfigError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let config: Value = serde_json::from_str(s)?;
|
||||
|
||||
let path = config["path"].as_str();
|
||||
let path = path.map(PathBuf::from);
|
||||
let temporary = config["temporary"].as_bool();
|
||||
let cache_capacity = config["cache_capacity"].as_u64();
|
||||
let flush_every_ms = config["flush_every_ms"].as_u64();
|
||||
let mode = match config["mode"].as_str() {
|
||||
Some("HighThroughput") => Mode::HighThroughput,
|
||||
Some("LowSpace") => Mode::LowSpace,
|
||||
_ => Mode::HighThroughput,
|
||||
};
|
||||
let use_compression = config["use_compression"].as_bool();
|
||||
|
||||
if let (Some(true), Some(path)) = (temporary, path.as_ref()) {
|
||||
if path.exists() {
|
||||
return Err(FromConfigError::PathExists);
|
||||
}
|
||||
}
|
||||
|
||||
let config = Config::new()
|
||||
.temporary(temporary.unwrap_or(DEFAULT_TEMPORARY))
|
||||
.path(path.unwrap_or(default_tmp_path()))
|
||||
.cache_capacity(cache_capacity.unwrap_or(DEFAULT_CACHE_CAPACITY))
|
||||
.flush_every_ms(flush_every_ms)
|
||||
.mode(mode)
|
||||
.use_compression(use_compression.unwrap_or(false));
|
||||
Ok(PmtreeConfig(config))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PmtreeConfig {
|
||||
fn default() -> Self {
|
||||
Self::builder()
|
||||
.build()
|
||||
.expect("Default configuration should never fail")
|
||||
}
|
||||
}
|
||||
impl Debug for PmtreeConfig {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for PmtreeConfig {
|
||||
fn clone(&self) -> Self {
|
||||
PmtreeConfig(self.0.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl ZerokitMerkleTree for PmTree {
|
||||
type Proof = PmTreeProof;
|
||||
type Hasher = PoseidonHash;
|
||||
type Config = PmtreeConfig;
|
||||
|
||||
fn default(depth: usize) -> Result<Self, ZerokitMerkleTreeError> {
|
||||
let default_config = PmtreeConfig::default();
|
||||
PmTree::new(depth, Self::Hasher::default_leaf(), default_config)
|
||||
}
|
||||
|
||||
fn new(
|
||||
depth: usize,
|
||||
_default_leaf: FrOf<Self::Hasher>,
|
||||
config: Self::Config,
|
||||
) -> Result<Self, ZerokitMerkleTreeError> {
|
||||
let tree_loaded = pmtree::MerkleTree::load(config.clone().0);
|
||||
let tree = match tree_loaded {
|
||||
Ok(tree) => tree,
|
||||
Err(_) => pmtree::MerkleTree::new(depth, config.0)?,
|
||||
};
|
||||
|
||||
Ok(PmTree {
|
||||
tree,
|
||||
cached_leaves_indices: vec![0; 1 << depth],
|
||||
metadata: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn depth(&self) -> usize {
|
||||
self.tree.depth()
|
||||
}
|
||||
|
||||
fn capacity(&self) -> usize {
|
||||
self.tree.capacity()
|
||||
}
|
||||
|
||||
fn leaves_set(&self) -> usize {
|
||||
self.tree.leaves_set()
|
||||
}
|
||||
|
||||
fn root(&self) -> FrOf<Self::Hasher> {
|
||||
self.tree.root()
|
||||
}
|
||||
|
||||
fn set(
|
||||
&mut self,
|
||||
index: usize,
|
||||
leaf: FrOf<Self::Hasher>,
|
||||
) -> Result<(), ZerokitMerkleTreeError> {
|
||||
self.tree.set(index, leaf)?;
|
||||
self.cached_leaves_indices[index] = 1;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_range<I: IntoIterator<Item = FrOf<Self::Hasher>>>(
|
||||
&mut self,
|
||||
start: usize,
|
||||
values: I,
|
||||
) -> Result<(), ZerokitMerkleTreeError> {
|
||||
let v = values.into_iter().collect::<Vec<_>>();
|
||||
self.tree.set_range(start, v.clone().into_iter())?;
|
||||
for i in start..v.len() {
|
||||
self.cached_leaves_indices[i] = 1
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>, ZerokitMerkleTreeError> {
|
||||
self.tree
|
||||
.get(index)
|
||||
.map_err(ZerokitMerkleTreeError::PmtreeErrorKind)
|
||||
}
|
||||
|
||||
fn get_subtree_root(
|
||||
&self,
|
||||
n: usize,
|
||||
index: usize,
|
||||
) -> Result<FrOf<Self::Hasher>, ZerokitMerkleTreeError> {
|
||||
if n > self.depth() {
|
||||
return Err(ZerokitMerkleTreeError::InvalidLevel);
|
||||
}
|
||||
if index >= self.capacity() {
|
||||
return Err(ZerokitMerkleTreeError::InvalidLeaf);
|
||||
}
|
||||
if n == 0 {
|
||||
Ok(self.root())
|
||||
} else if n == self.depth() {
|
||||
self.get(index)
|
||||
} else {
|
||||
let node = self
|
||||
.tree
|
||||
.get_elem(Key::new(n, index >> (self.depth() - n)))
|
||||
.unwrap();
|
||||
Ok(node)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_empty_leaves_indices(&self) -> Vec<usize> {
|
||||
let next_idx = self.leaves_set();
|
||||
self.cached_leaves_indices
|
||||
.iter()
|
||||
.take(next_idx)
|
||||
.enumerate()
|
||||
.filter(|&(_, &v)| v == 0u8)
|
||||
.map(|(idx, _)| idx)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn override_range<I: IntoIterator<Item = FrOf<Self::Hasher>>, J: IntoIterator<Item = usize>>(
|
||||
&mut self,
|
||||
start: usize,
|
||||
leaves: I,
|
||||
indices: J,
|
||||
) -> Result<(), ZerokitMerkleTreeError> {
|
||||
let leaves = leaves.into_iter().collect::<Vec<_>>();
|
||||
let mut indices = indices.into_iter().collect::<Vec<_>>();
|
||||
indices.sort();
|
||||
|
||||
match (leaves.len(), indices.len()) {
|
||||
(0, 0) => Err(ZerokitMerkleTreeError::InvalidLeaf),
|
||||
(1, 0) => self.set(start, leaves[0]),
|
||||
(0, 1) => self.delete(indices[0]),
|
||||
(_, 0) => self.set_range(start, leaves.into_iter()),
|
||||
(0, _) => self
|
||||
.remove_indices(&indices)
|
||||
.map_err(ZerokitMerkleTreeError::PmtreeErrorKind),
|
||||
(_, _) => self
|
||||
.remove_indices_and_set_leaves(start, leaves, &indices)
|
||||
.map_err(ZerokitMerkleTreeError::PmtreeErrorKind),
|
||||
}
|
||||
}
|
||||
|
||||
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<(), ZerokitMerkleTreeError> {
|
||||
self.tree
|
||||
.update_next(leaf)
|
||||
.map_err(ZerokitMerkleTreeError::PmtreeErrorKind)
|
||||
}
|
||||
|
||||
/// Delete a leaf in the merkle tree given its index
|
||||
///
|
||||
/// Deleting a leaf is done by resetting it to its default value. Note that the next_index field
|
||||
/// will not be changed (== previously used index cannot be reused - this to avoid replay
|
||||
/// attacks or unexpected and very hard to tackle issues)
|
||||
fn delete(&mut self, index: usize) -> Result<(), ZerokitMerkleTreeError> {
|
||||
self.tree
|
||||
.delete(index)
|
||||
.map_err(ZerokitMerkleTreeError::PmtreeErrorKind)?;
|
||||
self.cached_leaves_indices[index] = 0;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn proof(&self, index: usize) -> Result<Self::Proof, ZerokitMerkleTreeError> {
|
||||
let proof = self.tree.proof(index)?;
|
||||
Ok(PmTreeProof { proof })
|
||||
}
|
||||
|
||||
fn verify(
|
||||
&self,
|
||||
leaf: &FrOf<Self::Hasher>,
|
||||
witness: &Self::Proof,
|
||||
) -> Result<bool, ZerokitMerkleTreeError> {
|
||||
if self.tree.verify(leaf, &witness.proof) {
|
||||
Ok(true)
|
||||
} else {
|
||||
Err(ZerokitMerkleTreeError::InvalidWitness)
|
||||
}
|
||||
}
|
||||
|
||||
fn set_metadata(&mut self, metadata: &[u8]) -> Result<(), ZerokitMerkleTreeError> {
|
||||
self.tree
|
||||
.db
|
||||
.put(METADATA_KEY, metadata.to_vec())
|
||||
.map_err(ZerokitMerkleTreeError::PmtreeErrorKind)?;
|
||||
self.metadata = metadata.to_vec();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn metadata(&self) -> Result<Vec<u8>, ZerokitMerkleTreeError> {
|
||||
if !self.metadata.is_empty() {
|
||||
return Ok(self.metadata.clone());
|
||||
}
|
||||
// if empty, try searching the db
|
||||
let data = self.tree.db.get(METADATA_KEY)?;
|
||||
|
||||
if data.is_none() {
|
||||
// send empty Metadata
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
Ok(data.unwrap())
|
||||
}
|
||||
|
||||
fn close_db_connection(&mut self) -> Result<(), ZerokitMerkleTreeError> {
|
||||
self.tree
|
||||
.db
|
||||
.close()
|
||||
.map_err(ZerokitMerkleTreeError::PmtreeErrorKind)
|
||||
}
|
||||
}
|
||||
|
||||
type PmTreeHasher = <PmTree as ZerokitMerkleTree>::Hasher;
|
||||
type FrOfPmTreeHasher = FrOf<PmTreeHasher>;
|
||||
|
||||
impl PmTree {
|
||||
fn remove_indices(&mut self, indices: &[usize]) -> Result<(), PmtreeErrorKind> {
|
||||
let start = indices[0];
|
||||
let end = indices.last().unwrap() + 1;
|
||||
|
||||
let new_leaves = (start..end).map(|_| PmTreeHasher::default_leaf());
|
||||
|
||||
self.tree.set_range(start, new_leaves)?;
|
||||
|
||||
for i in start..end {
|
||||
self.cached_leaves_indices[i] = 0
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_indices_and_set_leaves(
|
||||
&mut self,
|
||||
start: usize,
|
||||
leaves: Vec<FrOfPmTreeHasher>,
|
||||
indices: &[usize],
|
||||
) -> Result<(), PmtreeErrorKind> {
|
||||
let min_index = *indices.first().unwrap();
|
||||
let max_index = start + leaves.len();
|
||||
|
||||
let mut set_values = vec![PmTreeHasher::default_leaf(); max_index - min_index];
|
||||
|
||||
for i in min_index..start {
|
||||
if !indices.contains(&i) {
|
||||
let value = self.tree.get(i)?;
|
||||
set_values[i - min_index] = value;
|
||||
}
|
||||
}
|
||||
|
||||
for (i, &leaf) in leaves.iter().enumerate() {
|
||||
set_values[start - min_index + i] = leaf;
|
||||
}
|
||||
|
||||
self.tree.set_range(start, set_values)?;
|
||||
|
||||
for i in indices {
|
||||
self.cached_leaves_indices[*i] = 0;
|
||||
}
|
||||
|
||||
for i in start..(max_index - min_index) {
|
||||
self.cached_leaves_indices[i] = 1
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ZerokitMerkleProof for PmTreeProof {
|
||||
type Index = u8;
|
||||
type Hasher = PoseidonHash;
|
||||
|
||||
fn length(&self) -> usize {
|
||||
self.proof.length()
|
||||
}
|
||||
|
||||
fn leaf_index(&self) -> usize {
|
||||
self.proof.leaf_index()
|
||||
}
|
||||
|
||||
fn get_path_elements(&self) -> Vec<FrOf<Self::Hasher>> {
|
||||
self.proof.get_path_elements()
|
||||
}
|
||||
|
||||
fn get_path_index(&self) -> Vec<Self::Index> {
|
||||
self.proof.get_path_index()
|
||||
}
|
||||
fn compute_root_from(&self, leaf: &FrOf<Self::Hasher>) -> FrOf<Self::Hasher> {
|
||||
self.proof.compute_root_from(leaf)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_pmtree_json_config() {
|
||||
let json = r#"
|
||||
{
|
||||
"path": "pmtree-123456",
|
||||
"temporary": false,
|
||||
"cache_capacity": 1073741824,
|
||||
"flush_every_ms": 500,
|
||||
"mode": "HighThroughput",
|
||||
"use_compression": false
|
||||
}"#;
|
||||
|
||||
let _: PmtreeConfig = json.parse().expect("Failed to parse JSON config");
|
||||
|
||||
let _ = PmtreeConfig::builder()
|
||||
.path(default_tmp_path())
|
||||
.temporary(DEFAULT_TEMPORARY)
|
||||
.cache_capacity(DEFAULT_CACHE_CAPACITY)
|
||||
.mode(DEFAULT_MODE)
|
||||
.use_compression(DEFAULT_USE_COMPRESSION)
|
||||
.build()
|
||||
.expect("Failed to build config");
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,129 +0,0 @@
|
||||
// This crate implements the Poseidon hash algorithm https://eprint.iacr.org/2019/458.pdf
|
||||
|
||||
// The implementation is taken from https://github.com/arnaucube/poseidon-rs/blob/233027d6075a637c29ad84a8a44f5653b81f0410/src/lib.rs
|
||||
// and slightly adapted to work over arkworks field data type
|
||||
|
||||
use crate::circuit::Fr;
|
||||
use crate::poseidon_constants::constants;
|
||||
use crate::utils::*;
|
||||
use ark_std::Zero;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Constants {
|
||||
pub c: Vec<Vec<Fr>>,
|
||||
pub m: Vec<Vec<Vec<Fr>>>,
|
||||
pub n_rounds_f: usize,
|
||||
pub n_rounds_p: Vec<usize>,
|
||||
}
|
||||
pub fn load_constants() -> Constants {
|
||||
let (c_str, m_str) = constants();
|
||||
let mut c: Vec<Vec<Fr>> = Vec::new();
|
||||
for i in 0..c_str.len() {
|
||||
let mut cci: Vec<Fr> = Vec::new();
|
||||
for j in 0..c_str[i].len() {
|
||||
let b: Fr = str_to_fr(c_str[i][j], 10);
|
||||
cci.push(b);
|
||||
}
|
||||
c.push(cci);
|
||||
}
|
||||
let mut m: Vec<Vec<Vec<Fr>>> = Vec::new();
|
||||
for i in 0..m_str.len() {
|
||||
let mut mi: Vec<Vec<Fr>> = Vec::new();
|
||||
for j in 0..m_str[i].len() {
|
||||
let mut mij: Vec<Fr> = Vec::new();
|
||||
for k in 0..m_str[i][j].len() {
|
||||
let b: Fr = str_to_fr(m_str[i][j][k], 10);
|
||||
mij.push(b);
|
||||
}
|
||||
mi.push(mij);
|
||||
}
|
||||
m.push(mi);
|
||||
}
|
||||
Constants {
|
||||
c: c,
|
||||
m: m,
|
||||
n_rounds_f: 8,
|
||||
n_rounds_p: vec![56, 57, 56, 60, 60, 63, 64, 63],
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Poseidon {
|
||||
constants: Constants,
|
||||
}
|
||||
impl Poseidon {
|
||||
pub fn new() -> Poseidon {
|
||||
Poseidon {
|
||||
constants: load_constants(),
|
||||
}
|
||||
}
|
||||
pub fn ark(&self, state: &mut [Fr], c: &[Fr], it: usize) {
|
||||
for i in 0..state.len() {
|
||||
state[i] += c[it + i];
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sbox(&self, n_rounds_f: usize, n_rounds_p: usize, state: &mut [Fr], i: usize) {
|
||||
if (i < n_rounds_f / 2) || (i >= n_rounds_f / 2 + n_rounds_p) {
|
||||
for j in 0..state.len() {
|
||||
let aux = state[j];
|
||||
state[j] *= state[j];
|
||||
state[j] *= state[j];
|
||||
state[j] *= aux;
|
||||
}
|
||||
} else {
|
||||
let aux = state[0];
|
||||
state[0] *= state[0];
|
||||
state[0] *= state[0];
|
||||
state[0] *= aux;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mix(&self, state: &[Fr], m: &[Vec<Fr>]) -> Vec<Fr> {
|
||||
let mut new_state: Vec<Fr> = Vec::new();
|
||||
for i in 0..state.len() {
|
||||
new_state.push(Fr::zero());
|
||||
for j in 0..state.len() {
|
||||
let mut mij = m[i][j];
|
||||
mij *= state[j];
|
||||
new_state[i] += mij;
|
||||
}
|
||||
}
|
||||
new_state.clone()
|
||||
}
|
||||
|
||||
pub fn hash(&self, inp: Vec<Fr>) -> Result<Fr, String> {
|
||||
let t = inp.len() + 1;
|
||||
if inp.is_empty() || (inp.len() >= self.constants.n_rounds_p.len() - 1) {
|
||||
return Err("Wrong inputs length".to_string());
|
||||
}
|
||||
let n_rounds_f = self.constants.n_rounds_f;
|
||||
let n_rounds_p = self.constants.n_rounds_p[t - 2];
|
||||
|
||||
let mut state = vec![Fr::zero(); t];
|
||||
state[1..].clone_from_slice(&inp);
|
||||
|
||||
for i in 0..(n_rounds_f + n_rounds_p) {
|
||||
self.ark(&mut state, &self.constants.c[t - 2], i * t);
|
||||
self.sbox(n_rounds_f, n_rounds_p, &mut state, i);
|
||||
state = self.mix(&state, &self.constants.m[t - 2]);
|
||||
}
|
||||
|
||||
Ok(state[0])
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Poseidon {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
// Poseidon Hash wrapper over above implementation. Adapted from semaphore-rs poseidon hash wrapper.
|
||||
static POSEIDON: Lazy<Poseidon> = Lazy::new(Poseidon::new);
|
||||
|
||||
pub fn poseidon_hash(input: &[Fr]) -> Fr {
|
||||
POSEIDON
|
||||
.hash(input.to_vec())
|
||||
.expect("hash with fixed input size can't fail")
|
||||
}
|
||||
@@ -1,109 +1,32 @@
|
||||
// This crate defines the RLN module default Merkle tree implementation and its Hasher
|
||||
// Implementation inspired by https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/poseidon_tree.rs
|
||||
|
||||
// Implementation inspired by https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/poseidon_tree.rs (no differences)
|
||||
#![cfg(not(feature = "stateless"))]
|
||||
|
||||
use crate::circuit::Fr;
|
||||
use crate::merkle_tree::*;
|
||||
use crate::poseidon_hash::poseidon_hash;
|
||||
use cfg_if::cfg_if;
|
||||
|
||||
// The zerokit RLN default Merkle tree implementation is the OptimalMerkleTree.
|
||||
// To switch to FullMerkleTree implementation, it is enough to enable the fullmerkletree feature
|
||||
// The zerokit RLN default Merkle tree implementation is the PMTree from the vacp2p_pmtree crate
|
||||
// To switch to FullMerkleTree or OptimalMerkleTree, enable the corresponding feature in the Cargo.toml file
|
||||
|
||||
cfg_if! {
|
||||
if #[cfg(feature = "fullmerkletree")] {
|
||||
use utils::{FullMerkleTree, FullMerkleProof};
|
||||
use crate::hashers::PoseidonHash;
|
||||
|
||||
pub type PoseidonTree = FullMerkleTree<PoseidonHash>;
|
||||
pub type MerkleProof = FullMerkleProof<PoseidonHash>;
|
||||
} else {
|
||||
} else if #[cfg(feature = "optimalmerkletree")] {
|
||||
use utils::{OptimalMerkleTree, OptimalMerkleProof};
|
||||
use crate::hashers::PoseidonHash;
|
||||
|
||||
pub type PoseidonTree = OptimalMerkleTree<PoseidonHash>;
|
||||
pub type MerkleProof = OptimalMerkleProof<PoseidonHash>;
|
||||
}
|
||||
}
|
||||
|
||||
// The zerokit RLN default Hasher
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub struct PoseidonHash;
|
||||
|
||||
impl Hasher for PoseidonHash {
|
||||
type Fr = Fr;
|
||||
|
||||
fn default_leaf() -> Self::Fr {
|
||||
Self::Fr::from(0)
|
||||
}
|
||||
|
||||
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
|
||||
poseidon_hash(inputs)
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
/// Tests
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
/// A basic performance comparison between the two supported Merkle Tree implementations
|
||||
fn test_merkle_implementations_performances() {
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
let tree_height = 20;
|
||||
let sample_size = 100;
|
||||
|
||||
let leaves: Vec<Fr> = (0..sample_size).map(|s| Fr::from(s)).collect();
|
||||
|
||||
let mut gen_time_full: u128 = 0;
|
||||
let mut upd_time_full: u128 = 0;
|
||||
let mut gen_time_opt: u128 = 0;
|
||||
let mut upd_time_opt: u128 = 0;
|
||||
|
||||
for _ in 0..sample_size.try_into().unwrap() {
|
||||
let now = Instant::now();
|
||||
FullMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
gen_time_full += now.elapsed().as_nanos();
|
||||
|
||||
let now = Instant::now();
|
||||
OptimalMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
gen_time_opt += now.elapsed().as_nanos();
|
||||
}
|
||||
|
||||
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
for i in 0..sample_size.try_into().unwrap() {
|
||||
let now = Instant::now();
|
||||
tree_full.set(i, leaves[i]).unwrap();
|
||||
upd_time_full += now.elapsed().as_nanos();
|
||||
let proof = tree_full.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
|
||||
let now = Instant::now();
|
||||
tree_opt.set(i, leaves[i]).unwrap();
|
||||
upd_time_opt += now.elapsed().as_nanos();
|
||||
let proof = tree_opt.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
}
|
||||
|
||||
println!("Average tree generation time:");
|
||||
println!(
|
||||
" - Full Merkle Tree: {:?}",
|
||||
Duration::from_nanos((gen_time_full / sample_size).try_into().unwrap())
|
||||
);
|
||||
println!(
|
||||
" - Optimal Merkle Tree: {:?}",
|
||||
Duration::from_nanos((gen_time_opt / sample_size).try_into().unwrap())
|
||||
);
|
||||
|
||||
println!("Average update_next execution time:");
|
||||
println!(
|
||||
" - Full Merkle Tree: {:?}",
|
||||
Duration::from_nanos((upd_time_full / sample_size).try_into().unwrap())
|
||||
);
|
||||
|
||||
println!(
|
||||
" - Optimal Merkle Tree: {:?}",
|
||||
Duration::from_nanos((upd_time_opt / sample_size).try_into().unwrap())
|
||||
);
|
||||
} else if #[cfg(feature = "pmtree-ft")] {
|
||||
use crate::pm_tree_adapter::{PmTree, PmTreeProof};
|
||||
|
||||
pub type PoseidonTree = PmTree;
|
||||
pub type MerkleProof = PmTreeProof;
|
||||
} else {
|
||||
compile_error!("One of the features `fullmerkletree`, `optimalmerkletree`, or `pmtree-ft` must be enabled.");
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
1947
rln/src/public.rs
1947
rln/src/public.rs
File diff suppressed because it is too large
Load Diff
1286
rln/src/public_api_tests.rs
Normal file
1286
rln/src/public_api_tests.rs
Normal file
File diff suppressed because it is too large
Load Diff
479
rln/src/utils.rs
479
rln/src/utils.rs
@@ -1,30 +1,35 @@
|
||||
// This crate provides cross-module useful utilities (mainly type conversions) not necessarily specific to RLN
|
||||
|
||||
use crate::circuit::Fr;
|
||||
use ark_ff::{BigInteger, FpParameters, PrimeField};
|
||||
use crate::error::ConversionError;
|
||||
use ark_ff::PrimeField;
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
use ark_std::UniformRand;
|
||||
use num_bigint::{BigInt, BigUint};
|
||||
use num_traits::Num;
|
||||
use std::iter::Extend;
|
||||
|
||||
pub fn modulus_bit_size() -> usize {
|
||||
<Fr as PrimeField>::Params::MODULUS
|
||||
.num_bits()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
}
|
||||
use rand::Rng;
|
||||
use ruint::aliases::U256;
|
||||
use serde_json::json;
|
||||
use std::io::Cursor;
|
||||
use std::ops::Deref;
|
||||
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
|
||||
|
||||
#[inline(always)]
|
||||
pub fn to_bigint(el: &Fr) -> BigInt {
|
||||
let res: BigUint = (*el).try_into().unwrap();
|
||||
res.try_into().unwrap()
|
||||
BigUint::from(*el).into()
|
||||
}
|
||||
|
||||
pub fn fr_byte_size() -> usize {
|
||||
let mbs = modulus_bit_size();
|
||||
(mbs + 64 - (mbs % 64)) / 8
|
||||
#[inline(always)]
|
||||
pub const fn fr_byte_size() -> usize {
|
||||
let mbs = <Fr as PrimeField>::MODULUS_BIT_SIZE;
|
||||
((mbs + 64 - (mbs % 64)) / 8) as usize
|
||||
}
|
||||
|
||||
pub fn str_to_fr(input: &str, radix: u32) -> Fr {
|
||||
assert!((radix == 10) || (radix == 16));
|
||||
#[inline(always)]
|
||||
pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr, ConversionError> {
|
||||
if !(radix == 10 || radix == 16) {
|
||||
return Err(ConversionError::WrongRadix);
|
||||
}
|
||||
|
||||
// We remove any quote present and we trim
|
||||
let single_quote: char = '\"';
|
||||
@@ -32,19 +37,14 @@ pub fn str_to_fr(input: &str, radix: u32) -> Fr {
|
||||
input_clean = input_clean.trim().to_string();
|
||||
|
||||
if radix == 10 {
|
||||
BigUint::from_str_radix(&input_clean, radix)
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
Ok(BigUint::from_str_radix(&input_clean, radix)?.into())
|
||||
} else {
|
||||
input_clean = input_clean.replace("0x", "");
|
||||
BigUint::from_str_radix(&input_clean, radix)
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap()
|
||||
Ok(BigUint::from_str_radix(&input_clean, radix)?.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn bytes_le_to_fr(input: &[u8]) -> (Fr, usize) {
|
||||
let el_size = fr_byte_size();
|
||||
(
|
||||
@@ -53,6 +53,7 @@ pub fn bytes_le_to_fr(input: &[u8]) -> (Fr, usize) {
|
||||
)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn bytes_be_to_fr(input: &[u8]) -> (Fr, usize) {
|
||||
let el_size = fr_byte_size();
|
||||
(
|
||||
@@ -61,252 +62,360 @@ pub fn bytes_be_to_fr(input: &[u8]) -> (Fr, usize) {
|
||||
)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn fr_to_bytes_le(input: &Fr) -> Vec<u8> {
|
||||
let input_biguint: BigUint = (*input).into();
|
||||
let mut res = input_biguint.to_bytes_le();
|
||||
//BigUint conversion ignores most significant zero bytes. We restore them otherwise serialization will fail (length % 8 != 0)
|
||||
while res.len() != fr_byte_size() {
|
||||
res.push(0);
|
||||
}
|
||||
res.resize(fr_byte_size(), 0);
|
||||
res
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn fr_to_bytes_be(input: &Fr) -> Vec<u8> {
|
||||
let input_biguint: BigUint = (*input).into();
|
||||
let mut res = input_biguint.to_bytes_be();
|
||||
// BigUint conversion ignores most significant zero bytes. We restore them otherwise serialization might fail
|
||||
// Fr elements are stored using 64 bits nimbs
|
||||
while res.len() != fr_byte_size() {
|
||||
res.insert(0, 0);
|
||||
// For BE, insert 0 at the start of the Vec (see also fr_to_bytes_le comments)
|
||||
let to_insert_count = fr_byte_size().saturating_sub(res.len());
|
||||
if to_insert_count > 0 {
|
||||
// Insert multi 0 at index 0
|
||||
res.splice(0..0, std::iter::repeat_n(0, to_insert_count));
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Vec<u8> {
|
||||
let mut bytes: Vec<u8> = Vec::new();
|
||||
//We store the vector length
|
||||
bytes.extend(input.len().to_le_bytes().to_vec());
|
||||
// Calculate capacity for Vec:
|
||||
// - 8 bytes for normalized vector length (usize)
|
||||
// - each Fr element requires fr_byte_size() bytes (typically 32 bytes)
|
||||
let mut bytes = Vec::with_capacity(8 + input.len() * fr_byte_size());
|
||||
|
||||
// We store the vector length
|
||||
bytes.extend_from_slice(&normalize_usize_le(input.len()));
|
||||
|
||||
// We store each element
|
||||
input.iter().for_each(|el| bytes.extend(fr_to_bytes_le(el)));
|
||||
for el in input {
|
||||
bytes.extend_from_slice(&fr_to_bytes_le(el));
|
||||
}
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn vec_fr_to_bytes_be(input: &[Fr]) -> Vec<u8> {
|
||||
let mut bytes: Vec<u8> = Vec::new();
|
||||
//We store the vector length
|
||||
bytes.extend(input.len().to_be_bytes().to_vec());
|
||||
// Calculate capacity for Vec:
|
||||
// - 8 bytes for normalized vector length (usize)
|
||||
// - each Fr element requires fr_byte_size() bytes (typically 32 bytes)
|
||||
let mut bytes = Vec::with_capacity(8 + input.len() * fr_byte_size());
|
||||
|
||||
// We store the vector length
|
||||
bytes.extend_from_slice(&normalize_usize_be(input.len()));
|
||||
|
||||
// We store each element
|
||||
input.iter().for_each(|el| bytes.extend(fr_to_bytes_be(el)));
|
||||
for el in input {
|
||||
bytes.extend_from_slice(&fr_to_bytes_be(el));
|
||||
}
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn vec_u8_to_bytes_le(input: &[u8]) -> Vec<u8> {
|
||||
let mut bytes: Vec<u8> = Vec::new();
|
||||
//We store the vector length
|
||||
bytes.extend(u64::try_from(input.len()).unwrap().to_le_bytes().to_vec());
|
||||
bytes.extend(input);
|
||||
// Calculate capacity for Vec:
|
||||
// - 8 bytes for normalized vector length (usize)
|
||||
// - variable length input data
|
||||
let mut bytes = Vec::with_capacity(8 + input.len());
|
||||
|
||||
// We store the vector length
|
||||
bytes.extend_from_slice(&normalize_usize_le(input.len()));
|
||||
|
||||
// We store the input
|
||||
bytes.extend_from_slice(input);
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
pub fn vec_u8_to_bytes_be(input: Vec<u8>) -> Vec<u8> {
|
||||
let mut bytes: Vec<u8> = Vec::new();
|
||||
//We store the vector length
|
||||
bytes.extend(u64::try_from(input.len()).unwrap().to_be_bytes().to_vec());
|
||||
bytes.extend(input);
|
||||
#[inline(always)]
|
||||
pub fn vec_u8_to_bytes_be(input: &[u8]) -> Vec<u8> {
|
||||
// Calculate capacity for Vec:
|
||||
// - 8 bytes for normalized vector length (usize)
|
||||
// - variable length input data
|
||||
let mut bytes = Vec::with_capacity(8 + input.len());
|
||||
|
||||
// We store the vector length
|
||||
bytes.extend_from_slice(&normalize_usize_be(input.len()));
|
||||
|
||||
// We store the input
|
||||
bytes.extend_from_slice(input);
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
pub fn bytes_le_to_vec_u8(input: &[u8]) -> (Vec<u8>, usize) {
|
||||
#[inline(always)]
|
||||
pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize), ConversionError> {
|
||||
let mut read: usize = 0;
|
||||
|
||||
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into().unwrap())).unwrap();
|
||||
if input.len() < 8 {
|
||||
return Err(ConversionError::InsufficientData {
|
||||
expected: 8,
|
||||
actual: input.len(),
|
||||
});
|
||||
}
|
||||
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
|
||||
read += 8;
|
||||
|
||||
if input.len() < 8 + len {
|
||||
return Err(ConversionError::InsufficientData {
|
||||
expected: 8 + len,
|
||||
actual: input.len(),
|
||||
});
|
||||
}
|
||||
let res = input[8..8 + len].to_vec();
|
||||
read += res.len();
|
||||
|
||||
(res, read)
|
||||
Ok((res, read))
|
||||
}
|
||||
|
||||
pub fn bytes_be_to_vec_u8(input: &[u8]) -> (Vec<u8>, usize) {
|
||||
#[inline(always)]
|
||||
pub fn bytes_be_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize), ConversionError> {
|
||||
let mut read: usize = 0;
|
||||
|
||||
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into().unwrap())).unwrap();
|
||||
if input.len() < 8 {
|
||||
return Err(ConversionError::InsufficientData {
|
||||
expected: 8,
|
||||
actual: input.len(),
|
||||
});
|
||||
}
|
||||
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into()?))?;
|
||||
read += 8;
|
||||
|
||||
if input.len() < 8 + len {
|
||||
return Err(ConversionError::InsufficientData {
|
||||
expected: 8 + len,
|
||||
actual: input.len(),
|
||||
});
|
||||
}
|
||||
let res = input[8..8 + len].to_vec();
|
||||
|
||||
read += res.len();
|
||||
|
||||
(res, read)
|
||||
Ok((res, read))
|
||||
}
|
||||
|
||||
pub fn bytes_le_to_vec_fr(input: &[u8]) -> (Vec<Fr>, usize) {
|
||||
#[inline(always)]
|
||||
pub fn bytes_le_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize), ConversionError> {
|
||||
let mut read: usize = 0;
|
||||
let mut res: Vec<Fr> = Vec::new();
|
||||
|
||||
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into().unwrap())).unwrap();
|
||||
if input.len() < 8 {
|
||||
return Err(ConversionError::InsufficientData {
|
||||
expected: 8,
|
||||
actual: input.len(),
|
||||
});
|
||||
}
|
||||
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
|
||||
read += 8;
|
||||
|
||||
let el_size = fr_byte_size();
|
||||
if input.len() < 8 + len * el_size {
|
||||
return Err(ConversionError::InsufficientData {
|
||||
expected: 8 + len * el_size,
|
||||
actual: input.len(),
|
||||
});
|
||||
}
|
||||
let mut res: Vec<Fr> = Vec::with_capacity(len);
|
||||
for i in 0..len {
|
||||
let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec());
|
||||
let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]);
|
||||
res.push(curr_el);
|
||||
read += el_size;
|
||||
}
|
||||
|
||||
(res, read)
|
||||
Ok((res, read))
|
||||
}
|
||||
|
||||
pub fn bytes_be_to_vec_fr(input: &[u8]) -> (Vec<Fr>, usize) {
|
||||
#[inline(always)]
|
||||
pub fn bytes_be_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize), ConversionError> {
|
||||
let mut read: usize = 0;
|
||||
let mut res: Vec<Fr> = Vec::new();
|
||||
|
||||
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into().unwrap())).unwrap();
|
||||
if input.len() < 8 {
|
||||
return Err(ConversionError::InsufficientData {
|
||||
expected: 8,
|
||||
actual: input.len(),
|
||||
});
|
||||
}
|
||||
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into()?))?;
|
||||
read += 8;
|
||||
|
||||
let el_size = fr_byte_size();
|
||||
if input.len() < 8 + len * el_size {
|
||||
return Err(ConversionError::InsufficientData {
|
||||
expected: 8 + len * el_size,
|
||||
actual: input.len(),
|
||||
});
|
||||
}
|
||||
let mut res: Vec<Fr> = Vec::with_capacity(len);
|
||||
for i in 0..len {
|
||||
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec());
|
||||
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]);
|
||||
res.push(curr_el);
|
||||
read += el_size;
|
||||
}
|
||||
|
||||
(res, read)
|
||||
Ok((res, read))
|
||||
}
|
||||
|
||||
/* Old conversion utilities between different libraries data types
|
||||
|
||||
// Conversion Utilities between poseidon-rs Field and arkworks Fr (in order to call directly poseidon-rs' poseidon_hash)
|
||||
|
||||
use ff::{PrimeField as _, PrimeFieldRepr as _};
|
||||
use poseidon_rs::Fr as PosFr;
|
||||
|
||||
pub fn fr_to_posfr(value: Fr) -> PosFr {
|
||||
let mut bytes = [0_u8; 32];
|
||||
let byte_vec = value.into_repr().to_bytes_be();
|
||||
bytes.copy_from_slice(&byte_vec[..]);
|
||||
let mut repr = <PosFr as ff::PrimeField>::Repr::default();
|
||||
repr.read_be(&bytes[..])
|
||||
.expect("read from correctly sized slice always succeeds");
|
||||
PosFr::from_repr(repr).expect("value is always in range")
|
||||
}
|
||||
|
||||
pub fn posfr_to_fr(value: PosFr) -> Fr {
|
||||
let mut bytes = [0u8; 32];
|
||||
value
|
||||
.into_repr()
|
||||
.write_be(&mut bytes[..])
|
||||
.expect("write to correctly sized slice always succeeds");
|
||||
Fr::from_be_bytes_mod_order(&bytes)
|
||||
}
|
||||
|
||||
|
||||
// Conversion Utilities between semaphore-rs Field and arkworks Fr
|
||||
|
||||
use semaphore::Field;
|
||||
|
||||
pub fn to_fr(el: &Field) -> Fr {
|
||||
Fr::try_from(*el).unwrap()
|
||||
}
|
||||
|
||||
pub fn to_field(el: &Fr) -> Field {
|
||||
(*el).try_into().unwrap()
|
||||
}
|
||||
|
||||
pub fn vec_to_fr(v: &[Field]) -> Vec<Fr> {
|
||||
v.iter().map(|el| to_fr(el)).collect()
|
||||
}
|
||||
|
||||
pub fn vec_to_field(v: &[Fr]) -> Vec<Field> {
|
||||
v.iter().map(|el| to_field(el)).collect()
|
||||
}
|
||||
|
||||
pub fn vec_fr_to_field(input: &[Fr]) -> Vec<Field> {
|
||||
input.iter().map(|el| to_field(el)).collect()
|
||||
}
|
||||
|
||||
pub fn vec_field_to_fr(input: &[Field]) -> Vec<Fr> {
|
||||
input.iter().map(|el| to_fr(el)).collect()
|
||||
}
|
||||
|
||||
pub fn str_to_field(input: String, radix: i32) -> Field {
|
||||
assert!((radix == 10) || (radix == 16));
|
||||
|
||||
// We remove any quote present and we trim
|
||||
let single_quote: char = '\"';
|
||||
let input_clean = input.replace(single_quote, "");
|
||||
let input_clean = input_clean.trim();
|
||||
|
||||
if radix == 10 {
|
||||
Field::from_str(&format!(
|
||||
"{:01$x}",
|
||||
BigUint::from_str(input_clean).unwrap(),
|
||||
64
|
||||
))
|
||||
.unwrap()
|
||||
#[inline(always)]
|
||||
pub fn bytes_le_to_vec_usize(input: &[u8]) -> Result<Vec<usize>, ConversionError> {
|
||||
if input.len() < 8 {
|
||||
return Err(ConversionError::InsufficientData {
|
||||
expected: 8,
|
||||
actual: input.len(),
|
||||
});
|
||||
}
|
||||
let nof_elem = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
|
||||
if nof_elem == 0 {
|
||||
Ok(vec![])
|
||||
} else {
|
||||
let input_clean = input_clean.replace("0x", "");
|
||||
Field::from_str(&format!("{:0>64}", &input_clean)).unwrap()
|
||||
if input.len() < 8 + nof_elem * 8 {
|
||||
return Err(ConversionError::InsufficientData {
|
||||
expected: 8 + nof_elem * 8,
|
||||
actual: input.len(),
|
||||
});
|
||||
}
|
||||
let elements: Vec<usize> = input[8..]
|
||||
.chunks(8)
|
||||
.take(nof_elem)
|
||||
.map(|ch| usize::from_le_bytes(ch[0..8].try_into().unwrap()))
|
||||
.collect();
|
||||
Ok(elements)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bytes_le_to_field(input: &[u8]) -> (Field, usize) {
|
||||
let (fr_el, read) = bytes_le_to_fr(input);
|
||||
(to_field(&fr_el), read)
|
||||
#[inline(always)]
|
||||
pub fn bytes_be_to_vec_usize(input: &[u8]) -> Result<Vec<usize>, ConversionError> {
|
||||
if input.len() < 8 {
|
||||
return Err(ConversionError::InsufficientData {
|
||||
expected: 8,
|
||||
actual: input.len(),
|
||||
});
|
||||
}
|
||||
let nof_elem = usize::try_from(u64::from_be_bytes(input[0..8].try_into()?))?;
|
||||
if nof_elem == 0 {
|
||||
Ok(vec![])
|
||||
} else {
|
||||
if input.len() < 8 + nof_elem * 8 {
|
||||
return Err(ConversionError::InsufficientData {
|
||||
expected: 8 + nof_elem * 8,
|
||||
actual: input.len(),
|
||||
});
|
||||
}
|
||||
let elements: Vec<usize> = input[8..]
|
||||
.chunks(8)
|
||||
.take(nof_elem)
|
||||
.map(|ch| usize::from_be_bytes(ch[0..8].try_into().unwrap()))
|
||||
.collect();
|
||||
Ok(elements)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bytes_be_to_field(input: &[u8]) -> (Field, usize) {
|
||||
let (fr_el, read) = bytes_be_to_fr(input);
|
||||
(to_field(&fr_el), read)
|
||||
/// Normalizes a `usize` into an 8-byte array, ensuring consistency across architectures.
|
||||
/// On 32-bit systems, the result is zero-padded to 8 bytes.
|
||||
/// On 64-bit systems, it directly represents the `usize` value.
|
||||
#[inline(always)]
|
||||
pub fn normalize_usize_le(input: usize) -> [u8; 8] {
|
||||
let mut bytes = [0u8; 8];
|
||||
let input_bytes = input.to_le_bytes();
|
||||
bytes[..input_bytes.len()].copy_from_slice(&input_bytes);
|
||||
bytes
|
||||
}
|
||||
|
||||
|
||||
pub fn field_to_bytes_le(input: &Field) -> Vec<u8> {
|
||||
fr_to_bytes_le(&to_fr(input))
|
||||
/// Normalizes a `usize` into an 8-byte array, ensuring consistency across architectures.
|
||||
/// On 32-bit systems, the result is zero-padded to 8 bytes.
|
||||
/// On 64-bit systems, it directly represents the `usize` value.
|
||||
#[inline(always)]
|
||||
pub fn normalize_usize_be(input: usize) -> [u8; 8] {
|
||||
let mut bytes = [0u8; 8];
|
||||
let input_bytes = input.to_be_bytes();
|
||||
bytes[..input_bytes.len()].copy_from_slice(&input_bytes);
|
||||
bytes
|
||||
}
|
||||
|
||||
pub fn field_to_bytes_be(input: &Field) -> Vec<u8> {
|
||||
fr_to_bytes_be(&to_fr(input))
|
||||
#[inline(always)] // using for test
|
||||
pub fn generate_input_buffer() -> Cursor<String> {
|
||||
Cursor::new(json!({}).to_string())
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, Zeroize, ZeroizeOnDrop, Clone, PartialEq, CanonicalSerialize, CanonicalDeserialize,
|
||||
)]
|
||||
pub struct IdSecret(ark_bn254::Fr);
|
||||
|
||||
pub fn vec_field_to_bytes_le(input: &[Field]) -> Vec<u8> {
|
||||
vec_fr_to_bytes_le(&vec_field_to_fr(input))
|
||||
impl IdSecret {
|
||||
pub fn rand<R: Rng + ?Sized>(rng: &mut R) -> Self {
|
||||
let mut fr = Fr::rand(rng);
|
||||
let res = Self::from(&mut fr);
|
||||
// No need to zeroize fr (already zeroiz'ed in from implementation)
|
||||
#[allow(clippy::let_and_return)]
|
||||
res
|
||||
}
|
||||
|
||||
pub fn from_bytes_le(input: &[u8]) -> (Self, usize) {
|
||||
let el_size = fr_byte_size();
|
||||
let b_uint = BigUint::from_bytes_le(&input[0..el_size]);
|
||||
let mut fr = Fr::from(b_uint);
|
||||
let res = IdSecret::from(&mut fr);
|
||||
// Note: no zeroize on b_uint as it has been moved
|
||||
(res, el_size)
|
||||
}
|
||||
|
||||
pub(crate) fn to_bytes_le(&self) -> Zeroizing<Vec<u8>> {
|
||||
let input_biguint: BigUint = self.0.into();
|
||||
let mut res = input_biguint.to_bytes_le();
|
||||
res.resize(fr_byte_size(), 0);
|
||||
Zeroizing::new(res)
|
||||
}
|
||||
|
||||
pub(crate) fn to_bytes_be(&self) -> Zeroizing<Vec<u8>> {
|
||||
let input_biguint: BigUint = self.0.into();
|
||||
let mut res = input_biguint.to_bytes_be();
|
||||
let to_insert_count = fr_byte_size().saturating_sub(res.len());
|
||||
if to_insert_count > 0 {
|
||||
// Insert multi 0 at index 0
|
||||
res.splice(0..0, std::iter::repeat_n(0, to_insert_count));
|
||||
}
|
||||
Zeroizing::new(res)
|
||||
}
|
||||
|
||||
/// Warning: this can leak the secret value
|
||||
/// Warning: Leaked value is of type 'U256' which implement Copy (every copy will not be zeroized)
|
||||
pub(crate) fn to_u256(&self) -> U256 {
|
||||
let mut big_int = self.0.into_bigint();
|
||||
let res = U256::from_limbs(big_int.0);
|
||||
big_int.zeroize();
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
pub fn vec_field_to_bytes_be(input: &[Field]) -> Vec<u8> {
|
||||
vec_fr_to_bytes_be(&vec_field_to_fr(input))
|
||||
impl From<&mut Fr> for IdSecret {
|
||||
fn from(value: &mut Fr) -> Self {
|
||||
let id_secret = Self(*value);
|
||||
value.zeroize();
|
||||
id_secret
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for IdSecret {
|
||||
type Target = Fr;
|
||||
|
||||
pub fn bytes_le_to_vec_field(input: &[u8]) -> (Vec<Field>, usize) {
|
||||
let (vec_fr, read) = bytes_le_to_vec_fr(input);
|
||||
(vec_fr_to_field(&vec_fr), read)
|
||||
/// Deref to &Fr
|
||||
///
|
||||
/// Warning: this can leak the secret value
|
||||
/// Warning: Leaked value is of type 'Fr' which implement Copy (every copy will not be zeroized)
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bytes_be_to_vec_field(input: &[u8]) -> (Vec<Field>, usize) {
|
||||
let (vec_fr, read) = bytes_be_to_vec_fr(input);
|
||||
(vec_fr_to_field(&vec_fr), read)
|
||||
#[derive(Debug, Zeroize, ZeroizeOnDrop)]
|
||||
pub enum FrOrSecret {
|
||||
IdSecret(IdSecret),
|
||||
Fr(Fr),
|
||||
}
|
||||
|
||||
// Arithmetic over Field elements (wrapped over arkworks algebra crate)
|
||||
|
||||
pub fn add(a: &Field, b: &Field) -> Field {
|
||||
to_field(&(to_fr(a) + to_fr(b)))
|
||||
impl From<Fr> for FrOrSecret {
|
||||
fn from(value: Fr) -> Self {
|
||||
FrOrSecret::Fr(value)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mul(a: &Field, b: &Field) -> Field {
|
||||
to_field(&(to_fr(a) * to_fr(b)))
|
||||
impl From<IdSecret> for FrOrSecret {
|
||||
fn from(value: IdSecret) -> Self {
|
||||
FrOrSecret::IdSecret(value)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn div(a: &Field, b: &Field) -> Field {
|
||||
to_field(&(to_fr(a) / to_fr(b)))
|
||||
}
|
||||
|
||||
pub fn inv(a: &Field) -> Field {
|
||||
to_field(&(Fr::from(1) / to_fr(a)))
|
||||
}
|
||||
*/
|
||||
|
||||
1444
rln/tests/ffi.rs
Normal file
1444
rln/tests/ffi.rs
Normal file
File diff suppressed because it is too large
Load Diff
151
rln/tests/poseidon_tree.rs
Normal file
151
rln/tests/poseidon_tree.rs
Normal file
@@ -0,0 +1,151 @@
|
||||
////////////////////////////////////////////////////////////
|
||||
// Tests
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
#![cfg(not(feature = "stateless"))]
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use rln::hashers::{poseidon_hash, PoseidonHash};
|
||||
use rln::{
|
||||
circuit::{Fr, TEST_TREE_DEPTH},
|
||||
poseidon_tree::PoseidonTree,
|
||||
};
|
||||
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
#[test]
|
||||
// The test checked correctness for `FullMerkleTree` and `OptimalMerkleTree` with Poseidon hash
|
||||
fn test_zerokit_merkle_implementations() {
|
||||
let sample_size = 100;
|
||||
let leaves: Vec<Fr> = (0..sample_size).map(Fr::from).collect();
|
||||
|
||||
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(TEST_TREE_DEPTH).unwrap();
|
||||
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_DEPTH).unwrap();
|
||||
|
||||
for (i, leave) in leaves
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.take(sample_size.try_into().unwrap())
|
||||
{
|
||||
tree_full.set(i, leave).unwrap();
|
||||
let proof = tree_full.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
|
||||
tree_opt.set(i, leave).unwrap();
|
||||
assert_eq!(tree_opt.root(), tree_full.root());
|
||||
let proof = tree_opt.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
}
|
||||
|
||||
// We check all roots are the same
|
||||
let tree_full_root = tree_full.root();
|
||||
let tree_opt_root = tree_opt.root();
|
||||
|
||||
assert_eq!(tree_full_root, tree_opt_root);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_subtree_root() {
|
||||
const DEPTH: usize = 3;
|
||||
const LEAVES_LEN: usize = 8;
|
||||
|
||||
let mut tree = PoseidonTree::default(DEPTH).unwrap();
|
||||
let leaves: Vec<Fr> = (0..LEAVES_LEN).map(|s| Fr::from(s as i32)).collect();
|
||||
let _ = tree.set_range(0, leaves.into_iter());
|
||||
|
||||
for i in 0..LEAVES_LEN {
|
||||
// check leaves
|
||||
assert_eq!(
|
||||
tree.get(i).unwrap(),
|
||||
tree.get_subtree_root(DEPTH, i).unwrap()
|
||||
);
|
||||
// check root
|
||||
assert_eq!(tree.root(), tree.get_subtree_root(0, i).unwrap());
|
||||
}
|
||||
|
||||
// check intermediate nodes
|
||||
for n in (1..=DEPTH).rev() {
|
||||
for i in (0..(1 << n)).step_by(2) {
|
||||
let idx_l = i * (1 << (DEPTH - n));
|
||||
let idx_r = (i + 1) * (1 << (DEPTH - n));
|
||||
let idx_sr = idx_l;
|
||||
|
||||
let prev_l = tree.get_subtree_root(n, idx_l).unwrap();
|
||||
let prev_r = tree.get_subtree_root(n, idx_r).unwrap();
|
||||
let subroot = tree.get_subtree_root(n - 1, idx_sr).unwrap();
|
||||
|
||||
assert_eq!(poseidon_hash(&[prev_l, prev_r]), subroot);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_empty_leaves_indices() {
|
||||
let depth = 4;
|
||||
let nof_leaves: usize = 1 << (depth - 1);
|
||||
|
||||
let mut tree = PoseidonTree::default(depth).unwrap();
|
||||
let leaves: Vec<Fr> = (0..nof_leaves).map(|s| Fr::from(s as i32)).collect();
|
||||
|
||||
// check set_range
|
||||
let _ = tree.set_range(0, leaves.clone().into_iter());
|
||||
assert!(tree.get_empty_leaves_indices().is_empty());
|
||||
|
||||
let mut vec_idxs = Vec::new();
|
||||
// check delete function
|
||||
for i in 0..nof_leaves {
|
||||
vec_idxs.push(i);
|
||||
let _ = tree.delete(i);
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec_idxs);
|
||||
}
|
||||
// check set function
|
||||
for i in (0..nof_leaves).rev() {
|
||||
vec_idxs.pop();
|
||||
let _ = tree.set(i, leaves[i]);
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec_idxs);
|
||||
}
|
||||
|
||||
// check remove_indices_and_set_leaves inside override_range function
|
||||
assert!(tree.get_empty_leaves_indices().is_empty());
|
||||
let leaves_2: Vec<Fr> = (0..2).map(Fr::from).collect();
|
||||
tree.override_range(0, leaves_2.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec![2, 3]);
|
||||
|
||||
// check remove_indices inside override_range function
|
||||
tree.override_range(0, [].into_iter(), [0, 1].into_iter())
|
||||
.unwrap();
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec![0, 1, 2, 3]);
|
||||
|
||||
// check set_range inside override_range function
|
||||
tree.override_range(0, leaves_2.clone().into_iter(), [].into_iter())
|
||||
.unwrap();
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec![2, 3]);
|
||||
|
||||
let leaves_4: Vec<Fr> = (0..4).map(Fr::from).collect();
|
||||
// check if the indexes for write and delete are the same
|
||||
tree.override_range(0, leaves_4.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
assert!(tree.get_empty_leaves_indices().is_empty());
|
||||
|
||||
// check if indexes for deletion are before indexes for overwriting
|
||||
tree.override_range(4, leaves_4.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
// The result will be like this, because in the set_range function in pmtree
|
||||
// the next_index value is increased not by the number of elements to insert,
|
||||
// but by the union of indices for deleting and inserting.
|
||||
assert_eq!(
|
||||
tree.get_empty_leaves_indices(),
|
||||
vec![0, 1, 2, 3, 8, 9, 10, 11]
|
||||
);
|
||||
|
||||
// check if the indices for write and delete do not overlap completely
|
||||
tree.override_range(2, leaves_4.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
// The result will be like this, because in the set_range function in pmtree
|
||||
// the next_index value is increased not by the number of elements to insert,
|
||||
// but by the union of indices for deleting and inserting.
|
||||
// + we've already set to 6 and 7 in previous test
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec![0, 1, 8, 9, 10, 11]);
|
||||
}
|
||||
}
|
||||
258
rln/tests/protocol.rs
Normal file
258
rln/tests/protocol.rs
Normal file
@@ -0,0 +1,258 @@
|
||||
#![cfg(not(feature = "stateless"))]
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use ark_ff::BigInt;
|
||||
use rln::circuit::{graph_from_folder, zkey_from_folder};
|
||||
use rln::circuit::{Fr, TEST_TREE_DEPTH};
|
||||
use rln::hashers::{hash_to_field_le, poseidon_hash};
|
||||
use rln::poseidon_tree::PoseidonTree;
|
||||
use rln::protocol::{
|
||||
deserialize_proof_values, deserialize_witness, generate_proof, keygen,
|
||||
proof_values_from_witness, rln_witness_from_json, rln_witness_from_values,
|
||||
rln_witness_to_json, seeded_keygen, serialize_proof_values, serialize_witness,
|
||||
verify_proof, RLNWitnessInput,
|
||||
};
|
||||
use rln::utils::str_to_fr;
|
||||
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
|
||||
|
||||
#[test]
|
||||
// We test Merkle tree generation, proofs and verification
|
||||
fn test_merkle_proof() {
|
||||
let leaf_index = 3;
|
||||
|
||||
// generate identity
|
||||
let identity_secret_hash = hash_to_field_le(b"test-merkle-proof");
|
||||
let id_commitment = poseidon_hash(&[identity_secret_hash]);
|
||||
let rate_commitment = poseidon_hash(&[id_commitment, 100.into()]);
|
||||
|
||||
// generate merkle tree
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
TEST_TREE_DEPTH,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
tree.set(leaf_index, rate_commitment).unwrap();
|
||||
|
||||
// We check correct computation of the root
|
||||
let root = tree.root();
|
||||
|
||||
assert_eq!(
|
||||
root,
|
||||
BigInt([
|
||||
4939322235247991215,
|
||||
5110804094006647505,
|
||||
4427606543677101242,
|
||||
910933464535675827
|
||||
])
|
||||
.into()
|
||||
);
|
||||
|
||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
let expected_path_elements: Vec<Fr> = [
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
]
|
||||
.map(|e| str_to_fr(e, 16).unwrap())
|
||||
.to_vec();
|
||||
|
||||
let expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
assert_eq!(path_elements, expected_path_elements);
|
||||
assert_eq!(identity_path_index, expected_identity_path_index);
|
||||
|
||||
// We check correct verification of the proof
|
||||
assert!(tree.verify(&rate_commitment, &merkle_proof).unwrap());
|
||||
}
|
||||
|
||||
fn get_test_witness() -> RLNWitnessInput {
|
||||
let leaf_index = 3;
|
||||
// Generate identity pair
|
||||
let (identity_secret_hash, id_commitment) = keygen();
|
||||
let user_message_limit = Fr::from(100);
|
||||
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
|
||||
//// generate merkle tree
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
TEST_TREE_DEPTH,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
tree.set(leaf_index, rate_commitment).unwrap();
|
||||
|
||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||
|
||||
let signal = b"hey hey";
|
||||
let x = hash_to_field_le(signal);
|
||||
|
||||
// We set the remaining values to random ones
|
||||
let epoch = hash_to_field_le(b"test-epoch");
|
||||
let rln_identifier = hash_to_field_le(b"test-rln-identifier");
|
||||
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
|
||||
|
||||
rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
merkle_proof.get_path_elements(),
|
||||
merkle_proof.get_path_index(),
|
||||
x,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
Fr::from(1),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_witness_from_json() {
|
||||
// We generate all relevant keys
|
||||
let proving_key = zkey_from_folder();
|
||||
let verification_key = &proving_key.0.vk;
|
||||
let graph_data = graph_from_folder();
|
||||
// We compute witness from the json input
|
||||
let rln_witness = get_test_witness();
|
||||
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
|
||||
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
|
||||
assert_eq!(rln_witness_deser, rln_witness);
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(proving_key, &rln_witness_deser, graph_data).unwrap();
|
||||
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
|
||||
|
||||
// Let's verify the proof
|
||||
let verified = verify_proof(verification_key, &proof, &proof_values);
|
||||
|
||||
assert!(verified.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_end_to_end() {
|
||||
let rln_witness = get_test_witness();
|
||||
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
|
||||
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
|
||||
assert_eq!(rln_witness_deser, rln_witness);
|
||||
|
||||
// We generate all relevant keys
|
||||
let proving_key = zkey_from_folder();
|
||||
let verification_key = &proving_key.0.vk;
|
||||
let graph_data = graph_from_folder();
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(proving_key, &rln_witness_deser, graph_data).unwrap();
|
||||
|
||||
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
|
||||
|
||||
// Let's verify the proof
|
||||
let success = verify_proof(verification_key, &proof, &proof_values).unwrap();
|
||||
|
||||
assert!(success);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_witness_serialization() {
|
||||
// We test witness JSON serialization
|
||||
let rln_witness = get_test_witness();
|
||||
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
|
||||
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
|
||||
assert_eq!(rln_witness_deser, rln_witness);
|
||||
|
||||
// We test witness serialization
|
||||
let ser = serialize_witness(&rln_witness).unwrap();
|
||||
let (deser, _) = deserialize_witness(&ser).unwrap();
|
||||
assert_eq!(rln_witness, deser);
|
||||
|
||||
// We test Proof values serialization
|
||||
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
|
||||
let ser = serialize_proof_values(&proof_values);
|
||||
let (deser, _) = deserialize_proof_values(&ser);
|
||||
assert_eq!(proof_values, deser);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Tests seeded keygen
|
||||
// Note that hardcoded values are only valid for Bn254
|
||||
fn test_seeded_keygen() {
|
||||
// Generate identity pair using a seed phrase
|
||||
let seed_phrase: &str = "A seed phrase example";
|
||||
let (identity_secret_hash, id_commitment) = seeded_keygen(seed_phrase.as_bytes());
|
||||
|
||||
// We check against expected values
|
||||
let expected_identity_secret_hash_seed_phrase = str_to_fr(
|
||||
"0x20df38f3f00496f19fe7c6535492543b21798ed7cb91aebe4af8012db884eda3",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
let expected_id_commitment_seed_phrase = str_to_fr(
|
||||
"0x1223a78a5d66043a7f9863e14507dc80720a5602b2a894923e5b5147d5a9c325",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
identity_secret_hash,
|
||||
expected_identity_secret_hash_seed_phrase
|
||||
);
|
||||
assert_eq!(id_commitment, expected_id_commitment_seed_phrase);
|
||||
|
||||
// Generate identity pair using an byte array
|
||||
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
let (identity_secret_hash, id_commitment) = seeded_keygen(seed_bytes);
|
||||
|
||||
// We check against expected values
|
||||
let expected_identity_secret_hash_seed_bytes = str_to_fr(
|
||||
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
let expected_id_commitment_seed_bytes = str_to_fr(
|
||||
"0xbf16d2b5c0d6f9d9d561e05bfca16a81b4b873bb063508fae360d8c74cef51f",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
identity_secret_hash,
|
||||
expected_identity_secret_hash_seed_bytes
|
||||
);
|
||||
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
|
||||
|
||||
// We check again if the identity pair generated with the same seed phrase corresponds to the previously generated one
|
||||
let (identity_secret_hash, id_commitment) = seeded_keygen(seed_phrase.as_bytes());
|
||||
|
||||
assert_eq!(
|
||||
identity_secret_hash,
|
||||
expected_identity_secret_hash_seed_phrase
|
||||
);
|
||||
assert_eq!(id_commitment, expected_id_commitment_seed_phrase);
|
||||
}
|
||||
}
|
||||
384
rln/tests/public.rs
Normal file
384
rln/tests/public.rs
Normal file
@@ -0,0 +1,384 @@
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
use {
|
||||
ark_ff::BigInt,
|
||||
rln::{
|
||||
circuit::TEST_TREE_DEPTH,
|
||||
protocol::compute_tree_root,
|
||||
public::RLN,
|
||||
utils::{
|
||||
bytes_le_to_vec_fr, bytes_le_to_vec_u8, bytes_le_to_vec_usize, fr_to_bytes_le,
|
||||
generate_input_buffer, IdSecret,
|
||||
},
|
||||
},
|
||||
zeroize::Zeroize,
|
||||
};
|
||||
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
use rln::circuit::Fr;
|
||||
use rln::hashers::{
|
||||
hash_to_field_be, hash_to_field_le, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS,
|
||||
};
|
||||
use rln::protocol::{
|
||||
deserialize_identity_pair_be, deserialize_identity_pair_le, deserialize_identity_tuple_be,
|
||||
deserialize_identity_tuple_le,
|
||||
};
|
||||
use rln::public::{
|
||||
hash as public_hash, poseidon_hash as public_poseidon_hash, seeded_extended_key_gen,
|
||||
seeded_key_gen,
|
||||
};
|
||||
use rln::utils::{
|
||||
bytes_be_to_fr, bytes_le_to_fr, str_to_fr, vec_fr_to_bytes_be, vec_fr_to_bytes_le,
|
||||
};
|
||||
use std::io::Cursor;
|
||||
|
||||
#[test]
|
||||
// This test is similar to the one in lib, but uses only public API
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
fn test_merkle_proof() {
|
||||
let leaf_index = 3;
|
||||
let user_message_limit = 1;
|
||||
|
||||
let mut rln = RLN::new(TEST_TREE_DEPTH, generate_input_buffer()).unwrap();
|
||||
|
||||
// generate identity
|
||||
let mut identity_secret_hash_ = hash_to_field_le(b"test-merkle-proof");
|
||||
let identity_secret_hash = IdSecret::from(&mut identity_secret_hash_);
|
||||
|
||||
let mut to_hash = [*identity_secret_hash.clone()];
|
||||
let id_commitment = utils_poseidon_hash(&to_hash);
|
||||
to_hash[0].zeroize();
|
||||
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit.into()]);
|
||||
|
||||
// check that leaves indices is empty
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_empty_leaves_indices(&mut buffer).unwrap();
|
||||
let idxs = bytes_le_to_vec_usize(&buffer.into_inner()).unwrap();
|
||||
assert!(idxs.is_empty());
|
||||
|
||||
// We pass rate_commitment as Read buffer to RLN's set_leaf
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
|
||||
rln.set_leaf(leaf_index, &mut buffer).unwrap();
|
||||
|
||||
// check that leaves before leaf_index is set to zero
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_empty_leaves_indices(&mut buffer).unwrap();
|
||||
let idxs = bytes_le_to_vec_usize(&buffer.into_inner()).unwrap();
|
||||
assert_eq!(idxs, [0, 1, 2]);
|
||||
|
||||
// We check correct computation of the root
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_root(&mut buffer).unwrap();
|
||||
let (root, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
assert_eq!(
|
||||
root,
|
||||
Fr::from(BigInt([
|
||||
17110646155607829651,
|
||||
5040045984242729823,
|
||||
6965416728592533086,
|
||||
2328960363755461975
|
||||
]))
|
||||
);
|
||||
|
||||
// We check correct computation of merkle proof
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_proof(leaf_index, &mut buffer).unwrap();
|
||||
|
||||
let buffer_inner = buffer.into_inner();
|
||||
let (path_elements, read) = bytes_le_to_vec_fr(&buffer_inner).unwrap();
|
||||
let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..]).unwrap();
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
let expected_path_elements: Vec<Fr> = [
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
]
|
||||
.map(|e| str_to_fr(e, 16).unwrap())
|
||||
.to_vec();
|
||||
|
||||
let expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
assert_eq!(path_elements, expected_path_elements);
|
||||
assert_eq!(identity_path_index, expected_identity_path_index);
|
||||
|
||||
// check subtree root computation for leaf 0 for all corresponding node until the root
|
||||
let l_idx = 0;
|
||||
for n in (1..=TEST_TREE_DEPTH).rev() {
|
||||
let idx_l = l_idx * (1 << (TEST_TREE_DEPTH - n));
|
||||
let idx_r = (l_idx + 1) * (1 << (TEST_TREE_DEPTH - n));
|
||||
let idx_sr = idx_l;
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_subtree_root(n, idx_l, &mut buffer).unwrap();
|
||||
let (prev_l, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_subtree_root(n, idx_r, &mut buffer).unwrap();
|
||||
let (prev_r, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_subtree_root(n - 1, idx_sr, &mut buffer).unwrap();
|
||||
let (subroot, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
let res = utils_poseidon_hash(&[prev_l, prev_r]);
|
||||
assert_eq!(res, subroot);
|
||||
}
|
||||
|
||||
// We double check that the proof computed from public API is correct
|
||||
let root_from_proof = compute_tree_root(
|
||||
&identity_secret_hash,
|
||||
&user_message_limit.into(),
|
||||
&path_elements,
|
||||
&identity_path_index,
|
||||
);
|
||||
|
||||
assert_eq!(root, root_from_proof);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seeded_keygen() {
|
||||
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
|
||||
let mut input_buffer = Cursor::new(&seed_bytes);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
|
||||
seeded_key_gen(&mut input_buffer, &mut output_buffer, true).unwrap();
|
||||
let serialized_output = output_buffer.into_inner();
|
||||
|
||||
let (identity_secret_hash, id_commitment) = deserialize_identity_pair_le(serialized_output);
|
||||
|
||||
// We check against expected values
|
||||
let expected_identity_secret_hash_seed_bytes = str_to_fr(
|
||||
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
let expected_id_commitment_seed_bytes = str_to_fr(
|
||||
"0xbf16d2b5c0d6f9d9d561e05bfca16a81b4b873bb063508fae360d8c74cef51f",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
identity_secret_hash,
|
||||
expected_identity_secret_hash_seed_bytes
|
||||
);
|
||||
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seeded_keygen_big_endian() {
|
||||
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
|
||||
let mut input_buffer = Cursor::new(&seed_bytes);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
|
||||
seeded_key_gen(&mut input_buffer, &mut output_buffer, false).unwrap();
|
||||
let serialized_output = output_buffer.into_inner();
|
||||
|
||||
let (identity_secret_hash, id_commitment) = deserialize_identity_pair_be(serialized_output);
|
||||
|
||||
// We check against expected values
|
||||
let expected_identity_secret_hash_seed_bytes = str_to_fr(
|
||||
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
let expected_id_commitment_seed_bytes = str_to_fr(
|
||||
"0xbf16d2b5c0d6f9d9d561e05bfca16a81b4b873bb063508fae360d8c74cef51f",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
identity_secret_hash,
|
||||
expected_identity_secret_hash_seed_bytes
|
||||
);
|
||||
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seeded_extended_keygen() {
|
||||
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
|
||||
let mut input_buffer = Cursor::new(&seed_bytes);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
|
||||
seeded_extended_key_gen(&mut input_buffer, &mut output_buffer, true).unwrap();
|
||||
let serialized_output = output_buffer.into_inner();
|
||||
|
||||
let (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) =
|
||||
deserialize_identity_tuple_le(serialized_output);
|
||||
|
||||
// We check against expected values
|
||||
let expected_identity_trapdoor_seed_bytes = str_to_fr(
|
||||
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
let expected_identity_nullifier_seed_bytes = str_to_fr(
|
||||
"0x1f18714c7bc83b5bca9e89d404cf6f2f585bc4c0f7ed8b53742b7e2b298f50b4",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
let expected_identity_secret_hash_seed_bytes = str_to_fr(
|
||||
"0x2aca62aaa7abaf3686fff2caf00f55ab9462dc12db5b5d4bcf3994e671f8e521",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
let expected_id_commitment_seed_bytes = str_to_fr(
|
||||
"0x68b66aa0a8320d2e56842581553285393188714c48f9b17acd198b4f1734c5c",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(identity_trapdoor, expected_identity_trapdoor_seed_bytes);
|
||||
assert_eq!(identity_nullifier, expected_identity_nullifier_seed_bytes);
|
||||
assert_eq!(
|
||||
identity_secret_hash,
|
||||
expected_identity_secret_hash_seed_bytes
|
||||
);
|
||||
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_seeded_extended_keygen_big_endian() {
|
||||
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
|
||||
let mut input_buffer = Cursor::new(&seed_bytes);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
|
||||
seeded_extended_key_gen(&mut input_buffer, &mut output_buffer, false).unwrap();
|
||||
let serialized_output = output_buffer.into_inner();
|
||||
|
||||
let (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) =
|
||||
deserialize_identity_tuple_be(serialized_output);
|
||||
|
||||
// We check against expected values
|
||||
let expected_identity_trapdoor_seed_bytes = str_to_fr(
|
||||
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
let expected_identity_nullifier_seed_bytes = str_to_fr(
|
||||
"0x1f18714c7bc83b5bca9e89d404cf6f2f585bc4c0f7ed8b53742b7e2b298f50b4",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
let expected_identity_secret_hash_seed_bytes = str_to_fr(
|
||||
"0x2aca62aaa7abaf3686fff2caf00f55ab9462dc12db5b5d4bcf3994e671f8e521",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
let expected_id_commitment_seed_bytes = str_to_fr(
|
||||
"0x68b66aa0a8320d2e56842581553285393188714c48f9b17acd198b4f1734c5c",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(identity_trapdoor, expected_identity_trapdoor_seed_bytes);
|
||||
assert_eq!(identity_nullifier, expected_identity_nullifier_seed_bytes);
|
||||
assert_eq!(
|
||||
identity_secret_hash,
|
||||
expected_identity_secret_hash_seed_bytes
|
||||
);
|
||||
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_hash_to_field() {
|
||||
let mut rng = thread_rng();
|
||||
let signal: [u8; 32] = rng.gen();
|
||||
|
||||
let mut input_buffer = Cursor::new(&signal);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
|
||||
public_hash(&mut input_buffer, &mut output_buffer, true).unwrap();
|
||||
let serialized_hash = output_buffer.into_inner();
|
||||
let (hash1, _) = bytes_le_to_fr(&serialized_hash);
|
||||
|
||||
let hash2 = hash_to_field_le(&signal);
|
||||
|
||||
assert_eq!(hash1, hash2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_hash_to_field_big_endian() {
|
||||
let mut rng = thread_rng();
|
||||
let signal: [u8; 32] = rng.gen();
|
||||
|
||||
let mut input_buffer = Cursor::new(&signal);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
|
||||
public_hash(&mut input_buffer, &mut output_buffer, false).unwrap();
|
||||
let serialized_hash = output_buffer.into_inner();
|
||||
let (hash1, _) = bytes_be_to_fr(&serialized_hash);
|
||||
|
||||
let hash2 = hash_to_field_be(&signal);
|
||||
|
||||
assert_eq!(hash1, hash2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_poseidon_hash() {
|
||||
let mut rng = thread_rng();
|
||||
let number_of_inputs = rng.gen_range(1..ROUND_PARAMS.len());
|
||||
let mut inputs = Vec::with_capacity(number_of_inputs);
|
||||
for _ in 0..number_of_inputs {
|
||||
inputs.push(Fr::rand(&mut rng));
|
||||
}
|
||||
let expected_hash = utils_poseidon_hash(&inputs);
|
||||
|
||||
let mut input_buffer = Cursor::new(vec_fr_to_bytes_le(&inputs));
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
|
||||
public_poseidon_hash(&mut input_buffer, &mut output_buffer, true).unwrap();
|
||||
let serialized_hash = output_buffer.into_inner();
|
||||
let (hash, _) = bytes_le_to_fr(&serialized_hash);
|
||||
|
||||
assert_eq!(hash, expected_hash);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_poseidon_hash_big_endian() {
|
||||
let mut rng = thread_rng();
|
||||
let number_of_inputs = rng.gen_range(1..ROUND_PARAMS.len());
|
||||
let mut inputs = Vec::with_capacity(number_of_inputs);
|
||||
for _ in 0..number_of_inputs {
|
||||
inputs.push(Fr::rand(&mut rng));
|
||||
}
|
||||
let expected_hash = utils_poseidon_hash(&inputs);
|
||||
|
||||
let mut input_buffer = Cursor::new(vec_fr_to_bytes_be(&inputs));
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
|
||||
public_poseidon_hash(&mut input_buffer, &mut output_buffer, false).unwrap();
|
||||
let serialized_hash = output_buffer.into_inner();
|
||||
let (hash, _) = bytes_be_to_fr(&serialized_hash);
|
||||
|
||||
assert_eq!(hash, expected_hash);
|
||||
}
|
||||
}
|
||||
411
rln/tests/utils.rs
Normal file
411
rln/tests/utils.rs
Normal file
@@ -0,0 +1,411 @@
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use rln::utils::{
|
||||
bytes_be_to_fr, bytes_be_to_vec_fr, bytes_be_to_vec_u8, bytes_be_to_vec_usize,
|
||||
bytes_le_to_fr, bytes_le_to_vec_fr, bytes_le_to_vec_u8, bytes_le_to_vec_usize,
|
||||
fr_to_bytes_be, fr_to_bytes_le, normalize_usize_be, normalize_usize_le, str_to_fr,
|
||||
vec_fr_to_bytes_be, vec_fr_to_bytes_le, vec_u8_to_bytes_be, vec_u8_to_bytes_le,
|
||||
};
|
||||
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rln::circuit::Fr;
|
||||
|
||||
#[test]
|
||||
fn test_normalize_usize_le() {
|
||||
// Test basic cases
|
||||
assert_eq!(normalize_usize_le(0), [0, 0, 0, 0, 0, 0, 0, 0]);
|
||||
assert_eq!(normalize_usize_le(1), [1, 0, 0, 0, 0, 0, 0, 0]);
|
||||
assert_eq!(normalize_usize_le(255), [255, 0, 0, 0, 0, 0, 0, 0]);
|
||||
assert_eq!(normalize_usize_le(256), [0, 1, 0, 0, 0, 0, 0, 0]);
|
||||
assert_eq!(normalize_usize_le(65535), [255, 255, 0, 0, 0, 0, 0, 0]);
|
||||
assert_eq!(normalize_usize_le(65536), [0, 0, 1, 0, 0, 0, 0, 0]);
|
||||
|
||||
// Test 32-bit boundary
|
||||
assert_eq!(
|
||||
normalize_usize_le(4294967295),
|
||||
[255, 255, 255, 255, 0, 0, 0, 0]
|
||||
);
|
||||
assert_eq!(normalize_usize_le(4294967296), [0, 0, 0, 0, 1, 0, 0, 0]);
|
||||
|
||||
// Test maximum value
|
||||
assert_eq!(
|
||||
normalize_usize_le(usize::MAX),
|
||||
[255, 255, 255, 255, 255, 255, 255, 255]
|
||||
);
|
||||
|
||||
// Test that result is always 8 bytes
|
||||
assert_eq!(normalize_usize_le(0).len(), 8);
|
||||
assert_eq!(normalize_usize_le(usize::MAX).len(), 8);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_usize_be() {
|
||||
// Test basic cases
|
||||
assert_eq!(normalize_usize_be(0), [0, 0, 0, 0, 0, 0, 0, 0]);
|
||||
assert_eq!(normalize_usize_be(1), [0, 0, 0, 0, 0, 0, 0, 1]);
|
||||
assert_eq!(normalize_usize_be(255), [0, 0, 0, 0, 0, 0, 0, 255]);
|
||||
assert_eq!(normalize_usize_be(256), [0, 0, 0, 0, 0, 0, 1, 0]);
|
||||
assert_eq!(normalize_usize_be(65535), [0, 0, 0, 0, 0, 0, 255, 255]);
|
||||
assert_eq!(normalize_usize_be(65536), [0, 0, 0, 0, 0, 1, 0, 0]);
|
||||
|
||||
// Test 32-bit boundary
|
||||
assert_eq!(
|
||||
normalize_usize_be(4294967295),
|
||||
[0, 0, 0, 0, 255, 255, 255, 255]
|
||||
);
|
||||
assert_eq!(normalize_usize_be(4294967296), [0, 0, 0, 1, 0, 0, 0, 0]);
|
||||
|
||||
// Test maximum value
|
||||
assert_eq!(
|
||||
normalize_usize_be(usize::MAX),
|
||||
[255, 255, 255, 255, 255, 255, 255, 255]
|
||||
);
|
||||
|
||||
// Test that result is always 8 bytes
|
||||
assert_eq!(normalize_usize_be(0).len(), 8);
|
||||
assert_eq!(normalize_usize_be(usize::MAX).len(), 8);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_usize_endianness() {
|
||||
// Test that little-endian and big-endian produce different results for non-zero values
|
||||
let test_values = vec![1, 255, 256, 65535, 65536, 4294967295, 4294967296];
|
||||
|
||||
for &value in &test_values {
|
||||
let le_result = normalize_usize_le(value);
|
||||
let be_result = normalize_usize_be(value);
|
||||
|
||||
// For non-zero values, LE and BE should be different
|
||||
assert_ne!(
|
||||
le_result, be_result,
|
||||
"LE and BE should differ for value {value}"
|
||||
);
|
||||
|
||||
// Both should be 8 bytes
|
||||
assert_eq!(le_result.len(), 8);
|
||||
assert_eq!(be_result.len(), 8);
|
||||
}
|
||||
|
||||
// Zero should be the same in both endianness
|
||||
assert_eq!(normalize_usize_le(0), normalize_usize_be(0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_usize_roundtrip() {
|
||||
// Test that we can reconstruct the original value from the normalized bytes
|
||||
let test_values = vec![
|
||||
0,
|
||||
1,
|
||||
255,
|
||||
256,
|
||||
65535,
|
||||
65536,
|
||||
4294967295,
|
||||
4294967296,
|
||||
usize::MAX,
|
||||
];
|
||||
|
||||
for &value in &test_values {
|
||||
let le_bytes = normalize_usize_le(value);
|
||||
let be_bytes = normalize_usize_be(value);
|
||||
|
||||
// Reconstruct from little-endian bytes
|
||||
let reconstructed_le = usize::from_le_bytes(le_bytes);
|
||||
assert_eq!(
|
||||
reconstructed_le, value,
|
||||
"LE roundtrip failed for value {value}"
|
||||
);
|
||||
|
||||
// Reconstruct from big-endian bytes
|
||||
let reconstructed_be = usize::from_be_bytes(be_bytes);
|
||||
assert_eq!(
|
||||
reconstructed_be, value,
|
||||
"BE roundtrip failed for value {value}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_usize_edge_cases() {
|
||||
// Test edge cases and boundary values
|
||||
let edge_cases = vec![
|
||||
0,
|
||||
1,
|
||||
255,
|
||||
256,
|
||||
65535,
|
||||
65536,
|
||||
16777215, // 2^24 - 1
|
||||
16777216, // 2^24
|
||||
4294967295, // 2^32 - 1
|
||||
4294967296, // 2^32
|
||||
1099511627775, // 2^40 - 1
|
||||
1099511627776, // 2^40
|
||||
281474976710655, // 2^48 - 1
|
||||
281474976710656, // 2^48
|
||||
72057594037927935, // 2^56 - 1
|
||||
72057594037927936, // 2^56
|
||||
usize::MAX,
|
||||
];
|
||||
|
||||
for &value in &edge_cases {
|
||||
let le_result = normalize_usize_le(value);
|
||||
let be_result = normalize_usize_be(value);
|
||||
|
||||
// Both should be 8 bytes
|
||||
assert_eq!(le_result.len(), 8);
|
||||
assert_eq!(be_result.len(), 8);
|
||||
|
||||
// Roundtrip should work
|
||||
assert_eq!(usize::from_le_bytes(le_result), value);
|
||||
assert_eq!(usize::from_be_bytes(be_result), value);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_normalize_usize_architecture_independence() {
|
||||
// Test that the functions work consistently regardless of the underlying architecture
|
||||
// This test ensures that the functions provide consistent 8-byte output
|
||||
// even on 32-bit systems where usize might be 4 bytes
|
||||
|
||||
let test_values = vec![0, 1, 255, 256, 65535, 65536, 4294967295, 4294967296];
|
||||
|
||||
for &value in &test_values {
|
||||
let le_result = normalize_usize_le(value);
|
||||
let be_result = normalize_usize_be(value);
|
||||
|
||||
// Always 8 bytes regardless of architecture
|
||||
assert_eq!(le_result.len(), 8);
|
||||
assert_eq!(be_result.len(), 8);
|
||||
|
||||
// The result should be consistent with the original value
|
||||
assert_eq!(usize::from_le_bytes(le_result), value);
|
||||
assert_eq!(usize::from_be_bytes(be_result), value);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fr_serialization_roundtrip() {
|
||||
let mut rng = thread_rng();
|
||||
|
||||
// Test multiple random Fr values
|
||||
for _ in 0..10 {
|
||||
let fr = Fr::rand(&mut rng);
|
||||
|
||||
// Test little-endian roundtrip
|
||||
let le_bytes = fr_to_bytes_le(&fr);
|
||||
let (reconstructed_le, _) = bytes_le_to_fr(&le_bytes);
|
||||
assert_eq!(fr, reconstructed_le);
|
||||
|
||||
// Test big-endian roundtrip
|
||||
let be_bytes = fr_to_bytes_be(&fr);
|
||||
let (reconstructed_be, _) = bytes_be_to_fr(&be_bytes);
|
||||
assert_eq!(fr, reconstructed_be);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_vec_fr_serialization_roundtrip() {
|
||||
let mut rng = thread_rng();
|
||||
|
||||
// Test with different vector sizes
|
||||
for size in [0, 1, 5, 10] {
|
||||
let fr_vec: Vec<Fr> = (0..size).map(|_| Fr::rand(&mut rng)).collect();
|
||||
|
||||
// Test little-endian roundtrip
|
||||
let le_bytes = vec_fr_to_bytes_le(&fr_vec);
|
||||
let (reconstructed_le, _) = bytes_le_to_vec_fr(&le_bytes).unwrap();
|
||||
assert_eq!(fr_vec, reconstructed_le);
|
||||
|
||||
// Test big-endian roundtrip
|
||||
let be_bytes = vec_fr_to_bytes_be(&fr_vec);
|
||||
let (reconstructed_be, _) = bytes_be_to_vec_fr(&be_bytes).unwrap();
|
||||
assert_eq!(fr_vec, reconstructed_be);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_vec_u8_serialization_roundtrip() {
|
||||
// Test with different vector sizes and content
|
||||
let test_cases = vec![
|
||||
vec![],
|
||||
vec![0],
|
||||
vec![255],
|
||||
vec![1, 2, 3, 4, 5],
|
||||
vec![0, 255, 128, 64, 32, 16, 8, 4, 2, 1],
|
||||
(0..100).collect::<Vec<u8>>(),
|
||||
];
|
||||
|
||||
for test_case in test_cases {
|
||||
// Test little-endian roundtrip
|
||||
let le_bytes = vec_u8_to_bytes_le(&test_case);
|
||||
let (reconstructed_le, _) = bytes_le_to_vec_u8(&le_bytes).unwrap();
|
||||
assert_eq!(test_case, reconstructed_le);
|
||||
|
||||
// Test big-endian roundtrip
|
||||
let be_bytes = vec_u8_to_bytes_be(&test_case);
|
||||
let (reconstructed_be, _) = bytes_be_to_vec_u8(&be_bytes).unwrap();
|
||||
assert_eq!(test_case, reconstructed_be);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_vec_usize_serialization_roundtrip() {
|
||||
// Test with different vector sizes and content
|
||||
let test_cases = vec![
|
||||
vec![],
|
||||
vec![0],
|
||||
vec![usize::MAX],
|
||||
vec![1, 2, 3, 4, 5],
|
||||
vec![0, 255, 65535, 4294967295, usize::MAX],
|
||||
(0..10).collect::<Vec<usize>>(),
|
||||
];
|
||||
|
||||
for test_case in test_cases {
|
||||
// Test little-endian roundtrip
|
||||
let le_bytes = {
|
||||
let mut bytes = Vec::new();
|
||||
bytes.extend_from_slice(&normalize_usize_le(test_case.len()));
|
||||
for &value in &test_case {
|
||||
bytes.extend_from_slice(&normalize_usize_le(value));
|
||||
}
|
||||
bytes
|
||||
};
|
||||
let reconstructed_le = bytes_le_to_vec_usize(&le_bytes).unwrap();
|
||||
assert_eq!(test_case, reconstructed_le);
|
||||
|
||||
// Test big-endian roundtrip
|
||||
let be_bytes = {
|
||||
let mut bytes = Vec::new();
|
||||
bytes.extend_from_slice(&normalize_usize_be(test_case.len()));
|
||||
for &value in &test_case {
|
||||
bytes.extend_from_slice(&normalize_usize_be(value));
|
||||
}
|
||||
bytes
|
||||
};
|
||||
let reconstructed_be = bytes_be_to_vec_usize(&be_bytes).unwrap();
|
||||
assert_eq!(test_case, reconstructed_be);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_str_to_fr() {
|
||||
// Test valid hex strings
|
||||
let test_cases = vec![
|
||||
("0x0", 16, Fr::from(0u64)),
|
||||
("0x1", 16, Fr::from(1u64)),
|
||||
("0xff", 16, Fr::from(255u64)),
|
||||
("0x100", 16, Fr::from(256u64)),
|
||||
];
|
||||
|
||||
for (input, radix, expected) in test_cases {
|
||||
let result = str_to_fr(input, radix).unwrap();
|
||||
assert_eq!(result, expected);
|
||||
}
|
||||
|
||||
// Test invalid inputs
|
||||
assert!(str_to_fr("invalid", 16).is_err());
|
||||
assert!(str_to_fr("0x", 16).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_endianness_differences() {
|
||||
let mut rng = thread_rng();
|
||||
let fr = Fr::rand(&mut rng);
|
||||
|
||||
// Test that LE and BE produce different byte representations
|
||||
let le_bytes = fr_to_bytes_le(&fr);
|
||||
let be_bytes = fr_to_bytes_be(&fr);
|
||||
|
||||
// They should be different (unless the value is symmetric)
|
||||
if le_bytes != be_bytes {
|
||||
// Verify they can both be reconstructed correctly
|
||||
let (reconstructed_le, _) = bytes_le_to_fr(&le_bytes);
|
||||
let (reconstructed_be, _) = bytes_be_to_fr(&be_bytes);
|
||||
assert_eq!(fr, reconstructed_le);
|
||||
assert_eq!(fr, reconstructed_be);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error_handling() {
|
||||
// Test with valid length but insufficient data
|
||||
let valid_length_invalid_data = vec![0u8; 8]; // Length 0, but no data
|
||||
assert!(bytes_le_to_vec_u8(&valid_length_invalid_data).is_ok());
|
||||
assert!(bytes_be_to_vec_u8(&valid_length_invalid_data).is_ok());
|
||||
assert!(bytes_le_to_vec_fr(&valid_length_invalid_data).is_ok());
|
||||
assert!(bytes_be_to_vec_fr(&valid_length_invalid_data).is_ok());
|
||||
assert!(bytes_le_to_vec_usize(&valid_length_invalid_data).is_ok());
|
||||
assert!(bytes_be_to_vec_usize(&valid_length_invalid_data).is_ok());
|
||||
|
||||
// Test with reasonable length but insufficient data for vector deserialization
|
||||
let reasonable_length = {
|
||||
let mut bytes = vec![0u8; 8];
|
||||
bytes[0] = 1; // Length 1
|
||||
bytes
|
||||
};
|
||||
// This should fail because we don't have enough data for the vector elements
|
||||
assert!(bytes_le_to_vec_u8(&reasonable_length).is_err());
|
||||
assert!(bytes_be_to_vec_u8(&reasonable_length).is_err());
|
||||
assert!(bytes_le_to_vec_fr(&reasonable_length).is_err());
|
||||
assert!(bytes_be_to_vec_fr(&reasonable_length).is_err());
|
||||
assert!(bytes_le_to_vec_usize(&reasonable_length).is_err());
|
||||
assert!(bytes_be_to_vec_usize(&reasonable_length).is_err());
|
||||
|
||||
// Test with valid data for u8 vector
|
||||
let valid_u8_data_le = {
|
||||
let mut bytes = vec![0u8; 9];
|
||||
bytes[..8].copy_from_slice(&(1u64.to_le_bytes())); // Length 1, little-endian
|
||||
bytes[8] = 42; // One byte of data
|
||||
bytes
|
||||
};
|
||||
let valid_u8_data_be = {
|
||||
let mut bytes = vec![0u8; 9];
|
||||
bytes[..8].copy_from_slice(&(1u64.to_be_bytes())); // Length 1, big-endian
|
||||
bytes[8] = 42; // One byte of data
|
||||
bytes
|
||||
};
|
||||
assert!(bytes_le_to_vec_u8(&valid_u8_data_le).is_ok());
|
||||
assert!(bytes_be_to_vec_u8(&valid_u8_data_be).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_vectors() {
|
||||
// Test empty vector serialization/deserialization
|
||||
let empty_fr: Vec<Fr> = vec![];
|
||||
let empty_u8: Vec<u8> = vec![];
|
||||
let empty_usize: Vec<usize> = vec![];
|
||||
|
||||
// Test Fr vectors
|
||||
let le_fr_bytes = vec_fr_to_bytes_le(&empty_fr);
|
||||
let be_fr_bytes = vec_fr_to_bytes_be(&empty_fr);
|
||||
let (reconstructed_le_fr, _) = bytes_le_to_vec_fr(&le_fr_bytes).unwrap();
|
||||
let (reconstructed_be_fr, _) = bytes_be_to_vec_fr(&be_fr_bytes).unwrap();
|
||||
assert_eq!(empty_fr, reconstructed_le_fr);
|
||||
assert_eq!(empty_fr, reconstructed_be_fr);
|
||||
|
||||
// Test u8 vectors
|
||||
let le_u8_bytes = vec_u8_to_bytes_le(&empty_u8);
|
||||
let be_u8_bytes = vec_u8_to_bytes_be(&empty_u8);
|
||||
let (reconstructed_le_u8, _) = bytes_le_to_vec_u8(&le_u8_bytes).unwrap();
|
||||
let (reconstructed_be_u8, _) = bytes_be_to_vec_u8(&be_u8_bytes).unwrap();
|
||||
assert_eq!(empty_u8, reconstructed_le_u8);
|
||||
assert_eq!(empty_u8, reconstructed_be_u8);
|
||||
|
||||
// Test usize vectors
|
||||
let le_usize_bytes = {
|
||||
let mut bytes = Vec::new();
|
||||
bytes.extend_from_slice(&normalize_usize_le(0));
|
||||
bytes
|
||||
};
|
||||
let be_usize_bytes = {
|
||||
let mut bytes = Vec::new();
|
||||
bytes.extend_from_slice(&normalize_usize_be(0));
|
||||
bytes
|
||||
};
|
||||
let reconstructed_le_usize = bytes_le_to_vec_usize(&le_usize_bytes).unwrap();
|
||||
let reconstructed_be_usize = bytes_be_to_vec_usize(&be_usize_bytes).unwrap();
|
||||
assert_eq!(empty_usize, reconstructed_le_usize);
|
||||
assert_eq!(empty_usize, reconstructed_be_usize);
|
||||
}
|
||||
}
|
||||
1
rln/vendor/rln
vendored
1
rln/vendor/rln
vendored
Submodule rln/vendor/rln deleted from 616ee9b0b0
@@ -1,49 +0,0 @@
|
||||
[package]
|
||||
name = "semaphore-wrapper"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[features]
|
||||
default = []
|
||||
dylib = [ "wasmer/dylib", "wasmer-engine-dylib", "wasmer-compiler-cranelift" ]
|
||||
|
||||
[dependencies]
|
||||
ark-bn254 = { version = "0.3.0" }
|
||||
ark-circom = { git = "https://github.com/gakonst/ark-circom", features=["circom-2"] }
|
||||
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
|
||||
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
|
||||
ark-relations = { version = "0.3.0", default-features = false }
|
||||
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
|
||||
color-eyre = "0.5"
|
||||
num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
|
||||
once_cell = "1.8"
|
||||
primitive-types = "0.11.1"
|
||||
rand = "0.8.4"
|
||||
semaphore = { git = "https://github.com/worldcoin/semaphore-rs", rev = "d462a43"}
|
||||
serde = "1.0"
|
||||
thiserror = "1.0.0"
|
||||
wasmer = { version = "2.0" }
|
||||
|
||||
[dev-dependencies]
|
||||
rand_chacha = "0.3.1"
|
||||
serde_json = "1.0.79"
|
||||
|
||||
[build-dependencies]
|
||||
color-eyre = "0.5"
|
||||
wasmer = { version = "2.0" }
|
||||
wasmer-engine-dylib = { version = "2.2.1", optional = true }
|
||||
wasmer-compiler-cranelift = { version = "2.2.1", optional = true }
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
lto = true
|
||||
panic = "abort"
|
||||
opt-level = 3
|
||||
|
||||
# Compilation profile for any non-workspace member.
|
||||
# Dependencies are optimized, even in a dev build. This improves dev performance
|
||||
# while having neglible impact on incremental build times.
|
||||
[profile.dev.package."*"]
|
||||
opt-level = 3
|
||||
@@ -1,10 +0,0 @@
|
||||
# Semaphore example package
|
||||
|
||||
This is basically a wrapper around/copy of
|
||||
https://github.com/worldcoin/semaphore-rs to illustrate how e.g. RLN package
|
||||
can be structured like.
|
||||
|
||||
Goal is also to provide a basic FFI around protocol.rs, which is currently not
|
||||
in scope for that project.
|
||||
|
||||
See that project for more information.
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user