mirror of
https://github.com/vacp2p/zerokit.git
synced 2026-01-08 21:28:11 -05:00
Compare commits
223 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2071346174 | ||
|
|
c0769395bd | ||
|
|
2fc079d633 | ||
|
|
0ebeea50fd | ||
|
|
c890bc83ad | ||
|
|
77a8d28965 | ||
|
|
5c73af1130 | ||
|
|
c74ab11c82 | ||
|
|
a52cf84f46 | ||
|
|
3160d9504d | ||
|
|
0b30ba112f | ||
|
|
a2f9aaeeee | ||
|
|
a198960cf3 | ||
|
|
7f6f66bb13 | ||
|
|
a4bb3feb50 | ||
|
|
2386e8732f | ||
|
|
44c6cf3cdd | ||
|
|
eb8eedfdb4 | ||
|
|
57b694db5d | ||
|
|
0b00c639a0 | ||
|
|
7c801a804e | ||
|
|
9da80dd807 | ||
|
|
bcbd6a97af | ||
|
|
6965cf2852 | ||
|
|
578e0507b3 | ||
|
|
bf1e184da9 | ||
|
|
4473688efa | ||
|
|
c80569d518 | ||
|
|
fd99b6af74 | ||
|
|
65f53e3da3 | ||
|
|
042f8a9739 | ||
|
|
baf474e747 | ||
|
|
dc0b31752c | ||
|
|
36013bf4ba | ||
|
|
211b2d4830 | ||
|
|
5f4bcb74ce | ||
|
|
de5fd36add | ||
|
|
19c0f551c8 | ||
|
|
4133f1f8c3 | ||
|
|
149096f7a6 | ||
|
|
7023e85fce | ||
|
|
a4cafa6adc | ||
|
|
4077357e3f | ||
|
|
84d9799d09 | ||
|
|
c576af8e62 | ||
|
|
81470b9678 | ||
|
|
9d4198c205 | ||
|
|
c60e0c33fc | ||
|
|
ba467d370c | ||
|
|
ffd5851d7d | ||
|
|
759d312680 | ||
|
|
fb0ffd74a3 | ||
|
|
9d8372be39 | ||
|
|
de9c0d5072 | ||
|
|
5c60ec7cce | ||
|
|
8793965650 | ||
|
|
1930ca1610 | ||
|
|
4b4169d7a7 | ||
|
|
8a3e33be41 | ||
|
|
7bb2444ba4 | ||
|
|
00f8d039a8 | ||
|
|
e39f156fff | ||
|
|
8b04930583 | ||
|
|
b9d27039c3 | ||
|
|
49e2517e15 | ||
|
|
6621efd0bb | ||
|
|
4a74ff0d6c | ||
|
|
fc823e7187 | ||
|
|
0d5642492a | ||
|
|
c4579e1917 | ||
|
|
e6238fd722 | ||
|
|
5540ddc993 | ||
|
|
d8f813bc2e | ||
|
|
c6493bd10f | ||
|
|
dd5edd6818 | ||
|
|
85d71a5427 | ||
|
|
7790954c4a | ||
|
|
820240d8c0 | ||
|
|
fe2b224981 | ||
|
|
d3d85c3e3c | ||
|
|
0005b1d61f | ||
|
|
4931b25237 | ||
|
|
652cc3647e | ||
|
|
51939be4a8 | ||
|
|
cd60af5b52 | ||
|
|
8581ac0b78 | ||
|
|
5937a67ee6 | ||
|
|
d96eb59e92 | ||
|
|
a372053047 | ||
|
|
b450bfdb37 | ||
|
|
0521c7349e | ||
|
|
d91a5b3568 | ||
|
|
cf9dbb419d | ||
|
|
aaa12db70d | ||
|
|
30d5f94181 | ||
|
|
ccd2ead847 | ||
|
|
7669d72f9b | ||
|
|
b5760697bc | ||
|
|
5c4e3fc13c | ||
|
|
a92d6428d6 | ||
|
|
e6db05f27c | ||
|
|
25f822e779 | ||
|
|
0997d15d33 | ||
|
|
8614b2a33a | ||
|
|
b903d8d740 | ||
|
|
f73c83b571 | ||
|
|
a86b859b75 | ||
|
|
f8fc455d08 | ||
|
|
b51896c3a7 | ||
|
|
0c5ef6abcf | ||
|
|
a1c292cb2e | ||
|
|
c6c1bfde91 | ||
|
|
bf3d1d3309 | ||
|
|
7110e00674 | ||
|
|
99966d1a6e | ||
|
|
7d63912ace | ||
|
|
ef1da42d94 | ||
|
|
ecb4d9307f | ||
|
|
d1414a44c5 | ||
|
|
6d58320077 | ||
|
|
be2dccfdd0 | ||
|
|
9d4ed68450 | ||
|
|
5cf2b2e05e | ||
|
|
36158e8d08 | ||
|
|
c8cf033f32 | ||
|
|
23d2331b78 | ||
|
|
c6b7a8c0a4 | ||
|
|
4ec93c5e1f | ||
|
|
c83c9902d7 | ||
|
|
131cacab35 | ||
|
|
8a365f0c9e | ||
|
|
c561741339 | ||
|
|
90fdfb9d78 | ||
|
|
56b9285fef | ||
|
|
be88a432d7 | ||
|
|
8cfd83de54 | ||
|
|
2793fe0e24 | ||
|
|
0d35571215 | ||
|
|
9cc86e526e | ||
|
|
ecd056884c | ||
|
|
96497db7c5 | ||
|
|
ba8f011cc1 | ||
|
|
9dc92ec1ce | ||
|
|
75d760c179 | ||
|
|
72a3ce1770 | ||
|
|
b841e725a0 | ||
|
|
3177e3ae74 | ||
|
|
2c4de0484a | ||
|
|
fcd4854037 | ||
|
|
d68dc1ad8e | ||
|
|
8c3d60ed01 | ||
|
|
c2d386cb74 | ||
|
|
8f2c9e3586 | ||
|
|
584c2cf4c0 | ||
|
|
2c4b399126 | ||
|
|
c4b699ddff | ||
|
|
33d3732922 | ||
|
|
654c77dcf6 | ||
|
|
783f875d3b | ||
|
|
fd7d7d9318 | ||
|
|
4f98fd8028 | ||
|
|
9931e901e5 | ||
|
|
0fb7e0bbcb | ||
|
|
672287b77b | ||
|
|
2e868d6cbf | ||
|
|
39bea35a6d | ||
|
|
6ff4eeb237 | ||
|
|
1f983bb232 | ||
|
|
13a2c61355 | ||
|
|
2bbb710e83 | ||
|
|
8cd4baba8a | ||
|
|
9045e31006 | ||
|
|
9e44bb64dc | ||
|
|
bb7dfb80ee | ||
|
|
c319f32a1e | ||
|
|
bf2aa16a71 | ||
|
|
c423bdea61 | ||
|
|
5eb98d4b33 | ||
|
|
b698153e28 | ||
|
|
a6c8090c93 | ||
|
|
7ee7675d52 | ||
|
|
062055dc5e | ||
|
|
55b00fd653 | ||
|
|
62018b4eba | ||
|
|
48fa1b9b3d | ||
|
|
a6145ab201 | ||
|
|
e21e9954ac | ||
|
|
de5eb2066a | ||
|
|
7aba62ff51 | ||
|
|
cbf8c541c2 | ||
|
|
5bcbc6c22f | ||
|
|
01fdba6d88 | ||
|
|
1502315605 | ||
|
|
92c431c98f | ||
|
|
005393d696 | ||
|
|
89ea87a98a | ||
|
|
32f3202e9d | ||
|
|
e69f6a67d8 | ||
|
|
9e1355d36a | ||
|
|
3551435d60 | ||
|
|
60e3369621 | ||
|
|
284e51483c | ||
|
|
3427729f7e | ||
|
|
e1c16c9c3f | ||
|
|
bc69e25359 | ||
|
|
6a7808d911 | ||
|
|
25bcb7991b | ||
|
|
3d943bccb6 | ||
|
|
fba905f45d | ||
|
|
490206aa44 | ||
|
|
afa4a09bba | ||
|
|
b95b151a1c | ||
|
|
b77facc5e9 | ||
|
|
5d429ca031 | ||
|
|
1df6c53ca0 | ||
|
|
878c3c5c5f | ||
|
|
a5aa4e8d4f | ||
|
|
bbacc9dcce | ||
|
|
c42fcfe644 | ||
|
|
99a7eb003f | ||
|
|
14f41d5340 | ||
|
|
c401c0b21d | ||
|
|
4f08818d7a |
5
.github/labels.yml
vendored
5
.github/labels.yml
vendored
@@ -90,11 +90,6 @@
|
||||
description: go-waku-productionization track (Waku Product)
|
||||
color: 9DEA79
|
||||
|
||||
# Tracks within zk-WASM project
|
||||
- name: track:kickoff
|
||||
description: Kickoff track (zk-WASM)
|
||||
color: 06B6C8
|
||||
|
||||
# Tracks within RAD project
|
||||
- name: track:waku-specs
|
||||
description: Waku specs track (RAD)
|
||||
|
||||
233
.github/workflows/ci.yml
vendored
233
.github/workflows/ci.yml
vendored
@@ -2,44 +2,221 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
- "!.github/workflows/*.yml"
|
||||
- "!rln-wasm/**"
|
||||
- "!rln/src/**"
|
||||
- "!rln/resources/**"
|
||||
- "!utils/src/**"
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened, ready_for_review]
|
||||
paths-ignore:
|
||||
- "**.md"
|
||||
- "!.github/workflows/*.yml"
|
||||
- "!rln-wasm/**"
|
||||
- "!rln/src/**"
|
||||
- "!rln/resources/**"
|
||||
- "!utils/src/**"
|
||||
|
||||
name: Tests
|
||||
name: CI
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
utils-test:
|
||||
# skip tests on draft PRs
|
||||
if: github.event_name == 'push' || (github.event_name == 'pull_request' && !github.event.pull_request.draft)
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [utils]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: Test - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Update git submodules
|
||||
run: git submodule update --init --recursive
|
||||
- name: cargo test
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Test utils
|
||||
run: |
|
||||
cargo test
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
cargo make test --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
rln-test:
|
||||
# skip tests on draft PRs
|
||||
if: github.event_name == 'push' || (github.event_name == 'pull_request' && !github.event.pull_request.draft)
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [rln]
|
||||
feature: ["default", "stateless"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: Test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Test rln
|
||||
run: |
|
||||
if [ ${{ matrix.feature }} == default ]; then
|
||||
cargo make test --release
|
||||
else
|
||||
cargo make test_${{ matrix.feature }} --release
|
||||
fi
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
rln-wasm-test:
|
||||
# skip tests on draft PRs
|
||||
if: github.event_name == 'push' || (github.event_name == 'pull_request' && !github.event.pull_request.draft)
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [rln-wasm]
|
||||
feature: ["default"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: Test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Build rln-wasm
|
||||
run: cargo make build
|
||||
working-directory: ${{ matrix.crate }}
|
||||
- name: Test rln-wasm on node
|
||||
run: cargo make test --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
- name: Test rln-wasm on browser
|
||||
run: cargo make test_browser --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
rln-wasm-parallel-test:
|
||||
# skip tests on draft PRs
|
||||
if: github.event_name == 'push' || (github.event_name == 'pull_request' && !github.event.pull_request.draft)
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [rln-wasm]
|
||||
feature: ["parallel"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: Test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Install nightly toolchain
|
||||
uses: dtolnay/rust-toolchain@nightly
|
||||
with:
|
||||
components: rust-src
|
||||
targets: wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Build rln-wasm in parallel mode
|
||||
run: cargo make build_parallel
|
||||
working-directory: ${{ matrix.crate }}
|
||||
- name: Test rln-wasm in parallel mode on browser
|
||||
run: cargo make test_parallel --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
lint:
|
||||
# run on both ready and draft PRs
|
||||
if: github.event_name == 'push' || (github.event_name == 'pull_request' && !github.event.pull_request.draft)
|
||||
strategy:
|
||||
matrix:
|
||||
# run lint tests only on ubuntu
|
||||
platform: [ubuntu-latest]
|
||||
crate: [rln, rln-wasm, utils]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: Lint - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- name: Update git submodules
|
||||
run: git submodule update --init --recursive
|
||||
- name: cargo fmt
|
||||
run: cargo fmt --all -- --check
|
||||
- name: cargo clippy
|
||||
run: cargo clippy
|
||||
# Currently not treating warnings as error, too noisy
|
||||
# -- -D warnings
|
||||
- name: Install wasm32 target
|
||||
if: matrix.crate == 'rln-wasm'
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Check formatting
|
||||
if: success() || failure()
|
||||
run: cargo fmt -- --check
|
||||
working-directory: ${{ matrix.crate }}
|
||||
- name: Check clippy wasm target
|
||||
if: (success() || failure()) && (matrix.crate == 'rln-wasm')
|
||||
run: |
|
||||
cargo clippy --target wasm32-unknown-unknown --tests --release -- -D warnings
|
||||
working-directory: ${{ matrix.crate }}
|
||||
- name: Check clippy default feature
|
||||
if: (success() || failure()) && (matrix.crate != 'rln-wasm')
|
||||
run: |
|
||||
cargo clippy --all-targets --tests --release -- -D warnings
|
||||
- name: Check clippy stateless feature
|
||||
if: (success() || failure()) && (matrix.crate == 'rln')
|
||||
run: |
|
||||
cargo clippy --all-targets --tests --release --features=stateless --no-default-features -- -D warnings
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
benchmark-utils:
|
||||
# run only on ready PRs
|
||||
if: github.event_name == 'pull_request' && !github.event.pull_request.draft
|
||||
strategy:
|
||||
matrix:
|
||||
# run benchmark tests only on ubuntu
|
||||
platform: [ubuntu-latest]
|
||||
crate: [utils]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: Benchmark - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: boa-dev/criterion-compare-action@v3
|
||||
with:
|
||||
branchName: ${{ github.base_ref }}
|
||||
cwd: ${{ matrix.crate }}
|
||||
|
||||
benchmark-rln:
|
||||
# run only on ready PRs
|
||||
if: github.event_name == 'pull_request' && !github.event.pull_request.draft
|
||||
strategy:
|
||||
matrix:
|
||||
# run benchmark tests only on ubuntu
|
||||
platform: [ubuntu-latest]
|
||||
crate: [rln]
|
||||
feature: ["default"]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: Benchmark - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: boa-dev/criterion-compare-action@v3
|
||||
with:
|
||||
branchName: ${{ github.base_ref }}
|
||||
cwd: ${{ matrix.crate }}
|
||||
features: ${{ matrix.feature }}
|
||||
|
||||
179
.github/workflows/nightly-release.yml
vendored
Normal file
179
.github/workflows/nightly-release.yml
vendored
Normal file
@@ -0,0 +1,179 @@
|
||||
name: Nightly build
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
name: Linux build
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
features:
|
||||
- ["stateless"]
|
||||
- ["stateless", "parallel"]
|
||||
- ["pmtree-ft"]
|
||||
- ["pmtree-ft", "parallel"]
|
||||
- ["fullmerkletree"]
|
||||
- ["fullmerkletree", "parallel"]
|
||||
- ["optimalmerkletree"]
|
||||
- ["optimalmerkletree", "parallel"]
|
||||
target: [x86_64-unknown-linux-gnu, aarch64-unknown-linux-gnu]
|
||||
env:
|
||||
FEATURES_CARGO: ${{ join(matrix.features, ',') }}
|
||||
FEATURES_TAG: ${{ join(matrix.features, '-') }}
|
||||
TARGET: ${{ matrix.target }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
target: ${{ env.TARGET }}
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Cross build
|
||||
run: |
|
||||
cross build --release --target $TARGET --no-default-features --features "$FEATURES_CARGO" --workspace
|
||||
mkdir release
|
||||
cp target/$TARGET/release/librln* release/
|
||||
tar -czvf $TARGET-$FEATURES_TAG-rln.tar.gz release/
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ env.TARGET }}-${{ env.FEATURES_TAG }}-archive
|
||||
path: ${{ env.TARGET }}-${{ env.FEATURES_TAG }}-rln.tar.gz
|
||||
retention-days: 2
|
||||
|
||||
macos:
|
||||
name: MacOS build
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
features:
|
||||
- ["stateless"]
|
||||
- ["stateless", "parallel"]
|
||||
- ["pmtree-ft"]
|
||||
- ["pmtree-ft", "parallel"]
|
||||
- ["fullmerkletree"]
|
||||
- ["fullmerkletree", "parallel"]
|
||||
- ["optimalmerkletree"]
|
||||
- ["optimalmerkletree", "parallel"]
|
||||
target: [x86_64-apple-darwin, aarch64-apple-darwin]
|
||||
env:
|
||||
FEATURES_CARGO: ${{ join(matrix.features, ',') }}
|
||||
FEATURES_TAG: ${{ join(matrix.features, '-') }}
|
||||
TARGET: ${{ matrix.target }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
target: ${{ env.TARGET }}
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Cross build
|
||||
run: |
|
||||
cross build --release --target $TARGET --no-default-features --features "$FEATURES_CARGO" --workspace
|
||||
mkdir release
|
||||
cp target/$TARGET/release/librln* release/
|
||||
tar -czvf $TARGET-$FEATURES_TAG-rln.tar.gz release/
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ env.TARGET }}-${{ env.FEATURES_TAG }}-archive
|
||||
path: ${{ env.TARGET }}-${{ env.FEATURES_TAG }}-rln.tar.gz
|
||||
retention-days: 2
|
||||
|
||||
rln-wasm:
|
||||
name: Build rln-wasm
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
feature:
|
||||
- "default"
|
||||
- "parallel"
|
||||
- "utils"
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- name: Install stable toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
targets: wasm32-unknown-unknown
|
||||
- name: Install nightly toolchain
|
||||
uses: dtolnay/rust-toolchain@nightly
|
||||
with:
|
||||
components: rust-src
|
||||
targets: wasm32-unknown-unknown
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Build rln-wasm package
|
||||
run: |
|
||||
if [[ ${{ matrix.feature }} == "parallel" ]]; then
|
||||
env CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS="-C target-feature=+atomics,+bulk-memory,+mutable-globals -C link-arg=--shared-memory -C link-arg=--max-memory=1073741824 -C link-arg=--import-memory -C link-arg=--export=__wasm_init_tls -C link-arg=--export=__tls_size -C link-arg=--export=__tls_align -C link-arg=--export=__tls_base" \
|
||||
rustup run nightly wasm-pack build --release --target web --scope waku \
|
||||
--features parallel -Z build-std=panic_abort,std
|
||||
sed -i.bak 's/rln-wasm/zerokit-rln-wasm-parallel/g' pkg/package.json && rm pkg/package.json.bak
|
||||
elif [[ ${{ matrix.feature }} == "utils" ]]; then
|
||||
wasm-pack build --release --target web --scope waku --no-default-features --features utils
|
||||
sed -i.bak 's/rln-wasm/zerokit-rln-wasm-utils/g' pkg/package.json && rm pkg/package.json.bak
|
||||
else
|
||||
wasm-pack build --release --target web --scope waku
|
||||
sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/package.json.bak
|
||||
fi
|
||||
|
||||
jq '. + {keywords: ["zerokit", "rln", "wasm"]}' pkg/package.json > pkg/package.json.tmp && \
|
||||
mv pkg/package.json.tmp pkg/package.json
|
||||
|
||||
mkdir release
|
||||
cp -r pkg/* release/
|
||||
tar -czvf rln-wasm-${{ matrix.feature }}.tar.gz release/
|
||||
working-directory: rln-wasm
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: rln-wasm-${{ matrix.feature }}-archive
|
||||
path: rln-wasm/rln-wasm-${{ matrix.feature }}.tar.gz
|
||||
retention-days: 2
|
||||
|
||||
prepare-prerelease:
|
||||
name: Prepare pre-release
|
||||
needs: [linux, macos, rln-wasm]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: master
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
- name: Delete tag
|
||||
uses: dev-drprasad/delete-tag-and-release@v0.2.1
|
||||
with:
|
||||
delete_release: true
|
||||
tag_name: nightly
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Create prerelease
|
||||
run: |
|
||||
start_tag=$(gh release list -L 2 --exclude-drafts | grep -v nightly | cut -d$'\t' -f3 | sed -n '1p')
|
||||
gh release create nightly --prerelease --target master \
|
||||
--title 'Nightly build ("master" branch)' \
|
||||
--generate-notes \
|
||||
--draft=false \
|
||||
--notes-start-tag $start_tag \
|
||||
*-archive/*.tar.gz \
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Delete artifacts
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
failOnError: false
|
||||
name: |
|
||||
*-archive
|
||||
2
.github/workflows/sync-labels.yml
vendored
2
.github/workflows/sync-labels.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
- uses: micnncim/action-label-syncer@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
24
.gitignore
vendored
24
.gitignore
vendored
@@ -1,19 +1,29 @@
|
||||
# Common files to ignore in Rust projects
|
||||
.DS_Store
|
||||
.idea
|
||||
*.log
|
||||
tmp/
|
||||
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
debug/
|
||||
target/
|
||||
# Generated by Cargo will have compiled files and executables
|
||||
/target
|
||||
|
||||
# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
|
||||
# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
|
||||
Cargo.lock
|
||||
# Generated by Nix
|
||||
result
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
|
||||
# MSVC Windows builds of rustc generate these, which store debugging information
|
||||
*.pdb
|
||||
|
||||
# FFI C examples
|
||||
rln/ffi_c_examples/main
|
||||
rln/ffi_c_examples/rln.h
|
||||
rln/ffi_c_examples/database
|
||||
|
||||
# FFI Nim examples
|
||||
rln/ffi_nim_examples/main
|
||||
rln/ffi_nim_examples/database
|
||||
|
||||
# Vscode
|
||||
.vscode
|
||||
|
||||
8
.gitmodules
vendored
8
.gitmodules
vendored
@@ -1,8 +0,0 @@
|
||||
[submodule "rln/vendor/rln"]
|
||||
path = rln/vendor/rln
|
||||
ignore = dirty
|
||||
url = https://github.com/Rate-Limiting-Nullifier/rln_circuits
|
||||
[submodule "semaphore/vendor/semaphore"]
|
||||
path = semaphore/vendor/semaphore
|
||||
ignore = dirty
|
||||
url = https://github.com/appliedzkp/semaphore.git
|
||||
197
CONTRIBUTING.md
Normal file
197
CONTRIBUTING.md
Normal file
@@ -0,0 +1,197 @@
|
||||
# Contributing to Zerokit
|
||||
|
||||
Thank you for your interest in contributing to Zerokit!
|
||||
This guide will discuss how the Zerokit team handles [Commits](#commits),
|
||||
[Pull Requests](#pull-requests) and [Merging](#merging).
|
||||
|
||||
**Note:** We won't force external contributors to follow this verbatim.
|
||||
Following these guidelines definitely helps us in accepting your contributions.
|
||||
|
||||
## Getting Started
|
||||
|
||||
1. Fork the repository
|
||||
2. Create a feature branch: `git checkout -b fix/your-bug-fix` or `git checkout -b feat/your-feature-name`
|
||||
3. Make your changes following our guidelines
|
||||
4. Ensure relevant tests pass (see [testing guidelines](#building-and-testing))
|
||||
5. Commit your changes (signed commits are highly encouraged - see [commit guidelines](#commits))
|
||||
6. Push and create a Pull Request
|
||||
|
||||
## Development Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
Install the required dependencies:
|
||||
|
||||
```bash
|
||||
make installdeps
|
||||
```
|
||||
|
||||
Or use Nix:
|
||||
|
||||
```bash
|
||||
nix develop
|
||||
```
|
||||
|
||||
### Building and Testing
|
||||
|
||||
```bash
|
||||
# Build all crates
|
||||
make build
|
||||
|
||||
# Run standard tests
|
||||
make test
|
||||
|
||||
# Module-specific testing
|
||||
cd rln && cargo make test_stateless # Test stateless features
|
||||
cd rln-wasm && cargo make test_browser # Test in browser headless mode
|
||||
cd rln-wasm && cargo make test_parallel # Test parallel features
|
||||
```
|
||||
|
||||
### Tools
|
||||
|
||||
We recommend using the [markdownlint extension](https://marketplace.visualstudio.com/items?itemName=DavidAnson.vscode-markdownlint)
|
||||
for VS Code to maintain consistent documentation formatting.
|
||||
|
||||
## Commits
|
||||
|
||||
We want to keep our commits small and focused.
|
||||
This allows for easily reviewing individual commits and/or
|
||||
splitting up pull requests when they grow too big.
|
||||
Additionally, this allows us to merge smaller changes quicker and release more often.
|
||||
|
||||
**All commits must be GPG signed.**
|
||||
This ensures the authenticity and integrity of contributions.
|
||||
|
||||
### Conventional Commits
|
||||
|
||||
When making the commit, write the commit message
|
||||
following the [Conventional Commits (v1.0.0)](https://www.conventionalcommits.org/en/v1.0.0/) specification.
|
||||
Following this convention allows us to provide an automated release process
|
||||
that also generates a detailed Changelog.
|
||||
|
||||
As described by the specification, our commit messages should be written as:
|
||||
|
||||
```markdown
|
||||
<type>[optional scope]: <description>
|
||||
|
||||
[optional body]
|
||||
|
||||
[optional footer(s)]
|
||||
```
|
||||
|
||||
Some examples of this pattern include:
|
||||
|
||||
```markdown
|
||||
feat(rln): add parallel witness calculation support
|
||||
```
|
||||
|
||||
```markdown
|
||||
fix(rln-wasm): resolve memory leak in browser threading
|
||||
```
|
||||
|
||||
```markdown
|
||||
docs: update RLN protocol flow documentation
|
||||
```
|
||||
|
||||
#### Scopes
|
||||
|
||||
Use scopes to improve the Changelog:
|
||||
|
||||
- `rln` - Core RLN implementation
|
||||
- `rln-cli` - Command-line interface
|
||||
- `rln-wasm` - WebAssembly bindings
|
||||
- `utils` - Cryptographic utilities (Merkle trees, Poseidon hash)
|
||||
- `ci` - Continuous integration
|
||||
|
||||
#### Breaking Changes
|
||||
|
||||
Mark breaking changes by adding `!` after the type:
|
||||
|
||||
```markdown
|
||||
feat(rln)!: change proof generation API
|
||||
```
|
||||
|
||||
## Pull Requests
|
||||
|
||||
Before creating a pull request, search for related issues.
|
||||
If none exist, create an issue describing the problem you're solving.
|
||||
|
||||
### CI Flow
|
||||
|
||||
Our continuous integration automatically runs when you create a Pull Request:
|
||||
|
||||
- **Build verification**: All crates compile successfully
|
||||
- **Test execution**: Comprehensive testing across all modules and feature combinations
|
||||
- **Code formatting**: `cargo fmt` compliance
|
||||
- **Linting**: `cargo clippy` checks
|
||||
- **Cross-platform builds**: Testing on multiple platforms
|
||||
|
||||
Ensure the following commands pass before submitting:
|
||||
|
||||
```bash
|
||||
# Format code
|
||||
cargo fmt --all
|
||||
|
||||
# Check for common mistakes
|
||||
cargo clippy --all-targets
|
||||
|
||||
# Run all tests
|
||||
make test
|
||||
```
|
||||
|
||||
### Adding Tests
|
||||
|
||||
Include tests for new functionality:
|
||||
|
||||
- **Unit tests** for specific functions
|
||||
- **Integration tests** for broader functionality
|
||||
- **WASM tests** for browser compatibility
|
||||
|
||||
### Typos and Small Changes
|
||||
|
||||
For minor fixes like typos, please report them as issues instead of opening PRs.
|
||||
This helps us manage resources effectively and ensures meaningful contributions.
|
||||
|
||||
## Merging
|
||||
|
||||
We use "squash merging" for all pull requests.
|
||||
This combines all commits into one commit, so keep pull requests small and focused.
|
||||
|
||||
### Requirements
|
||||
|
||||
- CI checks must pass
|
||||
- At least one maintainer review and approval
|
||||
- All review feedback addressed
|
||||
|
||||
### Squash Guidelines
|
||||
|
||||
When squashing, update the commit title to be a proper Conventional Commit and
|
||||
include any other relevant commits in the body:
|
||||
|
||||
```markdown
|
||||
feat(rln): implement parallel witness calculation (#123)
|
||||
|
||||
fix(tests): resolve memory leak in test suite
|
||||
chore(ci): update rust toolchain version
|
||||
```
|
||||
|
||||
## Roadmap Alignment
|
||||
|
||||
Please refer to our [project roadmap](https://roadmap.vac.dev/) for current development priorities.
|
||||
Consider how your changes align with these strategic goals when contributing.
|
||||
|
||||
## Getting Help
|
||||
|
||||
- **Issues**: Create a GitHub issue for bugs or feature requests
|
||||
- **Discussions**: Use GitHub Discussions for questions
|
||||
- **Documentation**: Check existing docs and unit tests for examples
|
||||
|
||||
## License
|
||||
|
||||
By contributing to Zerokit, you agree that your contributions will be licensed under both MIT and
|
||||
Apache 2.0 licenses, consistent with the project's dual licensing.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [Conventional Commits Guide](https://www.conventionalcommits.org/en/v1.0.0/)
|
||||
- [Project GitHub Repository](https://github.com/vacp2p/zerokit)
|
||||
1893
Cargo.lock
generated
Normal file
1893
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
14
Cargo.toml
14
Cargo.toml
@@ -1,6 +1,10 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"multiplier",
|
||||
"semaphore",
|
||||
"rln",
|
||||
]
|
||||
members = ["rln", "utils"]
|
||||
exclude = ["rln-cli", "rln-wasm"]
|
||||
resolver = "2"
|
||||
|
||||
# Compilation profile for any non-workspace member.
|
||||
# Dependencies are optimized, even in a dev build.
|
||||
# This improves dev performance while having negligible impact on incremental build times.
|
||||
[profile.dev.package."*"]
|
||||
opt-level = 3
|
||||
|
||||
35
Cross.toml
Normal file
35
Cross.toml
Normal file
@@ -0,0 +1,35 @@
|
||||
[target.x86_64-pc-windows-gnu]
|
||||
image = "ghcr.io/cross-rs/x86_64-pc-windows-gnu:latest"
|
||||
|
||||
[target.aarch64-unknown-linux-gnu]
|
||||
image = "ghcr.io/cross-rs/aarch64-unknown-linux-gnu:latest"
|
||||
|
||||
[target.x86_64-unknown-linux-gnu]
|
||||
image = "ghcr.io/cross-rs/x86_64-unknown-linux-gnu:latest"
|
||||
|
||||
[target.arm-unknown-linux-gnueabi]
|
||||
image = "ghcr.io/cross-rs/arm-unknown-linux-gnueabi:latest"
|
||||
|
||||
[target.i686-pc-windows-gnu]
|
||||
image = "ghcr.io/cross-rs/i686-pc-windows-gnu:latest"
|
||||
|
||||
[target.i686-unknown-linux-gnu]
|
||||
image = "ghcr.io/cross-rs/i686-unknown-linux-gnu:latest"
|
||||
|
||||
[target.arm-unknown-linux-gnueabihf]
|
||||
image = "ghcr.io/cross-rs/arm-unknown-linux-gnueabihf:latest"
|
||||
|
||||
[target.mips-unknown-linux-gnu]
|
||||
image = "ghcr.io/cross-rs/mips-unknown-linux-gnu:latest"
|
||||
|
||||
[target.mips64-unknown-linux-gnuabi64]
|
||||
image = "ghcr.io/cross-rs/mips64-unknown-linux-gnuabi64:latest"
|
||||
|
||||
[target.mips64el-unknown-linux-gnuabi64]
|
||||
image = "ghcr.io/cross-rs/mips64el-unknown-linux-gnuabi64:latest"
|
||||
|
||||
[target.mipsel-unknown-linux-gnu]
|
||||
image = "ghcr.io/cross-rs/mipsel-unknown-linux-gnu:latest"
|
||||
|
||||
[target.aarch64-linux-android]
|
||||
image = "ghcr.io/cross-rs/aarch64-linux-android:edge"
|
||||
203
LICENSE-APACHE
Normal file
203
LICENSE-APACHE
Normal file
@@ -0,0 +1,203 @@
|
||||
Copyright (c) 2022 Vac Research
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
25
LICENSE-MIT
Normal file
25
LICENSE-MIT
Normal file
@@ -0,0 +1,25 @@
|
||||
Copyright (c) 2022 Vac Research
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE O THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
39
Makefile
Normal file
39
Makefile
Normal file
@@ -0,0 +1,39 @@
|
||||
.PHONY: all installdeps build test bench clean
|
||||
|
||||
all: installdeps build
|
||||
|
||||
.fetch-submodules:
|
||||
@git submodule update --init --recursive
|
||||
|
||||
.pre-build: .fetch-submodules
|
||||
@cargo install cargo-make
|
||||
ifdef CI
|
||||
@cargo install cross --git https://github.com/cross-rs/cross.git --rev 1511a28
|
||||
endif
|
||||
|
||||
installdeps: .pre-build
|
||||
ifeq ($(shell uname),Darwin)
|
||||
@brew install ninja
|
||||
else ifeq ($(shell uname),Linux)
|
||||
@if [ -f /etc/os-release ] && grep -q "ID=nixos" /etc/os-release; then \
|
||||
echo "Detected NixOS, skipping apt installation."; \
|
||||
else \
|
||||
sudo apt update; \
|
||||
sudo apt install -y cmake ninja-build; \
|
||||
fi
|
||||
endif
|
||||
@which wasm-pack > /dev/null && wasm-pack --version | grep -q "0.13.1" || cargo install wasm-pack --version=0.13.1
|
||||
@test -s "$$HOME/.nvm/nvm.sh" || curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.2/install.sh | bash
|
||||
@bash -c '. "$$HOME/.nvm/nvm.sh"; [ "$$(node -v 2>/dev/null)" = "v22.14.0" ] || nvm install 22.14.0; nvm use 22.14.0; nvm alias default 22.14.0'
|
||||
|
||||
build: installdeps
|
||||
@cargo make build
|
||||
|
||||
test: build
|
||||
@cargo make test
|
||||
|
||||
bench: build
|
||||
@cargo make bench
|
||||
|
||||
clean:
|
||||
@cargo clean
|
||||
2
Makefile.toml
Normal file
2
Makefile.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[env]
|
||||
CARGO_MAKE_EXTEND_WORKSPACE_MAKEFILE = true
|
||||
86
README.md
86
README.md
@@ -1,19 +1,87 @@
|
||||
# Zerokit
|
||||
|
||||
A set of Zero Knowledge modules, written in Rust and designed to be used in other system programming environments.
|
||||
[](https://crates.io/crates/rln)
|
||||
[](https://github.com/vacp2p/zerokit/actions)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
|
||||
## Initial scope
|
||||
A collection of Zero Knowledge modules written in Rust and designed to be used in other system programming environments.
|
||||
|
||||
Focus on RLN and being able to use [Circom](https://iden3.io/circom) based
|
||||
version through ark-circom, as opposed to the native one that currently exists
|
||||
in Rust.
|
||||
## Overview
|
||||
|
||||
Zerokit provides zero-knowledge cryptographic primitives with a focus on performance, security, and usability.
|
||||
The current focus is on Rate-Limiting Nullifier [RLN](https://github.com/Rate-Limiting-Nullifier) implementation.
|
||||
|
||||
Current implementation is based on the following
|
||||
[specification](https://rfc.vac.dev/vac/raw/rln-v2)
|
||||
and focused on RLNv2 which allows to set a rate limit for the number of messages that can be sent by a user.
|
||||
|
||||
## Features
|
||||
|
||||
- **RLN Implementation**: Efficient Rate-Limiting Nullifier using zkSNARK
|
||||
- **Circom Compatibility**: Uses Circom-based circuits for RLN
|
||||
- **Cross-Platform**: Support for multiple architectures with cross-compilation
|
||||
- **FFI-Friendly**: Easy to integrate with other languages
|
||||
- **WASM Support**: Can be compiled to WebAssembly for web applications
|
||||
|
||||
## Architecture
|
||||
|
||||
Zerokit currently focuses on RLN (Rate-Limiting Nullifier) implementation using [Circom](https://iden3.io/circom)
|
||||
circuits through ark-circom, providing an alternative to existing native Rust implementations.
|
||||
|
||||
## Build and Test
|
||||
|
||||
### Install Dependencies
|
||||
|
||||
```bash
|
||||
make installdeps
|
||||
```
|
||||
|
||||
#### Use Nix to install dependencies
|
||||
|
||||
```bash
|
||||
nix develop
|
||||
```
|
||||
|
||||
### Build and Test All Crates
|
||||
|
||||
```bash
|
||||
make build
|
||||
make test
|
||||
```
|
||||
|
||||
## Release Assets
|
||||
|
||||
We use [`cross-rs`](https://github.com/cross-rs/cross) to cross-compile and generate release assets:
|
||||
|
||||
```bash
|
||||
# Example: Build for specific target
|
||||
cross build --target x86_64-unknown-linux-gnu --release -p rln
|
||||
```
|
||||
|
||||
## Used By
|
||||
|
||||
Zerokit powers zero-knowledge functionality in:
|
||||
|
||||
- [**nwaku**](https://github.com/waku-org/nwaku) - Nim implementation of the Waku v2 protocol
|
||||
- [**js-rln**](https://github.com/waku-org/js-rln) - JavaScript bindings for RLN
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
- Uses [ark-circom](https://github.com/gakonst/ark-circom), Rust wrapper around Circom.
|
||||
- Inspired by [Applied ZKP](https://zkp.science/) group work, including [zk-kit](https://github.com/appliedzkp/zk-kit)
|
||||
- Uses [ark-circom](https://github.com/gakonst/ark-circom) for zkey and Groth16 proof generation
|
||||
- Witness calculation based on [circom-witnesscalc](https://github.com/iden3/circom-witnesscalc) by iden3.
|
||||
The execution graph file used by this code has been generated by means of the same iden3 software.
|
||||
|
||||
- Inspired by Applied ZKP group work, e.g. [zk-kit](https://github.com/appliedzkp/zk-kit).
|
||||
> [!IMPORTANT]
|
||||
> The circom-witnesscalc code fragments have been borrowed instead of depending on this crate,
|
||||
> because its types of input and output data were incompatible with the corresponding zerokit code fragments,
|
||||
> and circom-witnesscalc has some dependencies, which are redundant for our purpose.
|
||||
|
||||
- [RLN library](https://github.com/kilic/rln) written in Rust based on Bellman.
|
||||
## Documentation
|
||||
|
||||
- [semaphore-rs](https://github.com/worldcoin/semaphore-rs) written in Rust based on ark-circom.
|
||||
For detailed documentation on each module:
|
||||
|
||||
```bash
|
||||
cargo doc --open
|
||||
```
|
||||
|
||||
48
flake.lock
generated
Normal file
48
flake.lock
generated
Normal file
@@ -0,0 +1,48 @@
|
||||
{
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1757590060,
|
||||
"narHash": "sha256-EWwwdKLMZALkgHFyKW7rmyhxECO74+N+ZO5xTDnY/5c=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "0ef228213045d2cdb5a169a95d63ded38670b293",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "0ef228213045d2cdb5a169a95d63ded38670b293",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs",
|
||||
"rust-overlay": "rust-overlay"
|
||||
}
|
||||
},
|
||||
"rust-overlay": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1748399823,
|
||||
"narHash": "sha256-kahD8D5hOXOsGbNdoLLnqCL887cjHkx98Izc37nDjlA=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "d68a69dc71bc19beb3479800392112c2f6218159",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
83
flake.nix
Normal file
83
flake.nix
Normal file
@@ -0,0 +1,83 @@
|
||||
{
|
||||
description = "A flake for building zerokit";
|
||||
|
||||
nixConfig = {
|
||||
extra-substituters = [ "https://nix-cache.status.im/" ];
|
||||
extra-trusted-public-keys = [ "nix-cache.status.im-1:x/93lOfLU+duPplwMSBR+OlY4+mo+dCN7n0mr4oPwgY=" ];
|
||||
};
|
||||
|
||||
inputs = {
|
||||
# Version 24.11
|
||||
nixpkgs.url = "github:NixOS/nixpkgs?rev=0ef228213045d2cdb5a169a95d63ded38670b293";
|
||||
rust-overlay = {
|
||||
url = "github:oxalica/rust-overlay";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, rust-overlay }:
|
||||
let
|
||||
stableSystems = [
|
||||
"x86_64-linux" "aarch64-linux"
|
||||
"x86_64-darwin" "aarch64-darwin"
|
||||
"x86_64-windows" "i686-linux"
|
||||
"i686-windows"
|
||||
];
|
||||
forAllSystems = nixpkgs.lib.genAttrs stableSystems;
|
||||
|
||||
pkgsFor = forAllSystems (
|
||||
system: import nixpkgs {
|
||||
inherit system;
|
||||
config = {
|
||||
android_sdk.accept_license = true;
|
||||
allowUnfree = true;
|
||||
};
|
||||
overlays = [
|
||||
(import rust-overlay)
|
||||
(f: p: { inherit rust-overlay; })
|
||||
];
|
||||
}
|
||||
);
|
||||
in rec
|
||||
{
|
||||
packages = forAllSystems (system: let
|
||||
pkgs = pkgsFor.${system};
|
||||
buildPackage = pkgs.callPackage ./nix/default.nix;
|
||||
buildRln = (buildPackage { src = self; project = "rln"; }).override;
|
||||
in rec {
|
||||
rln = buildRln { };
|
||||
|
||||
rln-linux-arm64 = buildRln {
|
||||
target-platform = "aarch64-multiplatform";
|
||||
rust-target = "aarch64-unknown-linux-gnu";
|
||||
};
|
||||
|
||||
rln-android-arm64 = buildRln {
|
||||
target-platform = "aarch64-android-prebuilt";
|
||||
rust-target = "aarch64-linux-android";
|
||||
};
|
||||
|
||||
rln-ios-arm64 = buildRln {
|
||||
target-platform = "aarch64-darwin";
|
||||
rust-target = "aarch64-apple-ios";
|
||||
};
|
||||
|
||||
# TODO: Remove legacy name for RLN android library
|
||||
zerokit-android-arm64 = rln-android-arm64;
|
||||
|
||||
default = rln;
|
||||
});
|
||||
|
||||
devShells = forAllSystems (system: let
|
||||
pkgs = pkgsFor.${system};
|
||||
in {
|
||||
default = pkgs.mkShell {
|
||||
buildInputs = with pkgs; [
|
||||
git cmake cargo-make rustup
|
||||
binaryen ninja gnuplot
|
||||
rust-bin.stable.latest.default
|
||||
];
|
||||
};
|
||||
});
|
||||
};
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
[package]
|
||||
name = "multiplier"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
|
||||
# WASM operations
|
||||
# wasmer = { version = "2.0" }
|
||||
# fnv = { version = "1.0.3", default-features = false }
|
||||
# num = { version = "0.4.0" }
|
||||
# num-traits = { version = "0.2.0", default-features = false }
|
||||
num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
|
||||
|
||||
# ZKP Generation
|
||||
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
|
||||
# ark-ff = { version = "0.3.0", default-features = false, features = ["parallel", "asm"] }
|
||||
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
|
||||
ark-bn254 = { version = "0.3.0" }
|
||||
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
|
||||
# ark-poly = { version = "^0.3.0", default-features = false, features = ["parallel"] }
|
||||
ark-relations = { version = "0.3.0", default-features = false }
|
||||
ark-serialize = { version = "0.3.0", default-features = false }
|
||||
|
||||
ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circom-2"] }
|
||||
|
||||
# error handling
|
||||
# thiserror = "1.0.26"
|
||||
color-eyre = "0.5"
|
||||
|
||||
# decoding of data
|
||||
# hex = "0.4.3"
|
||||
# byteorder = "1.4.3"
|
||||
@@ -1,13 +0,0 @@
|
||||
# Multiplier example
|
||||
|
||||
Example wrapper around a basic Circom circuit to test Circom 2 integration
|
||||
through ark-circom and FFI.
|
||||
|
||||
# FFI
|
||||
|
||||
To generate C or Nim bindings from Rust FFI, use `cbindgen` or `nbindgen`:
|
||||
|
||||
```
|
||||
cbindgen . -o target/multiplier.h
|
||||
nbindgen . -o target/multiplier.nim
|
||||
```
|
||||
Binary file not shown.
Binary file not shown.
@@ -1,77 +0,0 @@
|
||||
use crate::public::Multiplier;
|
||||
use std::slice;
|
||||
|
||||
/// Buffer struct is taken from
|
||||
/// https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs
|
||||
///
|
||||
/// Also heavily inspired by https://github.com/kilic/rln/blob/master/src/ffi.rs
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Buffer {
|
||||
pub ptr: *const u8,
|
||||
pub len: usize,
|
||||
}
|
||||
|
||||
impl From<&[u8]> for Buffer {
|
||||
fn from(src: &[u8]) -> Self {
|
||||
Self {
|
||||
ptr: &src[0] as *const u8,
|
||||
len: src.len(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&Buffer> for &'a [u8] {
|
||||
fn from(src: &Buffer) -> &'a [u8] {
|
||||
unsafe { slice::from_raw_parts(src.ptr, src.len) }
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new_circuit(ctx: *mut *mut Multiplier) -> bool {
|
||||
println!("multiplier ffi: new");
|
||||
let mul = Multiplier::new();
|
||||
|
||||
unsafe { *ctx = Box::into_raw(Box::new(mul)) };
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn prove(ctx: *const Multiplier, output_buffer: *mut Buffer) -> bool {
|
||||
println!("multiplier ffi: prove");
|
||||
let mul = unsafe { &*ctx };
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
|
||||
match mul.prove(&mut output_data) {
|
||||
Ok(proof_data) => proof_data,
|
||||
Err(_) => return false,
|
||||
};
|
||||
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn verify(
|
||||
ctx: *const Multiplier,
|
||||
proof_buffer: *const Buffer,
|
||||
result_ptr: *mut u32,
|
||||
) -> bool {
|
||||
println!("multiplier ffi: verify");
|
||||
let mul = unsafe { &*ctx };
|
||||
let proof_data = <&[u8]>::from(unsafe { &*proof_buffer });
|
||||
if match mul.verify(proof_data) {
|
||||
Ok(verified) => verified,
|
||||
Err(_) => return false,
|
||||
} {
|
||||
unsafe { *result_ptr = 0 };
|
||||
} else {
|
||||
unsafe { *result_ptr = 1 };
|
||||
};
|
||||
true
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
pub mod ffi;
|
||||
pub mod public;
|
||||
@@ -1,48 +0,0 @@
|
||||
use ark_circom::{CircomBuilder, CircomConfig};
|
||||
use ark_std::rand::thread_rng;
|
||||
use color_eyre::Result;
|
||||
|
||||
use ark_bn254::Bn254;
|
||||
use ark_groth16::{
|
||||
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
|
||||
};
|
||||
|
||||
fn groth16_proof_example() -> Result<()> {
|
||||
let cfg = CircomConfig::<Bn254>::new(
|
||||
"./resources/circom2_multiplier2.wasm",
|
||||
"./resources/circom2_multiplier2.r1cs",
|
||||
)?;
|
||||
|
||||
let mut builder = CircomBuilder::new(cfg);
|
||||
builder.push_input("a", 3);
|
||||
builder.push_input("b", 11);
|
||||
|
||||
// create an empty instance for setting it up
|
||||
let circom = builder.setup();
|
||||
|
||||
let mut rng = thread_rng();
|
||||
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng)?;
|
||||
|
||||
let circom = builder.build()?;
|
||||
|
||||
let inputs = circom.get_public_inputs().unwrap();
|
||||
|
||||
let proof = prove(circom, ¶ms, &mut rng)?;
|
||||
|
||||
let pvk = prepare_verifying_key(¶ms.vk);
|
||||
|
||||
let verified = verify_proof(&pvk, &proof, &inputs)?;
|
||||
|
||||
assert!(verified);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("Hello, world!");
|
||||
|
||||
match groth16_proof_example() {
|
||||
Ok(_) => println!("Success"),
|
||||
Err(_) => println!("Error"),
|
||||
}
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
use ark_circom::{CircomBuilder, CircomCircuit, CircomConfig};
|
||||
use ark_std::rand::thread_rng;
|
||||
|
||||
use ark_bn254::Bn254;
|
||||
use ark_groth16::{
|
||||
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
|
||||
Proof, ProvingKey,
|
||||
};
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
// , SerializationError};
|
||||
|
||||
use std::io::{self, Read, Write};
|
||||
|
||||
pub struct Multiplier {
|
||||
circom: CircomCircuit<Bn254>,
|
||||
params: ProvingKey<Bn254>,
|
||||
}
|
||||
|
||||
impl Multiplier {
|
||||
// TODO Break this apart here
|
||||
pub fn new() -> Multiplier {
|
||||
let cfg = CircomConfig::<Bn254>::new(
|
||||
"./resources/circom2_multiplier2.wasm",
|
||||
"./resources/circom2_multiplier2.r1cs",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mut builder = CircomBuilder::new(cfg);
|
||||
builder.push_input("a", 3);
|
||||
builder.push_input("b", 11);
|
||||
|
||||
// create an empty instance for setting it up
|
||||
let circom = builder.setup();
|
||||
|
||||
let mut rng = thread_rng();
|
||||
|
||||
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng).unwrap();
|
||||
|
||||
let circom = builder.build().unwrap();
|
||||
|
||||
//let inputs = circom.get_public_inputs().unwrap();
|
||||
|
||||
Multiplier { circom, params }
|
||||
}
|
||||
|
||||
// TODO Input Read
|
||||
pub fn prove<W: Write>(&self, result_data: W) -> io::Result<()> {
|
||||
let mut rng = thread_rng();
|
||||
|
||||
// XXX: There's probably a better way to do this
|
||||
let circom = self.circom.clone();
|
||||
let params = self.params.clone();
|
||||
|
||||
let proof = prove(circom, ¶ms, &mut rng).unwrap();
|
||||
|
||||
// XXX: Unclear if this is different from other serialization(s)
|
||||
let _ = proof.serialize(result_data).unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn verify<R: Read>(&self, input_data: R) -> io::Result<bool> {
|
||||
let proof = Proof::deserialize(input_data).unwrap();
|
||||
|
||||
let pvk = prepare_verifying_key(&self.params.vk);
|
||||
|
||||
// XXX Part of input data?
|
||||
let inputs = self.circom.get_public_inputs().unwrap();
|
||||
|
||||
let verified = verify_proof(&pvk, &proof, &inputs).unwrap();
|
||||
|
||||
Ok(verified)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Multiplier {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn multiplier_proof() {
|
||||
let mul = Multiplier::new();
|
||||
//let inputs = mul.circom.get_public_inputs().unwrap();
|
||||
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
let _ = mul.prove(&mut output_data);
|
||||
|
||||
let proof_data = &output_data[..];
|
||||
|
||||
// XXX Pass as arg?
|
||||
//let pvk = prepare_verifying_key(&mul.params.vk);
|
||||
|
||||
let verified = mul.verify(proof_data).unwrap();
|
||||
|
||||
assert!(verified);
|
||||
}
|
||||
64
nix/default.nix
Normal file
64
nix/default.nix
Normal file
@@ -0,0 +1,64 @@
|
||||
{
|
||||
pkgs,
|
||||
rust-overlay,
|
||||
project,
|
||||
src ? ../.,
|
||||
release ? true,
|
||||
target-platform ? null,
|
||||
rust-target ? null,
|
||||
features ? null,
|
||||
}:
|
||||
|
||||
let
|
||||
# Use cross-compilation if target-platform is specified.
|
||||
targetPlatformPkgs = if target-platform != null
|
||||
then pkgs.pkgsCross.${target-platform}
|
||||
else pkgs;
|
||||
|
||||
rust-bin = rust-overlay.lib.mkRustBin { } targetPlatformPkgs.buildPackages;
|
||||
|
||||
# Use Rust and Cargo versions from rust-overlay.
|
||||
rustPlatform = targetPlatformPkgs.makeRustPlatform {
|
||||
cargo = rust-bin.stable.latest.minimal;
|
||||
rustc = rust-bin.stable.latest.minimal;
|
||||
};
|
||||
in rustPlatform.buildRustPackage {
|
||||
pname = "zerokit";
|
||||
version = if src ? rev then src.rev else "nightly";
|
||||
|
||||
# Improve caching of sources
|
||||
src = builtins.path { path = src; name = "zerokit"; };
|
||||
|
||||
cargoLock = {
|
||||
lockFile = src + "/Cargo.lock";
|
||||
allowBuiltinFetchGit = true;
|
||||
};
|
||||
|
||||
nativeBuildInputs = [ pkgs.rust-cbindgen ];
|
||||
|
||||
doCheck = false;
|
||||
|
||||
CARGO_HOME = "/tmp";
|
||||
|
||||
buildPhase = ''
|
||||
cargo build --lib \
|
||||
${if release then "--release" else ""} \
|
||||
${if rust-target != null then "--target=${rust-target}" else ""} \
|
||||
${if features != null then "--features=${features}" else ""} \
|
||||
--manifest-path ${project}/Cargo.toml
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
set -eu
|
||||
mkdir -p $out/lib
|
||||
find target -type f -name 'librln.*' -not -path '*/deps/*' -exec cp -v '{}' "$out/lib/" \;
|
||||
mkdir -p $out/include
|
||||
cbindgen ${src}/rln -l c > "$out/include/rln.h"
|
||||
'';
|
||||
|
||||
|
||||
meta = with pkgs.lib; {
|
||||
description = "Zerokit";
|
||||
license = licenses.mit;
|
||||
};
|
||||
}
|
||||
20
rln-cli/.gitignore
vendored
Normal file
20
rln-cli/.gitignore
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
# Common files to ignore in Rust projects
|
||||
.DS_Store
|
||||
.idea
|
||||
*.log
|
||||
tmp/
|
||||
|
||||
# Generated by Cargo will have compiled files and executables
|
||||
/target
|
||||
|
||||
# Generated by rln-cli
|
||||
/database
|
||||
|
||||
# Generated by Nix
|
||||
result
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
|
||||
# MSVC Windows builds of rustc generate these, which store debugging information
|
||||
*.pdb
|
||||
1647
rln-cli/Cargo.lock
generated
Normal file
1647
rln-cli/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
27
rln-cli/Cargo.toml
Normal file
27
rln-cli/Cargo.toml
Normal file
@@ -0,0 +1,27 @@
|
||||
[package]
|
||||
name = "rln-cli"
|
||||
version = "0.5.0"
|
||||
edition = "2021"
|
||||
|
||||
[[example]]
|
||||
name = "relay"
|
||||
path = "src/examples/relay.rs"
|
||||
|
||||
[[example]]
|
||||
name = "stateless"
|
||||
path = "src/examples/stateless.rs"
|
||||
required-features = ["stateless"]
|
||||
|
||||
[dependencies]
|
||||
rln = { path = "../rln", version = "1.0.0", default-features = false }
|
||||
zerokit_utils = { path = "../utils", version = "1.0.0", default-features = false }
|
||||
clap = { version = "4.5.53", features = ["cargo", "derive", "env"] }
|
||||
serde_json = "1.0.145"
|
||||
serde = { version = "1.0.228", features = ["derive"] }
|
||||
|
||||
[features]
|
||||
default = ["rln/pmtree-ft", "rln/parallel"]
|
||||
stateless = ["rln/stateless", "rln/parallel"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
9
rln-cli/Makefile.toml
Normal file
9
rln-cli/Makefile.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
[tasks.build]
|
||||
command = "cargo"
|
||||
args = ["build"]
|
||||
|
||||
[tasks.test]
|
||||
disabled = true
|
||||
|
||||
[tasks.bench]
|
||||
disabled = true
|
||||
29
rln-cli/README.md
Normal file
29
rln-cli/README.md
Normal file
@@ -0,0 +1,29 @@
|
||||
# Zerokit RLN-CLI
|
||||
|
||||
The Zerokit RLN-CLI provides a command-line interface examples on how to use public API of the [Zerokit RLN Module](../rln/README.md).
|
||||
|
||||
## Relay Example
|
||||
|
||||
The following [Relay Example](src/examples/relay.rs) demonstrates how RLN enables spam prevention in anonymous environments for multple users.
|
||||
|
||||
You can run the example using the following command:
|
||||
|
||||
```bash
|
||||
cargo run --example relay
|
||||
```
|
||||
|
||||
You can also change **MESSAGE_LIMIT** and **TREE_DEPTH** in the [relay.rs](src/examples/relay.rs) file to see how the RLN instance behaves with different parameters.
|
||||
|
||||
The customize **TREE_DEPTH** constant differs from the default value of `20` should follow [Custom Circuit Compilation](../rln/README.md#advanced-custom-circuit-compilation) instructions.
|
||||
|
||||
## Stateless Example
|
||||
|
||||
The following [Stateless Example](src/examples/stateless.rs) demonstrates how RLN can be used for stateless features by creating the Merkle tree outside of RLN instance.
|
||||
|
||||
This example function similarly to the [Relay Example](#relay-example) but uses a stateless RLN and seperate Merkle tree.
|
||||
|
||||
You can run the example using the following command:
|
||||
|
||||
```bash
|
||||
cargo run --example stateless --no-default-features --features stateless
|
||||
```
|
||||
302
rln-cli/src/examples/relay.rs
Normal file
302
rln-cli/src/examples/relay.rs
Normal file
@@ -0,0 +1,302 @@
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs::File,
|
||||
io::{stdin, stdout, Read, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use clap::{Parser, Subcommand};
|
||||
use rln::prelude::{
|
||||
hash_to_field_le, keygen, poseidon_hash, recover_id_secret, Fr, IdSecret, PmtreeConfigBuilder,
|
||||
RLNProofValues, RLNWitnessInput, RLN,
|
||||
};
|
||||
use zerokit_utils::pm_tree::Mode;
|
||||
|
||||
const MESSAGE_LIMIT: u32 = 1;
|
||||
|
||||
const TREE_DEPTH: usize = 20;
|
||||
|
||||
type Result<T> = std::result::Result<T, Box<dyn std::error::Error>>;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
List,
|
||||
Register,
|
||||
Send {
|
||||
#[arg(short, long)]
|
||||
user_index: usize,
|
||||
#[arg(short, long)]
|
||||
message_id: u32,
|
||||
#[arg(short, long)]
|
||||
signal: String,
|
||||
},
|
||||
Clear,
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct Identity {
|
||||
identity_secret: IdSecret,
|
||||
id_commitment: Fr,
|
||||
}
|
||||
|
||||
impl Identity {
|
||||
fn new() -> Self {
|
||||
let (identity_secret, id_commitment) = keygen().unwrap();
|
||||
Identity {
|
||||
identity_secret,
|
||||
id_commitment,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct RLNSystem {
|
||||
rln: RLN,
|
||||
used_nullifiers: HashMap<Fr, RLNProofValues>,
|
||||
local_identities: HashMap<usize, Identity>,
|
||||
}
|
||||
|
||||
impl RLNSystem {
|
||||
fn new() -> Result<Self> {
|
||||
let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
let resources_path: PathBuf = format!("../rln/resources/tree_depth_{TREE_DEPTH}").into();
|
||||
let filenames = ["rln_final.arkzkey", "graph.bin"];
|
||||
for filename in filenames {
|
||||
let fullpath = resources_path.join(Path::new(filename));
|
||||
let mut file = File::open(&fullpath)?;
|
||||
let metadata = std::fs::metadata(&fullpath)?;
|
||||
let mut output_buffer = vec![0; metadata.len() as usize];
|
||||
file.read_exact(&mut output_buffer)?;
|
||||
resources.push(output_buffer);
|
||||
}
|
||||
let tree_config = PmtreeConfigBuilder::new()
|
||||
.path("./database")
|
||||
.temporary(false)
|
||||
.cache_capacity(1073741824)
|
||||
.flush_every_ms(500)
|
||||
.mode(Mode::HighThroughput)
|
||||
.use_compression(false)
|
||||
.build()?;
|
||||
let rln = RLN::new_with_params(
|
||||
TREE_DEPTH,
|
||||
resources[0].clone(),
|
||||
resources[1].clone(),
|
||||
tree_config,
|
||||
)?;
|
||||
println!("RLN instance initialized successfully");
|
||||
Ok(RLNSystem {
|
||||
rln,
|
||||
used_nullifiers: HashMap::new(),
|
||||
local_identities: HashMap::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn list_users(&self) {
|
||||
if self.local_identities.is_empty() {
|
||||
println!("No users registered yet.");
|
||||
return;
|
||||
}
|
||||
|
||||
println!("Registered users:");
|
||||
for (index, identity) in &self.local_identities {
|
||||
println!("User Index: {index}");
|
||||
println!("+ Identity secret: {}", *identity.identity_secret);
|
||||
println!("+ Identity commitment: {}", identity.id_commitment);
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
fn register_user(&mut self) -> Result<usize> {
|
||||
let index = self.rln.leaves_set();
|
||||
let identity = Identity::new();
|
||||
|
||||
let rate_commitment =
|
||||
poseidon_hash(&[identity.id_commitment, Fr::from(MESSAGE_LIMIT)]).unwrap();
|
||||
match self.rln.set_next_leaf(rate_commitment) {
|
||||
Ok(_) => {
|
||||
println!("Registered User Index: {index}");
|
||||
println!("+ Identity secret: {}", *identity.identity_secret);
|
||||
println!("+ Identity commitment: {}", identity.id_commitment);
|
||||
self.local_identities.insert(index, identity);
|
||||
}
|
||||
Err(_) => {
|
||||
println!("Maximum user limit reached: 2^{TREE_DEPTH}");
|
||||
}
|
||||
};
|
||||
|
||||
Ok(index)
|
||||
}
|
||||
|
||||
fn generate_and_verify_proof(
|
||||
&mut self,
|
||||
user_index: usize,
|
||||
message_id: u32,
|
||||
signal: &str,
|
||||
external_nullifier: Fr,
|
||||
) -> Result<RLNProofValues> {
|
||||
let identity = match self.local_identities.get(&user_index) {
|
||||
Some(identity) => identity,
|
||||
None => return Err(format!("user index {user_index} not found").into()),
|
||||
};
|
||||
|
||||
let (path_elements, identity_path_index) = self.rln.get_merkle_proof(user_index)?;
|
||||
let x = hash_to_field_le(signal.as_bytes())?;
|
||||
|
||||
let witness = RLNWitnessInput::new(
|
||||
identity.identity_secret.clone(),
|
||||
Fr::from(MESSAGE_LIMIT),
|
||||
Fr::from(message_id),
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x,
|
||||
external_nullifier,
|
||||
)?;
|
||||
|
||||
let (proof, proof_values) = self.rln.generate_rln_proof(&witness)?;
|
||||
|
||||
println!("Proof generated successfully:");
|
||||
println!("+ User Index: {user_index}");
|
||||
println!("+ Message ID: {message_id}");
|
||||
println!("+ Signal: {signal}");
|
||||
|
||||
let verified = self.rln.verify_rln_proof(&proof, &proof_values, &x)?;
|
||||
if verified {
|
||||
println!("Proof verified successfully");
|
||||
}
|
||||
|
||||
Ok(proof_values)
|
||||
}
|
||||
|
||||
fn check_nullifier(&mut self, proof_values: RLNProofValues) -> Result<()> {
|
||||
if let Some(&previous_proof_values) = self.used_nullifiers.get(&proof_values.nullifier) {
|
||||
self.handle_duplicate_message_id(previous_proof_values, proof_values)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.used_nullifiers
|
||||
.insert(proof_values.nullifier, proof_values);
|
||||
println!("Message verified and accepted");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_duplicate_message_id(
|
||||
&mut self,
|
||||
previous_proof_values: RLNProofValues,
|
||||
current_proof_values: RLNProofValues,
|
||||
) -> Result<()> {
|
||||
if previous_proof_values.x == current_proof_values.x
|
||||
&& previous_proof_values.y == current_proof_values.y
|
||||
{
|
||||
return Err("this exact message and signal has already been sent".into());
|
||||
}
|
||||
|
||||
match recover_id_secret(&previous_proof_values, ¤t_proof_values) {
|
||||
Ok(leaked_identity_secret) => {
|
||||
if let Some((user_index, identity)) = self
|
||||
.local_identities
|
||||
.iter()
|
||||
.find(|(_, identity)| identity.identity_secret == leaked_identity_secret)
|
||||
.map(|(index, identity)| (*index, identity))
|
||||
{
|
||||
let real_identity_secret = identity.identity_secret.clone();
|
||||
if leaked_identity_secret != real_identity_secret {
|
||||
Err("Identity secret mismatch: leaked_identity_secret != real_identity_secret".into())
|
||||
} else {
|
||||
println!(
|
||||
"DUPLICATE message ID detected! Reveal identity secret: {}",
|
||||
*leaked_identity_secret
|
||||
);
|
||||
self.local_identities.remove(&user_index);
|
||||
self.rln.delete_leaf(user_index)?;
|
||||
println!("User index {user_index} has been SLASHED");
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
Err("user identity secret ******** not found".into())
|
||||
}
|
||||
}
|
||||
Err(err) => Err(format!("Failed to recover identity secret: {err}").into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
println!("Initializing RLN instance...");
|
||||
print!("\x1B[2J\x1B[1;1H");
|
||||
let mut rln_system = RLNSystem::new()?;
|
||||
let rln_epoch = hash_to_field_le(b"epoch")?;
|
||||
let rln_identifier = hash_to_field_le(b"rln-identifier")?;
|
||||
let external_nullifier = poseidon_hash(&[rln_epoch, rln_identifier]).unwrap();
|
||||
println!("RLN Relay Example:");
|
||||
println!("Message Limit: {MESSAGE_LIMIT}");
|
||||
println!("----------------------------------");
|
||||
println!();
|
||||
show_commands();
|
||||
loop {
|
||||
print!("\n> ");
|
||||
stdout().flush()?;
|
||||
let mut input = String::new();
|
||||
stdin().read_line(&mut input)?;
|
||||
let trimmed = input.trim();
|
||||
let args = std::iter::once("").chain(trimmed.split_whitespace());
|
||||
|
||||
match Cli::try_parse_from(args) {
|
||||
Ok(cli) => match cli.command {
|
||||
Commands::List => {
|
||||
rln_system.list_users();
|
||||
}
|
||||
Commands::Register => {
|
||||
rln_system.register_user()?;
|
||||
}
|
||||
Commands::Send {
|
||||
user_index,
|
||||
message_id,
|
||||
signal,
|
||||
} => {
|
||||
match rln_system.generate_and_verify_proof(
|
||||
user_index,
|
||||
message_id,
|
||||
&signal,
|
||||
external_nullifier,
|
||||
) {
|
||||
Ok(proof_values) => {
|
||||
if let Err(err) = rln_system.check_nullifier(proof_values) {
|
||||
println!("Check nullifier error: {err}");
|
||||
};
|
||||
}
|
||||
Err(err) => {
|
||||
println!("Proof generation error: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::Clear => {
|
||||
print!("\x1B[2J\x1B[1;1H");
|
||||
show_commands();
|
||||
}
|
||||
Commands::Exit => {
|
||||
break;
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
eprintln!("Command error: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn show_commands() {
|
||||
println!("Available commands:");
|
||||
println!(" list - List registered users");
|
||||
println!(" register - Register a new user index");
|
||||
println!(" send -u <index> -m <message_id> -s <signal> - Send a message with proof");
|
||||
println!(" clear - Clear the screen");
|
||||
println!(" exit - Exit the program");
|
||||
}
|
||||
292
rln-cli/src/examples/stateless.rs
Normal file
292
rln-cli/src/examples/stateless.rs
Normal file
@@ -0,0 +1,292 @@
|
||||
#![cfg(feature = "stateless")]
|
||||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::{stdin, stdout, Write},
|
||||
};
|
||||
|
||||
use clap::{Parser, Subcommand};
|
||||
use rln::prelude::{
|
||||
hash_to_field_le, keygen, poseidon_hash, recover_id_secret, Fr, IdSecret, OptimalMerkleTree,
|
||||
PoseidonHash, RLNProofValues, RLNWitnessInput, ZerokitMerkleProof, ZerokitMerkleTree,
|
||||
DEFAULT_TREE_DEPTH, RLN,
|
||||
};
|
||||
|
||||
const MESSAGE_LIMIT: u32 = 1;
|
||||
|
||||
type Result<T> = std::result::Result<T, Box<dyn std::error::Error>>;
|
||||
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
List,
|
||||
Register,
|
||||
Send {
|
||||
#[arg(short, long)]
|
||||
user_index: usize,
|
||||
#[arg(short, long)]
|
||||
message_id: u32,
|
||||
#[arg(short, long)]
|
||||
signal: String,
|
||||
},
|
||||
Clear,
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct Identity {
|
||||
identity_secret: IdSecret,
|
||||
id_commitment: Fr,
|
||||
}
|
||||
|
||||
impl Identity {
|
||||
fn new() -> Self {
|
||||
let (identity_secret, id_commitment) = keygen().unwrap();
|
||||
Identity {
|
||||
identity_secret,
|
||||
id_commitment,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct RLNSystem {
|
||||
rln: RLN,
|
||||
tree: OptimalMerkleTree<PoseidonHash>,
|
||||
used_nullifiers: HashMap<Fr, RLNProofValues>,
|
||||
local_identities: HashMap<usize, Identity>,
|
||||
}
|
||||
|
||||
impl RLNSystem {
|
||||
fn new() -> Result<Self> {
|
||||
let rln = RLN::new()?;
|
||||
let default_leaf = Fr::from(0);
|
||||
let tree: OptimalMerkleTree<PoseidonHash> = OptimalMerkleTree::new(
|
||||
DEFAULT_TREE_DEPTH,
|
||||
default_leaf,
|
||||
ConfigOf::<OptimalMerkleTree<PoseidonHash>>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
Ok(RLNSystem {
|
||||
rln,
|
||||
tree,
|
||||
used_nullifiers: HashMap::new(),
|
||||
local_identities: HashMap::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn list_users(&self) {
|
||||
if self.local_identities.is_empty() {
|
||||
println!("No users registered yet.");
|
||||
return;
|
||||
}
|
||||
|
||||
println!("Registered users:");
|
||||
for (index, identity) in &self.local_identities {
|
||||
println!("User Index: {index}");
|
||||
println!("+ Identity secret: {}", *identity.identity_secret);
|
||||
println!("+ Identity commitment: {}", identity.id_commitment);
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
fn register_user(&mut self) -> Result<usize> {
|
||||
let index = self.tree.leaves_set();
|
||||
let identity = Identity::new();
|
||||
|
||||
let rate_commitment =
|
||||
poseidon_hash(&[identity.id_commitment, Fr::from(MESSAGE_LIMIT)]).unwrap();
|
||||
self.tree.update_next(rate_commitment)?;
|
||||
|
||||
println!("Registered User Index: {index}");
|
||||
println!("+ Identity secret: {}", *identity.identity_secret);
|
||||
println!("+ Identity commitment: {}", identity.id_commitment);
|
||||
|
||||
self.local_identities.insert(index, identity);
|
||||
Ok(index)
|
||||
}
|
||||
|
||||
fn generate_and_verify_proof(
|
||||
&mut self,
|
||||
user_index: usize,
|
||||
message_id: u32,
|
||||
signal: &str,
|
||||
external_nullifier: Fr,
|
||||
) -> Result<RLNProofValues> {
|
||||
let identity = match self.local_identities.get(&user_index) {
|
||||
Some(identity) => identity,
|
||||
None => return Err(format!("user index {user_index} not found").into()),
|
||||
};
|
||||
|
||||
let merkle_proof = self.tree.proof(user_index)?;
|
||||
let x = hash_to_field_le(signal.as_bytes())?;
|
||||
|
||||
let witness = RLNWitnessInput::new(
|
||||
identity.identity_secret.clone(),
|
||||
Fr::from(MESSAGE_LIMIT),
|
||||
Fr::from(message_id),
|
||||
merkle_proof.get_path_elements(),
|
||||
merkle_proof.get_path_index(),
|
||||
x,
|
||||
external_nullifier,
|
||||
)?;
|
||||
|
||||
let (proof, proof_values) = self.rln.generate_rln_proof(&witness)?;
|
||||
|
||||
println!("Proof generated successfully:");
|
||||
println!("+ User Index: {user_index}");
|
||||
println!("+ Message ID: {message_id}");
|
||||
println!("+ Signal: {signal}");
|
||||
|
||||
let tree_root = self.tree.root();
|
||||
|
||||
let verified = self
|
||||
.rln
|
||||
.verify_with_roots(&proof, &proof_values, &x, &[tree_root])?;
|
||||
if verified {
|
||||
println!("Proof verified successfully");
|
||||
}
|
||||
|
||||
Ok(proof_values)
|
||||
}
|
||||
|
||||
fn check_nullifier(&mut self, proof_values: RLNProofValues) -> Result<()> {
|
||||
let tree_root = self.tree.root();
|
||||
|
||||
if proof_values.root != tree_root {
|
||||
println!("Check nullifier failed: invalid root");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(&previous_proof_values) = self.used_nullifiers.get(&proof_values.nullifier) {
|
||||
self.handle_duplicate_message_id(previous_proof_values, proof_values)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.used_nullifiers
|
||||
.insert(proof_values.nullifier, proof_values);
|
||||
println!("Message verified and accepted");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_duplicate_message_id(
|
||||
&mut self,
|
||||
previous_proof_values: RLNProofValues,
|
||||
current_proof_values: RLNProofValues,
|
||||
) -> Result<()> {
|
||||
if previous_proof_values.x == current_proof_values.x
|
||||
&& previous_proof_values.y == current_proof_values.y
|
||||
{
|
||||
return Err("this exact message and signal has already been sent".into());
|
||||
}
|
||||
|
||||
match recover_id_secret(&previous_proof_values, ¤t_proof_values) {
|
||||
Ok(leaked_identity_secret) => {
|
||||
if let Some((user_index, identity)) = self
|
||||
.local_identities
|
||||
.iter()
|
||||
.find(|(_, identity)| identity.identity_secret == leaked_identity_secret)
|
||||
.map(|(index, identity)| (*index, identity))
|
||||
{
|
||||
let real_identity_secret = identity.identity_secret.clone();
|
||||
if leaked_identity_secret != real_identity_secret {
|
||||
Err("Identity secret mismatch: leaked_identity_secret != real_identity_secret".into())
|
||||
} else {
|
||||
println!(
|
||||
"DUPLICATE message ID detected! Reveal identity secret: {}",
|
||||
*leaked_identity_secret
|
||||
);
|
||||
self.local_identities.remove(&user_index);
|
||||
println!("User index {user_index} has been SLASHED");
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
Err("user identity secret ******** not found".into())
|
||||
}
|
||||
}
|
||||
Err(err) => Err(format!("Failed to recover identity secret: {err}").into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
println!("Initializing RLN instance...");
|
||||
print!("\x1B[2J\x1B[1;1H");
|
||||
let mut rln_system = RLNSystem::new()?;
|
||||
let rln_epoch = hash_to_field_le(b"epoch")?;
|
||||
let rln_identifier = hash_to_field_le(b"rln-identifier")?;
|
||||
let external_nullifier = poseidon_hash(&[rln_epoch, rln_identifier]).unwrap();
|
||||
println!("RLN Stateless Relay Example:");
|
||||
println!("Message Limit: {MESSAGE_LIMIT}");
|
||||
println!("----------------------------------");
|
||||
println!();
|
||||
show_commands();
|
||||
|
||||
loop {
|
||||
print!("\n> ");
|
||||
stdout().flush()?;
|
||||
let mut input = String::new();
|
||||
stdin().read_line(&mut input)?;
|
||||
let trimmed = input.trim();
|
||||
let args = std::iter::once("").chain(trimmed.split_whitespace());
|
||||
|
||||
match Cli::try_parse_from(args) {
|
||||
Ok(cli) => match cli.command {
|
||||
Commands::List => {
|
||||
rln_system.list_users();
|
||||
}
|
||||
Commands::Register => {
|
||||
rln_system.register_user()?;
|
||||
}
|
||||
Commands::Send {
|
||||
user_index,
|
||||
message_id,
|
||||
signal,
|
||||
} => {
|
||||
match rln_system.generate_and_verify_proof(
|
||||
user_index,
|
||||
message_id,
|
||||
&signal,
|
||||
external_nullifier,
|
||||
) {
|
||||
Ok(proof_values) => {
|
||||
if let Err(err) = rln_system.check_nullifier(proof_values) {
|
||||
println!("Check nullifier error: {err}");
|
||||
};
|
||||
}
|
||||
Err(err) => {
|
||||
println!("Proof generation error: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::Clear => {
|
||||
print!("\x1B[2J\x1B[1;1H");
|
||||
show_commands();
|
||||
}
|
||||
Commands::Exit => {
|
||||
break;
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
eprintln!("Command error: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn show_commands() {
|
||||
println!("Available commands:");
|
||||
println!(" list - List registered users");
|
||||
println!(" register - Register a new user index");
|
||||
println!(" send -u <index> -m <message_id> -s <signal> - Send a message with proof");
|
||||
println!(" clear - Clear the screen");
|
||||
println!(" exit - Exit the program");
|
||||
}
|
||||
22
rln-wasm/.gitignore
vendored
Normal file
22
rln-wasm/.gitignore
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
# Common files to ignore in Rust projects
|
||||
.DS_Store
|
||||
.idea
|
||||
*.log
|
||||
tmp/
|
||||
|
||||
# Generated by Cargo will have compiled files and executables
|
||||
/target
|
||||
|
||||
# Generated by rln-wasm
|
||||
/pkg
|
||||
/examples/node_modules
|
||||
/examples/package-lock.json
|
||||
|
||||
# Generated by Nix
|
||||
result
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
|
||||
# MSVC Windows builds of rustc generate these, which store debugging information
|
||||
*.pdb
|
||||
1792
rln-wasm/Cargo.lock
generated
Normal file
1792
rln-wasm/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
53
rln-wasm/Cargo.toml
Normal file
53
rln-wasm/Cargo.toml
Normal file
@@ -0,0 +1,53 @@
|
||||
[package]
|
||||
name = "rln-wasm"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
rln = { path = "../rln", version = "1.0.0", default-features = false, features = [
|
||||
"stateless",
|
||||
] }
|
||||
zerokit_utils = { path = "../utils", version = "1.0.0", default-features = false }
|
||||
num-bigint = { version = "0.4.6", default-features = false }
|
||||
js-sys = "0.3.83"
|
||||
wasm-bindgen = "0.2.106"
|
||||
serde-wasm-bindgen = "0.6.5"
|
||||
serde = "1.0.228"
|
||||
wasm-bindgen-rayon = { version = "1.3.0", features = [
|
||||
"no-bundler",
|
||||
], optional = true }
|
||||
ark-relations = { version = "0.5.1", features = ["std"] }
|
||||
ark-groth16 = { version = "0.5.0", default-features = false }
|
||||
rand = "0.8.5"
|
||||
|
||||
# The `console_error_panic_hook` crate provides better debugging of panics by
|
||||
# logging them with `console.error`. This is great for development, but requires
|
||||
# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for
|
||||
# code size when deploying.
|
||||
console_error_panic_hook = { version = "0.1.7", optional = true }
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
getrandom = { version = "0.2.16", features = ["js"] }
|
||||
|
||||
[dev-dependencies]
|
||||
serde_json = "1.0.145"
|
||||
wasm-bindgen-test = "0.3.56"
|
||||
wasm-bindgen-futures = "0.4.56"
|
||||
ark-std = { version = "0.5.0", default-features = false }
|
||||
|
||||
[dev-dependencies.web-sys]
|
||||
version = "0.3.83"
|
||||
features = ["Window", "Navigator"]
|
||||
|
||||
[features]
|
||||
default = []
|
||||
utils = []
|
||||
panic_hook = ["console_error_panic_hook"]
|
||||
parallel = ["rln/parallel", "wasm-bindgen-rayon", "ark-groth16/parallel"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
137
rln-wasm/Makefile.toml
Normal file
137
rln-wasm/Makefile.toml
Normal file
@@ -0,0 +1,137 @@
|
||||
[tasks.build]
|
||||
clear = true
|
||||
dependencies = ["pack_build", "pack_rename", "pack_add_keywords"]
|
||||
|
||||
[tasks.build_parallel]
|
||||
clear = true
|
||||
dependencies = [
|
||||
"pack_build_parallel",
|
||||
"pack_rename_parallel",
|
||||
"pack_add_keywords",
|
||||
]
|
||||
|
||||
[tasks.build_utils]
|
||||
clear = true
|
||||
dependencies = ["pack_build_utils", "pack_rename_utils", "pack_add_keywords"]
|
||||
|
||||
[tasks.pack_build]
|
||||
command = "wasm-pack"
|
||||
args = ["build", "--release", "--target", "web", "--scope", "waku"]
|
||||
|
||||
[tasks.pack_rename]
|
||||
script = "sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/package.json.bak"
|
||||
|
||||
[tasks.pack_build_parallel]
|
||||
command = "env"
|
||||
args = [
|
||||
"CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS=-C target-feature=+atomics,+bulk-memory,+mutable-globals -C link-arg=--shared-memory -C link-arg=--max-memory=1073741824 -C link-arg=--import-memory -C link-arg=--export=__wasm_init_tls -C link-arg=--export=__tls_size -C link-arg=--export=__tls_align -C link-arg=--export=__tls_base",
|
||||
"rustup",
|
||||
"run",
|
||||
"nightly",
|
||||
"wasm-pack",
|
||||
"build",
|
||||
"--release",
|
||||
"--target",
|
||||
"web",
|
||||
"--scope",
|
||||
"waku",
|
||||
"--features",
|
||||
"parallel",
|
||||
"-Z",
|
||||
"build-std=panic_abort,std",
|
||||
]
|
||||
|
||||
[tasks.pack_rename_parallel]
|
||||
script = "sed -i.bak 's/rln-wasm/zerokit-rln-wasm-parallel/g' pkg/package.json && rm pkg/package.json.bak"
|
||||
|
||||
[tasks.pack_build_utils]
|
||||
command = "wasm-pack"
|
||||
args = [
|
||||
"build",
|
||||
"--release",
|
||||
"--target",
|
||||
"web",
|
||||
"--scope",
|
||||
"waku",
|
||||
"--no-default-features",
|
||||
"--features",
|
||||
"utils",
|
||||
]
|
||||
|
||||
[tasks.pack_rename_utils]
|
||||
script = "sed -i.bak 's/rln-wasm/zerokit-rln-wasm-utils/g' pkg/package.json && rm pkg/package.json.bak"
|
||||
|
||||
[tasks.pack_add_keywords]
|
||||
script = """
|
||||
jq '. + {keywords: ["zerokit", "rln", "wasm"]}' pkg/package.json > pkg/package.json.tmp && \
|
||||
mv pkg/package.json.tmp pkg/package.json
|
||||
"""
|
||||
|
||||
[tasks.test]
|
||||
command = "wasm-pack"
|
||||
args = [
|
||||
"test",
|
||||
"--release",
|
||||
"--node",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
dependencies = ["build"]
|
||||
|
||||
[tasks.test_browser]
|
||||
command = "wasm-pack"
|
||||
args = [
|
||||
"test",
|
||||
"--release",
|
||||
"--chrome",
|
||||
"--headless",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
dependencies = ["build"]
|
||||
|
||||
[tasks.test_parallel]
|
||||
command = "env"
|
||||
args = [
|
||||
"CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS=-C target-feature=+atomics,+bulk-memory,+mutable-globals -C link-arg=--shared-memory -C link-arg=--max-memory=1073741824 -C link-arg=--import-memory -C link-arg=--export=__wasm_init_tls -C link-arg=--export=__tls_size -C link-arg=--export=__tls_align -C link-arg=--export=__tls_base",
|
||||
"rustup",
|
||||
"run",
|
||||
"nightly",
|
||||
"wasm-pack",
|
||||
"test",
|
||||
"--release",
|
||||
"--chrome",
|
||||
"--headless",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--features",
|
||||
"parallel",
|
||||
"-Z",
|
||||
"build-std=panic_abort,std",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
dependencies = ["build_parallel"]
|
||||
|
||||
[tasks.test_utils]
|
||||
command = "wasm-pack"
|
||||
args = [
|
||||
"test",
|
||||
"--release",
|
||||
"--node",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--no-default-features",
|
||||
"--features",
|
||||
"utils",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
dependencies = ["build_utils"]
|
||||
|
||||
[tasks.bench]
|
||||
disabled = true
|
||||
119
rln-wasm/README.md
Normal file
119
rln-wasm/README.md
Normal file
@@ -0,0 +1,119 @@
|
||||
# RLN for WASM
|
||||
|
||||
[](https://badge.fury.io/js/@waku%2Fzerokit-rln-wasm)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
|
||||
The Zerokit RLN WASM Module provides WebAssembly bindings for working with
|
||||
Rate-Limiting Nullifier [RLN](https://rfc.vac.dev/vac/raw/rln-v2) zkSNARK proofs and primitives.
|
||||
This module is used by [waku-org/js-rln](https://github.com/waku-org/js-rln/) to enable
|
||||
RLN functionality in JavaScript/TypeScript applications.
|
||||
|
||||
## Install Dependencies
|
||||
|
||||
> [!NOTE]
|
||||
> This project requires the following tools:
|
||||
>
|
||||
> - `wasm-pack` (v0.13.1) - for compiling Rust to WebAssembly
|
||||
> - `cargo-make` - for running build commands
|
||||
> - `nvm` - to install and manage Node.js (v22.14.0+)
|
||||
|
||||
### Quick Install
|
||||
|
||||
```bash
|
||||
make installdeps
|
||||
```
|
||||
|
||||
### Manual Installation
|
||||
|
||||
```bash
|
||||
# Install wasm-pack
|
||||
cargo install wasm-pack --version=0.13.1
|
||||
|
||||
# Install cargo-make
|
||||
cargo install cargo-make
|
||||
|
||||
# Install Node.js via nvm
|
||||
nvm install 22.14.0
|
||||
nvm use 22.14.0
|
||||
nvm alias default 22.14.0
|
||||
```
|
||||
|
||||
## Building the Library
|
||||
|
||||
Navigate to the rln-wasm directory:
|
||||
|
||||
```bash
|
||||
cd rln-wasm
|
||||
```
|
||||
|
||||
Build commands:
|
||||
|
||||
```bash
|
||||
cargo make build # Default → @waku/zerokit-rln-wasm
|
||||
cargo make build_parallel # Parallel → @waku/zerokit-rln-wasm-parallel (requires nightly Rust)
|
||||
cargo make build_utils # Utils only → @waku/zerokit-rln-wasm-utils
|
||||
```
|
||||
|
||||
All packages output to `pkg/` directory.
|
||||
|
||||
## Running Tests and Benchmarks
|
||||
|
||||
```bash
|
||||
cargo make test # Standard tests
|
||||
cargo make test_browser # Browser headless mode
|
||||
cargo make test_utils # Utils-only tests
|
||||
cargo make test_parallel # Parallel tests
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
See [Node example](./examples/index.js) and [README](./examples/Readme.md) for proof generation, verification, and slashing.
|
||||
|
||||
## Parallel Computation
|
||||
|
||||
Enables multi-threaded browser execution using `wasm-bindgen-rayon`.
|
||||
|
||||
> [!NOTE]
|
||||
>
|
||||
> - Parallel support is not enabled by default due to WebAssembly and browser limitations.
|
||||
> - Requires `nightly` Rust: `rustup install nightly`
|
||||
> - Browser-only (not compatible with Node.js)
|
||||
> - Requires HTTP headers for `SharedArrayBuffer`:
|
||||
> - `Cross-Origin-Opener-Policy: same-origin`
|
||||
> - `Cross-Origin-Embedder-Policy: require-corp`
|
||||
|
||||
### Usage
|
||||
|
||||
Direct usage (modern browsers with WebAssembly threads support):
|
||||
|
||||
```js
|
||||
import * as wasmPkg from '@waku/zerokit-rln-wasm-parallel';
|
||||
|
||||
await wasmPkg.default();
|
||||
await wasmPkg.initThreadPool(navigator.hardwareConcurrency);
|
||||
wasmPkg.nowCallAnyExportedFuncs();
|
||||
```
|
||||
|
||||
### Feature Detection for Older Browsers
|
||||
|
||||
If you're targeting [older browser versions that didn't support WebAssembly threads yet](https://webassembly.org/roadmap/), you'll want to use both builds - the parallel version for modern browsers and the default version as a fallback. Use feature detection to choose the appropriate build on the JavaScript side.
|
||||
|
||||
You can use the [wasm-feature-detect](https://github.com/GoogleChromeLabs/wasm-feature-detect) library for this purpose:
|
||||
|
||||
```js
|
||||
import { threads } from 'wasm-feature-detect';
|
||||
|
||||
let wasmPkg;
|
||||
|
||||
if (await threads()) {
|
||||
wasmPkg = await import('@waku/zerokit-rln-wasm-parallel');
|
||||
await wasmPkg.default();
|
||||
await wasmPkg.initThreadPool(navigator.hardwareConcurrency);
|
||||
} else {
|
||||
wasmPkg = await import('@waku/zerokit-rln-wasm');
|
||||
await wasmPkg.default();
|
||||
}
|
||||
|
||||
wasmPkg.nowCallAnyExportedFuncs();
|
||||
```
|
||||
22
rln-wasm/examples/README.md
Normal file
22
rln-wasm/examples/README.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# RLN WASM Node Examples
|
||||
|
||||
This example demonstrates how to use the RLN WASM package in a Node.js environment.
|
||||
|
||||
## Build the @waku/zerokit-rln-wasm package at the root of rln-wasm module
|
||||
|
||||
```bash
|
||||
cargo make build
|
||||
```
|
||||
|
||||
## Move into this directory and install dependencies
|
||||
|
||||
```bash
|
||||
cd examples
|
||||
npm install
|
||||
```
|
||||
|
||||
## Run
|
||||
|
||||
```bash
|
||||
npm start
|
||||
```
|
||||
484
rln-wasm/examples/index.js
Normal file
484
rln-wasm/examples/index.js
Normal file
@@ -0,0 +1,484 @@
|
||||
import { readFileSync } from "fs";
|
||||
import { fileURLToPath } from "url";
|
||||
import { dirname, join } from "path";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
function debugUint8Array(uint8Array) {
|
||||
return Array.from(uint8Array, (byte) =>
|
||||
byte.toString(16).padStart(2, "0")
|
||||
).join(", ");
|
||||
}
|
||||
|
||||
async function calculateWitness(circomPath, inputs, witnessCalculatorFile) {
|
||||
const wasmFile = readFileSync(circomPath);
|
||||
const wasmFileBuffer = wasmFile.buffer.slice(
|
||||
wasmFile.byteOffset,
|
||||
wasmFile.byteOffset + wasmFile.byteLength
|
||||
);
|
||||
const witnessCalculator = await witnessCalculatorFile(wasmFileBuffer);
|
||||
const calculatedWitness = await witnessCalculator.calculateWitness(
|
||||
inputs,
|
||||
false
|
||||
);
|
||||
return calculatedWitness;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const rlnWasm = await import("../pkg/rln_wasm.js");
|
||||
const wasmPath = join(__dirname, "../pkg/rln_wasm_bg.wasm");
|
||||
const wasmBytes = readFileSync(wasmPath);
|
||||
rlnWasm.initSync({ module: wasmBytes });
|
||||
|
||||
const zkeyPath = join(
|
||||
__dirname,
|
||||
"../../rln/resources/tree_depth_20/rln_final.arkzkey"
|
||||
);
|
||||
const circomPath = join(
|
||||
__dirname,
|
||||
"../../rln/resources/tree_depth_20/rln.wasm"
|
||||
);
|
||||
const witnessCalculatorPath = join(
|
||||
__dirname,
|
||||
"../resources/witness_calculator.js"
|
||||
);
|
||||
const { builder: witnessCalculatorFile } = await import(
|
||||
witnessCalculatorPath
|
||||
);
|
||||
|
||||
console.log("Creating RLN instance");
|
||||
const zkeyData = readFileSync(zkeyPath);
|
||||
let rlnInstance;
|
||||
try {
|
||||
rlnInstance = new rlnWasm.WasmRLN(new Uint8Array(zkeyData));
|
||||
} catch (error) {
|
||||
console.error("Initial RLN instance creation error:", error);
|
||||
return;
|
||||
}
|
||||
console.log("RLN instance created successfully");
|
||||
|
||||
console.log("\nGenerating identity keys");
|
||||
let identity;
|
||||
try {
|
||||
identity = rlnWasm.Identity.generate();
|
||||
} catch (error) {
|
||||
console.error("Key generation error:", error);
|
||||
return;
|
||||
}
|
||||
const identitySecret = identity.getSecretHash();
|
||||
const idCommitment = identity.getCommitment();
|
||||
console.log("Identity generated");
|
||||
console.log(" - identity_secret = " + identitySecret.debug());
|
||||
console.log(" - id_commitment = " + idCommitment.debug());
|
||||
|
||||
console.log("\nCreating message limit");
|
||||
const userMessageLimit = rlnWasm.WasmFr.fromUint(1);
|
||||
console.log(" - user_message_limit = " + userMessageLimit.debug());
|
||||
|
||||
console.log("\nComputing rate commitment");
|
||||
let rateCommitment;
|
||||
try {
|
||||
rateCommitment = rlnWasm.Hasher.poseidonHashPair(
|
||||
idCommitment,
|
||||
userMessageLimit
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Rate commitment hash error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(" - rate_commitment = " + rateCommitment.debug());
|
||||
|
||||
console.log("\nWasmFr serialization: WasmFr <-> bytes");
|
||||
const serRateCommitment = rateCommitment.toBytesLE();
|
||||
console.log(
|
||||
" - serialized rate_commitment = [" +
|
||||
debugUint8Array(serRateCommitment) +
|
||||
"]"
|
||||
);
|
||||
|
||||
let deserRateCommitment;
|
||||
try {
|
||||
deserRateCommitment = rlnWasm.WasmFr.fromBytesLE(serRateCommitment);
|
||||
} catch (error) {
|
||||
console.error("Rate commitment deserialization error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(
|
||||
" - deserialized rate_commitment = " + deserRateCommitment.debug()
|
||||
);
|
||||
|
||||
console.log("\nIdentity serialization: Identity <-> bytes");
|
||||
const serIdentity = identity.toBytesLE();
|
||||
console.log(
|
||||
" - serialized identity = [" + debugUint8Array(serIdentity) + "]"
|
||||
);
|
||||
|
||||
let deserIdentity;
|
||||
try {
|
||||
deserIdentity = rlnWasm.Identity.fromBytesLE(serIdentity);
|
||||
} catch (error) {
|
||||
console.error("Identity deserialization error:", error);
|
||||
return;
|
||||
}
|
||||
const deserIdentitySecret = deserIdentity.getSecretHash();
|
||||
const deserIdCommitment = deserIdentity.getCommitment();
|
||||
console.log(
|
||||
" - deserialized identity = [" +
|
||||
deserIdentitySecret.debug() +
|
||||
", " +
|
||||
deserIdCommitment.debug() +
|
||||
"]"
|
||||
);
|
||||
|
||||
console.log("\nBuilding Merkle path for stateless mode");
|
||||
const treeDepth = 20;
|
||||
const defaultLeaf = rlnWasm.WasmFr.zero();
|
||||
|
||||
const defaultHashes = [];
|
||||
try {
|
||||
defaultHashes[0] = rlnWasm.Hasher.poseidonHashPair(
|
||||
defaultLeaf,
|
||||
defaultLeaf
|
||||
);
|
||||
for (let i = 1; i < treeDepth - 1; i++) {
|
||||
defaultHashes[i] = rlnWasm.Hasher.poseidonHashPair(
|
||||
defaultHashes[i - 1],
|
||||
defaultHashes[i - 1]
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Poseidon hash error:", error);
|
||||
return;
|
||||
}
|
||||
|
||||
const pathElements = new rlnWasm.VecWasmFr();
|
||||
pathElements.push(defaultLeaf);
|
||||
for (let i = 1; i < treeDepth; i++) {
|
||||
pathElements.push(defaultHashes[i - 1]);
|
||||
}
|
||||
const identityPathIndex = new Uint8Array(treeDepth);
|
||||
|
||||
console.log("\nVecWasmFr serialization: VecWasmFr <-> bytes");
|
||||
const serPathElements = pathElements.toBytesLE();
|
||||
console.log(
|
||||
" - serialized path_elements = [" + debugUint8Array(serPathElements) + "]"
|
||||
);
|
||||
|
||||
let deserPathElements;
|
||||
try {
|
||||
deserPathElements = rlnWasm.VecWasmFr.fromBytesLE(serPathElements);
|
||||
} catch (error) {
|
||||
console.error("Path elements deserialization error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(" - deserialized path_elements = ", deserPathElements.debug());
|
||||
|
||||
console.log("\nUint8Array serialization: Uint8Array <-> bytes");
|
||||
const serPathIndex = rlnWasm.Uint8ArrayUtils.toBytesLE(identityPathIndex);
|
||||
console.log(
|
||||
" - serialized path_index = [" + debugUint8Array(serPathIndex) + "]"
|
||||
);
|
||||
|
||||
let deserPathIndex;
|
||||
try {
|
||||
deserPathIndex = rlnWasm.Uint8ArrayUtils.fromBytesLE(serPathIndex);
|
||||
} catch (error) {
|
||||
console.error("Path index deserialization error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(" - deserialized path_index =", deserPathIndex);
|
||||
|
||||
console.log("\nComputing Merkle root for stateless mode");
|
||||
console.log(" - computing root for index 0 with rate_commitment");
|
||||
|
||||
let computedRoot;
|
||||
try {
|
||||
computedRoot = rlnWasm.Hasher.poseidonHashPair(rateCommitment, defaultLeaf);
|
||||
for (let i = 1; i < treeDepth; i++) {
|
||||
computedRoot = rlnWasm.Hasher.poseidonHashPair(
|
||||
computedRoot,
|
||||
defaultHashes[i - 1]
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Poseidon hash error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(" - computed_root = " + computedRoot.debug());
|
||||
|
||||
console.log("\nHashing signal");
|
||||
const signal = new Uint8Array([
|
||||
1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0,
|
||||
]);
|
||||
let x;
|
||||
try {
|
||||
x = rlnWasm.Hasher.hashToFieldLE(signal);
|
||||
} catch (error) {
|
||||
console.error("Hash signal error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(" - x = " + x.debug());
|
||||
|
||||
console.log("\nHashing epoch");
|
||||
const epochStr = "test-epoch";
|
||||
let epoch;
|
||||
try {
|
||||
epoch = rlnWasm.Hasher.hashToFieldLE(new TextEncoder().encode(epochStr));
|
||||
} catch (error) {
|
||||
console.error("Hash epoch error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(" - epoch = " + epoch.debug());
|
||||
|
||||
console.log("\nHashing RLN identifier");
|
||||
const rlnIdStr = "test-rln-identifier";
|
||||
let rlnIdentifier;
|
||||
try {
|
||||
rlnIdentifier = rlnWasm.Hasher.hashToFieldLE(
|
||||
new TextEncoder().encode(rlnIdStr)
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Hash RLN identifier error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(" - rln_identifier = " + rlnIdentifier.debug());
|
||||
|
||||
console.log("\nComputing Poseidon hash for external nullifier");
|
||||
let externalNullifier;
|
||||
try {
|
||||
externalNullifier = rlnWasm.Hasher.poseidonHashPair(epoch, rlnIdentifier);
|
||||
} catch (error) {
|
||||
console.error("External nullifier hash error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(" - external_nullifier = " + externalNullifier.debug());
|
||||
|
||||
console.log("\nCreating message_id");
|
||||
const messageId = rlnWasm.WasmFr.fromUint(0);
|
||||
console.log(" - message_id = " + messageId.debug());
|
||||
|
||||
console.log("\nCreating RLN Witness");
|
||||
const witness = new rlnWasm.WasmRLNWitnessInput(
|
||||
identitySecret,
|
||||
userMessageLimit,
|
||||
messageId,
|
||||
pathElements,
|
||||
identityPathIndex,
|
||||
x,
|
||||
externalNullifier
|
||||
);
|
||||
console.log("RLN Witness created successfully");
|
||||
|
||||
console.log(
|
||||
"\nWasmRLNWitnessInput serialization: WasmRLNWitnessInput <-> bytes"
|
||||
);
|
||||
let serWitness;
|
||||
try {
|
||||
serWitness = witness.toBytesLE();
|
||||
} catch (error) {
|
||||
console.error("Witness serialization error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(
|
||||
" - serialized witness = [" + debugUint8Array(serWitness) + " ]"
|
||||
);
|
||||
|
||||
let deserWitness;
|
||||
try {
|
||||
deserWitness = rlnWasm.WasmRLNWitnessInput.fromBytesLE(serWitness);
|
||||
} catch (error) {
|
||||
console.error("Witness deserialization error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(" - witness deserialized successfully");
|
||||
|
||||
console.log("\nCalculating witness");
|
||||
let witnessJson;
|
||||
try {
|
||||
witnessJson = witness.toBigIntJson();
|
||||
} catch (error) {
|
||||
console.error("Witness to BigInt JSON error:", error);
|
||||
return;
|
||||
}
|
||||
const calculatedWitness = await calculateWitness(
|
||||
circomPath,
|
||||
witnessJson,
|
||||
witnessCalculatorFile
|
||||
);
|
||||
console.log("Witness calculated successfully");
|
||||
|
||||
console.log("\nGenerating RLN Proof");
|
||||
let rln_proof;
|
||||
try {
|
||||
rln_proof = rlnInstance.generateRLNProofWithWitness(
|
||||
calculatedWitness,
|
||||
witness
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Proof generation error:", error);
|
||||
return;
|
||||
}
|
||||
console.log("Proof generated successfully");
|
||||
|
||||
console.log("\nGetting proof values");
|
||||
const proofValues = rln_proof.getValues();
|
||||
console.log(" - y = " + proofValues.y.debug());
|
||||
console.log(" - nullifier = " + proofValues.nullifier.debug());
|
||||
console.log(" - root = " + proofValues.root.debug());
|
||||
console.log(" - x = " + proofValues.x.debug());
|
||||
console.log(
|
||||
" - external_nullifier = " + proofValues.externalNullifier.debug()
|
||||
);
|
||||
|
||||
console.log("\nRLNProof serialization: RLNProof <-> bytes");
|
||||
let serProof;
|
||||
try {
|
||||
serProof = rln_proof.toBytesLE();
|
||||
} catch (error) {
|
||||
console.error("Proof serialization error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(" - serialized proof = [" + debugUint8Array(serProof) + " ]");
|
||||
|
||||
let deserProof;
|
||||
try {
|
||||
deserProof = rlnWasm.WasmRLNProof.fromBytesLE(serProof);
|
||||
} catch (error) {
|
||||
console.error("Proof deserialization error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(" - proof deserialized successfully");
|
||||
|
||||
console.log("\nRLNProofValues serialization: RLNProofValues <-> bytes");
|
||||
const serProofValues = proofValues.toBytesLE();
|
||||
console.log(
|
||||
" - serialized proof_values = [" + debugUint8Array(serProofValues) + " ]"
|
||||
);
|
||||
|
||||
let deserProofValues2;
|
||||
try {
|
||||
deserProofValues2 = rlnWasm.WasmRLNProofValues.fromBytesLE(serProofValues);
|
||||
} catch (error) {
|
||||
console.error("Proof values deserialization error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(" - proof_values deserialized successfully");
|
||||
console.log(
|
||||
" - deserialized external_nullifier = " +
|
||||
deserProofValues2.externalNullifier.debug()
|
||||
);
|
||||
|
||||
console.log("\nVerifying Proof");
|
||||
const roots = new rlnWasm.VecWasmFr();
|
||||
roots.push(computedRoot);
|
||||
let isValid;
|
||||
try {
|
||||
isValid = rlnInstance.verifyWithRoots(rln_proof, roots, x);
|
||||
} catch (error) {
|
||||
console.error("Proof verification error:", error);
|
||||
return;
|
||||
}
|
||||
if (isValid) {
|
||||
console.log("Proof verified successfully");
|
||||
} else {
|
||||
console.log("Proof verification failed");
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(
|
||||
"\nSimulating double-signaling attack (same epoch, different message)"
|
||||
);
|
||||
|
||||
console.log("\nHashing second signal");
|
||||
const signal2 = new Uint8Array([
|
||||
11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
]);
|
||||
let x2;
|
||||
try {
|
||||
x2 = rlnWasm.Hasher.hashToFieldLE(signal2);
|
||||
} catch (error) {
|
||||
console.error("Hash second signal error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(" - x2 = " + x2.debug());
|
||||
|
||||
console.log("\nCreating second message with the same id");
|
||||
const messageId2 = rlnWasm.WasmFr.fromUint(0);
|
||||
console.log(" - message_id2 = " + messageId2.debug());
|
||||
|
||||
console.log("\nCreating second RLN Witness");
|
||||
const witness2 = new rlnWasm.WasmRLNWitnessInput(
|
||||
identitySecret,
|
||||
userMessageLimit,
|
||||
messageId2,
|
||||
pathElements,
|
||||
identityPathIndex,
|
||||
x2,
|
||||
externalNullifier
|
||||
);
|
||||
console.log("Second RLN Witness created successfully");
|
||||
|
||||
console.log("\nCalculating second witness");
|
||||
let witnessJson2;
|
||||
try {
|
||||
witnessJson2 = witness2.toBigIntJson();
|
||||
} catch (error) {
|
||||
console.error("Second witness to BigInt JSON error:", error);
|
||||
return;
|
||||
}
|
||||
const calculatedWitness2 = await calculateWitness(
|
||||
circomPath,
|
||||
witnessJson2,
|
||||
witnessCalculatorFile
|
||||
);
|
||||
console.log("Second witness calculated successfully");
|
||||
|
||||
console.log("\nGenerating second RLN Proof");
|
||||
let rln_proof2;
|
||||
try {
|
||||
rln_proof2 = rlnInstance.generateRLNProofWithWitness(
|
||||
calculatedWitness2,
|
||||
witness2
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Second proof generation error:", error);
|
||||
return;
|
||||
}
|
||||
console.log("Second proof generated successfully");
|
||||
|
||||
console.log("\nVerifying second proof");
|
||||
let isValid2;
|
||||
try {
|
||||
isValid2 = rlnInstance.verifyWithRoots(rln_proof2, roots, x2);
|
||||
} catch (error) {
|
||||
console.error("Proof verification error:", error);
|
||||
return;
|
||||
}
|
||||
if (isValid2) {
|
||||
console.log("Second proof verified successfully");
|
||||
|
||||
console.log("\nRecovering identity secret");
|
||||
const proofValues1 = rln_proof.getValues();
|
||||
const proofValues2 = rln_proof2.getValues();
|
||||
let recoveredSecret;
|
||||
try {
|
||||
recoveredSecret = rlnWasm.WasmRLNProofValues.recoverIdSecret(
|
||||
proofValues1,
|
||||
proofValues2
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Identity recovery error:", error);
|
||||
return;
|
||||
}
|
||||
console.log(" - recovered_secret = " + recoveredSecret.debug());
|
||||
console.log(" - original_secret = " + identitySecret.debug());
|
||||
console.log("Slashing successful: Identity is recovered!");
|
||||
} else {
|
||||
console.log("Second proof verification failed");
|
||||
}
|
||||
}
|
||||
|
||||
main().catch(console.error);
|
||||
13
rln-wasm/examples/package.json
Normal file
13
rln-wasm/examples/package.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"name": "rln-wasm-node-example",
|
||||
"version": "1.0.0",
|
||||
"description": "Node.js example for RLN WASM",
|
||||
"type": "module",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "node index.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@waku/zerokit-rln-wasm": "file:../../pkg"
|
||||
}
|
||||
}
|
||||
328
rln-wasm/resources/witness_calculator.js
Normal file
328
rln-wasm/resources/witness_calculator.js
Normal file
@@ -0,0 +1,328 @@
|
||||
// File generated with https://github.com/iden3/circom
|
||||
// following the instructions from:
|
||||
// https://github.com/vacp2p/zerokit/tree/master/rln#advanced-custom-circuit-compilation
|
||||
|
||||
export async function builder(code, options) {
|
||||
options = options || {};
|
||||
|
||||
let wasmModule;
|
||||
try {
|
||||
wasmModule = await WebAssembly.compile(code);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
console.log(
|
||||
"\nTry to run circom --c in order to generate c++ code instead\n"
|
||||
);
|
||||
throw new Error(err);
|
||||
}
|
||||
|
||||
let wc;
|
||||
|
||||
let errStr = "";
|
||||
let msgStr = "";
|
||||
|
||||
const instance = await WebAssembly.instantiate(wasmModule, {
|
||||
runtime: {
|
||||
exceptionHandler: function (code) {
|
||||
let err;
|
||||
if (code == 1) {
|
||||
err = "Signal not found.\n";
|
||||
} else if (code == 2) {
|
||||
err = "Too many signals set.\n";
|
||||
} else if (code == 3) {
|
||||
err = "Signal already set.\n";
|
||||
} else if (code == 4) {
|
||||
err = "Assert Failed.\n";
|
||||
} else if (code == 5) {
|
||||
err = "Not enough memory.\n";
|
||||
} else if (code == 6) {
|
||||
err = "Input signal array access exceeds the size.\n";
|
||||
} else {
|
||||
err = "Unknown error.\n";
|
||||
}
|
||||
throw new Error(err + errStr);
|
||||
},
|
||||
printErrorMessage: function () {
|
||||
errStr += getMessage() + "\n";
|
||||
// console.error(getMessage());
|
||||
},
|
||||
writeBufferMessage: function () {
|
||||
const msg = getMessage();
|
||||
// Any calls to `log()` will always end with a `\n`, so that's when we print and reset
|
||||
if (msg === "\n") {
|
||||
console.log(msgStr);
|
||||
msgStr = "";
|
||||
} else {
|
||||
// If we've buffered other content, put a space in between the items
|
||||
if (msgStr !== "") {
|
||||
msgStr += " ";
|
||||
}
|
||||
// Then append the message to the message we are creating
|
||||
msgStr += msg;
|
||||
}
|
||||
},
|
||||
showSharedRWMemory: function () {
|
||||
printSharedRWMemory();
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const sanityCheck = options;
|
||||
// options &&
|
||||
// (
|
||||
// options.sanityCheck ||
|
||||
// options.logGetSignal ||
|
||||
// options.logSetSignal ||
|
||||
// options.logStartComponent ||
|
||||
// options.logFinishComponent
|
||||
// );
|
||||
|
||||
wc = new WitnessCalculator(instance, sanityCheck);
|
||||
return wc;
|
||||
|
||||
function getMessage() {
|
||||
var message = "";
|
||||
var c = instance.exports.getMessageChar();
|
||||
while (c != 0) {
|
||||
message += String.fromCharCode(c);
|
||||
c = instance.exports.getMessageChar();
|
||||
}
|
||||
return message;
|
||||
}
|
||||
|
||||
function printSharedRWMemory() {
|
||||
const shared_rw_memory_size = instance.exports.getFieldNumLen32();
|
||||
const arr = new Uint32Array(shared_rw_memory_size);
|
||||
for (let j = 0; j < shared_rw_memory_size; j++) {
|
||||
arr[shared_rw_memory_size - 1 - j] =
|
||||
instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
|
||||
// If we've buffered other content, put a space in between the items
|
||||
if (msgStr !== "") {
|
||||
msgStr += " ";
|
||||
}
|
||||
// Then append the value to the message we are creating
|
||||
msgStr += fromArray32(arr).toString();
|
||||
}
|
||||
}
|
||||
|
||||
class WitnessCalculator {
|
||||
constructor(instance, sanityCheck) {
|
||||
this.instance = instance;
|
||||
|
||||
this.version = this.instance.exports.getVersion();
|
||||
this.n32 = this.instance.exports.getFieldNumLen32();
|
||||
|
||||
this.instance.exports.getRawPrime();
|
||||
const arr = new Uint32Array(this.n32);
|
||||
for (let i = 0; i < this.n32; i++) {
|
||||
arr[this.n32 - 1 - i] = this.instance.exports.readSharedRWMemory(i);
|
||||
}
|
||||
this.prime = fromArray32(arr);
|
||||
|
||||
this.witnessSize = this.instance.exports.getWitnessSize();
|
||||
|
||||
this.sanityCheck = sanityCheck;
|
||||
}
|
||||
|
||||
circom_version() {
|
||||
return this.instance.exports.getVersion();
|
||||
}
|
||||
|
||||
async _doCalculateWitness(input, sanityCheck) {
|
||||
//input is assumed to be a map from signals to arrays of bigints
|
||||
this.instance.exports.init(this.sanityCheck || sanityCheck ? 1 : 0);
|
||||
const keys = Object.keys(input);
|
||||
var input_counter = 0;
|
||||
keys.forEach((k) => {
|
||||
const h = fnvHash(k);
|
||||
const hMSB = parseInt(h.slice(0, 8), 16);
|
||||
const hLSB = parseInt(h.slice(8, 16), 16);
|
||||
const fArr = flatArray(input[k]);
|
||||
let signalSize = this.instance.exports.getInputSignalSize(hMSB, hLSB);
|
||||
if (signalSize < 0) {
|
||||
throw new Error(`Signal ${k} not found\n`);
|
||||
}
|
||||
if (fArr.length < signalSize) {
|
||||
throw new Error(`Not enough values for input signal ${k}\n`);
|
||||
}
|
||||
if (fArr.length > signalSize) {
|
||||
throw new Error(`Too many values for input signal ${k}\n`);
|
||||
}
|
||||
for (let i = 0; i < fArr.length; i++) {
|
||||
const arrFr = toArray32(BigInt(fArr[i]) % this.prime, this.n32);
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
this.instance.exports.writeSharedRWMemory(j, arrFr[this.n32 - 1 - j]);
|
||||
}
|
||||
try {
|
||||
this.instance.exports.setInputSignal(hMSB, hLSB, i);
|
||||
input_counter++;
|
||||
} catch (err) {
|
||||
// console.log(`After adding signal ${i} of ${k}`)
|
||||
throw new Error(err);
|
||||
}
|
||||
}
|
||||
});
|
||||
if (input_counter < this.instance.exports.getInputSize()) {
|
||||
throw new Error(
|
||||
`Not all inputs have been set. Only ${input_counter} out of ${this.instance.exports.getInputSize()}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async calculateWitness(input, sanityCheck) {
|
||||
const w = [];
|
||||
|
||||
await this._doCalculateWitness(input, sanityCheck);
|
||||
|
||||
for (let i = 0; i < this.witnessSize; i++) {
|
||||
this.instance.exports.getWitness(i);
|
||||
const arr = new Uint32Array(this.n32);
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
arr[this.n32 - 1 - j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
w.push(fromArray32(arr));
|
||||
}
|
||||
|
||||
return w;
|
||||
}
|
||||
|
||||
async calculateBinWitness(input, sanityCheck) {
|
||||
const buff32 = new Uint32Array(this.witnessSize * this.n32);
|
||||
const buff = new Uint8Array(buff32.buffer);
|
||||
await this._doCalculateWitness(input, sanityCheck);
|
||||
|
||||
for (let i = 0; i < this.witnessSize; i++) {
|
||||
this.instance.exports.getWitness(i);
|
||||
const pos = i * this.n32;
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
}
|
||||
|
||||
return buff;
|
||||
}
|
||||
|
||||
async calculateWTNSBin(input, sanityCheck) {
|
||||
const buff32 = new Uint32Array(this.witnessSize * this.n32 + this.n32 + 11);
|
||||
const buff = new Uint8Array(buff32.buffer);
|
||||
await this._doCalculateWitness(input, sanityCheck);
|
||||
|
||||
//"wtns"
|
||||
buff[0] = "w".charCodeAt(0);
|
||||
buff[1] = "t".charCodeAt(0);
|
||||
buff[2] = "n".charCodeAt(0);
|
||||
buff[3] = "s".charCodeAt(0);
|
||||
|
||||
//version 2
|
||||
buff32[1] = 2;
|
||||
|
||||
//number of sections: 2
|
||||
buff32[2] = 2;
|
||||
|
||||
//id section 1
|
||||
buff32[3] = 1;
|
||||
|
||||
const n8 = this.n32 * 4;
|
||||
//id section 1 length in 64bytes
|
||||
const idSection1length = 8 + n8;
|
||||
const idSection1lengthHex = idSection1length.toString(16);
|
||||
buff32[4] = parseInt(idSection1lengthHex.slice(0, 8), 16);
|
||||
buff32[5] = parseInt(idSection1lengthHex.slice(8, 16), 16);
|
||||
|
||||
//this.n32
|
||||
buff32[6] = n8;
|
||||
|
||||
//prime number
|
||||
this.instance.exports.getRawPrime();
|
||||
|
||||
var pos = 7;
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
pos += this.n32;
|
||||
|
||||
// witness size
|
||||
buff32[pos] = this.witnessSize;
|
||||
pos++;
|
||||
|
||||
//id section 2
|
||||
buff32[pos] = 2;
|
||||
pos++;
|
||||
|
||||
// section 2 length
|
||||
const idSection2length = n8 * this.witnessSize;
|
||||
const idSection2lengthHex = idSection2length.toString(16);
|
||||
buff32[pos] = parseInt(idSection2lengthHex.slice(0, 8), 16);
|
||||
buff32[pos + 1] = parseInt(idSection2lengthHex.slice(8, 16), 16);
|
||||
|
||||
pos += 2;
|
||||
for (let i = 0; i < this.witnessSize; i++) {
|
||||
this.instance.exports.getWitness(i);
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
pos += this.n32;
|
||||
}
|
||||
|
||||
return buff;
|
||||
}
|
||||
}
|
||||
|
||||
function toArray32(rem, size) {
|
||||
const res = []; //new Uint32Array(size); //has no unshift
|
||||
const radix = BigInt(0x100000000);
|
||||
while (rem) {
|
||||
res.unshift(Number(rem % radix));
|
||||
rem = rem / radix;
|
||||
}
|
||||
if (size) {
|
||||
var i = size - res.length;
|
||||
while (i > 0) {
|
||||
res.unshift(0);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
function fromArray32(arr) {
|
||||
//returns a BigInt
|
||||
var res = BigInt(0);
|
||||
const radix = BigInt(0x100000000);
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
res = res * radix + BigInt(arr[i]);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
function flatArray(a) {
|
||||
var res = [];
|
||||
fillArray(res, a);
|
||||
return res;
|
||||
|
||||
function fillArray(res, a) {
|
||||
if (Array.isArray(a)) {
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
fillArray(res, a[i]);
|
||||
}
|
||||
} else {
|
||||
res.push(a);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function fnvHash(str) {
|
||||
const uint64_max = BigInt(2) ** BigInt(64);
|
||||
let hash = BigInt("0xCBF29CE484222325");
|
||||
for (var i = 0; i < str.length; i++) {
|
||||
hash ^= BigInt(str[i].charCodeAt());
|
||||
hash *= BigInt(0x100000001b3);
|
||||
hash %= uint64_max;
|
||||
}
|
||||
let shash = hash.toString(16);
|
||||
let n = 16 - shash.length;
|
||||
shash = "0".repeat(n).concat(shash);
|
||||
return shash;
|
||||
}
|
||||
16
rln-wasm/src/lib.rs
Normal file
16
rln-wasm/src/lib.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
#![cfg(target_arch = "wasm32")]
|
||||
|
||||
pub mod wasm_rln;
|
||||
pub mod wasm_utils;
|
||||
|
||||
#[cfg(all(feature = "parallel", not(feature = "utils")))]
|
||||
pub use wasm_bindgen_rayon::init_thread_pool;
|
||||
#[cfg(not(feature = "utils"))]
|
||||
pub use wasm_rln::{WasmRLN, WasmRLNProof, WasmRLNProofValues, WasmRLNWitnessInput};
|
||||
pub use wasm_utils::{ExtendedIdentity, Hasher, Identity, VecWasmFr, WasmFr};
|
||||
|
||||
#[cfg(feature = "panic_hook")]
|
||||
#[wasm_bindgen(js_name = initPanicHook)]
|
||||
pub fn init_panic_hook() {
|
||||
console_error_panic_hook::set_once();
|
||||
}
|
||||
253
rln-wasm/src/wasm_rln.rs
Normal file
253
rln-wasm/src/wasm_rln.rs
Normal file
@@ -0,0 +1,253 @@
|
||||
#![cfg(target_arch = "wasm32")]
|
||||
#![cfg(not(feature = "utils"))]
|
||||
|
||||
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
|
||||
use num_bigint::BigInt;
|
||||
use rln::prelude::*;
|
||||
use serde::Serialize;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
use crate::wasm_utils::{VecWasmFr, WasmFr};
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub struct WasmRLN(RLN);
|
||||
|
||||
#[wasm_bindgen]
|
||||
impl WasmRLN {
|
||||
#[wasm_bindgen(constructor)]
|
||||
pub fn new(zkey_data: &Uint8Array) -> Result<WasmRLN, String> {
|
||||
let rln = RLN::new_with_params(zkey_data.to_vec()).map_err(|err| err.to_string())?;
|
||||
Ok(WasmRLN(rln))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = generateRLNProofWithWitness)]
|
||||
pub fn generate_rln_proof_with_witness(
|
||||
&self,
|
||||
calculated_witness: Vec<JsBigInt>,
|
||||
witness: &WasmRLNWitnessInput,
|
||||
) -> Result<WasmRLNProof, String> {
|
||||
let calculated_witness_bigint: Vec<BigInt> = calculated_witness
|
||||
.iter()
|
||||
.map(|js_bigint| {
|
||||
js_bigint
|
||||
.to_string(10)
|
||||
.ok()
|
||||
.and_then(|js_str| js_str.as_string())
|
||||
.ok_or_else(|| "Failed to convert JsBigInt to string".to_string())
|
||||
.and_then(|str_val| {
|
||||
str_val
|
||||
.parse::<BigInt>()
|
||||
.map_err(|err| format!("Failed to parse BigInt: {}", err))
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let (proof, proof_values) = self
|
||||
.0
|
||||
.generate_rln_proof_with_witness(calculated_witness_bigint, &witness.0)
|
||||
.map_err(|err| err.to_string())?;
|
||||
|
||||
let rln_proof = RLNProof {
|
||||
proof_values,
|
||||
proof,
|
||||
};
|
||||
|
||||
Ok(WasmRLNProof(rln_proof))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = verifyWithRoots)]
|
||||
pub fn verify_with_roots(
|
||||
&self,
|
||||
rln_proof: &WasmRLNProof,
|
||||
roots: &VecWasmFr,
|
||||
x: &WasmFr,
|
||||
) -> Result<bool, String> {
|
||||
let roots_fr: Vec<Fr> = (0..roots.length())
|
||||
.filter_map(|i| roots.get(i))
|
||||
.map(|root| *root)
|
||||
.collect();
|
||||
|
||||
self.0
|
||||
.verify_with_roots(&rln_proof.0.proof, &rln_proof.0.proof_values, x, &roots_fr)
|
||||
.map_err(|err| err.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub struct WasmRLNProof(RLNProof);
|
||||
|
||||
#[wasm_bindgen]
|
||||
impl WasmRLNProof {
|
||||
#[wasm_bindgen(js_name = getValues)]
|
||||
pub fn get_values(&self) -> WasmRLNProofValues {
|
||||
WasmRLNProofValues(self.0.proof_values)
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBytesLE)]
|
||||
pub fn to_bytes_le(&self) -> Result<Uint8Array, String> {
|
||||
let bytes = rln_proof_to_bytes_le(&self.0).map_err(|err| err.to_string())?;
|
||||
Ok(Uint8Array::from(&bytes[..]))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBytesBE)]
|
||||
pub fn to_bytes_be(&self) -> Result<Uint8Array, String> {
|
||||
let bytes = rln_proof_to_bytes_be(&self.0).map_err(|err| err.to_string())?;
|
||||
Ok(Uint8Array::from(&bytes[..]))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesLE)]
|
||||
pub fn from_bytes_le(bytes: &Uint8Array) -> Result<WasmRLNProof, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
let (proof, _) = bytes_le_to_rln_proof(&bytes_vec).map_err(|err| err.to_string())?;
|
||||
Ok(WasmRLNProof(proof))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesBE)]
|
||||
pub fn from_bytes_be(bytes: &Uint8Array) -> Result<WasmRLNProof, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
let (proof, _) = bytes_be_to_rln_proof(&bytes_vec).map_err(|err| err.to_string())?;
|
||||
Ok(WasmRLNProof(proof))
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub struct WasmRLNProofValues(RLNProofValues);
|
||||
|
||||
#[wasm_bindgen]
|
||||
impl WasmRLNProofValues {
|
||||
#[wasm_bindgen(getter)]
|
||||
pub fn y(&self) -> WasmFr {
|
||||
WasmFr::from(self.0.y)
|
||||
}
|
||||
|
||||
#[wasm_bindgen(getter)]
|
||||
pub fn nullifier(&self) -> WasmFr {
|
||||
WasmFr::from(self.0.nullifier)
|
||||
}
|
||||
|
||||
#[wasm_bindgen(getter)]
|
||||
pub fn root(&self) -> WasmFr {
|
||||
WasmFr::from(self.0.root)
|
||||
}
|
||||
|
||||
#[wasm_bindgen(getter)]
|
||||
pub fn x(&self) -> WasmFr {
|
||||
WasmFr::from(self.0.x)
|
||||
}
|
||||
|
||||
#[wasm_bindgen(getter, js_name = externalNullifier)]
|
||||
pub fn external_nullifier(&self) -> WasmFr {
|
||||
WasmFr::from(self.0.external_nullifier)
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBytesLE)]
|
||||
pub fn to_bytes_le(&self) -> Uint8Array {
|
||||
Uint8Array::from(&rln_proof_values_to_bytes_le(&self.0)[..])
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBytesBE)]
|
||||
pub fn to_bytes_be(&self) -> Uint8Array {
|
||||
Uint8Array::from(&rln_proof_values_to_bytes_be(&self.0)[..])
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesLE)]
|
||||
pub fn from_bytes_le(bytes: &Uint8Array) -> Result<WasmRLNProofValues, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
let (proof_values, _) =
|
||||
bytes_le_to_rln_proof_values(&bytes_vec).map_err(|err| err.to_string())?;
|
||||
Ok(WasmRLNProofValues(proof_values))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesBE)]
|
||||
pub fn from_bytes_be(bytes: &Uint8Array) -> Result<WasmRLNProofValues, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
let (proof_values, _) =
|
||||
bytes_be_to_rln_proof_values(&bytes_vec).map_err(|err| err.to_string())?;
|
||||
Ok(WasmRLNProofValues(proof_values))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = recoverIdSecret)]
|
||||
pub fn recover_id_secret(
|
||||
proof_values_1: &WasmRLNProofValues,
|
||||
proof_values_2: &WasmRLNProofValues,
|
||||
) -> Result<WasmFr, String> {
|
||||
let recovered_identity_secret = recover_id_secret(&proof_values_1.0, &proof_values_2.0)
|
||||
.map_err(|err| err.to_string())?;
|
||||
|
||||
Ok(WasmFr::from(*recovered_identity_secret))
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub struct WasmRLNWitnessInput(RLNWitnessInput);
|
||||
|
||||
#[wasm_bindgen]
|
||||
impl WasmRLNWitnessInput {
|
||||
#[wasm_bindgen(constructor)]
|
||||
pub fn new(
|
||||
identity_secret: &WasmFr,
|
||||
user_message_limit: &WasmFr,
|
||||
message_id: &WasmFr,
|
||||
path_elements: &VecWasmFr,
|
||||
identity_path_index: &Uint8Array,
|
||||
x: &WasmFr,
|
||||
external_nullifier: &WasmFr,
|
||||
) -> Result<WasmRLNWitnessInput, String> {
|
||||
let mut identity_secret_fr = identity_secret.inner();
|
||||
let path_elements: Vec<Fr> = path_elements.inner();
|
||||
let identity_path_index: Vec<u8> = identity_path_index.to_vec();
|
||||
|
||||
let witness = RLNWitnessInput::new(
|
||||
IdSecret::from(&mut identity_secret_fr),
|
||||
user_message_limit.inner(),
|
||||
message_id.inner(),
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x.inner(),
|
||||
external_nullifier.inner(),
|
||||
)
|
||||
.map_err(|err| err.to_string())?;
|
||||
|
||||
Ok(WasmRLNWitnessInput(witness))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBigIntJson)]
|
||||
pub fn to_bigint_json(&self) -> Result<Object, String> {
|
||||
let bigint_json = rln_witness_to_bigint_json(&self.0).map_err(|err| err.to_string())?;
|
||||
|
||||
let serializer = serde_wasm_bindgen::Serializer::json_compatible();
|
||||
let js_value = bigint_json
|
||||
.serialize(&serializer)
|
||||
.map_err(|err| err.to_string())?;
|
||||
|
||||
js_value
|
||||
.dyn_into::<Object>()
|
||||
.map_err(|err| format!("{:#?}", err))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBytesLE)]
|
||||
pub fn to_bytes_le(&self) -> Result<Uint8Array, String> {
|
||||
let bytes = rln_witness_to_bytes_le(&self.0).map_err(|err| err.to_string())?;
|
||||
Ok(Uint8Array::from(&bytes[..]))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBytesBE)]
|
||||
pub fn to_bytes_be(&self) -> Result<Uint8Array, String> {
|
||||
let bytes = rln_witness_to_bytes_be(&self.0).map_err(|err| err.to_string())?;
|
||||
Ok(Uint8Array::from(&bytes[..]))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesLE)]
|
||||
pub fn from_bytes_le(bytes: &Uint8Array) -> Result<WasmRLNWitnessInput, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
let (witness, _) = bytes_le_to_rln_witness(&bytes_vec).map_err(|err| err.to_string())?;
|
||||
Ok(WasmRLNWitnessInput(witness))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesBE)]
|
||||
pub fn from_bytes_be(bytes: &Uint8Array) -> Result<WasmRLNWitnessInput, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
let (witness, _) = bytes_be_to_rln_witness(&bytes_vec).map_err(|err| err.to_string())?;
|
||||
Ok(WasmRLNWitnessInput(witness))
|
||||
}
|
||||
}
|
||||
420
rln-wasm/src/wasm_utils.rs
Normal file
420
rln-wasm/src/wasm_utils.rs
Normal file
@@ -0,0 +1,420 @@
|
||||
#![cfg(target_arch = "wasm32")]
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
use js_sys::Uint8Array;
|
||||
use rln::prelude::*;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
// WasmFr
|
||||
|
||||
#[wasm_bindgen]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Default)]
|
||||
pub struct WasmFr(Fr);
|
||||
|
||||
impl From<Fr> for WasmFr {
|
||||
fn from(fr: Fr) -> Self {
|
||||
Self(fr)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for WasmFr {
|
||||
type Target = Fr;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
impl WasmFr {
|
||||
#[wasm_bindgen(js_name = zero)]
|
||||
pub fn zero() -> Self {
|
||||
Self(Fr::from(0u32))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = one)]
|
||||
pub fn one() -> Self {
|
||||
Self(Fr::from(1u32))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromUint)]
|
||||
pub fn from_uint(value: u32) -> Self {
|
||||
Self(Fr::from(value))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesLE)]
|
||||
pub fn from_bytes_le(bytes: &Uint8Array) -> Result<Self, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
let (fr, _) = bytes_le_to_fr(&bytes_vec).map_err(|err| err.to_string())?;
|
||||
Ok(Self(fr))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesBE)]
|
||||
pub fn from_bytes_be(bytes: &Uint8Array) -> Result<Self, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
let (fr, _) = bytes_be_to_fr(&bytes_vec).map_err(|err| err.to_string())?;
|
||||
Ok(Self(fr))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBytesLE)]
|
||||
pub fn to_bytes_le(&self) -> Uint8Array {
|
||||
let bytes = fr_to_bytes_le(&self.0);
|
||||
Uint8Array::from(&bytes[..])
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBytesBE)]
|
||||
pub fn to_bytes_be(&self) -> Uint8Array {
|
||||
let bytes = fr_to_bytes_be(&self.0);
|
||||
Uint8Array::from(&bytes[..])
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = debug)]
|
||||
pub fn debug(&self) -> String {
|
||||
format!("{:?}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl WasmFr {
|
||||
pub fn inner(&self) -> Fr {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
// VecWasmFr
|
||||
|
||||
#[wasm_bindgen]
|
||||
#[derive(Debug, Clone, PartialEq, Default)]
|
||||
pub struct VecWasmFr(Vec<Fr>);
|
||||
|
||||
#[wasm_bindgen]
|
||||
impl VecWasmFr {
|
||||
#[wasm_bindgen(constructor)]
|
||||
pub fn new() -> Self {
|
||||
Self(Vec::new())
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesLE)]
|
||||
pub fn from_bytes_le(bytes: &Uint8Array) -> Result<VecWasmFr, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
bytes_le_to_vec_fr(&bytes_vec)
|
||||
.map(|(vec_fr, _)| VecWasmFr(vec_fr))
|
||||
.map_err(|err| err.to_string())
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesBE)]
|
||||
pub fn from_bytes_be(bytes: &Uint8Array) -> Result<VecWasmFr, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
bytes_be_to_vec_fr(&bytes_vec)
|
||||
.map(|(vec_fr, _)| VecWasmFr(vec_fr))
|
||||
.map_err(|err| err.to_string())
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBytesLE)]
|
||||
pub fn to_bytes_le(&self) -> Uint8Array {
|
||||
let bytes = vec_fr_to_bytes_le(&self.0);
|
||||
Uint8Array::from(&bytes[..])
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBytesBE)]
|
||||
pub fn to_bytes_be(&self) -> Uint8Array {
|
||||
let bytes = vec_fr_to_bytes_be(&self.0);
|
||||
Uint8Array::from(&bytes[..])
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = get)]
|
||||
pub fn get(&self, index: usize) -> Option<WasmFr> {
|
||||
self.0.get(index).map(|&fr| WasmFr(fr))
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = length)]
|
||||
pub fn length(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = push)]
|
||||
pub fn push(&mut self, element: &WasmFr) {
|
||||
self.0.push(element.0);
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = debug)]
|
||||
pub fn debug(&self) -> String {
|
||||
format!("{:?}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl VecWasmFr {
|
||||
pub fn inner(&self) -> Vec<Fr> {
|
||||
self.0.clone()
|
||||
}
|
||||
}
|
||||
|
||||
// Uint8Array
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub struct Uint8ArrayUtils;
|
||||
|
||||
#[wasm_bindgen]
|
||||
impl Uint8ArrayUtils {
|
||||
#[wasm_bindgen(js_name = toBytesLE)]
|
||||
pub fn to_bytes_le(input: &Uint8Array) -> Uint8Array {
|
||||
let input_vec = input.to_vec();
|
||||
let bytes = vec_u8_to_bytes_le(&input_vec);
|
||||
Uint8Array::from(&bytes[..])
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBytesBE)]
|
||||
pub fn to_bytes_be(input: &Uint8Array) -> Uint8Array {
|
||||
let input_vec = input.to_vec();
|
||||
let bytes = vec_u8_to_bytes_be(&input_vec);
|
||||
Uint8Array::from(&bytes[..])
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesLE)]
|
||||
pub fn from_bytes_le(bytes: &Uint8Array) -> Result<Uint8Array, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
bytes_le_to_vec_u8(&bytes_vec)
|
||||
.map(|(vec_u8, _)| Uint8Array::from(&vec_u8[..]))
|
||||
.map_err(|err| err.to_string())
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesBE)]
|
||||
pub fn from_bytes_be(bytes: &Uint8Array) -> Result<Uint8Array, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
bytes_be_to_vec_u8(&bytes_vec)
|
||||
.map(|(vec_u8, _)| Uint8Array::from(&vec_u8[..]))
|
||||
.map_err(|err| err.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
// Utility APIs
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub struct Hasher;
|
||||
|
||||
#[wasm_bindgen]
|
||||
impl Hasher {
|
||||
#[wasm_bindgen(js_name = hashToFieldLE)]
|
||||
pub fn hash_to_field_le(input: &Uint8Array) -> Result<WasmFr, String> {
|
||||
hash_to_field_le(&input.to_vec())
|
||||
.map(WasmFr)
|
||||
.map_err(|err| err.to_string())
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = hashToFieldBE)]
|
||||
pub fn hash_to_field_be(input: &Uint8Array) -> Result<WasmFr, String> {
|
||||
hash_to_field_be(&input.to_vec())
|
||||
.map(WasmFr)
|
||||
.map_err(|err| err.to_string())
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = poseidonHashPair)]
|
||||
pub fn poseidon_hash_pair(a: &WasmFr, b: &WasmFr) -> Result<WasmFr, String> {
|
||||
poseidon_hash(&[a.0, b.0])
|
||||
.map(WasmFr)
|
||||
.map_err(|err| err.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub struct Identity {
|
||||
identity_secret: Fr,
|
||||
id_commitment: Fr,
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
impl Identity {
|
||||
#[wasm_bindgen(js_name = generate)]
|
||||
pub fn generate() -> Result<Identity, String> {
|
||||
let (identity_secret, id_commitment) = keygen().map_err(|err| err.to_string())?;
|
||||
Ok(Identity {
|
||||
identity_secret: *identity_secret,
|
||||
id_commitment,
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = generateSeeded)]
|
||||
pub fn generate_seeded(seed: &Uint8Array) -> Result<Identity, String> {
|
||||
let seed_vec = seed.to_vec();
|
||||
let (identity_secret, id_commitment) =
|
||||
seeded_keygen(&seed_vec).map_err(|err| err.to_string())?;
|
||||
Ok(Identity {
|
||||
identity_secret,
|
||||
id_commitment,
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = getSecretHash)]
|
||||
pub fn get_secret_hash(&self) -> WasmFr {
|
||||
WasmFr(self.identity_secret)
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = getCommitment)]
|
||||
pub fn get_commitment(&self) -> WasmFr {
|
||||
WasmFr(self.id_commitment)
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toArray)]
|
||||
pub fn to_array(&self) -> VecWasmFr {
|
||||
VecWasmFr(vec![self.identity_secret, self.id_commitment])
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBytesLE)]
|
||||
pub fn to_bytes_le(&self) -> Uint8Array {
|
||||
let vec_fr = vec![self.identity_secret, self.id_commitment];
|
||||
let bytes = vec_fr_to_bytes_le(&vec_fr);
|
||||
Uint8Array::from(&bytes[..])
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBytesBE)]
|
||||
pub fn to_bytes_be(&self) -> Uint8Array {
|
||||
let vec_fr = vec![self.identity_secret, self.id_commitment];
|
||||
let bytes = vec_fr_to_bytes_be(&vec_fr);
|
||||
Uint8Array::from(&bytes[..])
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesLE)]
|
||||
pub fn from_bytes_le(bytes: &Uint8Array) -> Result<Identity, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
let (vec_fr, _) = bytes_le_to_vec_fr(&bytes_vec).map_err(|err| err.to_string())?;
|
||||
if vec_fr.len() != 2 {
|
||||
return Err(format!("Expected 2 elements, got {}", vec_fr.len()));
|
||||
}
|
||||
Ok(Identity {
|
||||
identity_secret: vec_fr[0],
|
||||
id_commitment: vec_fr[1],
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesBE)]
|
||||
pub fn from_bytes_be(bytes: &Uint8Array) -> Result<Identity, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
let (vec_fr, _) = bytes_be_to_vec_fr(&bytes_vec).map_err(|err| err.to_string())?;
|
||||
if vec_fr.len() != 2 {
|
||||
return Err(format!("Expected 2 elements, got {}", vec_fr.len()));
|
||||
}
|
||||
Ok(Identity {
|
||||
identity_secret: vec_fr[0],
|
||||
id_commitment: vec_fr[1],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
pub struct ExtendedIdentity {
|
||||
identity_trapdoor: Fr,
|
||||
identity_nullifier: Fr,
|
||||
identity_secret: Fr,
|
||||
id_commitment: Fr,
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
impl ExtendedIdentity {
|
||||
#[wasm_bindgen(js_name = generate)]
|
||||
pub fn generate() -> Result<ExtendedIdentity, String> {
|
||||
let (identity_trapdoor, identity_nullifier, identity_secret, id_commitment) =
|
||||
extended_keygen().map_err(|err| err.to_string())?;
|
||||
Ok(ExtendedIdentity {
|
||||
identity_trapdoor,
|
||||
identity_nullifier,
|
||||
identity_secret,
|
||||
id_commitment,
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = generateSeeded)]
|
||||
pub fn generate_seeded(seed: &Uint8Array) -> Result<ExtendedIdentity, String> {
|
||||
let seed_vec = seed.to_vec();
|
||||
let (identity_trapdoor, identity_nullifier, identity_secret, id_commitment) =
|
||||
extended_seeded_keygen(&seed_vec).map_err(|err| err.to_string())?;
|
||||
Ok(ExtendedIdentity {
|
||||
identity_trapdoor,
|
||||
identity_nullifier,
|
||||
identity_secret,
|
||||
id_commitment,
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = getTrapdoor)]
|
||||
pub fn get_trapdoor(&self) -> WasmFr {
|
||||
WasmFr(self.identity_trapdoor)
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = getNullifier)]
|
||||
pub fn get_nullifier(&self) -> WasmFr {
|
||||
WasmFr(self.identity_nullifier)
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = getSecretHash)]
|
||||
pub fn get_secret_hash(&self) -> WasmFr {
|
||||
WasmFr(self.identity_secret)
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = getCommitment)]
|
||||
pub fn get_commitment(&self) -> WasmFr {
|
||||
WasmFr(self.id_commitment)
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toArray)]
|
||||
pub fn to_array(&self) -> VecWasmFr {
|
||||
VecWasmFr(vec![
|
||||
self.identity_trapdoor,
|
||||
self.identity_nullifier,
|
||||
self.identity_secret,
|
||||
self.id_commitment,
|
||||
])
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBytesLE)]
|
||||
pub fn to_bytes_le(&self) -> Uint8Array {
|
||||
let vec_fr = vec![
|
||||
self.identity_trapdoor,
|
||||
self.identity_nullifier,
|
||||
self.identity_secret,
|
||||
self.id_commitment,
|
||||
];
|
||||
let bytes = vec_fr_to_bytes_le(&vec_fr);
|
||||
Uint8Array::from(&bytes[..])
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = toBytesBE)]
|
||||
pub fn to_bytes_be(&self) -> Uint8Array {
|
||||
let vec_fr = vec![
|
||||
self.identity_trapdoor,
|
||||
self.identity_nullifier,
|
||||
self.identity_secret,
|
||||
self.id_commitment,
|
||||
];
|
||||
let bytes = vec_fr_to_bytes_be(&vec_fr);
|
||||
Uint8Array::from(&bytes[..])
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesLE)]
|
||||
pub fn from_bytes_le(bytes: &Uint8Array) -> Result<ExtendedIdentity, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
let (vec_fr, _) = bytes_le_to_vec_fr(&bytes_vec).map_err(|err| err.to_string())?;
|
||||
if vec_fr.len() != 4 {
|
||||
return Err(format!("Expected 4 elements, got {}", vec_fr.len()));
|
||||
}
|
||||
Ok(ExtendedIdentity {
|
||||
identity_trapdoor: vec_fr[0],
|
||||
identity_nullifier: vec_fr[1],
|
||||
identity_secret: vec_fr[2],
|
||||
id_commitment: vec_fr[3],
|
||||
})
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = fromBytesBE)]
|
||||
pub fn from_bytes_be(bytes: &Uint8Array) -> Result<ExtendedIdentity, String> {
|
||||
let bytes_vec = bytes.to_vec();
|
||||
let (vec_fr, _) = bytes_be_to_vec_fr(&bytes_vec).map_err(|err| err.to_string())?;
|
||||
if vec_fr.len() != 4 {
|
||||
return Err(format!("Expected 4 elements, got {}", vec_fr.len()));
|
||||
}
|
||||
Ok(ExtendedIdentity {
|
||||
identity_trapdoor: vec_fr[0],
|
||||
identity_nullifier: vec_fr[1],
|
||||
identity_secret: vec_fr[2],
|
||||
id_commitment: vec_fr[3],
|
||||
})
|
||||
}
|
||||
}
|
||||
247
rln-wasm/tests/browser.rs
Normal file
247
rln-wasm/tests/browser.rs
Normal file
@@ -0,0 +1,247 @@
|
||||
#![cfg(target_arch = "wasm32")]
|
||||
#![cfg(not(feature = "utils"))]
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use js_sys::{BigInt as JsBigInt, Date, Object, Uint8Array};
|
||||
use rln::prelude::*;
|
||||
use rln_wasm::{
|
||||
Hasher, Identity, VecWasmFr, WasmFr, WasmRLN, WasmRLNProof, WasmRLNWitnessInput,
|
||||
};
|
||||
use wasm_bindgen::{prelude::wasm_bindgen, JsValue};
|
||||
use wasm_bindgen_test::{console_log, wasm_bindgen_test, wasm_bindgen_test_configure};
|
||||
use zerokit_utils::merkle_tree::{
|
||||
OptimalMerkleProof, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree,
|
||||
};
|
||||
#[cfg(feature = "parallel")]
|
||||
use {rln_wasm::init_thread_pool, wasm_bindgen_futures::JsFuture, web_sys::window};
|
||||
|
||||
#[wasm_bindgen(inline_js = r#"
|
||||
export function isThreadpoolSupported() {
|
||||
return typeof SharedArrayBuffer !== 'undefined' &&
|
||||
typeof Atomics !== 'undefined' &&
|
||||
typeof crossOriginIsolated !== 'undefined' &&
|
||||
crossOriginIsolated;
|
||||
}
|
||||
|
||||
export function initWitnessCalculator(jsCode) {
|
||||
const processedCode = jsCode
|
||||
.replace(/export\s+async\s+function\s+builder/, 'async function builder')
|
||||
.replace(/export\s*\{\s*builder\s*\};?/g, '');
|
||||
|
||||
const moduleFunc = new Function(processedCode + '\nreturn { builder };');
|
||||
const witnessCalculatorModule = moduleFunc();
|
||||
|
||||
window.witnessCalculatorBuilder = witnessCalculatorModule.builder;
|
||||
|
||||
if (typeof window.witnessCalculatorBuilder !== 'function') {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function readFile(data) {
|
||||
return new Uint8Array(data);
|
||||
}
|
||||
|
||||
export async function calculateWitness(circom_data, inputs) {
|
||||
const wasmBuffer = circom_data instanceof Uint8Array ? circom_data : new Uint8Array(circom_data);
|
||||
const witnessCalculator = await window.witnessCalculatorBuilder(wasmBuffer);
|
||||
const calculatedWitness = await witnessCalculator.calculateWitness(inputs, false);
|
||||
return JSON.stringify(calculatedWitness, (key, value) =>
|
||||
typeof value === "bigint" ? value.toString() : value
|
||||
);
|
||||
}
|
||||
"#)]
|
||||
extern "C" {
|
||||
#[wasm_bindgen(catch)]
|
||||
fn isThreadpoolSupported() -> Result<bool, JsValue>;
|
||||
|
||||
#[wasm_bindgen(catch)]
|
||||
fn initWitnessCalculator(js: &str) -> Result<bool, JsValue>;
|
||||
|
||||
#[wasm_bindgen(catch)]
|
||||
fn readFile(data: &[u8]) -> Result<Uint8Array, JsValue>;
|
||||
|
||||
#[wasm_bindgen(catch)]
|
||||
async fn calculateWitness(circom_data: &[u8], inputs: Object) -> Result<JsValue, JsValue>;
|
||||
}
|
||||
|
||||
const WITNESS_CALCULATOR_JS: &str = include_str!("../resources/witness_calculator.js");
|
||||
|
||||
const ARKZKEY_BYTES: &[u8] =
|
||||
include_bytes!("../../rln/resources/tree_depth_20/rln_final.arkzkey");
|
||||
|
||||
const CIRCOM_BYTES: &[u8] = include_bytes!("../../rln/resources/tree_depth_20/rln.wasm");
|
||||
|
||||
wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
pub async fn rln_wasm_benchmark() {
|
||||
// Check if thread pool is supported
|
||||
#[cfg(feature = "parallel")]
|
||||
if !isThreadpoolSupported().unwrap() {
|
||||
panic!("Thread pool is NOT supported");
|
||||
} else {
|
||||
// Initialize thread pool
|
||||
let cpu_count = window().unwrap().navigator().hardware_concurrency() as usize;
|
||||
JsFuture::from(init_thread_pool(cpu_count)).await.unwrap();
|
||||
}
|
||||
|
||||
// Initialize witness calculator
|
||||
initWitnessCalculator(WITNESS_CALCULATOR_JS).unwrap();
|
||||
|
||||
let mut results = String::from("\nbenchmarks:\n");
|
||||
let iterations = 10;
|
||||
|
||||
let zkey = readFile(ARKZKEY_BYTES).unwrap();
|
||||
|
||||
// Benchmark RLN instance creation
|
||||
let start_rln_new = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = WasmRLN::new(&zkey).unwrap();
|
||||
}
|
||||
let rln_new_result = Date::now() - start_rln_new;
|
||||
|
||||
// Create RLN instance for other benchmarks
|
||||
let rln_instance = WasmRLN::new(&zkey).unwrap();
|
||||
let mut tree: OptimalMerkleTree<PoseidonHash> =
|
||||
OptimalMerkleTree::default(DEFAULT_TREE_DEPTH).unwrap();
|
||||
|
||||
// Benchmark generate identity
|
||||
let start_identity_gen = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = Identity::generate().unwrap();
|
||||
}
|
||||
let identity_gen_result = Date::now() - start_identity_gen;
|
||||
|
||||
// Generate identity for other benchmarks
|
||||
let identity_pair = Identity::generate().unwrap();
|
||||
let identity_secret = identity_pair.get_secret_hash();
|
||||
let id_commitment = identity_pair.get_commitment();
|
||||
|
||||
let epoch = Hasher::hash_to_field_le(&Uint8Array::from(b"test-epoch" as &[u8])).unwrap();
|
||||
let rln_identifier =
|
||||
Hasher::hash_to_field_le(&Uint8Array::from(b"test-rln-identifier" as &[u8])).unwrap();
|
||||
let external_nullifier = Hasher::poseidon_hash_pair(&epoch, &rln_identifier).unwrap();
|
||||
|
||||
let identity_index = tree.leaves_set();
|
||||
|
||||
let user_message_limit = WasmFr::from_uint(100);
|
||||
|
||||
let rate_commitment =
|
||||
Hasher::poseidon_hash_pair(&id_commitment, &user_message_limit).unwrap();
|
||||
tree.update_next(*rate_commitment).unwrap();
|
||||
|
||||
let message_id = WasmFr::from_uint(0);
|
||||
let signal: [u8; 32] = [0; 32];
|
||||
let x = Hasher::hash_to_field_le(&Uint8Array::from(&signal[..])).unwrap();
|
||||
|
||||
let merkle_proof: OptimalMerkleProof<PoseidonHash> = tree.proof(identity_index).unwrap();
|
||||
|
||||
let mut path_elements = VecWasmFr::new();
|
||||
for path_element in merkle_proof.get_path_elements() {
|
||||
path_elements.push(&WasmFr::from(path_element));
|
||||
}
|
||||
let path_index = Uint8Array::from(&merkle_proof.get_path_index()[..]);
|
||||
|
||||
let witness = WasmRLNWitnessInput::new(
|
||||
&identity_secret,
|
||||
&user_message_limit,
|
||||
&message_id,
|
||||
&path_elements,
|
||||
&path_index,
|
||||
&x,
|
||||
&external_nullifier,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let bigint_json = witness.to_bigint_json().unwrap();
|
||||
|
||||
// Benchmark witness calculation
|
||||
let start_calculate_witness = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = calculateWitness(CIRCOM_BYTES, bigint_json.clone())
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
let calculate_witness_result = Date::now() - start_calculate_witness;
|
||||
|
||||
// Calculate witness for other benchmarks
|
||||
let calculated_witness_str = calculateWitness(CIRCOM_BYTES, bigint_json.clone())
|
||||
.await
|
||||
.unwrap()
|
||||
.as_string()
|
||||
.unwrap();
|
||||
let calculated_witness_vec_str: Vec<String> =
|
||||
serde_json::from_str(&calculated_witness_str).unwrap();
|
||||
let calculated_witness: Vec<JsBigInt> = calculated_witness_vec_str
|
||||
.iter()
|
||||
.map(|x| JsBigInt::new(&x.into()).unwrap())
|
||||
.collect();
|
||||
|
||||
// Benchmark proof generation with witness
|
||||
let start_generate_rln_proof_with_witness = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = rln_instance
|
||||
.generate_rln_proof_with_witness(calculated_witness.clone(), &witness)
|
||||
.unwrap();
|
||||
}
|
||||
let generate_rln_proof_with_witness_result =
|
||||
Date::now() - start_generate_rln_proof_with_witness;
|
||||
|
||||
// Generate proof with witness for other benchmarks
|
||||
let proof: WasmRLNProof = rln_instance
|
||||
.generate_rln_proof_with_witness(calculated_witness, &witness)
|
||||
.unwrap();
|
||||
|
||||
let root = WasmFr::from(tree.root());
|
||||
let mut roots = VecWasmFr::new();
|
||||
roots.push(&root);
|
||||
|
||||
// Benchmark proof verification with the root
|
||||
let start_verify_with_roots = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = rln_instance.verify_with_roots(&proof, &roots, &x).unwrap();
|
||||
}
|
||||
let verify_with_roots_result = Date::now() - start_verify_with_roots;
|
||||
|
||||
// Verify proof with the root for other benchmarks
|
||||
let is_proof_valid = rln_instance.verify_with_roots(&proof, &roots, &x).unwrap();
|
||||
assert!(is_proof_valid, "verification failed");
|
||||
|
||||
// Format and display the benchmark results
|
||||
let format_duration = |duration_ms: f64| -> String {
|
||||
let avg_ms = duration_ms / (iterations as f64);
|
||||
if avg_ms >= 1000.0 {
|
||||
format!("{:.3} s", avg_ms / 1000.0)
|
||||
} else {
|
||||
format!("{:.3} ms", avg_ms)
|
||||
}
|
||||
};
|
||||
|
||||
results.push_str(&format!(
|
||||
"RLN instance creation: {}\n",
|
||||
format_duration(rln_new_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"Identity generation: {}\n",
|
||||
format_duration(identity_gen_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"Witness calculation: {}\n",
|
||||
format_duration(calculate_witness_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"Proof generation with witness: {}\n",
|
||||
format_duration(generate_rln_proof_with_witness_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"Proof verification with roots: {}\n",
|
||||
format_duration(verify_with_roots_result)
|
||||
));
|
||||
|
||||
// Log the results
|
||||
console_log!("{results}");
|
||||
}
|
||||
}
|
||||
233
rln-wasm/tests/node.rs
Normal file
233
rln-wasm/tests/node.rs
Normal file
@@ -0,0 +1,233 @@
|
||||
#![cfg(target_arch = "wasm32")]
|
||||
#![cfg(not(feature = "utils"))]
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use js_sys::{BigInt as JsBigInt, Date, Object, Uint8Array};
|
||||
use rln::prelude::*;
|
||||
use rln_wasm::{
|
||||
Hasher, Identity, VecWasmFr, WasmFr, WasmRLN, WasmRLNProof, WasmRLNWitnessInput,
|
||||
};
|
||||
use wasm_bindgen::{prelude::wasm_bindgen, JsValue};
|
||||
use wasm_bindgen_test::{console_log, wasm_bindgen_test};
|
||||
use zerokit_utils::merkle_tree::{
|
||||
OptimalMerkleProof, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree,
|
||||
};
|
||||
|
||||
#[wasm_bindgen(inline_js = r#"
|
||||
const fs = require("fs");
|
||||
|
||||
let witnessCalculatorModule = null;
|
||||
|
||||
module.exports = {
|
||||
initWitnessCalculator: function(code) {
|
||||
const processedCode = code
|
||||
.replace(/export\s+async\s+function\s+builder/, 'async function builder')
|
||||
.replace(/export\s*\{\s*builder\s*\};?/g, '');
|
||||
|
||||
const moduleFunc = new Function(processedCode + '\nreturn { builder };');
|
||||
witnessCalculatorModule = moduleFunc();
|
||||
|
||||
if (typeof witnessCalculatorModule.builder !== 'function') {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
|
||||
readFile: function (path) {
|
||||
return fs.readFileSync(path);
|
||||
},
|
||||
|
||||
calculateWitness: async function (circom_path, inputs) {
|
||||
const wasmFile = fs.readFileSync(circom_path);
|
||||
const wasmFileBuffer = wasmFile.buffer.slice(
|
||||
wasmFile.byteOffset,
|
||||
wasmFile.byteOffset + wasmFile.byteLength
|
||||
);
|
||||
const witnessCalculator = await witnessCalculatorModule.builder(wasmFileBuffer);
|
||||
const calculatedWitness = await witnessCalculator.calculateWitness(
|
||||
inputs,
|
||||
false
|
||||
);
|
||||
return JSON.stringify(calculatedWitness, (key, value) =>
|
||||
typeof value === "bigint" ? value.toString() : value
|
||||
);
|
||||
},
|
||||
};
|
||||
"#)]
|
||||
extern "C" {
|
||||
#[wasm_bindgen(catch)]
|
||||
fn initWitnessCalculator(code: &str) -> Result<bool, JsValue>;
|
||||
|
||||
#[wasm_bindgen(catch)]
|
||||
fn readFile(path: &str) -> Result<Uint8Array, JsValue>;
|
||||
|
||||
#[wasm_bindgen(catch)]
|
||||
async fn calculateWitness(circom_path: &str, input: Object) -> Result<JsValue, JsValue>;
|
||||
}
|
||||
|
||||
const WITNESS_CALCULATOR_JS: &str = include_str!("../resources/witness_calculator.js");
|
||||
|
||||
const ARKZKEY_PATH: &str = "../rln/resources/tree_depth_20/rln_final.arkzkey";
|
||||
|
||||
const CIRCOM_PATH: &str = "../rln/resources/tree_depth_20/rln.wasm";
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
pub async fn rln_wasm_benchmark() {
|
||||
// Initialize witness calculator
|
||||
initWitnessCalculator(WITNESS_CALCULATOR_JS).unwrap();
|
||||
|
||||
let mut results = String::from("\nbenchmarks:\n");
|
||||
let iterations = 10;
|
||||
|
||||
let zkey = readFile(ARKZKEY_PATH).unwrap();
|
||||
|
||||
// Benchmark RLN instance creation
|
||||
let start_rln_new = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = WasmRLN::new(&zkey).unwrap();
|
||||
}
|
||||
let rln_new_result = Date::now() - start_rln_new;
|
||||
|
||||
// Create RLN instance for other benchmarks
|
||||
let rln_instance = WasmRLN::new(&zkey).unwrap();
|
||||
let mut tree: OptimalMerkleTree<PoseidonHash> =
|
||||
OptimalMerkleTree::default(DEFAULT_TREE_DEPTH).unwrap();
|
||||
|
||||
// Benchmark generate identity
|
||||
let start_identity_gen = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = Identity::generate().unwrap();
|
||||
}
|
||||
let identity_gen_result = Date::now() - start_identity_gen;
|
||||
|
||||
// Generate identity for other benchmarks
|
||||
let identity_pair = Identity::generate().unwrap();
|
||||
let identity_secret = identity_pair.get_secret_hash();
|
||||
let id_commitment = identity_pair.get_commitment();
|
||||
|
||||
let epoch = Hasher::hash_to_field_le(&Uint8Array::from(b"test-epoch" as &[u8])).unwrap();
|
||||
let rln_identifier =
|
||||
Hasher::hash_to_field_le(&Uint8Array::from(b"test-rln-identifier" as &[u8])).unwrap();
|
||||
let external_nullifier = Hasher::poseidon_hash_pair(&epoch, &rln_identifier).unwrap();
|
||||
|
||||
let identity_index = tree.leaves_set();
|
||||
|
||||
let user_message_limit = WasmFr::from_uint(100);
|
||||
|
||||
let rate_commitment =
|
||||
Hasher::poseidon_hash_pair(&id_commitment, &user_message_limit).unwrap();
|
||||
tree.update_next(*rate_commitment).unwrap();
|
||||
|
||||
let message_id = WasmFr::from_uint(0);
|
||||
let signal: [u8; 32] = [0; 32];
|
||||
let x = Hasher::hash_to_field_le(&Uint8Array::from(&signal[..])).unwrap();
|
||||
|
||||
let merkle_proof: OptimalMerkleProof<PoseidonHash> = tree.proof(identity_index).unwrap();
|
||||
|
||||
let mut path_elements = VecWasmFr::new();
|
||||
for path_element in merkle_proof.get_path_elements() {
|
||||
path_elements.push(&WasmFr::from(path_element));
|
||||
}
|
||||
let path_index = Uint8Array::from(&merkle_proof.get_path_index()[..]);
|
||||
|
||||
let witness = WasmRLNWitnessInput::new(
|
||||
&identity_secret,
|
||||
&user_message_limit,
|
||||
&message_id,
|
||||
&path_elements,
|
||||
&path_index,
|
||||
&x,
|
||||
&external_nullifier,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let bigint_json = witness.to_bigint_json().unwrap();
|
||||
|
||||
// Benchmark witness calculation
|
||||
let start_calculate_witness = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = calculateWitness(CIRCOM_PATH, bigint_json.clone())
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
let calculate_witness_result = Date::now() - start_calculate_witness;
|
||||
|
||||
// Calculate witness for other benchmarks
|
||||
let calculated_witness_str = calculateWitness(CIRCOM_PATH, bigint_json.clone())
|
||||
.await
|
||||
.unwrap()
|
||||
.as_string()
|
||||
.unwrap();
|
||||
let calculated_witness_vec_str: Vec<String> =
|
||||
serde_json::from_str(&calculated_witness_str).unwrap();
|
||||
let calculated_witness: Vec<JsBigInt> = calculated_witness_vec_str
|
||||
.iter()
|
||||
.map(|x| JsBigInt::new(&x.into()).unwrap())
|
||||
.collect();
|
||||
|
||||
// Benchmark proof generation with witness
|
||||
let start_generate_rln_proof_with_witness = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = rln_instance
|
||||
.generate_rln_proof_with_witness(calculated_witness.clone(), &witness)
|
||||
.unwrap();
|
||||
}
|
||||
let generate_rln_proof_with_witness_result =
|
||||
Date::now() - start_generate_rln_proof_with_witness;
|
||||
|
||||
// Generate proof with witness for other benchmarks
|
||||
let proof: WasmRLNProof = rln_instance
|
||||
.generate_rln_proof_with_witness(calculated_witness, &witness)
|
||||
.unwrap();
|
||||
|
||||
let root = WasmFr::from(tree.root());
|
||||
let mut roots = VecWasmFr::new();
|
||||
roots.push(&root);
|
||||
|
||||
// Benchmark proof verification with the root
|
||||
let start_verify_with_roots = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = rln_instance.verify_with_roots(&proof, &roots, &x).unwrap();
|
||||
}
|
||||
let verify_with_roots_result = Date::now() - start_verify_with_roots;
|
||||
|
||||
// Verify proof with the root for other benchmarks
|
||||
let is_proof_valid = rln_instance.verify_with_roots(&proof, &roots, &x).unwrap();
|
||||
assert!(is_proof_valid, "verification failed");
|
||||
|
||||
// Format and display the benchmark results
|
||||
let format_duration = |duration_ms: f64| -> String {
|
||||
let avg_ms = duration_ms / (iterations as f64);
|
||||
if avg_ms >= 1000.0 {
|
||||
format!("{:.3} s", avg_ms / 1000.0)
|
||||
} else {
|
||||
format!("{:.3} ms", avg_ms)
|
||||
}
|
||||
};
|
||||
|
||||
results.push_str(&format!(
|
||||
"RLN instance creation: {}\n",
|
||||
format_duration(rln_new_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"Identity generation: {}\n",
|
||||
format_duration(identity_gen_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"Witness calculation: {}\n",
|
||||
format_duration(calculate_witness_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"Proof generation with witness: {}\n",
|
||||
format_duration(generate_rln_proof_with_witness_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"Proof verification with roots: {}\n",
|
||||
format_duration(verify_with_roots_result)
|
||||
));
|
||||
|
||||
// Log the results
|
||||
console_log!("{results}");
|
||||
}
|
||||
}
|
||||
222
rln-wasm/tests/utils.rs
Normal file
222
rln-wasm/tests/utils.rs
Normal file
@@ -0,0 +1,222 @@
|
||||
#![cfg(target_arch = "wasm32")]
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::assert_eq;
|
||||
|
||||
use ark_std::rand::thread_rng;
|
||||
use js_sys::Uint8Array;
|
||||
use rand::Rng;
|
||||
use rln::prelude::*;
|
||||
use rln_wasm::{ExtendedIdentity, Hasher, Identity, VecWasmFr, WasmFr};
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_keygen_wasm() {
|
||||
let identity = Identity::generate().unwrap();
|
||||
let identity_secret = *identity.get_secret_hash();
|
||||
let id_commitment = *identity.get_commitment();
|
||||
|
||||
assert_ne!(identity_secret, Fr::from(0u8));
|
||||
assert_ne!(id_commitment, Fr::from(0u8));
|
||||
|
||||
let arr = identity.to_array();
|
||||
assert_eq!(arr.length(), 2);
|
||||
assert_eq!(*arr.get(0).unwrap(), identity_secret);
|
||||
assert_eq!(*arr.get(1).unwrap(), id_commitment);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_extended_keygen_wasm() {
|
||||
let identity = ExtendedIdentity::generate().unwrap();
|
||||
|
||||
let identity_trapdoor = *identity.get_trapdoor();
|
||||
let identity_nullifier = *identity.get_nullifier();
|
||||
let identity_secret = *identity.get_secret_hash();
|
||||
let id_commitment = *identity.get_commitment();
|
||||
|
||||
assert_ne!(identity_trapdoor, Fr::from(0u8));
|
||||
assert_ne!(identity_nullifier, Fr::from(0u8));
|
||||
assert_ne!(identity_secret, Fr::from(0u8));
|
||||
assert_ne!(id_commitment, Fr::from(0u8));
|
||||
|
||||
let arr = identity.to_array();
|
||||
assert_eq!(arr.length(), 4);
|
||||
assert_eq!(*arr.get(0).unwrap(), identity_trapdoor);
|
||||
assert_eq!(*arr.get(1).unwrap(), identity_nullifier);
|
||||
assert_eq!(*arr.get(2).unwrap(), identity_secret);
|
||||
assert_eq!(*arr.get(3).unwrap(), id_commitment);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_seeded_keygen_wasm() {
|
||||
let seed_bytes: Vec<u8> = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
let seed = Uint8Array::from(&seed_bytes[..]);
|
||||
|
||||
let identity = Identity::generate_seeded(&seed).unwrap();
|
||||
let identity_secret = *identity.get_secret_hash();
|
||||
let id_commitment = *identity.get_commitment();
|
||||
|
||||
let expected_identity_secret_seed_bytes = str_to_fr(
|
||||
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
let expected_id_commitment_seed_bytes = str_to_fr(
|
||||
"0xbf16d2b5c0d6f9d9d561e05bfca16a81b4b873bb063508fae360d8c74cef51f",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(identity_secret, expected_identity_secret_seed_bytes);
|
||||
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_seeded_extended_keygen_wasm() {
|
||||
let seed_bytes: Vec<u8> = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
let seed = Uint8Array::from(&seed_bytes[..]);
|
||||
|
||||
let identity = ExtendedIdentity::generate_seeded(&seed).unwrap();
|
||||
|
||||
let identity_trapdoor = *identity.get_trapdoor();
|
||||
let identity_nullifier = *identity.get_nullifier();
|
||||
let identity_secret = *identity.get_secret_hash();
|
||||
let id_commitment = *identity.get_commitment();
|
||||
|
||||
let expected_identity_trapdoor_seed_bytes = str_to_fr(
|
||||
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
let expected_identity_nullifier_seed_bytes = str_to_fr(
|
||||
"0x1f18714c7bc83b5bca9e89d404cf6f2f585bc4c0f7ed8b53742b7e2b298f50b4",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
let expected_identity_secret_seed_bytes = str_to_fr(
|
||||
"0x2aca62aaa7abaf3686fff2caf00f55ab9462dc12db5b5d4bcf3994e671f8e521",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
let expected_id_commitment_seed_bytes = str_to_fr(
|
||||
"0x68b66aa0a8320d2e56842581553285393188714c48f9b17acd198b4f1734c5c",
|
||||
16,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(identity_trapdoor, expected_identity_trapdoor_seed_bytes);
|
||||
assert_eq!(identity_nullifier, expected_identity_nullifier_seed_bytes);
|
||||
assert_eq!(identity_secret, expected_identity_secret_seed_bytes);
|
||||
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_wasmfr() {
|
||||
let wasmfr_zero = WasmFr::zero();
|
||||
let fr_zero = Fr::from(0u8);
|
||||
assert_eq!(*wasmfr_zero, fr_zero);
|
||||
|
||||
let wasmfr_one = WasmFr::one();
|
||||
let fr_one = Fr::from(1u8);
|
||||
assert_eq!(*wasmfr_one, fr_one);
|
||||
|
||||
let wasmfr_int = WasmFr::from_uint(42);
|
||||
let fr_int = Fr::from(42u8);
|
||||
assert_eq!(*wasmfr_int, fr_int);
|
||||
|
||||
let wasmfr_debug_str = wasmfr_int.debug();
|
||||
assert_eq!(wasmfr_debug_str.to_string(), "42");
|
||||
|
||||
let identity = Identity::generate().unwrap();
|
||||
let mut id_secret_fr = *identity.get_secret_hash();
|
||||
let id_secret_hash = IdSecret::from(&mut id_secret_fr);
|
||||
let id_commitment = *identity.get_commitment();
|
||||
let wasmfr_id_secret_hash = *identity.get_secret_hash();
|
||||
assert_eq!(wasmfr_id_secret_hash, *id_secret_hash);
|
||||
let wasmfr_id_commitment = *identity.get_commitment();
|
||||
assert_eq!(wasmfr_id_commitment, id_commitment);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_vec_wasmfr() {
|
||||
let vec_fr = vec![Fr::from(1u8), Fr::from(2u8), Fr::from(3u8), Fr::from(4u8)];
|
||||
let mut vec_wasmfr = VecWasmFr::new();
|
||||
for fr in &vec_fr {
|
||||
vec_wasmfr.push(&WasmFr::from(*fr));
|
||||
}
|
||||
|
||||
let bytes_le = vec_wasmfr.to_bytes_le();
|
||||
let expected_le = rln::utils::vec_fr_to_bytes_le(&vec_fr);
|
||||
assert_eq!(bytes_le.to_vec(), expected_le);
|
||||
|
||||
let bytes_be = vec_wasmfr.to_bytes_be();
|
||||
let expected_be = rln::utils::vec_fr_to_bytes_be(&vec_fr);
|
||||
assert_eq!(bytes_be.to_vec(), expected_be);
|
||||
|
||||
let vec_wasmfr_from_le = match VecWasmFr::from_bytes_le(&bytes_le) {
|
||||
Ok(v) => v,
|
||||
Err(err) => panic!("VecWasmFr::from_bytes_le call failed: {}", err),
|
||||
};
|
||||
assert_eq!(vec_wasmfr_from_le.length(), vec_wasmfr.length());
|
||||
for i in 0..vec_wasmfr.length() {
|
||||
assert_eq!(
|
||||
*vec_wasmfr_from_le.get(i).unwrap(),
|
||||
*vec_wasmfr.get(i).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
let vec_wasmfr_from_be = match VecWasmFr::from_bytes_be(&bytes_be) {
|
||||
Ok(v) => v,
|
||||
Err(err) => panic!("VecWasmFr::from_bytes_be call failed: {}", err),
|
||||
};
|
||||
for i in 0..vec_wasmfr.length() {
|
||||
assert_eq!(
|
||||
*vec_wasmfr_from_be.get(i).unwrap(),
|
||||
*vec_wasmfr.get(i).unwrap()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_hash_to_field_wasm() {
|
||||
let mut rng = thread_rng();
|
||||
let signal_gen: [u8; 32] = rng.gen();
|
||||
let signal = Uint8Array::from(&signal_gen[..]);
|
||||
|
||||
let wasmfr_le_1 = Hasher::hash_to_field_le(&signal).unwrap();
|
||||
let fr_le_2 = hash_to_field_le(&signal_gen).unwrap();
|
||||
assert_eq!(*wasmfr_le_1, fr_le_2);
|
||||
|
||||
let wasmfr_be_1 = Hasher::hash_to_field_be(&signal).unwrap();
|
||||
let fr_be_2 = hash_to_field_be(&signal_gen).unwrap();
|
||||
assert_eq!(*wasmfr_be_1, fr_be_2);
|
||||
|
||||
assert_eq!(*wasmfr_le_1, *wasmfr_be_1);
|
||||
assert_eq!(fr_le_2, fr_be_2);
|
||||
|
||||
let hash_wasmfr_le_1 = wasmfr_le_1.to_bytes_le();
|
||||
let hash_fr_le_2 = fr_to_bytes_le(&fr_le_2);
|
||||
assert_eq!(hash_wasmfr_le_1.to_vec(), hash_fr_le_2);
|
||||
|
||||
let hash_wasmfr_be_1 = wasmfr_be_1.to_bytes_be();
|
||||
let hash_fr_be_2 = fr_to_bytes_be(&fr_be_2);
|
||||
assert_eq!(hash_wasmfr_be_1.to_vec(), hash_fr_be_2);
|
||||
|
||||
assert_ne!(hash_wasmfr_le_1.to_vec(), hash_wasmfr_be_1.to_vec());
|
||||
assert_ne!(hash_fr_le_2, hash_fr_be_2);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_poseidon_hash_pair_wasm() {
|
||||
let input_1 = Fr::from(42u8);
|
||||
let input_2 = Fr::from(99u8);
|
||||
|
||||
let expected_hash = poseidon_hash(&[input_1, input_2]).unwrap();
|
||||
let wasmfr_1 = WasmFr::from_uint(42);
|
||||
let wasmfr_2 = WasmFr::from_uint(99);
|
||||
let received_hash = Hasher::poseidon_hash_pair(&wasmfr_1, &wasmfr_2).unwrap();
|
||||
|
||||
assert_eq!(*received_hash, expected_hash);
|
||||
}
|
||||
}
|
||||
107
rln/Cargo.toml
107
rln/Cargo.toml
@@ -1,42 +1,97 @@
|
||||
[package]
|
||||
name = "rln"
|
||||
version = "0.1.0"
|
||||
version = "1.0.0"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "APIs to manage, compute and verify zkSNARK proofs and RLN primitives"
|
||||
documentation = "https://github.com/vacp2p/zerokit"
|
||||
homepage = "https://vac.dev"
|
||||
repository = "https://github.com/vacp2p/zerokit"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib", "staticlib"]
|
||||
crate-type = ["rlib", "staticlib", "cdylib"]
|
||||
bench = false
|
||||
|
||||
# This flag disables cargo doctests, i.e. testing example code-snippets in documentation
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
|
||||
# ZKP Generation
|
||||
ark-ff = { version = "0.3.0", default-features = false, features = ["parallel", "asm"] }
|
||||
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
|
||||
ark-bn254 = { version = "0.3.0" }
|
||||
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
|
||||
ark-relations = { version = "0.3.0", default-features = false, features = [ "std" ] }
|
||||
ark-serialize = { version = "0.3.0", default-features = false }
|
||||
ark-circom = { git = "https://github.com/gakonst/ark-circom", rev = "06eb075", features = ["circom-2"] }
|
||||
#ark-circom = { git = "https://github.com/vacp2p/ark-circom", branch = "no-ethers-core", features = ["circom-2"] }
|
||||
wasmer = "2.3.0"
|
||||
ark-bn254 = { version = "0.5.0", features = ["std"] }
|
||||
ark-relations = { version = "0.5.1", features = ["std"] }
|
||||
ark-ff = { version = "0.5.0", default-features = false }
|
||||
ark-ec = { version = "0.5.0", default-features = false }
|
||||
ark-std = { version = "0.5.0", default-features = false }
|
||||
ark-poly = { version = "0.5.0", default-features = false }
|
||||
ark-groth16 = { version = "0.5.0", default-features = false }
|
||||
ark-serialize = { version = "0.5.0", default-features = false }
|
||||
|
||||
# error handling
|
||||
color-eyre = "0.5.11"
|
||||
thiserror = "1.0.0"
|
||||
# Error Handling
|
||||
thiserror = "2.0.17"
|
||||
|
||||
# utilities
|
||||
cfg-if = "1.0"
|
||||
num-bigint = { version = "0.4.3", default-features = false, features = ["rand"] }
|
||||
num-traits = "0.2.11"
|
||||
once_cell = "1.14.0"
|
||||
rand = "0.8"
|
||||
# Utilities
|
||||
rayon = { version = "1.11.0", optional = true }
|
||||
byteorder = "1.5.0"
|
||||
cfg-if = "1.0.4"
|
||||
num-bigint = { version = "0.4.6", default-features = false, features = ["std"] }
|
||||
num-traits = "0.2.19"
|
||||
once_cell = "1.21.3"
|
||||
rand = "0.8.5"
|
||||
rand_chacha = "0.3.1"
|
||||
ruint = { version = "1.17.0", default-features = false, features = [
|
||||
"rand",
|
||||
"serde",
|
||||
"ark-ff-05",
|
||||
] }
|
||||
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
|
||||
zeroize = "1.8.2"
|
||||
tempfile = "3.23.0"
|
||||
zerokit_utils = { version = "1.0.0", path = "../utils", default-features = false }
|
||||
|
||||
# serialization
|
||||
serde_json = "1.0.48"
|
||||
# FFI
|
||||
safer-ffi.version = "0.1"
|
||||
|
||||
# Serialization
|
||||
prost = "0.14.1"
|
||||
serde_json = "1.0.145"
|
||||
serde = { version = "1.0.228", features = ["derive"] }
|
||||
|
||||
# Documentation
|
||||
document-features = { version = "0.2.12", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
hex-literal = "0.3.4"
|
||||
criterion = { version = "0.8.0", features = ["html_reports"] }
|
||||
|
||||
[features]
|
||||
fullmerkletree = []
|
||||
default = ["parallel", "pmtree-ft"]
|
||||
stateless = []
|
||||
parallel = [
|
||||
"rayon",
|
||||
"ark-ff/parallel",
|
||||
"ark-ec/parallel",
|
||||
"ark-std/parallel",
|
||||
"ark-poly/parallel",
|
||||
"ark-groth16/parallel",
|
||||
"ark-serialize/parallel",
|
||||
"zerokit_utils/parallel",
|
||||
]
|
||||
fullmerkletree = [] # Pre-allocated tree, fastest access
|
||||
optimalmerkletree = [] # Sparse storage, memory efficient
|
||||
pmtree-ft = ["zerokit_utils/pmtree-ft"] # Persistent storage, disk-based
|
||||
headers = ["safer-ffi/headers"] # Generate C header file with safer-ffi
|
||||
|
||||
[[bench]]
|
||||
name = "pmtree_benchmark"
|
||||
harness = false
|
||||
required-features = ["pmtree-ft"]
|
||||
|
||||
[[bench]]
|
||||
name = "poseidon_tree_benchmark"
|
||||
harness = false
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
|
||||
[[bin]]
|
||||
name = "generate-headers"
|
||||
required-features = ["headers"] # Do not build unless generating headers.
|
||||
|
||||
23
rln/Makefile.toml
Normal file
23
rln/Makefile.toml
Normal file
@@ -0,0 +1,23 @@
|
||||
[tasks.build]
|
||||
command = "cargo"
|
||||
args = ["build", "--release"]
|
||||
|
||||
[tasks.test]
|
||||
command = "cargo"
|
||||
args = ["test", "--release", "--", "--nocapture"]
|
||||
|
||||
[tasks.test_stateless]
|
||||
command = "cargo"
|
||||
args = [
|
||||
"test",
|
||||
"--release",
|
||||
"--no-default-features",
|
||||
"--features",
|
||||
"stateless",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
|
||||
[tasks.bench]
|
||||
command = "cargo"
|
||||
args = ["bench"]
|
||||
332
rln/README.md
332
rln/README.md
@@ -1,43 +1,331 @@
|
||||
# Zerokit RLN Module
|
||||
|
||||
This module provides APIs to manage, compute and verify [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and RLN primitives.
|
||||
[](https://crates.io/crates/rln)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
|
||||
Currently, this module comes with three [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources) RLN circuits having Merkle tree of height `15`, `19` and `20`, respectively.
|
||||
The Zerokit RLN Module provides a Rust implementation for working with
|
||||
Rate-Limiting Nullifier [RLN](https://rfc.vac.dev/vac/raw/rln-v2) zkSNARK proofs and primitives.
|
||||
This module allows you to:
|
||||
|
||||
Implemented tests can be executed by running within the module folder
|
||||
- Generate and verify RLN proofs
|
||||
- Work with Merkle trees for commitment storage
|
||||
- Implement rate-limiting mechanisms for distributed systems
|
||||
|
||||
`cargo test --release`
|
||||
## Quick Start
|
||||
|
||||
## Compiling circuits
|
||||
> [!IMPORTANT]
|
||||
> Version 0.7.0 is the only version that does not support WASM and x32 architecture.
|
||||
> WASM support is available in version 0.8.0 and above.
|
||||
|
||||
`rln` (https://github.com/privacy-scaling-explorations/rln) repo with Circuits is contained as a submodule.
|
||||
### Add RLN as dependency
|
||||
|
||||
``` sh
|
||||
# Update submodules
|
||||
git submodule update --init --recursive
|
||||
We start by adding zerokit RLN to our `Cargo.toml`
|
||||
|
||||
# Install rln dependencies
|
||||
cd vendor/rln/ && npm install
|
||||
```toml
|
||||
[dependencies]
|
||||
rln = "1.0.0"
|
||||
```
|
||||
|
||||
## Basic Usage Example
|
||||
|
||||
The RLN object constructor requires the following files:
|
||||
|
||||
- `rln_final.arkzkey`: The proving key in arkzkey format.
|
||||
- `graph.bin`: The graph file built for the input tree size
|
||||
|
||||
Additionally, `rln.wasm` is used for testing in the rln-wasm module.
|
||||
|
||||
```rust
|
||||
use rln::prelude::{keygen, poseidon_hash, hash_to_field_le, RLN, RLNWitnessInput, Fr, IdSecret};
|
||||
|
||||
fn main() {
|
||||
// 1. Initialize RLN with parameters:
|
||||
// - the tree depth;
|
||||
// - the tree config, if it is not defined, the default value will be set
|
||||
let tree_depth = 20;
|
||||
let mut rln = RLN::new(tree_depth, "").unwrap();
|
||||
|
||||
// 2. Generate an identity keypair
|
||||
let (identity_secret, id_commitment) = keygen();
|
||||
|
||||
// 3. Add a rate commitment to the Merkle tree
|
||||
let leaf_index = 10;
|
||||
let user_message_limit = Fr::from(10);
|
||||
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
rln.set_leaf(leaf_index, rate_commitment).unwrap();
|
||||
|
||||
// 4. Get the Merkle proof for the added commitment
|
||||
let (path_elements, identity_path_index) = rln.get_merkle_proof(leaf_index).unwrap();
|
||||
|
||||
// 5. Set up external nullifier (epoch + app identifier)
|
||||
// We generate epoch from a date seed and we ensure is
|
||||
// mapped to a field element by hashing-to-field its content
|
||||
let epoch = hash_to_field_le(b"Today at noon, this year");
|
||||
// We generate rln_identifier from an application identifier and
|
||||
// we ensure is mapped to a field element by hashing-to-field its content
|
||||
let rln_identifier = hash_to_field_le(b"test-rln-identifier");
|
||||
// We generate a external nullifier
|
||||
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
|
||||
// We choose a message_id satisfy 0 <= message_id < user_message_limit
|
||||
let message_id = Fr::from(1);
|
||||
|
||||
// 6. Define the message signal
|
||||
let signal = b"RLN is awesome";
|
||||
|
||||
// 7. Compute x from the signal
|
||||
let x = hash_to_field_le(signal);
|
||||
|
||||
// 8. Create witness input for RLN proof generation
|
||||
let witness = RLNWitnessInput::new(
|
||||
identity_secret,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x,
|
||||
external_nullifier,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// 9. Generate a RLN proof
|
||||
// We generate proof and proof values from the witness
|
||||
let (proof, proof_values) = rln.generate_rln_proof(&witness).unwrap();
|
||||
|
||||
// 10. Verify the RLN proof
|
||||
// We verify the proof using the proof and proof values and the hashed signal x
|
||||
let verified = rln.verify_rln_proof(&proof, &proof_values, &x).unwrap();
|
||||
assert!(verified);
|
||||
}
|
||||
```
|
||||
|
||||
### Comments for the code above for point 5
|
||||
|
||||
The `external nullifier` includes two parameters.
|
||||
|
||||
The first one is `epoch` and it's used to identify messages received in a certain time frame.
|
||||
It usually corresponds to the current UNIX time but can also be set to a random value or generated by a seed,
|
||||
provided that it corresponds to a field element.
|
||||
|
||||
The second one is `rln_identifier` and it's used to prevent a RLN ZK proof generated
|
||||
for one application to be re-used in another one.
|
||||
|
||||
### Features
|
||||
|
||||
- **Stateless Mode**: Allows the use of RLN without maintaining state of the Merkle tree.
|
||||
- **Pre-compiled Circuits**: Ready-to-use circuits with Merkle tree depth of 20
|
||||
- **Wasm Support**: WebAssembly bindings via rln-wasm crate with features like:
|
||||
- Browser and Node.js compatibility
|
||||
- Optional parallel feature support using [wasm-bindgen-rayon](https://github.com/RReverser/wasm-bindgen-rayon)
|
||||
- Headless browser testing capabilities
|
||||
- **Merkle Tree Implementations**: Multiple tree variants optimized for different use cases:
|
||||
- **Full Merkle Tree**: Fastest access with complete pre-allocated tree in memory. Best for frequent random access (enable with `fullmerkletree` feature).
|
||||
- **Optimal Merkle Tree**: Memory-efficient sparse storage using HashMap. Ideal for partially populated trees (enable with `optimalmerkletree` feature).
|
||||
- **Persistent Merkle Tree**: Disk-based storage with [sled](https://github.com/spacejam/sled) for persistence across application restarts and large datasets (enable with `pmtree-ft` feature).
|
||||
|
||||
## Building and Testing
|
||||
|
||||
### Prerequisites
|
||||
|
||||
```sh
|
||||
git clone https://github.com/vacp2p/zerokit.git
|
||||
make installdeps
|
||||
cd zerokit/rln
|
||||
```
|
||||
|
||||
### Build Commands
|
||||
|
||||
```sh
|
||||
# Build with default features
|
||||
cargo make build
|
||||
|
||||
# Test with default features
|
||||
cargo make test
|
||||
|
||||
# Test with stateless features
|
||||
cargo make test_stateless
|
||||
```
|
||||
|
||||
## Advanced: Custom Circuit Compilation
|
||||
|
||||
The `circom-rln` (<https://github.com/rate-limiting-nullifier/circom-rln>) repository,
|
||||
which contains the RLN circuit implementation used for pre-compiled RLN circuit for zerokit RLN.
|
||||
If you want to compile your own RLN circuit, you can follow the instructions below.
|
||||
|
||||
### 1. Compile ZK Circuits for getting the zkey file
|
||||
|
||||
This script actually generates not only the zkey file for the RLN circuit,
|
||||
but also the execution wasm file used for witness calculation.
|
||||
However, the wasm file is not needed for the `rln` module,
|
||||
because current implementation uses the iden3 graph file for witness calculation.
|
||||
This graph file is generated by the `circom-witnesscalc` tool in [step 2](#2-generate-witness-calculation-graph).
|
||||
|
||||
To customize the circuit parameters, modify `circom-rln/circuits/rln.circom`:
|
||||
|
||||
```circom
|
||||
pragma circom 2.1.0;
|
||||
include "./rln.circom";
|
||||
component main { public [x, externalNullifier] } = RLN(N, M);
|
||||
```
|
||||
|
||||
Where:
|
||||
|
||||
- `N`: Merkle tree depth, determining the maximum membership capacity (2^N members).
|
||||
|
||||
- `M`: Bit size for range checks, setting an upper bound for the number of messages per epoch (2^M messages).
|
||||
|
||||
> [!NOTE]
|
||||
> However, if `N` is too big, this might require a larger Powers of Tau ceremony
|
||||
> than the one hardcoded in `./scripts/build-circuits.sh`, which is `2^14`.
|
||||
> In such case, we refer to the official
|
||||
> [Circom documentation](https://docs.circom.io/getting-started/proving-circuits/#powers-of-tau)
|
||||
> for instructions on how to run an appropriate Powers of Tau ceremony and Phase 2 in order to compile the desired circuit. \
|
||||
> Additionally, while `M` sets an upper bound on the number of messages per epoch (`2^M`),
|
||||
> you can configure lower message limit for your use case, as long as it satisfies `user_message_limit ≤ 2^M`. \
|
||||
> Currently, the `rln` module comes with a [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources/tree_depth_20)
|
||||
> RLN circuit with a Merkle tree of depth `20` and a bit size of `16`,
|
||||
> allowing up to `2^20` registered members and a `2^16` message limit per epoch.
|
||||
|
||||
#### Install circom compiler
|
||||
|
||||
You can follow the instructions below or refer to the
|
||||
[installing Circom](https://docs.circom.io/getting-started/installation/#installing-circom) guide for more details.
|
||||
Make sure to use the specific version `v2.1.0`.
|
||||
|
||||
```sh
|
||||
# Clone the circom repository
|
||||
git clone https://github.com/iden3/circom.git
|
||||
|
||||
# Checkout the specific version
|
||||
cd circom && git checkout v2.1.0
|
||||
|
||||
# Build the circom compiler
|
||||
cargo build --release
|
||||
|
||||
# Install the circom binary globally
|
||||
cargo install --path circom
|
||||
|
||||
# Check the circom version to ensure it's v2.1.0
|
||||
circom --version
|
||||
```
|
||||
|
||||
#### Generate the zkey and verification key files example
|
||||
|
||||
```sh
|
||||
# Clone the circom-rln repository
|
||||
git clone https://github.com/rate-limiting-nullifier/circom-rln
|
||||
|
||||
# Install dependencies
|
||||
cd circom-rln && npm install
|
||||
|
||||
# Build circuits
|
||||
./scripts/build-circuits.sh rln
|
||||
|
||||
# Copy over assets
|
||||
cp build/zkeyFiles/rln-final.zkey ../../resources/tree_height_15
|
||||
cp build/zkeyFiles/rln.wasm ../../resources/tree_height_15
|
||||
# Use the generated zkey file in subsequent steps
|
||||
cp zkeyFiles/rln/final.zkey <path_to_rln_final.zkey>
|
||||
```
|
||||
|
||||
Note that the above code snippet will compile a RLN circuit with a Merkle tree of height equal `15` based on the default value set in `rln/circuit/rln.circom`.
|
||||
### 2. Generate Witness Calculation Graph
|
||||
|
||||
To compile a RLN circuit with Merkle tree height `N`, it suffices to change `rln/circuit/rln.circom` to
|
||||
The execution graph file used for witness calculation can be compiled following instructions
|
||||
in the [circom-witnesscalc](https://github.com/iden3/circom-witnesscalc) repository.
|
||||
As mentioned in step 1, we should use `rln.circom` file from `circom-rln` repository.
|
||||
|
||||
```
|
||||
pragma circom 2.0.0;
|
||||
```sh
|
||||
# Clone the circom-witnesscalc repository
|
||||
git clone https://github.com/iden3/circom-witnesscalc
|
||||
|
||||
include "./rln-base.circom";
|
||||
# Load the submodules
|
||||
cd circom-witnesscalc && git submodule update --init --recursive
|
||||
|
||||
component main {public [x, epoch, rln_identifier ]} = RLN(N);
|
||||
# Build the circom-witnesscalc tool
|
||||
cargo build
|
||||
|
||||
# Generate the witness calculation graph
|
||||
cargo run --package circom_witnesscalc --bin build-circuit ../circom-rln/circuits/rln.circom <path_to_graph.bin>
|
||||
```
|
||||
|
||||
However, if `N` is too big, this might require a bigger Powers of Tau ceremony than the one hardcoded in `./scripts/build-circuits.sh`, which is `2^14`.
|
||||
In such case we refer to the official [Circom documentation](https://docs.circom.io/getting-started/proving-circuits/#powers-of-tau) for instructions on how to run an appropriate Powers of Tau ceremony and Phase 2 in order to compile the desired circuit.
|
||||
The `rln` module comes with [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources/tree_depth_20)
|
||||
execution graph files for the RLN circuit.
|
||||
|
||||
### 3. Generate Arkzkey Representation for zkey file
|
||||
|
||||
For faster loading, compile the zkey file into the arkzkey format using
|
||||
[ark-zkey](https://github.com/seemenkina/ark-zkey).
|
||||
This is fork of the [original](https://github.com/zkmopro/ark-zkey) repository with the uncompressed arkzkey support.
|
||||
|
||||
```sh
|
||||
# Clone the ark-zkey repository
|
||||
git clone https://github.com/seemenkina/ark-zkey.git
|
||||
|
||||
# Build the ark-zkey tool
|
||||
cd ark-zkey && cargo build
|
||||
|
||||
# Generate the arkzkey representation for the zkey file
|
||||
cargo run --bin arkzkey-util <path_to_rln_final.zkey>
|
||||
```
|
||||
|
||||
This will generate the `rln_final.arkzkey` file, which is used by the `rln` module.
|
||||
|
||||
Currently, the `rln` module comes with
|
||||
[pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources/tree_depth_20) arkzkey keys for the RLN circuit.
|
||||
|
||||
> [!NOTE]
|
||||
> You can use this [convert_zkey.sh](./convert_zkey.sh) script
|
||||
> to automate the process of generating the arkzkey file from any zkey file
|
||||
|
||||
Run the script as follows:
|
||||
|
||||
```sh
|
||||
chmod +x ./convert_zkey.sh
|
||||
./convert_zkey.sh <path_to_rln_final.zkey>
|
||||
```
|
||||
|
||||
## FFI Interface
|
||||
|
||||
RLN provides C-compatible bindings for integration with C, C++, Nim, and other languages through [safer_ffi](https://getditto.github.io/safer_ffi/).
|
||||
|
||||
The FFI layer is organized into several modules:
|
||||
|
||||
- [`ffi_rln.rs`](./src/ffi/ffi_rln.rs) – Implements core RLN functionality, including initialization functions, proof generation, and proof verification.
|
||||
- [`ffi_tree.rs`](./src/ffi/ffi_tree.rs) – Provides all tree-related operations and helper functions for Merkle tree management.
|
||||
- [`ffi_utils.rs`](./src/ffi/ffi_utils.rs) – Contains all utility functions and structure definitions used across the FFI layer.
|
||||
|
||||
### Examples
|
||||
|
||||
Working examples demonstrating proof generation, proof verification and slashing in C and Nim:
|
||||
|
||||
- [C example](./ffi_c_examples/main.c) and [README](./ffi_c_examples/Readme.md)
|
||||
- [Nim example](./ffi_nim_examples/main.nim) and [README](./ffi_nim_examples/Readme.md)
|
||||
|
||||
### Memory Management
|
||||
|
||||
- All **heap-allocated** objects returned from Rust FFI **must** be freed using their corresponding FFI `_free` functions.
|
||||
|
||||
## Detailed Protocol Flow
|
||||
|
||||
1. **Identity Creation**: Generate a secret key and commitment
|
||||
2. **Rate Commitment**: Add commitment to a Merkle tree
|
||||
3. **External Nullifier Setup**: Combine epoch and application identifier
|
||||
4. **Proof Generation**: Create a zkSNARK proof that:
|
||||
- Proves membership in the Merkle tree
|
||||
- Ensures rate-limiting constraints are satisfied
|
||||
- Generates a nullifier to prevent double-usage
|
||||
5. **Proof Verification**: Verify the proof without revealing the prover's identity
|
||||
6. **Slashing Mechanism**: Detect and penalize double-usage attempts
|
||||
|
||||
## Getting Involved
|
||||
|
||||
Zerokit RLN public and FFI APIs allow interaction with many more features than what briefly showcased above.
|
||||
|
||||
We invite you to check our API documentation by running
|
||||
|
||||
```bash
|
||||
cargo doc --no-deps
|
||||
```
|
||||
|
||||
and look at unit tests to have an hint on how to interface and use them.
|
||||
|
||||
- Check the [unit tests](https://github.com/vacp2p/zerokit/tree/master/rln/tests) for more usage examples
|
||||
- [RFC specification](https://rfc.vac.dev/vac/raw/rln-v2) for the Rate-Limiting Nullifier protocol
|
||||
- [GitHub repository](https://github.com/vacp2p/zerokit) for the latest updates
|
||||
|
||||
50
rln/benches/pmtree_benchmark.rs
Normal file
50
rln/benches/pmtree_benchmark.rs
Normal file
@@ -0,0 +1,50 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use rln::prelude::*;
|
||||
use zerokit_utils::merkle_tree::ZerokitMerkleTree;
|
||||
|
||||
pub fn pmtree_benchmark(c: &mut Criterion) {
|
||||
let mut tree = PmTree::default(2).unwrap();
|
||||
|
||||
let leaves: Vec<Fr> = (0..4).map(Fr::from).collect();
|
||||
|
||||
c.bench_function("Pmtree::set", |b| {
|
||||
b.iter(|| {
|
||||
tree.set(0, leaves[0]).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("Pmtree::delete", |b| {
|
||||
b.iter(|| {
|
||||
tree.delete(0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("Pmtree::override_range", |b| {
|
||||
b.iter(|| {
|
||||
tree.override_range(0, leaves.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("Pmtree::get", |b| {
|
||||
b.iter(|| {
|
||||
tree.get(0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
// check intermediate node getter which required additional computation of sub root index
|
||||
c.bench_function("Pmtree::get_subtree_root", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_subtree_root(1, 0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("Pmtree::get_empty_leaves_indices", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_empty_leaves_indices();
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(benches, pmtree_benchmark);
|
||||
criterion_main!(benches);
|
||||
76
rln/benches/poseidon_tree_benchmark.rs
Normal file
76
rln/benches/poseidon_tree_benchmark.rs
Normal file
@@ -0,0 +1,76 @@
|
||||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
|
||||
use rln::prelude::*;
|
||||
use zerokit_utils::merkle_tree::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleTree};
|
||||
|
||||
pub fn get_leaves(n: u32) -> Vec<Fr> {
|
||||
(0..n).map(Fr::from).collect()
|
||||
}
|
||||
|
||||
pub fn optimal_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
|
||||
c.bench_function("OptimalMerkleTree::<Poseidon>::full_depth_gen", |b| {
|
||||
b.iter(|| {
|
||||
OptimalMerkleTree::<PoseidonHash>::default(DEFAULT_TREE_DEPTH).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
let mut group = c.benchmark_group("Set");
|
||||
for &n in [1u32, 10, 100].iter() {
|
||||
let leaves = get_leaves(n);
|
||||
|
||||
let mut tree = OptimalMerkleTree::<PoseidonHash>::default(DEFAULT_TREE_DEPTH).unwrap();
|
||||
group.bench_function(
|
||||
BenchmarkId::new("OptimalMerkleTree::<Poseidon>::set", n),
|
||||
|b| {
|
||||
b.iter(|| {
|
||||
for (i, l) in leaves.iter().enumerate() {
|
||||
let _ = tree.set(i, *l);
|
||||
}
|
||||
})
|
||||
},
|
||||
);
|
||||
|
||||
group.bench_function(
|
||||
BenchmarkId::new("OptimalMerkleTree::<Poseidon>::set_range", n),
|
||||
|b| b.iter(|| tree.set_range(0, leaves.iter().cloned())),
|
||||
);
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
pub fn full_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
|
||||
c.bench_function("FullMerkleTree::<Poseidon>::full_depth_gen", |b| {
|
||||
b.iter(|| {
|
||||
FullMerkleTree::<PoseidonHash>::default(DEFAULT_TREE_DEPTH).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
let mut group = c.benchmark_group("Set");
|
||||
for &n in [1u32, 10, 100].iter() {
|
||||
let leaves = get_leaves(n);
|
||||
|
||||
let mut tree = FullMerkleTree::<PoseidonHash>::default(DEFAULT_TREE_DEPTH).unwrap();
|
||||
group.bench_function(
|
||||
BenchmarkId::new("FullMerkleTree::<Poseidon>::set", n),
|
||||
|b| {
|
||||
b.iter(|| {
|
||||
for (i, l) in leaves.iter().enumerate() {
|
||||
let _ = tree.set(i, *l);
|
||||
}
|
||||
})
|
||||
},
|
||||
);
|
||||
|
||||
group.bench_function(
|
||||
BenchmarkId::new("FullMerkleTree::<Poseidon>::set_range", n),
|
||||
|b| b.iter(|| tree.set_range(0, leaves.iter().cloned())),
|
||||
);
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
benches,
|
||||
optimal_merkle_tree_poseidon_benchmark,
|
||||
full_merkle_tree_poseidon_benchmark
|
||||
);
|
||||
criterion_main!(benches);
|
||||
56
rln/convert_zkey.sh
Executable file
56
rln/convert_zkey.sh
Executable file
@@ -0,0 +1,56 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Convert zkey to arkzkey using /tmp directory
|
||||
# Usage: ./convert_zkey.sh <path_to_zkey_file>
|
||||
|
||||
set -e
|
||||
|
||||
# Check input
|
||||
if [ $# -eq 0 ]; then
|
||||
echo "Usage: $0 <path_to_zkey_file>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ZKEY_FILE="$1"
|
||||
|
||||
if [ ! -f "$ZKEY_FILE" ]; then
|
||||
echo "Error: File '$ZKEY_FILE' does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get absolute path before changing directories
|
||||
ZKEY_ABSOLUTE_PATH=$(realpath "$ZKEY_FILE")
|
||||
|
||||
# Create temp directory in /tmp
|
||||
TEMP_DIR="/tmp/ark-zkey-$$"
|
||||
echo "Using temp directory: $TEMP_DIR"
|
||||
|
||||
# Cleanup function
|
||||
cleanup() {
|
||||
echo "Cleaning up temp directory: $TEMP_DIR"
|
||||
rm -rf "$TEMP_DIR"
|
||||
}
|
||||
|
||||
# Setup cleanup trap
|
||||
trap cleanup EXIT
|
||||
|
||||
# Create temp directory and clone ark-zkey
|
||||
mkdir -p "$TEMP_DIR"
|
||||
cd "$TEMP_DIR"
|
||||
git clone https://github.com/seemenkina/ark-zkey.git
|
||||
cd ark-zkey
|
||||
cargo build
|
||||
|
||||
# Convert
|
||||
cargo run --bin arkzkey-util "$ZKEY_ABSOLUTE_PATH"
|
||||
|
||||
# Check if arkzkey file was created (tool creates it in same directory as input)
|
||||
ARKZKEY_FILE="${ZKEY_ABSOLUTE_PATH%.zkey}.arkzkey"
|
||||
|
||||
if [ ! -f "$ARKZKEY_FILE" ]; then
|
||||
echo "Could not find generated .arkzkey file at $ARKZKEY_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Conversion successful!"
|
||||
echo "Output file: $ARKZKEY_FILE"
|
||||
47
rln/ffi_c_examples/Readme.md
Normal file
47
rln/ffi_c_examples/Readme.md
Normal file
@@ -0,0 +1,47 @@
|
||||
# RLN FFI C example
|
||||
|
||||
This example demonstrates how to use the RLN C FFI in both stateless and non-stateless modes.
|
||||
|
||||
## Non-stateless mode
|
||||
|
||||
### Compile lib non-stateless
|
||||
|
||||
```bash
|
||||
cargo build -p rln
|
||||
cargo run --features headers --bin generate-headers
|
||||
mv -v rln.h rln/ffi_c_examples/
|
||||
```
|
||||
|
||||
### Compile and run example non-stateless
|
||||
|
||||
```bash
|
||||
cd rln/ffi_c_examples/
|
||||
gcc -Wall main.c -o main -lrln -L../../target/debug
|
||||
./main
|
||||
```
|
||||
|
||||
## Stateless mode
|
||||
|
||||
### Compile lib stateless
|
||||
|
||||
```bash
|
||||
cargo build -p rln --no-default-features --features stateless
|
||||
cargo run --no-default-features --features stateless,headers --bin generate-headers
|
||||
mv -v rln.h rln/ffi_c_examples/
|
||||
```
|
||||
|
||||
### Compile example stateless
|
||||
|
||||
```bash
|
||||
cd rln/ffi_c_examples/
|
||||
gcc -Wall -DSTATELESS main.c -o main -lrln -L../../target/debug
|
||||
./main
|
||||
```
|
||||
|
||||
## Note
|
||||
|
||||
### Find C lib used by Rust
|
||||
|
||||
```bash
|
||||
cargo +nightly rustc --release -p rln -- -Z unstable-options --print native-static-libs
|
||||
```
|
||||
668
rln/ffi_c_examples/main.c
Normal file
668
rln/ffi_c_examples/main.c
Normal file
@@ -0,0 +1,668 @@
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
#include "rln.h"
|
||||
|
||||
int main(int argc, char const *const argv[])
|
||||
{
|
||||
printf("Creating RLN instance\n");
|
||||
|
||||
#ifdef STATELESS
|
||||
CResult_FFI_RLN_ptr_Vec_uint8_t ffi_rln_new_result = ffi_rln_new();
|
||||
#else
|
||||
const char *config_path = "../resources/tree_depth_20/config.json";
|
||||
CResult_FFI_RLN_ptr_Vec_uint8_t ffi_rln_new_result = ffi_rln_new(20, config_path);
|
||||
#endif
|
||||
|
||||
if (!ffi_rln_new_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Initial RLN instance creation error: %s\n", ffi_rln_new_result.err.ptr);
|
||||
ffi_c_string_free(ffi_rln_new_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
FFI_RLN_t *rln = ffi_rln_new_result.ok;
|
||||
printf("RLN instance created successfully\n");
|
||||
|
||||
printf("\nGenerating identity keys\n");
|
||||
CResult_Vec_CFr_Vec_uint8_t keys_result = ffi_key_gen();
|
||||
if (keys_result.err.ptr)
|
||||
{
|
||||
fprintf(stderr, "Key generation error: %s\n", keys_result.err.ptr);
|
||||
ffi_c_string_free(keys_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
Vec_CFr_t keys = keys_result.ok;
|
||||
const CFr_t *identity_secret = ffi_vec_cfr_get(&keys, 0);
|
||||
const CFr_t *id_commitment = ffi_vec_cfr_get(&keys, 1);
|
||||
printf("Identity generated\n");
|
||||
|
||||
Vec_uint8_t debug = ffi_cfr_debug(identity_secret);
|
||||
printf(" - identity_secret = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
debug = ffi_cfr_debug(id_commitment);
|
||||
printf(" - id_commitment = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
printf("\nCreating message limit\n");
|
||||
CFr_t *user_message_limit = ffi_uint_to_cfr(1);
|
||||
|
||||
debug = ffi_cfr_debug(user_message_limit);
|
||||
printf(" - user_message_limit = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
printf("\nComputing rate commitment\n");
|
||||
CResult_CFr_ptr_Vec_uint8_t rate_commitment_result = ffi_poseidon_hash_pair(id_commitment, user_message_limit);
|
||||
if (!rate_commitment_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Rate commitment hash error: %s\n", rate_commitment_result.err.ptr);
|
||||
ffi_c_string_free(rate_commitment_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
CFr_t *rate_commitment = rate_commitment_result.ok;
|
||||
|
||||
debug = ffi_cfr_debug(rate_commitment);
|
||||
printf(" - rate_commitment = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
printf("\nCFr serialization: CFr <-> bytes\n");
|
||||
Vec_uint8_t ser_rate_commitment = ffi_cfr_to_bytes_le(rate_commitment);
|
||||
|
||||
debug = ffi_vec_u8_debug(&ser_rate_commitment);
|
||||
printf(" - serialized rate_commitment = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
CResult_CFr_ptr_Vec_uint8_t deser_rate_commitment_result = ffi_bytes_le_to_cfr(&ser_rate_commitment);
|
||||
if (!deser_rate_commitment_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Rate commitment deserialization error: %s\n", deser_rate_commitment_result.err.ptr);
|
||||
ffi_c_string_free(deser_rate_commitment_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
CFr_t *deser_rate_commitment = deser_rate_commitment_result.ok;
|
||||
|
||||
debug = ffi_cfr_debug(deser_rate_commitment);
|
||||
printf(" - deserialized rate_commitment = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
ffi_vec_u8_free(ser_rate_commitment);
|
||||
ffi_cfr_free(deser_rate_commitment);
|
||||
|
||||
printf("\nVec<CFr> serialization: Vec<CFr> <-> bytes\n");
|
||||
Vec_uint8_t ser_keys = ffi_vec_cfr_to_bytes_le(&keys);
|
||||
|
||||
debug = ffi_vec_u8_debug(&ser_keys);
|
||||
printf(" - serialized keys = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
CResult_Vec_CFr_Vec_uint8_t deser_keys_result = ffi_bytes_le_to_vec_cfr(&ser_keys);
|
||||
if (deser_keys_result.err.ptr)
|
||||
{
|
||||
fprintf(stderr, "Keys deserialization error: %s\n", deser_keys_result.err.ptr);
|
||||
ffi_c_string_free(deser_keys_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
debug = ffi_vec_cfr_debug(&deser_keys_result.ok);
|
||||
printf(" - deserialized keys = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
Vec_CFr_t deser_keys = deser_keys_result.ok;
|
||||
ffi_vec_cfr_free(deser_keys);
|
||||
|
||||
ffi_vec_u8_free(ser_keys);
|
||||
|
||||
#ifdef STATELESS
|
||||
#define TREE_DEPTH 20
|
||||
#define CFR_SIZE 32
|
||||
|
||||
printf("\nBuilding Merkle path for stateless mode\n");
|
||||
CFr_t *default_leaf = ffi_cfr_zero();
|
||||
|
||||
CFr_t *default_hashes[TREE_DEPTH - 1];
|
||||
CResult_CFr_ptr_Vec_uint8_t hash_result = ffi_poseidon_hash_pair(default_leaf, default_leaf);
|
||||
if (!hash_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Poseidon hash error: %s\n", hash_result.err.ptr);
|
||||
ffi_c_string_free(hash_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
default_hashes[0] = hash_result.ok;
|
||||
for (size_t i = 1; i < TREE_DEPTH - 1; i++)
|
||||
{
|
||||
hash_result = ffi_poseidon_hash_pair(default_hashes[i - 1], default_hashes[i - 1]);
|
||||
if (!hash_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Poseidon hash error: %s\n", hash_result.err.ptr);
|
||||
ffi_c_string_free(hash_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
default_hashes[i] = hash_result.ok;
|
||||
}
|
||||
|
||||
Vec_CFr_t path_elements = ffi_vec_cfr_new(TREE_DEPTH);
|
||||
ffi_vec_cfr_push(&path_elements, default_leaf);
|
||||
for (size_t i = 0; i < TREE_DEPTH - 1; i++)
|
||||
{
|
||||
ffi_vec_cfr_push(&path_elements, default_hashes[i]);
|
||||
}
|
||||
|
||||
printf("\nVec<CFr> serialization: Vec<CFr> <-> bytes\n");
|
||||
Vec_uint8_t ser_path_elements = ffi_vec_cfr_to_bytes_le(&path_elements);
|
||||
|
||||
debug = ffi_vec_u8_debug(&ser_path_elements);
|
||||
printf(" - serialized path_elements = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
CResult_Vec_CFr_Vec_uint8_t deser_path_elements_result = ffi_bytes_le_to_vec_cfr(&ser_path_elements);
|
||||
if (deser_path_elements_result.err.ptr)
|
||||
{
|
||||
fprintf(stderr, "Path elements deserialization error: %s\n", deser_path_elements_result.err.ptr);
|
||||
ffi_c_string_free(deser_path_elements_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
debug = ffi_vec_cfr_debug(&deser_path_elements_result.ok);
|
||||
printf(" - deserialized path_elements = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
Vec_CFr_t deser_path_elements = deser_path_elements_result.ok;
|
||||
ffi_vec_cfr_free(deser_path_elements);
|
||||
|
||||
ffi_vec_u8_free(ser_path_elements);
|
||||
|
||||
uint8_t path_index_arr[TREE_DEPTH] = {0};
|
||||
Vec_uint8_t identity_path_index = {
|
||||
.ptr = path_index_arr,
|
||||
.len = TREE_DEPTH,
|
||||
.cap = TREE_DEPTH};
|
||||
|
||||
printf("\nVec<uint8> serialization: Vec<uint8> <-> bytes\n");
|
||||
Vec_uint8_t ser_path_index = ffi_vec_u8_to_bytes_le(&identity_path_index);
|
||||
|
||||
debug = ffi_vec_u8_debug(&ser_path_index);
|
||||
printf(" - serialized path_index = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
CResult_Vec_uint8_Vec_uint8_t deser_path_index_result = ffi_bytes_le_to_vec_u8(&ser_path_index);
|
||||
if (deser_path_index_result.err.ptr)
|
||||
{
|
||||
fprintf(stderr, "Path index deserialization error: %s\n", deser_path_index_result.err.ptr);
|
||||
ffi_c_string_free(deser_path_index_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
debug = ffi_vec_u8_debug(&deser_path_index_result.ok);
|
||||
printf(" - deserialized path_index = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
Vec_uint8_t deser_path_index = deser_path_index_result.ok;
|
||||
ffi_vec_u8_free(deser_path_index);
|
||||
|
||||
ffi_vec_u8_free(ser_path_index);
|
||||
|
||||
printf("\nComputing Merkle root for stateless mode\n");
|
||||
printf(" - computing root for index 0 with rate_commitment\n");
|
||||
CResult_CFr_ptr_Vec_uint8_t root_result = ffi_poseidon_hash_pair(rate_commitment, default_leaf);
|
||||
if (!root_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Poseidon hash error: %s\n", root_result.err.ptr);
|
||||
ffi_c_string_free(root_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
CFr_t *computed_root = root_result.ok;
|
||||
for (size_t i = 1; i < TREE_DEPTH; i++)
|
||||
{
|
||||
root_result = ffi_poseidon_hash_pair(computed_root, default_hashes[i - 1]);
|
||||
if (!root_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Poseidon hash error: %s\n", root_result.err.ptr);
|
||||
ffi_c_string_free(root_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
CFr_t *next_root = root_result.ok;
|
||||
ffi_cfr_free(computed_root);
|
||||
computed_root = next_root;
|
||||
}
|
||||
|
||||
debug = ffi_cfr_debug(computed_root);
|
||||
printf(" - computed_root = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
#else
|
||||
printf("\nAdding rate_commitment to tree\n");
|
||||
CBoolResult_t set_err = ffi_set_next_leaf(&rln, rate_commitment);
|
||||
if (!set_err.ok)
|
||||
{
|
||||
fprintf(stderr, "Set next leaf error: %s\n", set_err.err.ptr);
|
||||
ffi_c_string_free(set_err.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
size_t leaf_index = ffi_leaves_set(&rln) - 1;
|
||||
printf(" - added to tree at index %zu\n", leaf_index);
|
||||
|
||||
printf("\nGetting Merkle proof\n");
|
||||
CResult_FFI_MerkleProof_ptr_Vec_uint8_t proof_result = ffi_get_merkle_proof(&rln, leaf_index);
|
||||
if (!proof_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Get proof error: %s\n", proof_result.err.ptr);
|
||||
ffi_c_string_free(proof_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
FFI_MerkleProof_t *merkle_proof = proof_result.ok;
|
||||
printf(" - proof obtained (depth: %zu)\n", merkle_proof->path_elements.len);
|
||||
#endif
|
||||
|
||||
printf("\nHashing signal\n");
|
||||
uint8_t signal[32] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
|
||||
Vec_uint8_t signal_vec = {signal, 32, 32};
|
||||
CResult_CFr_ptr_Vec_uint8_t x_result = ffi_hash_to_field_le(&signal_vec);
|
||||
if (!x_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Hash signal error: %s\n", x_result.err.ptr);
|
||||
ffi_c_string_free(x_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
CFr_t *x = x_result.ok;
|
||||
|
||||
debug = ffi_cfr_debug(x);
|
||||
printf(" - x = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
printf("\nHashing epoch\n");
|
||||
const char *epoch_str = "test-epoch";
|
||||
Vec_uint8_t epoch_vec = {(uint8_t *)epoch_str, strlen(epoch_str), strlen(epoch_str)};
|
||||
CResult_CFr_ptr_Vec_uint8_t epoch_result = ffi_hash_to_field_le(&epoch_vec);
|
||||
if (!epoch_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Hash epoch error: %s\n", epoch_result.err.ptr);
|
||||
ffi_c_string_free(epoch_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
CFr_t *epoch = epoch_result.ok;
|
||||
|
||||
debug = ffi_cfr_debug(epoch);
|
||||
printf(" - epoch = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
printf("\nHashing RLN identifier\n");
|
||||
const char *rln_id_str = "test-rln-identifier";
|
||||
Vec_uint8_t rln_id_vec = {(uint8_t *)rln_id_str, strlen(rln_id_str), strlen(rln_id_str)};
|
||||
CResult_CFr_ptr_Vec_uint8_t rln_identifier_result = ffi_hash_to_field_le(&rln_id_vec);
|
||||
if (!rln_identifier_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Hash RLN identifier error: %s\n", rln_identifier_result.err.ptr);
|
||||
ffi_c_string_free(rln_identifier_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
CFr_t *rln_identifier = rln_identifier_result.ok;
|
||||
|
||||
debug = ffi_cfr_debug(rln_identifier);
|
||||
printf(" - rln_identifier = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
printf("\nComputing Poseidon hash for external nullifier\n");
|
||||
CResult_CFr_ptr_Vec_uint8_t external_nullifier_result = ffi_poseidon_hash_pair(epoch, rln_identifier);
|
||||
if (!external_nullifier_result.ok)
|
||||
{
|
||||
fprintf(stderr, "External nullifier hash error: %s\n", external_nullifier_result.err.ptr);
|
||||
ffi_c_string_free(external_nullifier_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
CFr_t *external_nullifier = external_nullifier_result.ok;
|
||||
|
||||
debug = ffi_cfr_debug(external_nullifier);
|
||||
printf(" - external_nullifier = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
printf("\nCreating message_id\n");
|
||||
CFr_t *message_id = ffi_uint_to_cfr(0);
|
||||
|
||||
debug = ffi_cfr_debug(message_id);
|
||||
printf(" - message_id = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
printf("\nCreating RLN Witness\n");
|
||||
#ifdef STATELESS
|
||||
CResult_FFI_RLNWitnessInput_ptr_Vec_uint8_t witness_result = ffi_rln_witness_input_new(
|
||||
identity_secret,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
&path_elements,
|
||||
&identity_path_index,
|
||||
x,
|
||||
external_nullifier);
|
||||
|
||||
if (!witness_result.ok)
|
||||
{
|
||||
fprintf(stderr, "RLN Witness creation error: %s\n", witness_result.err.ptr);
|
||||
ffi_c_string_free(witness_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
FFI_RLNWitnessInput_t *witness = witness_result.ok;
|
||||
printf("RLN Witness created successfully\n");
|
||||
#else
|
||||
CResult_FFI_RLNWitnessInput_ptr_Vec_uint8_t witness_result = ffi_rln_witness_input_new(
|
||||
identity_secret,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
&merkle_proof->path_elements,
|
||||
&merkle_proof->path_index,
|
||||
x,
|
||||
external_nullifier);
|
||||
|
||||
if (!witness_result.ok)
|
||||
{
|
||||
fprintf(stderr, "RLN Witness creation error: %s\n", witness_result.err.ptr);
|
||||
ffi_c_string_free(witness_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
FFI_RLNWitnessInput_t *witness = witness_result.ok;
|
||||
printf("RLN Witness created successfully\n");
|
||||
#endif
|
||||
|
||||
printf("\nRLNWitnessInput serialization: RLNWitnessInput <-> bytes\n");
|
||||
CResult_Vec_uint8_Vec_uint8_t ser_witness_result = ffi_rln_witness_to_bytes_le(&witness);
|
||||
if (ser_witness_result.err.ptr)
|
||||
{
|
||||
fprintf(stderr, "Witness serialization error: %s\n", ser_witness_result.err.ptr);
|
||||
ffi_c_string_free(ser_witness_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
Vec_uint8_t ser_witness = ser_witness_result.ok;
|
||||
|
||||
debug = ffi_vec_u8_debug(&ser_witness);
|
||||
printf(" - serialized witness = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
CResult_FFI_RLNWitnessInput_ptr_Vec_uint8_t deser_witness_result = ffi_bytes_le_to_rln_witness(&ser_witness);
|
||||
if (!deser_witness_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Witness deserialization error: %s\n", deser_witness_result.err.ptr);
|
||||
ffi_c_string_free(deser_witness_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
FFI_RLNWitnessInput_t *deser_witness = deser_witness_result.ok;
|
||||
printf(" - witness deserialized successfully\n");
|
||||
|
||||
ffi_rln_witness_input_free(deser_witness);
|
||||
ffi_vec_u8_free(ser_witness);
|
||||
|
||||
printf("\nGenerating RLN Proof\n");
|
||||
CResult_FFI_RLNProof_ptr_Vec_uint8_t proof_gen_result = ffi_generate_rln_proof(
|
||||
&rln,
|
||||
&witness);
|
||||
|
||||
if (!proof_gen_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Proof generation error: %s\n", proof_gen_result.err.ptr);
|
||||
ffi_c_string_free(proof_gen_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
FFI_RLNProof_t *rln_proof = proof_gen_result.ok;
|
||||
printf("Proof generated successfully\n");
|
||||
|
||||
printf("\nGetting proof values\n");
|
||||
FFI_RLNProofValues_t *proof_values = ffi_rln_proof_get_values(&rln_proof);
|
||||
|
||||
CFr_t *y = ffi_rln_proof_values_get_y(&proof_values);
|
||||
debug = ffi_cfr_debug(y);
|
||||
printf(" - y = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
ffi_cfr_free(y);
|
||||
|
||||
CFr_t *nullifier = ffi_rln_proof_values_get_nullifier(&proof_values);
|
||||
debug = ffi_cfr_debug(nullifier);
|
||||
printf(" - nullifier = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
ffi_cfr_free(nullifier);
|
||||
|
||||
CFr_t *root = ffi_rln_proof_values_get_root(&proof_values);
|
||||
debug = ffi_cfr_debug(root);
|
||||
printf(" - root = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
ffi_cfr_free(root);
|
||||
|
||||
CFr_t *x_val = ffi_rln_proof_values_get_x(&proof_values);
|
||||
debug = ffi_cfr_debug(x_val);
|
||||
printf(" - x = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
ffi_cfr_free(x_val);
|
||||
|
||||
CFr_t *ext_nullifier = ffi_rln_proof_values_get_external_nullifier(&proof_values);
|
||||
debug = ffi_cfr_debug(ext_nullifier);
|
||||
printf(" - external_nullifier = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
ffi_cfr_free(ext_nullifier);
|
||||
|
||||
printf("\nRLNProof serialization: RLNProof <-> bytes\n");
|
||||
CResult_Vec_uint8_Vec_uint8_t ser_proof_result = ffi_rln_proof_to_bytes_le(&rln_proof);
|
||||
if (ser_proof_result.err.ptr)
|
||||
{
|
||||
fprintf(stderr, "Proof serialization error: %s\n", ser_proof_result.err.ptr);
|
||||
ffi_c_string_free(ser_proof_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
Vec_uint8_t ser_proof = ser_proof_result.ok;
|
||||
|
||||
debug = ffi_vec_u8_debug(&ser_proof);
|
||||
printf(" - serialized proof = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
CResult_FFI_RLNProof_ptr_Vec_uint8_t deser_proof_result = ffi_bytes_le_to_rln_proof(&ser_proof);
|
||||
if (!deser_proof_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Proof deserialization error: %s\n", deser_proof_result.err.ptr);
|
||||
ffi_c_string_free(deser_proof_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
FFI_RLNProof_t *deser_proof = deser_proof_result.ok;
|
||||
printf(" - proof deserialized successfully\n");
|
||||
|
||||
printf("\nRLNProofValues serialization: RLNProofValues <-> bytes\n");
|
||||
Vec_uint8_t ser_proof_values = ffi_rln_proof_values_to_bytes_le(&proof_values);
|
||||
|
||||
debug = ffi_vec_u8_debug(&ser_proof_values);
|
||||
printf(" - serialized proof_values = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
CResult_FFI_RLNProofValues_ptr_Vec_uint8_t deser_proof_values_result = ffi_bytes_le_to_rln_proof_values(&ser_proof_values);
|
||||
if (!deser_proof_values_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Proof values deserialization error: %s\n", deser_proof_values_result.err.ptr);
|
||||
ffi_c_string_free(deser_proof_values_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
FFI_RLNProofValues_t *deser_proof_values = deser_proof_values_result.ok;
|
||||
printf(" - proof_values deserialized successfully\n");
|
||||
|
||||
CFr_t *deser_external_nullifier = ffi_rln_proof_values_get_external_nullifier(&deser_proof_values);
|
||||
debug = ffi_cfr_debug(deser_external_nullifier);
|
||||
printf(" - deserialized external_nullifier = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
ffi_cfr_free(deser_external_nullifier);
|
||||
|
||||
ffi_rln_proof_values_free(deser_proof_values);
|
||||
ffi_vec_u8_free(ser_proof_values);
|
||||
ffi_rln_proof_free(deser_proof);
|
||||
ffi_vec_u8_free(ser_proof);
|
||||
|
||||
printf("\nVerifying Proof\n");
|
||||
#ifdef STATELESS
|
||||
Vec_CFr_t roots = ffi_vec_cfr_from_cfr(computed_root);
|
||||
CBoolResult_t verify_err = ffi_verify_with_roots(&rln, &rln_proof, &roots, x);
|
||||
#else
|
||||
CBoolResult_t verify_err = ffi_verify_rln_proof(&rln, &rln_proof, x);
|
||||
#endif
|
||||
|
||||
if (!verify_err.ok)
|
||||
{
|
||||
fprintf(stderr, "Proof verification error: %s\n", verify_err.err.ptr);
|
||||
ffi_c_string_free(verify_err.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
printf("Proof verified successfully\n");
|
||||
|
||||
ffi_rln_proof_free(rln_proof);
|
||||
|
||||
printf("\nSimulating double-signaling attack (same epoch, different message)\n");
|
||||
|
||||
printf("\nHashing second signal\n");
|
||||
uint8_t signal2[32] = {11, 12, 13, 14, 15, 16, 17, 18, 19, 20};
|
||||
Vec_uint8_t signal2_vec = {signal2, 32, 32};
|
||||
CResult_CFr_ptr_Vec_uint8_t x2_result = ffi_hash_to_field_le(&signal2_vec);
|
||||
if (!x2_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Hash second signal error: %s\n", x2_result.err.ptr);
|
||||
ffi_c_string_free(x2_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
CFr_t *x2 = x2_result.ok;
|
||||
|
||||
debug = ffi_cfr_debug(x2);
|
||||
printf(" - x2 = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
printf("\nCreating second message with the same id\n");
|
||||
CFr_t *message_id2 = ffi_uint_to_cfr(0);
|
||||
|
||||
debug = ffi_cfr_debug(message_id2);
|
||||
printf(" - message_id2 = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
printf("\nCreating second RLN Witness\n");
|
||||
#ifdef STATELESS
|
||||
CResult_FFI_RLNWitnessInput_ptr_Vec_uint8_t witness_result2 = ffi_rln_witness_input_new(
|
||||
identity_secret,
|
||||
user_message_limit,
|
||||
message_id2,
|
||||
&path_elements,
|
||||
&identity_path_index,
|
||||
x2,
|
||||
external_nullifier);
|
||||
|
||||
if (!witness_result2.ok)
|
||||
{
|
||||
fprintf(stderr, "Second RLN Witness creation error: %s\n", witness_result2.err.ptr);
|
||||
ffi_c_string_free(witness_result2.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
FFI_RLNWitnessInput_t *witness2 = witness_result2.ok;
|
||||
printf("Second RLN Witness created successfully\n");
|
||||
#else
|
||||
CResult_FFI_RLNWitnessInput_ptr_Vec_uint8_t witness_result2 = ffi_rln_witness_input_new(
|
||||
identity_secret,
|
||||
user_message_limit,
|
||||
message_id2,
|
||||
&merkle_proof->path_elements,
|
||||
&merkle_proof->path_index,
|
||||
x2,
|
||||
external_nullifier);
|
||||
|
||||
if (!witness_result2.ok)
|
||||
{
|
||||
fprintf(stderr, "Second RLN Witness creation error: %s\n", witness_result2.err.ptr);
|
||||
ffi_c_string_free(witness_result2.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
FFI_RLNWitnessInput_t *witness2 = witness_result2.ok;
|
||||
printf("Second RLN Witness created successfully\n");
|
||||
#endif
|
||||
printf("\nGenerating second RLN Proof\n");
|
||||
CResult_FFI_RLNProof_ptr_Vec_uint8_t proof_gen_result2 = ffi_generate_rln_proof(
|
||||
&rln,
|
||||
&witness2);
|
||||
|
||||
if (!proof_gen_result2.ok)
|
||||
{
|
||||
fprintf(stderr, "Second proof generation error: %s\n", proof_gen_result2.err.ptr);
|
||||
ffi_c_string_free(proof_gen_result2.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
FFI_RLNProof_t *rln_proof2 = proof_gen_result2.ok;
|
||||
printf("Second proof generated successfully\n");
|
||||
|
||||
FFI_RLNProofValues_t *proof_values2 = ffi_rln_proof_get_values(&rln_proof2);
|
||||
|
||||
printf("\nVerifying second proof\n");
|
||||
#ifdef STATELESS
|
||||
CBoolResult_t verify_err2 = ffi_verify_with_roots(&rln, &rln_proof2, &roots, x2);
|
||||
#else
|
||||
CBoolResult_t verify_err2 = ffi_verify_rln_proof(&rln, &rln_proof2, x2);
|
||||
#endif
|
||||
|
||||
if (!verify_err2.ok)
|
||||
{
|
||||
fprintf(stderr, "Proof verification error: %s\n", verify_err2.err.ptr);
|
||||
ffi_c_string_free(verify_err2.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
printf("Second proof verified successfully\n");
|
||||
|
||||
ffi_rln_proof_free(rln_proof2);
|
||||
|
||||
printf("\nRecovering identity secret\n");
|
||||
CResult_CFr_ptr_Vec_uint8_t recover_result = ffi_recover_id_secret(&proof_values, &proof_values2);
|
||||
if (!recover_result.ok)
|
||||
{
|
||||
fprintf(stderr, "Identity recovery error: %s\n", recover_result.err.ptr);
|
||||
ffi_c_string_free(recover_result.err);
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
|
||||
CFr_t *recovered_secret = recover_result.ok;
|
||||
|
||||
debug = ffi_cfr_debug(recovered_secret);
|
||||
printf(" - recovered_secret = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
debug = ffi_cfr_debug(identity_secret);
|
||||
printf(" - original_secret = %s\n", debug.ptr);
|
||||
ffi_c_string_free(debug);
|
||||
|
||||
printf("Slashing successful: Identity is recovered!\n");
|
||||
|
||||
ffi_cfr_free(recovered_secret);
|
||||
|
||||
ffi_rln_proof_values_free(proof_values2);
|
||||
ffi_rln_proof_values_free(proof_values);
|
||||
ffi_cfr_free(x2);
|
||||
ffi_cfr_free(message_id2);
|
||||
|
||||
#ifdef STATELESS
|
||||
ffi_rln_witness_input_free(witness2);
|
||||
ffi_rln_witness_input_free(witness);
|
||||
ffi_vec_cfr_free(roots);
|
||||
ffi_vec_cfr_free(path_elements);
|
||||
for (size_t i = 0; i < TREE_DEPTH - 1; i++)
|
||||
{
|
||||
ffi_cfr_free(default_hashes[i]);
|
||||
}
|
||||
ffi_cfr_free(default_leaf);
|
||||
ffi_cfr_free(computed_root);
|
||||
#else
|
||||
ffi_rln_witness_input_free(witness2);
|
||||
ffi_rln_witness_input_free(witness);
|
||||
ffi_merkle_proof_free(merkle_proof);
|
||||
#endif
|
||||
|
||||
ffi_cfr_free(rate_commitment);
|
||||
ffi_cfr_free(x);
|
||||
ffi_cfr_free(epoch);
|
||||
ffi_cfr_free(rln_identifier);
|
||||
ffi_cfr_free(external_nullifier);
|
||||
ffi_cfr_free(user_message_limit);
|
||||
ffi_cfr_free(message_id);
|
||||
ffi_vec_cfr_free(keys);
|
||||
ffi_rln_free(rln);
|
||||
|
||||
return EXIT_SUCCESS;
|
||||
}
|
||||
124
rln/ffi_nim_examples/README.md
Normal file
124
rln/ffi_nim_examples/README.md
Normal file
@@ -0,0 +1,124 @@
|
||||
# RLN FFI Nim example
|
||||
|
||||
This example demonstrates how to use the RLN C FFI from Nim in both stateless and non-stateless modes. It covers:
|
||||
|
||||
- Creating an RLN handle (stateless or with Merkle tree backend)
|
||||
- Generating identity keys and commitments
|
||||
- Building a witness (mock Merkle path in stateless mode, real Merkle proof in non-stateless mode)
|
||||
- Generating and verifying a proof
|
||||
- Serializing/deserializing FFI objects (CFr, Vec\<CFr>, RLNWitnessInput, RLNProof, RLNProofValues)
|
||||
- Simulating a double-signaling attack and recovering the identity secret
|
||||
|
||||
## Build the RLN library
|
||||
|
||||
From the repository root:
|
||||
|
||||
```bash
|
||||
# Stateless build (no tree APIs)
|
||||
cargo build -p rln --release --no-default-features --features stateless
|
||||
|
||||
# Non-stateless build (with tree APIs)
|
||||
cargo build -p rln --release
|
||||
```
|
||||
|
||||
This produces the shared library in `target/release`:
|
||||
|
||||
- macOS: `librln.dylib`
|
||||
- Linux: `librln.so`
|
||||
- Windows: `rln.dll`
|
||||
|
||||
## Build the Nim example (two modes)
|
||||
|
||||
From this directory:
|
||||
|
||||
```bash
|
||||
# Stateless mode (no tree APIs, uses mock Merkle path)
|
||||
nim c -d:release -d:ffiStateless main.nim
|
||||
|
||||
# Non-stateless mode (uses exported tree APIs to insert leaf and fetch proof)
|
||||
nim c -d:release main.nim
|
||||
```
|
||||
|
||||
Notes:
|
||||
|
||||
- The example links dynamically. If your OS linker cannot find the library at runtime,
|
||||
set an rpath or environment variable as shown below.
|
||||
- The example auto-picks a platform-specific default library name.
|
||||
You can override it with `-d:RLN_LIB:"/absolute/path/to/lib"` if needed.
|
||||
|
||||
## Run the example
|
||||
|
||||
Ensure the dynamic loader can find the RLN library, then run the binary.
|
||||
|
||||
macOS:
|
||||
|
||||
```bash
|
||||
DYLD_LIBRARY_PATH=../../target/release ./main
|
||||
```
|
||||
|
||||
Linux:
|
||||
|
||||
```bash
|
||||
LD_LIBRARY_PATH=../../target/release ./main
|
||||
```
|
||||
|
||||
Windows (PowerShell):
|
||||
|
||||
```powershell
|
||||
$env:PATH = "$PWD\..\..\target\release;$env:PATH"
|
||||
./main.exe
|
||||
```
|
||||
|
||||
You should see detailed output showing each step, for example:
|
||||
|
||||
```text
|
||||
Creating RLN instance
|
||||
RLN instance created successfully
|
||||
|
||||
Generating identity keys
|
||||
Identity generated
|
||||
- identity_secret = ...
|
||||
- id_commitment = ...
|
||||
|
||||
Creating message limit
|
||||
- user_message_limit = ...
|
||||
|
||||
Computing rate commitment
|
||||
- rate_commitment = ...
|
||||
|
||||
CFr serialization: CFr <-> bytes
|
||||
- serialized rate_commitment = ...
|
||||
- deserialized rate_commitment = ...
|
||||
|
||||
Vec<CFr> serialization: Vec<CFr> <-> bytes
|
||||
- serialized keys = ...
|
||||
- deserialized keys = ...
|
||||
|
||||
... (Merkle path, hashing, witness, proof, verification, and slashing steps) ...
|
||||
|
||||
Proof verified successfully
|
||||
Slashing successful: Identity is recovered!
|
||||
```
|
||||
|
||||
## What the example does
|
||||
|
||||
### Stateless mode
|
||||
|
||||
1. Creates an RLN handle via the stateless constructor.
|
||||
2. Generates identity keys, sets a `user_message_limit` and `message_id`.
|
||||
3. Hashes a signal, epoch, and RLN identifier to field elements.
|
||||
4. Computes `rateCommitment = Poseidon(id_commitment, user_message_limit)`.
|
||||
5. Builds a mock Merkle path for an empty depth-20 tree at index 0 (no exported tree APIs):
|
||||
- Path siblings: level 0 sibling is `0`, then each level uses precomputed default hashes `H(0,0)`, `H(H(0,0),H(0,0))`, ...
|
||||
- Path indices: all zeros (left at every level)
|
||||
- Root: folds the path upwards with `rateCommitment` at index 0
|
||||
6. Builds the witness, generates the proof, and verifies it with `ffi_verify_with_roots`, passing a one-element roots vector containing the computed root.
|
||||
7. Simulates a double-signaling attack and recovers the identity secret from two proofs.
|
||||
|
||||
### Non-stateless mode
|
||||
|
||||
1. Creates an RLN handle with a Merkle tree backend and configuration.
|
||||
2. Generates identity keys and computes `rateCommitment = Poseidon(id_commitment, user_message_limit)`.
|
||||
3. Inserts the leaf with `ffi_set_next_leaf` and fetches a real Merkle path for index 0 via `ffi_get_merkle_proof`.
|
||||
4. Builds the witness from the exported proof, generates the proof, and verifies with `ffi_verify_rln_proof` using the current tree root.
|
||||
5. Simulates a double-signaling attack and recovers the identity secret from two proofs.
|
||||
940
rln/ffi_nim_examples/main.nim
Normal file
940
rln/ffi_nim_examples/main.nim
Normal file
@@ -0,0 +1,940 @@
|
||||
# Embed rpaths to find Cargo's built library relative to the executable
|
||||
when defined(macosx):
|
||||
{.passL: "-Wl,-rpath,@executable_path/../../target/release".}
|
||||
when defined(linux):
|
||||
{.passL: "-Wl,-rpath,'$ORIGIN/../../target/release'".}
|
||||
|
||||
# Portable dynlib name with override capability (-d:RLN_LIB:"...")
|
||||
when defined(macosx):
|
||||
const RLN_LIB* {.strdefine.} = "librln.dylib"
|
||||
elif defined(linux):
|
||||
const RLN_LIB* {.strdefine.} = "librln.so"
|
||||
elif defined(windows):
|
||||
const RLN_LIB* {.strdefine.} = "rln.dll"
|
||||
else:
|
||||
const RLN_LIB* {.strdefine.} = "rln"
|
||||
|
||||
# FFI objects
|
||||
type
|
||||
CSize* = csize_t
|
||||
CFr* = object
|
||||
FFI_RLN* = object
|
||||
FFI_RLNProof* = object
|
||||
FFI_RLNWitnessInput* = object
|
||||
|
||||
Vec_CFr* = object
|
||||
dataPtr*: ptr CFr
|
||||
len*: CSize
|
||||
cap*: CSize
|
||||
|
||||
Vec_uint8* = object
|
||||
dataPtr*: ptr uint8
|
||||
len*: CSize
|
||||
cap*: CSize
|
||||
|
||||
SliceRefU8* = object
|
||||
dataPtr*: ptr uint8
|
||||
len*: CSize
|
||||
|
||||
FFI_MerkleProof* = object
|
||||
path_elements*: Vec_CFr
|
||||
path_index*: Vec_uint8
|
||||
|
||||
CResultRLNPtrVecU8* = object
|
||||
ok*: ptr FFI_RLN
|
||||
err*: Vec_uint8
|
||||
|
||||
CResultProofPtrVecU8* = object
|
||||
ok*: ptr FFI_RLNProof
|
||||
err*: Vec_uint8
|
||||
|
||||
CResultWitnessInputPtrVecU8* = object
|
||||
ok*: ptr FFI_RLNWitnessInput
|
||||
err*: Vec_uint8
|
||||
|
||||
FFI_RLNProofValues* = object
|
||||
|
||||
CResultCFrPtrVecU8* = object
|
||||
ok*: ptr CFr
|
||||
err*: Vec_uint8
|
||||
|
||||
CResultRLNProofValuesPtrVecU8* = object
|
||||
ok*: ptr FFI_RLNProofValues
|
||||
err*: Vec_uint8
|
||||
|
||||
CResultMerkleProofPtrVecU8* = object
|
||||
ok*: ptr FFI_MerkleProof
|
||||
err*: Vec_uint8
|
||||
|
||||
CResultVecCFrVecU8* = object
|
||||
ok*: Vec_CFr
|
||||
err*: Vec_uint8
|
||||
|
||||
CResultVecU8VecU8* = object
|
||||
ok*: Vec_uint8
|
||||
err*: Vec_uint8
|
||||
|
||||
CResultBigIntJsonVecU8* = object
|
||||
ok*: Vec_uint8
|
||||
err*: Vec_uint8
|
||||
|
||||
CBoolResult* = object
|
||||
ok*: bool
|
||||
err*: Vec_uint8
|
||||
|
||||
# CFr functions
|
||||
proc ffi_cfr_zero*(): ptr CFr {.importc: "ffi_cfr_zero", cdecl,
|
||||
dynlib: RLN_LIB.}
|
||||
proc ffi_cfr_one*(): ptr CFr {.importc: "ffi_cfr_one", cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_cfr_free*(x: ptr CFr) {.importc: "ffi_cfr_free", cdecl,
|
||||
dynlib: RLN_LIB.}
|
||||
proc ffi_uint_to_cfr*(value: uint32): ptr CFr {.importc: "ffi_uint_to_cfr",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_cfr_debug*(cfr: ptr CFr): Vec_uint8 {.importc: "ffi_cfr_debug", cdecl,
|
||||
dynlib: RLN_LIB.}
|
||||
proc ffi_cfr_to_bytes_le*(cfr: ptr CFr): Vec_uint8 {.importc: "ffi_cfr_to_bytes_le",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_cfr_to_bytes_be*(cfr: ptr CFr): Vec_uint8 {.importc: "ffi_cfr_to_bytes_be",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_bytes_le_to_cfr*(bytes: ptr Vec_uint8): CResultCFrPtrVecU8 {.importc: "ffi_bytes_le_to_cfr",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_bytes_be_to_cfr*(bytes: ptr Vec_uint8): CResultCFrPtrVecU8 {.importc: "ffi_bytes_be_to_cfr",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
|
||||
# Vec<CFr> functions
|
||||
proc ffi_vec_cfr_new*(capacity: CSize): Vec_CFr {.importc: "ffi_vec_cfr_new",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_vec_cfr_from_cfr*(cfr: ptr CFr): Vec_CFr {.importc: "ffi_vec_cfr_from_cfr",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_vec_cfr_push*(v: ptr Vec_CFr, cfr: ptr CFr) {.importc: "ffi_vec_cfr_push",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_vec_cfr_len*(v: ptr Vec_CFr): CSize {.importc: "ffi_vec_cfr_len",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_vec_cfr_get*(v: ptr Vec_CFr, i: CSize): ptr CFr {.importc: "ffi_vec_cfr_get",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_vec_cfr_to_bytes_le*(v: ptr Vec_CFr): Vec_uint8 {.importc: "ffi_vec_cfr_to_bytes_le",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_vec_cfr_to_bytes_be*(v: ptr Vec_CFr): Vec_uint8 {.importc: "ffi_vec_cfr_to_bytes_be",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_bytes_le_to_vec_cfr*(bytes: ptr Vec_uint8): CResultVecCFrVecU8 {.importc: "ffi_bytes_le_to_vec_cfr",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_bytes_be_to_vec_cfr*(bytes: ptr Vec_uint8): CResultVecCFrVecU8 {.importc: "ffi_bytes_be_to_vec_cfr",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_vec_cfr_debug*(v: ptr Vec_CFr): Vec_uint8 {.importc: "ffi_vec_cfr_debug",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_vec_cfr_free*(v: Vec_CFr) {.importc: "ffi_vec_cfr_free", cdecl,
|
||||
dynlib: RLN_LIB.}
|
||||
|
||||
# Vec<u8> functions
|
||||
proc ffi_vec_u8_to_bytes_le*(v: ptr Vec_uint8): Vec_uint8 {.importc: "ffi_vec_u8_to_bytes_le",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_vec_u8_to_bytes_be*(v: ptr Vec_uint8): Vec_uint8 {.importc: "ffi_vec_u8_to_bytes_be",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_bytes_le_to_vec_u8*(bytes: ptr Vec_uint8): CResultVecU8VecU8 {.importc: "ffi_bytes_le_to_vec_u8",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_bytes_be_to_vec_u8*(bytes: ptr Vec_uint8): CResultVecU8VecU8 {.importc: "ffi_bytes_be_to_vec_u8",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_vec_u8_debug*(v: ptr Vec_uint8): Vec_uint8 {.importc: "ffi_vec_u8_debug",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_vec_u8_free*(v: Vec_uint8) {.importc: "ffi_vec_u8_free", cdecl,
|
||||
dynlib: RLN_LIB.}
|
||||
|
||||
# Hashing functions
|
||||
proc ffi_hash_to_field_le*(input: ptr Vec_uint8): CResultCFrPtrVecU8 {.importc: "ffi_hash_to_field_le",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_hash_to_field_be*(input: ptr Vec_uint8): CResultCFrPtrVecU8 {.importc: "ffi_hash_to_field_be",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_poseidon_hash_pair*(a: ptr CFr,
|
||||
b: ptr CFr): CResultCFrPtrVecU8 {.importc: "ffi_poseidon_hash_pair", cdecl,
|
||||
dynlib: RLN_LIB.}
|
||||
|
||||
# Keygen function
|
||||
proc ffi_key_gen*(): CResultVecCFrVecU8 {.importc: "ffi_key_gen", cdecl,
|
||||
dynlib: RLN_LIB.}
|
||||
proc ffi_seeded_key_gen*(seed: ptr Vec_uint8): CResultVecCFrVecU8 {.importc: "ffi_seeded_key_gen",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_extended_key_gen*(): CResultVecCFrVecU8 {.importc: "ffi_extended_key_gen",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_seeded_extended_key_gen*(seed: ptr Vec_uint8): CResultVecCFrVecU8 {.importc: "ffi_seeded_extended_key_gen",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
|
||||
# RLN instance functions
|
||||
when defined(ffiStateless):
|
||||
proc ffi_rln_new*(): CResultRLNPtrVecU8 {.importc: "ffi_rln_new", cdecl,
|
||||
dynlib: RLN_LIB.}
|
||||
proc ffi_rln_new_with_params*(zkey_data: ptr Vec_uint8,
|
||||
graph_data: ptr Vec_uint8): CResultRLNPtrVecU8 {.importc: "ffi_rln_new_with_params",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
else:
|
||||
proc ffi_rln_new*(treeDepth: CSize, config: cstring): CResultRLNPtrVecU8 {.importc: "ffi_rln_new",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_rln_new_with_params*(treeDepth: CSize, zkey_data: ptr Vec_uint8,
|
||||
graph_data: ptr Vec_uint8, config: cstring): CResultRLNPtrVecU8 {.importc: "ffi_rln_new_with_params",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
|
||||
proc ffi_rln_free*(rln: ptr FFI_RLN) {.importc: "ffi_rln_free", cdecl,
|
||||
dynlib: RLN_LIB.}
|
||||
|
||||
# Witness input functions
|
||||
proc ffi_rln_witness_input_new*(
|
||||
identity_secret: ptr CFr,
|
||||
user_message_limit: ptr CFr,
|
||||
message_id: ptr CFr,
|
||||
path_elements: ptr Vec_CFr,
|
||||
identity_path_index: ptr Vec_uint8,
|
||||
x: ptr CFr,
|
||||
external_nullifier: ptr CFr
|
||||
): CResultWitnessInputPtrVecU8 {.importc: "ffi_rln_witness_input_new", cdecl,
|
||||
dynlib: RLN_LIB.}
|
||||
proc ffi_rln_witness_to_bytes_le*(witness: ptr ptr FFI_RLNWitnessInput): CResultVecU8VecU8 {.importc: "ffi_rln_witness_to_bytes_le",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_rln_witness_to_bytes_be*(witness: ptr ptr FFI_RLNWitnessInput): CResultVecU8VecU8 {.importc: "ffi_rln_witness_to_bytes_be",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_bytes_le_to_rln_witness*(bytes: ptr Vec_uint8): CResultWitnessInputPtrVecU8 {.importc: "ffi_bytes_le_to_rln_witness",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_bytes_be_to_rln_witness*(bytes: ptr Vec_uint8): CResultWitnessInputPtrVecU8 {.importc: "ffi_bytes_be_to_rln_witness",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_rln_witness_to_bigint_json*(witness: ptr ptr FFI_RLNWitnessInput): CResultBigIntJsonVecU8 {.importc: "ffi_rln_witness_to_bigint_json",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_rln_witness_input_free*(witness: ptr FFI_RLNWitnessInput) {.importc: "ffi_rln_witness_input_free",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
|
||||
# Proof generation/verification functions
|
||||
proc ffi_generate_rln_proof*(
|
||||
rln: ptr ptr FFI_RLN,
|
||||
witness: ptr ptr FFI_RLNWitnessInput
|
||||
): CResultProofPtrVecU8 {.importc: "ffi_generate_rln_proof", cdecl,
|
||||
dynlib: RLN_LIB.}
|
||||
|
||||
proc ffi_generate_rln_proof_with_witness*(
|
||||
rln: ptr ptr FFI_RLN,
|
||||
calculated_witness: ptr Vec_uint8,
|
||||
witness: ptr ptr FFI_RLNWitnessInput
|
||||
): CResultProofPtrVecU8 {.importc: "ffi_generate_rln_proof_with_witness",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
|
||||
when not defined(ffiStateless):
|
||||
proc ffi_verify_rln_proof*(
|
||||
rln: ptr ptr FFI_RLN,
|
||||
proof: ptr ptr FFI_RLNProof,
|
||||
x: ptr CFr
|
||||
): CBoolResult {.importc: "ffi_verify_rln_proof", cdecl,
|
||||
dynlib: RLN_LIB.}
|
||||
|
||||
proc ffi_verify_with_roots*(
|
||||
rln: ptr ptr FFI_RLN,
|
||||
proof: ptr ptr FFI_RLNProof,
|
||||
roots: ptr Vec_CFr,
|
||||
x: ptr CFr
|
||||
): CBoolResult {.importc: "ffi_verify_with_roots", cdecl,
|
||||
dynlib: RLN_LIB.}
|
||||
|
||||
proc ffi_rln_proof_free*(p: ptr FFI_RLNProof) {.importc: "ffi_rln_proof_free",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
|
||||
# Merkle tree operations (non-stateless mode)
|
||||
when not defined(ffiStateless):
|
||||
proc ffi_set_tree*(rln: ptr ptr FFI_RLN,
|
||||
tree_depth: CSize): CBoolResult {.importc: "ffi_set_tree",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_delete_leaf*(rln: ptr ptr FFI_RLN,
|
||||
index: CSize): CBoolResult {.importc: "ffi_delete_leaf",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_set_leaf*(rln: ptr ptr FFI_RLN, index: CSize,
|
||||
leaf: ptr CFr): CBoolResult {.importc: "ffi_set_leaf",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_get_leaf*(rln: ptr ptr FFI_RLN,
|
||||
index: CSize): CResultCFrPtrVecU8 {.importc: "ffi_get_leaf",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_set_next_leaf*(rln: ptr ptr FFI_RLN,
|
||||
leaf: ptr CFr): CBoolResult {.importc: "ffi_set_next_leaf",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_set_leaves_from*(rln: ptr ptr FFI_RLN, index: CSize,
|
||||
leaves: ptr Vec_CFr): CBoolResult {.importc: "ffi_set_leaves_from",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_init_tree_with_leaves*(rln: ptr ptr FFI_RLN,
|
||||
leaves: ptr Vec_CFr): CBoolResult {.importc: "ffi_init_tree_with_leaves",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_atomic_operation*(rln: ptr ptr FFI_RLN, index: CSize,
|
||||
leaves: ptr Vec_CFr,
|
||||
indices: ptr Vec_uint8): CBoolResult {.importc: "ffi_atomic_operation",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_seq_atomic_operation*(rln: ptr ptr FFI_RLN, leaves: ptr Vec_CFr,
|
||||
indices: ptr Vec_uint8): CBoolResult {.importc: "ffi_seq_atomic_operation",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_get_root*(rln: ptr ptr FFI_RLN): ptr CFr {.importc: "ffi_get_root",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_leaves_set*(rln: ptr ptr FFI_RLN): CSize {.importc: "ffi_leaves_set",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_get_merkle_proof*(rln: ptr ptr FFI_RLN,
|
||||
index: CSize): CResultMerkleProofPtrVecU8 {.importc: "ffi_get_merkle_proof",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_set_metadata*(rln: ptr ptr FFI_RLN,
|
||||
metadata: ptr Vec_uint8): CBoolResult {.importc: "ffi_set_metadata",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_get_metadata*(rln: ptr ptr FFI_RLN): CResultVecU8VecU8 {.importc: "ffi_get_metadata",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_flush*(rln: ptr ptr FFI_RLN): CBoolResult {.importc: "ffi_flush",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_merkle_proof_free*(p: ptr FFI_MerkleProof) {.importc: "ffi_merkle_proof_free",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
|
||||
# Identity secret recovery
|
||||
proc ffi_recover_id_secret*(proof_values_1: ptr ptr FFI_RLNProofValues,
|
||||
proof_values_2: ptr ptr FFI_RLNProofValues): CResultCFrPtrVecU8 {.importc: "ffi_recover_id_secret",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
|
||||
# RLNProof serialization
|
||||
proc ffi_rln_proof_to_bytes_le*(proof: ptr ptr FFI_RLNProof): CResultVecU8VecU8 {.importc: "ffi_rln_proof_to_bytes_le",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_rln_proof_to_bytes_be*(proof: ptr ptr FFI_RLNProof): CResultVecU8VecU8 {.importc: "ffi_rln_proof_to_bytes_be",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_bytes_le_to_rln_proof*(bytes: ptr Vec_uint8): CResultProofPtrVecU8 {.importc: "ffi_bytes_le_to_rln_proof",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_bytes_be_to_rln_proof*(bytes: ptr Vec_uint8): CResultProofPtrVecU8 {.importc: "ffi_bytes_be_to_rln_proof",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
|
||||
# RLNProofValues functions
|
||||
proc ffi_rln_proof_get_values*(proof: ptr ptr FFI_RLNProof): ptr FFI_RLNProofValues {.importc: "ffi_rln_proof_get_values",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_rln_proof_values_get_y*(pv: ptr ptr FFI_RLNProofValues): ptr CFr {.importc: "ffi_rln_proof_values_get_y",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_rln_proof_values_get_nullifier*(pv: ptr ptr FFI_RLNProofValues): ptr CFr {.importc: "ffi_rln_proof_values_get_nullifier",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_rln_proof_values_get_root*(pv: ptr ptr FFI_RLNProofValues): ptr CFr {.importc: "ffi_rln_proof_values_get_root",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_rln_proof_values_get_x*(pv: ptr ptr FFI_RLNProofValues): ptr CFr {.importc: "ffi_rln_proof_values_get_x",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_rln_proof_values_get_external_nullifier*(pv: ptr ptr FFI_RLNProofValues): ptr CFr {.importc: "ffi_rln_proof_values_get_external_nullifier",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_rln_proof_values_to_bytes_le*(pv: ptr ptr FFI_RLNProofValues): Vec_uint8 {.importc: "ffi_rln_proof_values_to_bytes_le",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_rln_proof_values_to_bytes_be*(pv: ptr ptr FFI_RLNProofValues): Vec_uint8 {.importc: "ffi_rln_proof_values_to_bytes_be",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_bytes_le_to_rln_proof_values*(bytes: ptr Vec_uint8): CResultRLNProofValuesPtrVecU8 {.importc: "ffi_bytes_le_to_rln_proof_values",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_bytes_be_to_rln_proof_values*(bytes: ptr Vec_uint8): CResultRLNProofValuesPtrVecU8 {.importc: "ffi_bytes_be_to_rln_proof_values",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
proc ffi_rln_proof_values_free*(pv: ptr FFI_RLNProofValues) {.importc: "ffi_rln_proof_values_free",
|
||||
cdecl, dynlib: RLN_LIB.}
|
||||
|
||||
# Helpers functions
|
||||
proc asVecU8*(buf: var seq[uint8]): Vec_uint8 =
|
||||
result.dataPtr = if buf.len == 0: nil else: addr buf[0]
|
||||
result.len = CSize(buf.len)
|
||||
result.cap = CSize(buf.len)
|
||||
|
||||
proc asString*(v: Vec_uint8): string =
|
||||
if v.dataPtr.isNil or v.len == 0: return ""
|
||||
result = newString(v.len.int)
|
||||
copyMem(addr result[0], v.dataPtr, v.len.int)
|
||||
|
||||
proc ffi_c_string_free*(s: Vec_uint8) {.importc: "ffi_c_string_free", cdecl,
|
||||
dynlib: RLN_LIB.}
|
||||
|
||||
when isMainModule:
|
||||
echo "Creating RLN instance"
|
||||
|
||||
var rlnRes: CResultRLNPtrVecU8
|
||||
when defined(ffiStateless):
|
||||
rlnRes = ffi_rln_new()
|
||||
else:
|
||||
let config_path = """../resources/tree_depth_20/config.json""".cstring
|
||||
rlnRes = ffi_rln_new(CSize(20), config_path)
|
||||
|
||||
if rlnRes.ok.isNil:
|
||||
stderr.writeLine "Initial RLN instance creation error: ", asString(rlnRes.err)
|
||||
ffi_c_string_free(rlnRes.err)
|
||||
quit 1
|
||||
|
||||
var rln = rlnRes.ok
|
||||
echo "RLN instance created successfully"
|
||||
|
||||
echo "\nGenerating identity keys"
|
||||
var keysResult = ffi_key_gen()
|
||||
if keysResult.err.dataPtr != nil:
|
||||
let errMsg = asString(keysResult.err)
|
||||
ffi_c_string_free(keysResult.err)
|
||||
echo "Key generation error: ", errMsg
|
||||
quit 1
|
||||
var keys = keysResult.ok
|
||||
let identitySecret = ffi_vec_cfr_get(addr keys, CSize(0))
|
||||
let idCommitment = ffi_vec_cfr_get(addr keys, CSize(1))
|
||||
echo "Identity generated"
|
||||
|
||||
block:
|
||||
let debug = ffi_cfr_debug(identitySecret)
|
||||
echo " - identity_secret = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
block:
|
||||
let debug = ffi_cfr_debug(idCommitment)
|
||||
echo " - id_commitment = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
echo "\nCreating message limit"
|
||||
let userMessageLimit = ffi_uint_to_cfr(1'u32)
|
||||
|
||||
block:
|
||||
let debug = ffi_cfr_debug(userMessageLimit)
|
||||
echo " - user_message_limit = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
echo "\nComputing rate commitment"
|
||||
let rateCommitmentResult = ffi_poseidon_hash_pair(idCommitment, userMessageLimit)
|
||||
if rateCommitmentResult.ok.isNil:
|
||||
let errMsg = asString(rateCommitmentResult.err)
|
||||
ffi_c_string_free(rateCommitmentResult.err)
|
||||
echo "Rate commitment hash error: ", errMsg
|
||||
quit 1
|
||||
let rateCommitment = rateCommitmentResult.ok
|
||||
|
||||
block:
|
||||
let debug = ffi_cfr_debug(rateCommitment)
|
||||
echo " - rate_commitment = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
echo "\nCFr serialization: CFr <-> bytes"
|
||||
var serRateCommitment = ffi_cfr_to_bytes_be(rateCommitment)
|
||||
|
||||
block:
|
||||
let debug = ffi_vec_u8_debug(addr serRateCommitment)
|
||||
echo " - serialized rate_commitment = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
let deserRateCommitmentResult = ffi_bytes_be_to_cfr(addr serRateCommitment)
|
||||
if deserRateCommitmentResult.ok.isNil:
|
||||
stderr.writeLine "Rate commitment deserialization error: ", asString(
|
||||
deserRateCommitmentResult.err)
|
||||
ffi_c_string_free(deserRateCommitmentResult.err)
|
||||
quit 1
|
||||
let deserRateCommitment = deserRateCommitmentResult.ok
|
||||
|
||||
block:
|
||||
let debug = ffi_cfr_debug(deserRateCommitment)
|
||||
echo " - deserialized rate_commitment = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
ffi_vec_u8_free(serRateCommitment)
|
||||
ffi_cfr_free(deserRateCommitment)
|
||||
|
||||
echo "\nVec<CFr> serialization: Vec<CFr> <-> bytes"
|
||||
var serKeys = ffi_vec_cfr_to_bytes_be(addr keys)
|
||||
|
||||
block:
|
||||
let debug = ffi_vec_u8_debug(addr serKeys)
|
||||
echo " - serialized keys = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
let deserKeysResult = ffi_bytes_be_to_vec_cfr(addr serKeys)
|
||||
if deserKeysResult.err.dataPtr != nil:
|
||||
stderr.writeLine "Keys deserialization error: ", asString(
|
||||
deserKeysResult.err)
|
||||
ffi_c_string_free(deserKeysResult.err)
|
||||
quit 1
|
||||
|
||||
block:
|
||||
var okKeys = deserKeysResult.ok
|
||||
let debug = ffi_vec_cfr_debug(addr okKeys)
|
||||
echo " - deserialized keys = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
ffi_vec_cfr_free(deserKeysResult.ok)
|
||||
ffi_vec_u8_free(serKeys)
|
||||
|
||||
when defined(ffiStateless):
|
||||
const treeDepth = 20
|
||||
const CFR_SIZE = 32
|
||||
|
||||
echo "\nBuilding Merkle path for stateless mode"
|
||||
|
||||
let defaultLeaf = ffi_cfr_zero()
|
||||
var defaultHashes: array[treeDepth-1, ptr CFr]
|
||||
block:
|
||||
let hashResult = ffi_poseidon_hash_pair(defaultLeaf, defaultLeaf)
|
||||
if hashResult.ok.isNil:
|
||||
let errMsg = asString(hashResult.err)
|
||||
ffi_c_string_free(hashResult.err)
|
||||
echo "Poseidon hash error: ", errMsg
|
||||
quit 1
|
||||
defaultHashes[0] = hashResult.ok
|
||||
for i in 1..treeDepth-2:
|
||||
let hashResult = ffi_poseidon_hash_pair(defaultHashes[i-1], defaultHashes[i-1])
|
||||
if hashResult.ok.isNil:
|
||||
let errMsg = asString(hashResult.err)
|
||||
ffi_c_string_free(hashResult.err)
|
||||
echo "Poseidon hash error: ", errMsg
|
||||
quit 1
|
||||
defaultHashes[i] = hashResult.ok
|
||||
|
||||
var pathElements = ffi_vec_cfr_new(CSize(treeDepth))
|
||||
ffi_vec_cfr_push(addr pathElements, defaultLeaf)
|
||||
for i in 0..treeDepth-2:
|
||||
ffi_vec_cfr_push(addr pathElements, defaultHashes[i])
|
||||
|
||||
echo "\nVec<CFr> serialization: Vec<CFr> <-> bytes"
|
||||
var serPathElements = ffi_vec_cfr_to_bytes_be(addr pathElements)
|
||||
|
||||
block:
|
||||
let debug = ffi_vec_u8_debug(addr serPathElements)
|
||||
echo " - serialized path_elements = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
let deserPathElements = ffi_bytes_be_to_vec_cfr(addr serPathElements)
|
||||
if deserPathElements.err.dataPtr != nil:
|
||||
stderr.writeLine "Path elements deserialization error: ", asString(
|
||||
deserPathElements.err)
|
||||
ffi_c_string_free(deserPathElements.err)
|
||||
quit 1
|
||||
|
||||
block:
|
||||
var okPathElems = deserPathElements.ok
|
||||
let debug = ffi_vec_cfr_debug(addr okPathElems)
|
||||
echo " - deserialized path_elements = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
ffi_vec_cfr_free(deserPathElements.ok)
|
||||
ffi_vec_u8_free(serPathElements)
|
||||
|
||||
var pathIndexSeq = newSeq[uint8](treeDepth)
|
||||
var identityPathIndex = asVecU8(pathIndexSeq)
|
||||
|
||||
echo "\nVec<uint8> serialization: Vec<uint8> <-> bytes"
|
||||
var serPathIndex = ffi_vec_u8_to_bytes_be(addr identityPathIndex)
|
||||
|
||||
block:
|
||||
let debug = ffi_vec_u8_debug(addr serPathIndex)
|
||||
echo " - serialized path_index = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
let deserPathIndex = ffi_bytes_be_to_vec_u8(addr serPathIndex)
|
||||
if deserPathIndex.err.dataPtr != nil:
|
||||
stderr.writeLine "Path index deserialization error: ", asString(
|
||||
deserPathIndex.err)
|
||||
ffi_c_string_free(deserPathIndex.err)
|
||||
quit 1
|
||||
|
||||
block:
|
||||
var okPathIdx = deserPathIndex.ok
|
||||
let debug = ffi_vec_u8_debug(addr okPathIdx)
|
||||
echo " - deserialized path_index = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
ffi_vec_u8_free(deserPathIndex.ok)
|
||||
ffi_vec_u8_free(serPathIndex)
|
||||
|
||||
echo "\nComputing Merkle root for stateless mode"
|
||||
echo " - computing root for index 0 with rate_commitment"
|
||||
let rootResult = ffi_poseidon_hash_pair(rateCommitment, defaultLeaf)
|
||||
if rootResult.ok.isNil:
|
||||
let errMsg = asString(rootResult.err)
|
||||
ffi_c_string_free(rootResult.err)
|
||||
echo "Poseidon hash error: ", errMsg
|
||||
quit 1
|
||||
var computedRoot = rootResult.ok
|
||||
for i in 1..treeDepth-1:
|
||||
let nextResult = ffi_poseidon_hash_pair(computedRoot, defaultHashes[i-1])
|
||||
if nextResult.ok.isNil:
|
||||
let errMsg = asString(nextResult.err)
|
||||
ffi_c_string_free(nextResult.err)
|
||||
echo "Poseidon hash error: ", errMsg
|
||||
quit 1
|
||||
let next = nextResult.ok
|
||||
ffi_cfr_free(computedRoot)
|
||||
computedRoot = next
|
||||
|
||||
block:
|
||||
let debug = ffi_cfr_debug(computedRoot)
|
||||
echo " - computed_root = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
else:
|
||||
echo "\nAdding rate_commitment to tree"
|
||||
var rcPtr = rateCommitment
|
||||
let setErr = ffi_set_next_leaf(addr rln, rcPtr)
|
||||
if not setErr.ok:
|
||||
stderr.writeLine "Set next leaf error: ", asString(setErr.err)
|
||||
ffi_c_string_free(setErr.err)
|
||||
quit 1
|
||||
|
||||
let leafIndex = ffi_leaves_set(addr rln) - 1
|
||||
echo " - added to tree at index ", leafIndex
|
||||
|
||||
echo "\nGetting Merkle proof"
|
||||
let proofResult = ffi_get_merkle_proof(addr rln, leafIndex)
|
||||
if proofResult.ok.isNil:
|
||||
stderr.writeLine "Get proof error: ", asString(proofResult.err)
|
||||
ffi_c_string_free(proofResult.err)
|
||||
quit 1
|
||||
let merkleProof = proofResult.ok
|
||||
echo " - proof obtained (depth: ", merkleProof.path_elements.len, ")"
|
||||
|
||||
echo "\nHashing signal"
|
||||
var signal: array[32, uint8] = [1'u8, 2, 3, 4, 5, 6, 7, 8, 9, 10, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
|
||||
var signalVec = Vec_uint8(dataPtr: cast[ptr uint8](addr signal[0]),
|
||||
len: CSize(signal.len), cap: CSize(signal.len))
|
||||
let xResult = ffi_hash_to_field_be(addr signalVec)
|
||||
if xResult.ok.isNil:
|
||||
stderr.writeLine "Hash signal error: ", asString(xResult.err)
|
||||
ffi_c_string_free(xResult.err)
|
||||
quit 1
|
||||
let x = xResult.ok
|
||||
|
||||
block:
|
||||
let debug = ffi_cfr_debug(x)
|
||||
echo " - x = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
echo "\nHashing epoch"
|
||||
let epochStr = "test-epoch"
|
||||
var epochBytes = newSeq[uint8](epochStr.len)
|
||||
for i in 0..<epochStr.len: epochBytes[i] = uint8(epochStr[i])
|
||||
var epochVec = asVecU8(epochBytes)
|
||||
let epochResult = ffi_hash_to_field_be(addr epochVec)
|
||||
if epochResult.ok.isNil:
|
||||
stderr.writeLine "Hash epoch error: ", asString(epochResult.err)
|
||||
ffi_c_string_free(epochResult.err)
|
||||
quit 1
|
||||
let epoch = epochResult.ok
|
||||
|
||||
block:
|
||||
let debug = ffi_cfr_debug(epoch)
|
||||
echo " - epoch = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
echo "\nHashing RLN identifier"
|
||||
let rlnIdStr = "test-rln-identifier"
|
||||
var rlnIdBytes = newSeq[uint8](rlnIdStr.len)
|
||||
for i in 0..<rlnIdStr.len: rlnIdBytes[i] = uint8(rlnIdStr[i])
|
||||
var rlnIdVec = asVecU8(rlnIdBytes)
|
||||
let rlnIdentifierResult = ffi_hash_to_field_be(addr rlnIdVec)
|
||||
if rlnIdentifierResult.ok.isNil:
|
||||
stderr.writeLine "Hash RLN identifier error: ", asString(
|
||||
rlnIdentifierResult.err)
|
||||
ffi_c_string_free(rlnIdentifierResult.err)
|
||||
quit 1
|
||||
let rlnIdentifier = rlnIdentifierResult.ok
|
||||
|
||||
block:
|
||||
let debug = ffi_cfr_debug(rlnIdentifier)
|
||||
echo " - rln_identifier = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
echo "\nComputing Poseidon hash for external nullifier"
|
||||
let externalNullifierResult = ffi_poseidon_hash_pair(epoch, rlnIdentifier)
|
||||
if externalNullifierResult.ok.isNil:
|
||||
let errMsg = asString(externalNullifierResult.err)
|
||||
ffi_c_string_free(externalNullifierResult.err)
|
||||
echo "External nullifier hash error: ", errMsg
|
||||
quit 1
|
||||
let externalNullifier = externalNullifierResult.ok
|
||||
|
||||
block:
|
||||
let debug = ffi_cfr_debug(externalNullifier)
|
||||
echo " - external_nullifier = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
echo "\nCreating message_id"
|
||||
let messageId = ffi_uint_to_cfr(0'u32)
|
||||
|
||||
block:
|
||||
let debug = ffi_cfr_debug(messageId)
|
||||
echo " - message_id = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
echo "\nCreating RLN Witness"
|
||||
when defined(ffiStateless):
|
||||
var witnessRes = ffi_rln_witness_input_new(identitySecret,
|
||||
userMessageLimit, messageId, addr pathElements, addr identityPathIndex,
|
||||
x, externalNullifier)
|
||||
if witnessRes.ok.isNil:
|
||||
stderr.writeLine "RLN Witness creation error: ", asString(witnessRes.err)
|
||||
ffi_c_string_free(witnessRes.err)
|
||||
quit 1
|
||||
var witness = witnessRes.ok
|
||||
echo "RLN Witness created successfully"
|
||||
else:
|
||||
var witnessRes = ffi_rln_witness_input_new(identitySecret,
|
||||
userMessageLimit, messageId, addr merkleProof.path_elements,
|
||||
addr merkleProof.path_index, x, externalNullifier)
|
||||
if witnessRes.ok.isNil:
|
||||
stderr.writeLine "RLN Witness creation error: ", asString(witnessRes.err)
|
||||
ffi_c_string_free(witnessRes.err)
|
||||
quit 1
|
||||
var witness = witnessRes.ok
|
||||
echo "RLN Witness created successfully"
|
||||
|
||||
echo "\nRLNWitnessInput serialization: RLNWitnessInput <-> bytes"
|
||||
let serWitnessResult = ffi_rln_witness_to_bytes_be(addr witness)
|
||||
if serWitnessResult.err.dataPtr != nil:
|
||||
stderr.writeLine "Witness serialization error: ", asString(
|
||||
serWitnessResult.err)
|
||||
ffi_c_string_free(serWitnessResult.err)
|
||||
quit 1
|
||||
var serWitness = serWitnessResult.ok
|
||||
|
||||
block:
|
||||
let debug = ffi_vec_u8_debug(addr serWitness)
|
||||
echo " - serialized witness = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
let deserWitnessResult = ffi_bytes_be_to_rln_witness(addr serWitness)
|
||||
if deserWitnessResult.ok.isNil:
|
||||
stderr.writeLine "Witness deserialization error: ", asString(
|
||||
deserWitnessResult.err)
|
||||
ffi_c_string_free(deserWitnessResult.err)
|
||||
quit 1
|
||||
|
||||
echo " - witness deserialized successfully"
|
||||
ffi_rln_witness_input_free(deserWitnessResult.ok)
|
||||
ffi_vec_u8_free(serWitness)
|
||||
|
||||
echo "\nGenerating RLN Proof"
|
||||
var proofRes = ffi_generate_rln_proof(addr rln, addr witness)
|
||||
|
||||
if proofRes.ok.isNil:
|
||||
stderr.writeLine "Proof generation error: ", asString(proofRes.err)
|
||||
ffi_c_string_free(proofRes.err)
|
||||
quit 1
|
||||
|
||||
var proof = proofRes.ok
|
||||
echo "Proof generated successfully"
|
||||
|
||||
echo "\nGetting proof values"
|
||||
var proofValues = ffi_rln_proof_get_values(addr proof)
|
||||
|
||||
block:
|
||||
let y = ffi_rln_proof_values_get_y(addr proofValues)
|
||||
let debug = ffi_cfr_debug(y)
|
||||
echo " - y = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
ffi_cfr_free(y)
|
||||
|
||||
block:
|
||||
let nullifier = ffi_rln_proof_values_get_nullifier(addr proofValues)
|
||||
let debug = ffi_cfr_debug(nullifier)
|
||||
echo " - nullifier = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
ffi_cfr_free(nullifier)
|
||||
|
||||
block:
|
||||
let root = ffi_rln_proof_values_get_root(addr proofValues)
|
||||
let debug = ffi_cfr_debug(root)
|
||||
echo " - root = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
ffi_cfr_free(root)
|
||||
|
||||
block:
|
||||
let xVal = ffi_rln_proof_values_get_x(addr proofValues)
|
||||
let debug = ffi_cfr_debug(xVal)
|
||||
echo " - x = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
ffi_cfr_free(xVal)
|
||||
|
||||
block:
|
||||
let extNullifier = ffi_rln_proof_values_get_external_nullifier(
|
||||
addr proofValues)
|
||||
let debug = ffi_cfr_debug(extNullifier)
|
||||
echo " - external_nullifier = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
ffi_cfr_free(extNullifier)
|
||||
|
||||
echo "\nRLNProof serialization: RLNProof <-> bytes"
|
||||
let serProofResult = ffi_rln_proof_to_bytes_be(addr proof)
|
||||
if serProofResult.err.dataPtr != nil:
|
||||
stderr.writeLine "Proof serialization error: ", asString(serProofResult.err)
|
||||
ffi_c_string_free(serProofResult.err)
|
||||
quit 1
|
||||
var serProof = serProofResult.ok
|
||||
|
||||
block:
|
||||
let debug = ffi_vec_u8_debug(addr serProof)
|
||||
echo " - serialized proof = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
let deserProofResult = ffi_bytes_be_to_rln_proof(addr serProof)
|
||||
if deserProofResult.ok.isNil:
|
||||
stderr.writeLine "Proof deserialization error: ", asString(
|
||||
deserProofResult.err)
|
||||
ffi_c_string_free(deserProofResult.err)
|
||||
quit 1
|
||||
|
||||
var deserProof = deserProofResult.ok
|
||||
echo " - proof deserialized successfully"
|
||||
|
||||
echo "\nRLNProofValues serialization: RLNProofValues <-> bytes"
|
||||
var serProofValues = ffi_rln_proof_values_to_bytes_be(addr proofValues)
|
||||
|
||||
block:
|
||||
let debug = ffi_vec_u8_debug(addr serProofValues)
|
||||
echo " - serialized proof_values = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
let deserProofValuesResult = ffi_bytes_be_to_rln_proof_values(
|
||||
addr serProofValues)
|
||||
if deserProofValuesResult.ok.isNil:
|
||||
stderr.writeLine "Proof values deserialization error: ", asString(
|
||||
deserProofValuesResult.err)
|
||||
ffi_c_string_free(deserProofValuesResult.err)
|
||||
quit 1
|
||||
var deserProofValues = deserProofValuesResult.ok
|
||||
echo " - proof_values deserialized successfully"
|
||||
|
||||
block:
|
||||
let deserExternalNullifier = ffi_rln_proof_values_get_external_nullifier(
|
||||
addr deserProofValues)
|
||||
let debug = ffi_cfr_debug(deserExternalNullifier)
|
||||
echo " - deserialized external_nullifier = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
ffi_cfr_free(deserExternalNullifier)
|
||||
|
||||
ffi_rln_proof_values_free(deserProofValues)
|
||||
ffi_vec_u8_free(serProofValues)
|
||||
ffi_rln_proof_free(deserProof)
|
||||
ffi_vec_u8_free(serProof)
|
||||
|
||||
echo "\nVerifying Proof"
|
||||
when defined(ffiStateless):
|
||||
var roots = ffi_vec_cfr_from_cfr(computedRoot)
|
||||
let verifyErr = ffi_verify_with_roots(addr rln, addr proof, addr roots, x)
|
||||
else:
|
||||
let verifyErr = ffi_verify_rln_proof(addr rln, addr proof, x)
|
||||
|
||||
if not verifyErr.ok:
|
||||
stderr.writeLine "Proof verification error: ", asString(verifyErr.err)
|
||||
ffi_c_string_free(verifyErr.err)
|
||||
quit 1
|
||||
|
||||
echo "Proof verified successfully"
|
||||
|
||||
ffi_rln_proof_free(proof)
|
||||
|
||||
echo "\nSimulating double-signaling attack (same epoch, different message)"
|
||||
|
||||
echo "\nHashing second signal"
|
||||
var signal2: array[32, uint8] = [11'u8, 12, 13, 14, 15, 16, 17, 18, 19, 20, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
|
||||
var signal2Vec = Vec_uint8(dataPtr: cast[ptr uint8](addr signal2[0]),
|
||||
len: CSize(signal2.len), cap: CSize(signal2.len))
|
||||
let x2Result = ffi_hash_to_field_be(addr signal2Vec)
|
||||
if x2Result.ok.isNil:
|
||||
stderr.writeLine "Hash second signal error: ", asString(x2Result.err)
|
||||
ffi_c_string_free(x2Result.err)
|
||||
quit 1
|
||||
let x2 = x2Result.ok
|
||||
|
||||
block:
|
||||
let debug = ffi_cfr_debug(x2)
|
||||
echo " - x2 = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
echo "\nCreating second message with the same id"
|
||||
let messageId2 = ffi_uint_to_cfr(0'u32)
|
||||
|
||||
block:
|
||||
let debug = ffi_cfr_debug(messageId2)
|
||||
echo " - message_id2 = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
echo "\nCreating second RLN Witness"
|
||||
when defined(ffiStateless):
|
||||
var witnessRes2 = ffi_rln_witness_input_new(identitySecret,
|
||||
userMessageLimit, messageId2, addr pathElements, addr identityPathIndex,
|
||||
x2, externalNullifier)
|
||||
if witnessRes2.ok.isNil:
|
||||
stderr.writeLine "Second RLN Witness creation error: ", asString(
|
||||
witnessRes2.err)
|
||||
ffi_c_string_free(witnessRes2.err)
|
||||
quit 1
|
||||
var witness2 = witnessRes2.ok
|
||||
echo "Second RLN Witness created successfully"
|
||||
else:
|
||||
var witnessRes2 = ffi_rln_witness_input_new(identitySecret,
|
||||
userMessageLimit, messageId2, addr merkleProof.path_elements,
|
||||
addr merkleProof.path_index, x2, externalNullifier)
|
||||
if witnessRes2.ok.isNil:
|
||||
stderr.writeLine "Second RLN Witness creation error: ", asString(
|
||||
witnessRes2.err)
|
||||
ffi_c_string_free(witnessRes2.err)
|
||||
quit 1
|
||||
var witness2 = witnessRes2.ok
|
||||
echo "Second RLN Witness created successfully"
|
||||
|
||||
echo "\nGenerating second RLN Proof"
|
||||
var proofRes2 = ffi_generate_rln_proof(addr rln, addr witness2)
|
||||
|
||||
if proofRes2.ok.isNil:
|
||||
stderr.writeLine "Second proof generation error: ", asString(proofRes2.err)
|
||||
ffi_c_string_free(proofRes2.err)
|
||||
quit 1
|
||||
|
||||
var proof2 = proofRes2.ok
|
||||
echo "Second proof generated successfully"
|
||||
|
||||
var proofValues2 = ffi_rln_proof_get_values(addr proof2)
|
||||
|
||||
echo "\nVerifying second proof"
|
||||
when defined(ffiStateless):
|
||||
let verifyErr2 = ffi_verify_with_roots(addr rln, addr proof2, addr roots, x2)
|
||||
else:
|
||||
let verifyErr2 = ffi_verify_rln_proof(addr rln, addr proof2, x2)
|
||||
|
||||
if not verifyErr2.ok:
|
||||
stderr.writeLine "Proof verification error: ", asString(
|
||||
verifyErr2.err)
|
||||
ffi_c_string_free(verifyErr2.err)
|
||||
quit 1
|
||||
|
||||
echo "Second proof verified successfully"
|
||||
|
||||
echo "\nRecovering identity secret"
|
||||
let recoverRes = ffi_recover_id_secret(addr proofValues, addr proofValues2)
|
||||
if recoverRes.ok.isNil:
|
||||
stderr.writeLine "Identity recovery error: ", asString(recoverRes.err)
|
||||
ffi_c_string_free(recoverRes.err)
|
||||
quit 1
|
||||
|
||||
let recoveredSecret = recoverRes.ok
|
||||
|
||||
block:
|
||||
let debug = ffi_cfr_debug(recoveredSecret)
|
||||
echo " - recovered_secret = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
block:
|
||||
let debug = ffi_cfr_debug(identitySecret)
|
||||
echo " - original_secret = ", asString(debug)
|
||||
ffi_c_string_free(debug)
|
||||
|
||||
echo "Slashing successful: Identity is recovered!"
|
||||
ffi_cfr_free(recoveredSecret)
|
||||
|
||||
ffi_rln_proof_values_free(proofValues2)
|
||||
ffi_rln_proof_values_free(proofValues)
|
||||
ffi_rln_proof_free(proof2)
|
||||
ffi_cfr_free(x2)
|
||||
ffi_cfr_free(messageId2)
|
||||
|
||||
when defined(ffiStateless):
|
||||
ffi_rln_witness_input_free(witness2)
|
||||
ffi_rln_witness_input_free(witness)
|
||||
ffi_vec_cfr_free(roots)
|
||||
ffi_vec_cfr_free(pathElements)
|
||||
for i in 0..treeDepth-2:
|
||||
ffi_cfr_free(defaultHashes[i])
|
||||
ffi_cfr_free(defaultLeaf)
|
||||
ffi_cfr_free(computedRoot)
|
||||
else:
|
||||
ffi_rln_witness_input_free(witness2)
|
||||
ffi_rln_witness_input_free(witness)
|
||||
ffi_merkle_proof_free(merkleProof)
|
||||
|
||||
ffi_cfr_free(rateCommitment)
|
||||
ffi_cfr_free(x)
|
||||
ffi_cfr_free(epoch)
|
||||
ffi_cfr_free(rlnIdentifier)
|
||||
ffi_cfr_free(externalNullifier)
|
||||
ffi_cfr_free(userMessageLimit)
|
||||
ffi_cfr_free(messageId)
|
||||
ffi_vec_cfr_free(keys)
|
||||
ffi_rln_free(rln)
|
||||
9
rln/resources/tree_depth_20/config.json
Normal file
9
rln/resources/tree_depth_20/config.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"path": "./database",
|
||||
"temporary": false,
|
||||
"cache_capacity": 1073741824,
|
||||
"flush_every_ms": 500,
|
||||
"mode": "HighThroughput",
|
||||
"use_compression": false,
|
||||
"tree_depth": 20
|
||||
}
|
||||
BIN
rln/resources/tree_depth_20/graph.bin
Normal file
BIN
rln/resources/tree_depth_20/graph.bin
Normal file
Binary file not shown.
BIN
rln/resources/tree_depth_20/rln.wasm
Normal file
BIN
rln/resources/tree_depth_20/rln.wasm
Normal file
Binary file not shown.
BIN
rln/resources/tree_depth_20/rln_final.arkzkey
Normal file
BIN
rln/resources/tree_depth_20/rln_final.arkzkey
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1,119 +0,0 @@
|
||||
{
|
||||
"protocol": "groth16",
|
||||
"curve": "bn128",
|
||||
"nPublic": 6,
|
||||
"vk_alpha_1": [
|
||||
"1805378556360488226980822394597799963030511477964155500103132920745199284516",
|
||||
"11990395240534218699464972016456017378439762088320057798320175886595281336136",
|
||||
"1"
|
||||
],
|
||||
"vk_beta_2": [
|
||||
[
|
||||
"11031529986141021025408838211017932346992429731488270384177563837022796743627",
|
||||
"16042159910707312759082561183373181639420894978640710177581040523252926273854"
|
||||
],
|
||||
[
|
||||
"20112698439519222240302944148895052359035104222313380895334495118294612255131",
|
||||
"19441583024670359810872018179190533814486480928824742448673677460151702019379"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_gamma_2": [
|
||||
[
|
||||
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
|
||||
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
|
||||
],
|
||||
[
|
||||
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
|
||||
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_delta_2": [
|
||||
[
|
||||
"1342791402398183550129987853701397066695422166542200371137242980909975744720",
|
||||
"19885954793721639146517398722913034453263197732511169431324269951156805454588"
|
||||
],
|
||||
[
|
||||
"16612518449808520746616592899100682320852224744311197908486719118388461103870",
|
||||
"13039435290897389787786546960964558630619663289413586834851804020863949546009"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_alphabeta_12": [
|
||||
[
|
||||
[
|
||||
"5151991366823434428398919091000210787450832786814248297320989361921939794156",
|
||||
"15735191313289001022885148627913534790382722933676436876510746491415970766821"
|
||||
],
|
||||
[
|
||||
"3387907257437913904447588318761906430938415556102110876587455322225272831272",
|
||||
"1998779853452712881084781956683721603875246565720647583735935725110674288056"
|
||||
],
|
||||
[
|
||||
"14280074182991498185075387990446437410077692353432005297922275464876153151820",
|
||||
"17092408446352310039633488224969232803092763095456307462247653153107223117633"
|
||||
]
|
||||
],
|
||||
[
|
||||
[
|
||||
"4359046709531668109201634396816565829237358165496082832279660960675584351266",
|
||||
"4511888308846208349307186938266411423935335853916317436093178288331845821336"
|
||||
],
|
||||
[
|
||||
"11429499807090785857812316277335883295048773373068683863667725283965356423273",
|
||||
"16232274853200678548795010078253506586114563833318973594428907292096178657392"
|
||||
],
|
||||
[
|
||||
"18068999605870933925311275504102553573815570223888590384919752303726860800970",
|
||||
"17309569111965782732372130116757295842160193489132771344011460471298173784984"
|
||||
]
|
||||
]
|
||||
],
|
||||
"IC": [
|
||||
[
|
||||
"15907620619058468322652190166474219459106695372760190199814463422116003944385",
|
||||
"15752765921940703867480319151728055971288798043197983667046402260506178676501",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"12004081423498474638814710157503496372594892372197913146719480190853290407272",
|
||||
"17759993271504587923309435837545182941635937261719294500288793819648071033469",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"878120019311612655450010384994897394984265086410869146105626241891073100410",
|
||||
"17631186298933191134732246976686754514124819009836710500647157641262968661294",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"14710016919630225372037989028011020715054625029990218653012745498368446893907",
|
||||
"2581293501049347486538806758240731445964309309490885835380825245889909387041",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"766327921864693063481261933507417084013182964450768912480746815296334678928",
|
||||
"18104222034822903557262264275808261481286672296559910954337205847153944954509",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"8877686447180479408315100041907552504213694351585462004774320248566787828012",
|
||||
"15836202093850379814510995758762098170932781831518064786308541653541698178373",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"19567388833538990982537236781224917793757180861915757860561618079730704818311",
|
||||
"3535132838196675082818592669173684593624477421910576112671761297886253127546",
|
||||
"1"
|
||||
]
|
||||
]
|
||||
}
|
||||
Binary file not shown.
Binary file not shown.
@@ -1,119 +0,0 @@
|
||||
{
|
||||
"protocol": "groth16",
|
||||
"curve": "bn128",
|
||||
"nPublic": 6,
|
||||
"vk_alpha_1": [
|
||||
"1805378556360488226980822394597799963030511477964155500103132920745199284516",
|
||||
"11990395240534218699464972016456017378439762088320057798320175886595281336136",
|
||||
"1"
|
||||
],
|
||||
"vk_beta_2": [
|
||||
[
|
||||
"11031529986141021025408838211017932346992429731488270384177563837022796743627",
|
||||
"16042159910707312759082561183373181639420894978640710177581040523252926273854"
|
||||
],
|
||||
[
|
||||
"20112698439519222240302944148895052359035104222313380895334495118294612255131",
|
||||
"19441583024670359810872018179190533814486480928824742448673677460151702019379"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_gamma_2": [
|
||||
[
|
||||
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
|
||||
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
|
||||
],
|
||||
[
|
||||
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
|
||||
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_delta_2": [
|
||||
[
|
||||
"1948496782571164085469528023647105317580208688174386157591917599801657832035",
|
||||
"20445814069256658101339037520922621162739470138213615104905368409238414511981"
|
||||
],
|
||||
[
|
||||
"10024680869920840984813249386422727863826862577760330492647062850849851925340",
|
||||
"10512156247842686783409460795717734694774542185222602679117887145206209285142"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_alphabeta_12": [
|
||||
[
|
||||
[
|
||||
"5151991366823434428398919091000210787450832786814248297320989361921939794156",
|
||||
"15735191313289001022885148627913534790382722933676436876510746491415970766821"
|
||||
],
|
||||
[
|
||||
"3387907257437913904447588318761906430938415556102110876587455322225272831272",
|
||||
"1998779853452712881084781956683721603875246565720647583735935725110674288056"
|
||||
],
|
||||
[
|
||||
"14280074182991498185075387990446437410077692353432005297922275464876153151820",
|
||||
"17092408446352310039633488224969232803092763095456307462247653153107223117633"
|
||||
]
|
||||
],
|
||||
[
|
||||
[
|
||||
"4359046709531668109201634396816565829237358165496082832279660960675584351266",
|
||||
"4511888308846208349307186938266411423935335853916317436093178288331845821336"
|
||||
],
|
||||
[
|
||||
"11429499807090785857812316277335883295048773373068683863667725283965356423273",
|
||||
"16232274853200678548795010078253506586114563833318973594428907292096178657392"
|
||||
],
|
||||
[
|
||||
"18068999605870933925311275504102553573815570223888590384919752303726860800970",
|
||||
"17309569111965782732372130116757295842160193489132771344011460471298173784984"
|
||||
]
|
||||
]
|
||||
],
|
||||
"IC": [
|
||||
[
|
||||
"18693301901828818437917730940595978397160482710354161265484535387752523310572",
|
||||
"17985273354976640088538673802000794244421192643855111089693820179790551470769",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"21164641723988537620541455173278629777250883365474191521194244273980931825942",
|
||||
"998385854410718613441067082771678946155853656328717326195057262123686425518",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"21666968581672145768705229094968410656430989593283335488162701230986314747515",
|
||||
"17996457608540683483506630273632100555125353447506062045735279661096094677264",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"20137761979695192602424300886442379728165712610493092740175904438282083668117",
|
||||
"19184814924890679891263780109959113289320127263583260218200636509492157834679",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"10943171273393803842589314082509655332154393332394322726077270895078286354146",
|
||||
"10872472035685319847811233167729172672344935625121511932198535224727331126439",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"13049169779481227658517545034348883391527506091990880778783387628208561946597",
|
||||
"10083689369261379027228809473568899816311684698866922944902456565434209079955",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"19633516378466409167014413361365552102431118630694133723053441455184566611083",
|
||||
"8059525100726933978719058611146131904598011633549012007359165766216730722269",
|
||||
"1"
|
||||
]
|
||||
]
|
||||
}
|
||||
5
rln/src/bin/generate-headers.rs
Normal file
5
rln/src/bin/generate-headers.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
use rln::ffi;
|
||||
|
||||
fn main() -> std::io::Result<()> {
|
||||
ffi::generate_headers()
|
||||
}
|
||||
@@ -1,239 +0,0 @@
|
||||
// This crate provides interfaces for the zero-knowledge circuit and keys
|
||||
|
||||
use ark_bn254::{
|
||||
Bn254, Fq as ArkFq, Fq2 as ArkFq2, Fr as ArkFr, G1Affine as ArkG1Affine,
|
||||
G1Projective as ArkG1Projective, G2Affine as ArkG2Affine, G2Projective as ArkG2Projective,
|
||||
};
|
||||
use ark_circom::{read_zkey, WitnessCalculator};
|
||||
use ark_groth16::{ProvingKey, VerifyingKey};
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use num_bigint::BigUint;
|
||||
use once_cell::sync::OnceCell;
|
||||
use serde_json::Value;
|
||||
use std::fs::File;
|
||||
use std::io::{Cursor, Error, ErrorKind, Result};
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Mutex;
|
||||
use wasmer::{Module, Store};
|
||||
|
||||
const ZKEY_FILENAME: &str = "rln_final.zkey";
|
||||
const VK_FILENAME: &str = "verifying_key.json";
|
||||
const WASM_FILENAME: &str = "rln.wasm";
|
||||
|
||||
// These parameters are used for tests
|
||||
// Note that the circuit and keys in TEST_RESOURCES_FOLDER are compiled for Merkle trees of height 15, 19 and 20
|
||||
// Changing these parameters to other values than these defaults will cause zkSNARK proof verification to fail
|
||||
pub const TEST_PARAMETERS_INDEX: usize = 2;
|
||||
pub const TEST_TREE_HEIGHT: usize = [15, 19, 20][TEST_PARAMETERS_INDEX];
|
||||
pub const TEST_RESOURCES_FOLDER: &str = [
|
||||
"./resources/tree_height_15/",
|
||||
"./resources/tree_height_19/",
|
||||
"./resources/tree_height_20/",
|
||||
][TEST_PARAMETERS_INDEX];
|
||||
|
||||
// The following types define the pairing friendly elliptic curve, the underlying finite fields and groups default to this module
|
||||
// Note that proofs are serialized assuming Fr to be 4x8 = 32 bytes in size. Hence, changing to a curve with different encoding will make proof verification to fail
|
||||
pub type Curve = Bn254;
|
||||
pub type Fr = ArkFr;
|
||||
pub type Fq = ArkFq;
|
||||
pub type Fq2 = ArkFq2;
|
||||
pub type G1Affine = ArkG1Affine;
|
||||
pub type G1Projective = ArkG1Projective;
|
||||
pub type G2Affine = ArkG2Affine;
|
||||
pub type G2Projective = ArkG2Projective;
|
||||
|
||||
// Loads the proving key using a bytes vector
|
||||
pub fn zkey_from_raw(zkey_data: &Vec<u8>) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
|
||||
if !zkey_data.is_empty() {
|
||||
let mut c = Cursor::new(zkey_data);
|
||||
let proving_key_and_matrices = read_zkey(&mut c)?;
|
||||
Ok(proving_key_and_matrices)
|
||||
} else {
|
||||
Err(Error::new(ErrorKind::NotFound, "No proving key found!"))
|
||||
}
|
||||
}
|
||||
|
||||
// Loads the proving key
|
||||
pub fn zkey_from_folder(
|
||||
resources_folder: &str,
|
||||
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
|
||||
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
|
||||
if Path::new(&zkey_path).exists() {
|
||||
let mut file = File::open(&zkey_path)?;
|
||||
let proving_key_and_matrices = read_zkey(&mut file)?;
|
||||
Ok(proving_key_and_matrices)
|
||||
} else {
|
||||
Err(Error::new(ErrorKind::NotFound, "No proving key found!"))
|
||||
}
|
||||
}
|
||||
|
||||
// Loads the verification key from a bytes vector
|
||||
pub fn vk_from_raw(vk_data: &Vec<u8>, zkey_data: &Vec<u8>) -> Result<VerifyingKey<Curve>> {
|
||||
let verifying_key: VerifyingKey<Curve>;
|
||||
|
||||
if !vk_data.is_empty() {
|
||||
verifying_key = vk_from_vector(vk_data);
|
||||
Ok(verifying_key)
|
||||
} else if !zkey_data.is_empty() {
|
||||
let (proving_key, _matrices) = zkey_from_raw(zkey_data)?;
|
||||
verifying_key = proving_key.vk;
|
||||
Ok(verifying_key)
|
||||
} else {
|
||||
Err(Error::new(
|
||||
ErrorKind::NotFound,
|
||||
"No proving/verification key found!",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
// Loads the verification key
|
||||
pub fn vk_from_folder(resources_folder: &str) -> Result<VerifyingKey<Curve>> {
|
||||
let vk_path = format!("{resources_folder}{VK_FILENAME}");
|
||||
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
|
||||
|
||||
let verifying_key: VerifyingKey<Curve>;
|
||||
|
||||
if Path::new(&vk_path).exists() {
|
||||
verifying_key = vk_from_json(&vk_path);
|
||||
Ok(verifying_key)
|
||||
} else if Path::new(&zkey_path).exists() {
|
||||
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
|
||||
verifying_key = proving_key.vk;
|
||||
Ok(verifying_key)
|
||||
} else {
|
||||
Err(Error::new(
|
||||
ErrorKind::NotFound,
|
||||
"No proving/verification key found!",
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
static WITNESS_CALCULATOR: OnceCell<Mutex<WitnessCalculator>> = OnceCell::new();
|
||||
|
||||
// Initializes the witness calculator using a bytes vector
|
||||
pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> &'static Mutex<WitnessCalculator> {
|
||||
WITNESS_CALCULATOR.get_or_init(|| {
|
||||
let store = Store::default();
|
||||
let module = Module::new(&store, wasm_buffer).unwrap();
|
||||
let result =
|
||||
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
|
||||
Mutex::new(result)
|
||||
})
|
||||
}
|
||||
|
||||
// Initializes the witness calculator
|
||||
pub fn circom_from_folder(resources_folder: &str) -> &'static Mutex<WitnessCalculator> {
|
||||
// We read the wasm file
|
||||
let wasm_path = format!("{resources_folder}{WASM_FILENAME}");
|
||||
let wasm_buffer = std::fs::read(&wasm_path).unwrap();
|
||||
circom_from_raw(wasm_buffer)
|
||||
}
|
||||
|
||||
// The following function implementations are taken/adapted from https://github.com/gakonst/ark-circom/blob/1732e15d6313fe176b0b1abb858ac9e095d0dbd7/src/zkey.rs
|
||||
|
||||
// Utilities to convert a json verification key in a groth16::VerificationKey
|
||||
fn fq_from_str(s: &str) -> Fq {
|
||||
Fq::try_from(BigUint::from_str(s).unwrap()).unwrap()
|
||||
}
|
||||
|
||||
// Extracts the element in G1 corresponding to its JSON serialization
|
||||
fn json_to_g1(json: &Value, key: &str) -> G1Affine {
|
||||
let els: Vec<String> = json
|
||||
.get(key)
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|i| i.as_str().unwrap().to_string())
|
||||
.collect();
|
||||
G1Affine::from(G1Projective::new(
|
||||
fq_from_str(&els[0]),
|
||||
fq_from_str(&els[1]),
|
||||
fq_from_str(&els[2]),
|
||||
))
|
||||
}
|
||||
|
||||
// Extracts the vector of G1 elements corresponding to its JSON serialization
|
||||
fn json_to_g1_vec(json: &Value, key: &str) -> Vec<G1Affine> {
|
||||
let els: Vec<Vec<String>> = json
|
||||
.get(key)
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|i| {
|
||||
i.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|x| x.as_str().unwrap().to_string())
|
||||
.collect::<Vec<String>>()
|
||||
})
|
||||
.collect();
|
||||
|
||||
els.iter()
|
||||
.map(|coords| {
|
||||
G1Affine::from(G1Projective::new(
|
||||
fq_from_str(&coords[0]),
|
||||
fq_from_str(&coords[1]),
|
||||
fq_from_str(&coords[2]),
|
||||
))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
// Extracts the element in G2 corresponding to its JSON serialization
|
||||
fn json_to_g2(json: &Value, key: &str) -> G2Affine {
|
||||
let els: Vec<Vec<String>> = json
|
||||
.get(key)
|
||||
.unwrap()
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|i| {
|
||||
i.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|x| x.as_str().unwrap().to_string())
|
||||
.collect::<Vec<String>>()
|
||||
})
|
||||
.collect();
|
||||
|
||||
let x = Fq2::new(fq_from_str(&els[0][0]), fq_from_str(&els[0][1]));
|
||||
let y = Fq2::new(fq_from_str(&els[1][0]), fq_from_str(&els[1][1]));
|
||||
let z = Fq2::new(fq_from_str(&els[2][0]), fq_from_str(&els[2][1]));
|
||||
G2Affine::from(G2Projective::new(x, y, z))
|
||||
}
|
||||
|
||||
// Converts JSON to a VerifyingKey
|
||||
fn to_verifying_key(json: serde_json::Value) -> VerifyingKey<Curve> {
|
||||
VerifyingKey {
|
||||
alpha_g1: json_to_g1(&json, "vk_alpha_1"),
|
||||
beta_g2: json_to_g2(&json, "vk_beta_2"),
|
||||
gamma_g2: json_to_g2(&json, "vk_gamma_2"),
|
||||
delta_g2: json_to_g2(&json, "vk_delta_2"),
|
||||
gamma_abc_g1: json_to_g1_vec(&json, "IC"),
|
||||
}
|
||||
}
|
||||
|
||||
// Computes the verification key from its JSON serialization
|
||||
fn vk_from_json(vk_path: &str) -> VerifyingKey<Curve> {
|
||||
let json = std::fs::read_to_string(vk_path).unwrap();
|
||||
let json: Value = serde_json::from_str(&json).unwrap();
|
||||
|
||||
to_verifying_key(json)
|
||||
}
|
||||
|
||||
// Computes the verification key from a bytes vector containing its JSON serialization
|
||||
fn vk_from_vector(vk: &[u8]) -> VerifyingKey<Curve> {
|
||||
let json = String::from_utf8(vk.to_vec()).expect("Found invalid UTF-8");
|
||||
let json: Value = serde_json::from_str(&json).unwrap();
|
||||
|
||||
to_verifying_key(json)
|
||||
}
|
||||
|
||||
// Checks verification key to be correct with respect to proving key
|
||||
pub fn check_vk_from_zkey(resources_folder: &str, verifying_key: VerifyingKey<Curve>) {
|
||||
let (proving_key, _matrices) = zkey_from_folder(resources_folder).unwrap();
|
||||
assert_eq!(proving_key.vk, verifying_key);
|
||||
}
|
||||
25
rln/src/circuit/error.rs
Normal file
25
rln/src/circuit/error.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
/// Errors that can occur during zkey reading operations
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ZKeyReadError {
|
||||
#[error("Empty zkey bytes provided")]
|
||||
EmptyBytes,
|
||||
#[error("{0}")]
|
||||
SerializationError(#[from] ark_serialize::SerializationError),
|
||||
}
|
||||
|
||||
/// Errors that can occur during witness calculation
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum WitnessCalcError {
|
||||
#[error("Failed to deserialize witness calculation graph: {0}")]
|
||||
GraphDeserialization(#[from] std::io::Error),
|
||||
#[error("Failed to evaluate witness calculation graph: {0}")]
|
||||
GraphEvaluation(String),
|
||||
#[error("Invalid input length for '{name}': expected {expected}, got {actual}")]
|
||||
InvalidInputLength {
|
||||
name: String,
|
||||
expected: usize,
|
||||
actual: usize,
|
||||
},
|
||||
#[error("Missing required input: {0}")]
|
||||
MissingInput(String),
|
||||
}
|
||||
113
rln/src/circuit/iden3calc.rs
Normal file
113
rln/src/circuit/iden3calc.rs
Normal file
@@ -0,0 +1,113 @@
|
||||
// This crate is based on the code by iden3. Its preimage can be found here:
|
||||
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/lib.rs
|
||||
|
||||
mod graph;
|
||||
mod proto;
|
||||
mod storage;
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use graph::Node;
|
||||
use ruint::aliases::U256;
|
||||
use storage::deserialize_witnesscalc_graph;
|
||||
use zeroize::zeroize_flat_type;
|
||||
|
||||
use self::graph::fr_to_u256;
|
||||
use super::{error::WitnessCalcError, Fr};
|
||||
use crate::utils::FrOrSecret;
|
||||
|
||||
pub(crate) type InputSignalsInfo = HashMap<String, (usize, usize)>;
|
||||
|
||||
pub(crate) fn calc_witness<I: IntoIterator<Item = (String, Vec<FrOrSecret>)>>(
|
||||
inputs: I,
|
||||
graph_data: &[u8],
|
||||
) -> Result<Vec<Fr>, WitnessCalcError> {
|
||||
let mut inputs: HashMap<String, Vec<U256>> = inputs
|
||||
.into_iter()
|
||||
.map(|(key, value)| {
|
||||
(
|
||||
key,
|
||||
value
|
||||
.iter()
|
||||
.map(|f_| match f_ {
|
||||
FrOrSecret::IdSecret(s) => s.to_u256(),
|
||||
FrOrSecret::Fr(f) => fr_to_u256(f),
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let (nodes, signals, input_mapping): (Vec<Node>, Vec<usize>, InputSignalsInfo) =
|
||||
deserialize_witnesscalc_graph(std::io::Cursor::new(graph_data))?;
|
||||
|
||||
let mut inputs_buffer = get_inputs_buffer(get_inputs_size(&nodes));
|
||||
|
||||
populate_inputs(&inputs, &input_mapping, &mut inputs_buffer)?;
|
||||
|
||||
if let Some(v) = inputs.get_mut("identitySecret") {
|
||||
// DO NOT USE: unsafe { zeroize_flat_type(v) } only clears the Vec pointer, not the data—can cause memory leaks
|
||||
|
||||
for val in v.iter_mut() {
|
||||
unsafe { zeroize_flat_type(val) };
|
||||
}
|
||||
}
|
||||
|
||||
let res = graph::evaluate(&nodes, inputs_buffer.as_slice(), &signals)
|
||||
.map_err(WitnessCalcError::GraphEvaluation)?;
|
||||
|
||||
for val in inputs_buffer.iter_mut() {
|
||||
unsafe { zeroize_flat_type(val) };
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
fn get_inputs_size(nodes: &[Node]) -> usize {
|
||||
let mut start = false;
|
||||
let mut max_index = 0usize;
|
||||
for &node in nodes.iter() {
|
||||
if let Node::Input(i) = node {
|
||||
if i > max_index {
|
||||
max_index = i;
|
||||
}
|
||||
start = true
|
||||
} else if start {
|
||||
break;
|
||||
}
|
||||
}
|
||||
max_index + 1
|
||||
}
|
||||
|
||||
fn populate_inputs(
|
||||
input_list: &HashMap<String, Vec<U256>>,
|
||||
inputs_info: &InputSignalsInfo,
|
||||
input_buffer: &mut [U256],
|
||||
) -> Result<(), WitnessCalcError> {
|
||||
for (key, value) in input_list {
|
||||
let (offset, len) = inputs_info
|
||||
.get(key)
|
||||
.ok_or_else(|| WitnessCalcError::MissingInput(key.clone()))?;
|
||||
|
||||
if *len != value.len() {
|
||||
return Err(WitnessCalcError::InvalidInputLength {
|
||||
name: key.clone(),
|
||||
expected: *len,
|
||||
actual: value.len(),
|
||||
});
|
||||
}
|
||||
|
||||
for (i, v) in value.iter().enumerate() {
|
||||
input_buffer[offset + i] = *v;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Allocates inputs vec with position 0 set to 1
|
||||
fn get_inputs_buffer(size: usize) -> Vec<U256> {
|
||||
let mut inputs = vec![U256::ZERO; size];
|
||||
inputs[0] = U256::from(1);
|
||||
inputs
|
||||
}
|
||||
584
rln/src/circuit/iden3calc/graph.rs
Normal file
584
rln/src/circuit/iden3calc/graph.rs
Normal file
@@ -0,0 +1,584 @@
|
||||
// This crate is based on the code by iden3. Its preimage can be found here:
|
||||
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/graph.rs
|
||||
|
||||
use std::cmp::Ordering;
|
||||
|
||||
use ark_ff::{BigInt, BigInteger, One, PrimeField, Zero};
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate};
|
||||
use ruint::{aliases::U256, uint};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::proto;
|
||||
use crate::circuit::Fr;
|
||||
|
||||
const M: U256 =
|
||||
uint!(21888242871839275222246405745257275088548364400416034343698204186575808495617_U256);
|
||||
|
||||
fn ark_se<S, A: CanonicalSerialize>(a: &A, s: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let mut bytes = vec![];
|
||||
a.serialize_with_mode(&mut bytes, Compress::Yes)
|
||||
.map_err(serde::ser::Error::custom)?;
|
||||
s.serialize_bytes(&bytes)
|
||||
}
|
||||
|
||||
fn ark_de<'de, D, A: CanonicalDeserialize>(data: D) -> Result<A, D::Error>
|
||||
where
|
||||
D: serde::de::Deserializer<'de>,
|
||||
{
|
||||
let s: Vec<u8> = serde::de::Deserialize::deserialize(data)?;
|
||||
let a = A::deserialize_with_mode(s.as_slice(), Compress::Yes, Validate::Yes);
|
||||
a.map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub(crate) fn fr_to_u256(x: &Fr) -> U256 {
|
||||
U256::from_limbs(x.into_bigint().0)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub(crate) fn u256_to_fr(x: &U256) -> Result<Fr, String> {
|
||||
Fr::from_bigint(BigInt::new(x.into_limbs()))
|
||||
.ok_or_else(|| "Failed to convert U256 to Fr".to_string())
|
||||
}
|
||||
|
||||
#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub(crate) enum Operation {
|
||||
Mul,
|
||||
Div,
|
||||
Add,
|
||||
Sub,
|
||||
Pow,
|
||||
Idiv,
|
||||
Mod,
|
||||
Eq,
|
||||
Neq,
|
||||
Lt,
|
||||
Gt,
|
||||
Leq,
|
||||
Geq,
|
||||
Land,
|
||||
Lor,
|
||||
Shl,
|
||||
Shr,
|
||||
Bor,
|
||||
Band,
|
||||
Bxor,
|
||||
}
|
||||
|
||||
impl Operation {
|
||||
fn eval_fr(&self, a: Fr, b: Fr) -> Result<Fr, String> {
|
||||
use Operation::*;
|
||||
match self {
|
||||
Mul => Ok(a * b),
|
||||
// We always should return something on the circuit execution.
|
||||
// So in case of division by 0 we would return 0. And the proof
|
||||
// should be invalid in the end.
|
||||
Div => {
|
||||
if b.is_zero() {
|
||||
Ok(Fr::zero())
|
||||
} else {
|
||||
Ok(a / b)
|
||||
}
|
||||
}
|
||||
Add => Ok(a + b),
|
||||
Sub => Ok(a - b),
|
||||
// Modular exponentiation to prevent overflow and keep result in field
|
||||
Pow => {
|
||||
let a_u256 = fr_to_u256(&a);
|
||||
let b_u256 = fr_to_u256(&b);
|
||||
let result = a_u256.pow_mod(b_u256, M);
|
||||
u256_to_fr(&result)
|
||||
}
|
||||
// Integer division (not field division)
|
||||
Idiv => {
|
||||
if b.is_zero() {
|
||||
Ok(Fr::zero())
|
||||
} else {
|
||||
let a_u256 = fr_to_u256(&a);
|
||||
let b_u256 = fr_to_u256(&b);
|
||||
u256_to_fr(&(a_u256 / b_u256))
|
||||
}
|
||||
}
|
||||
// Integer modulo (not field arithmetic)
|
||||
Mod => {
|
||||
if b.is_zero() {
|
||||
Ok(Fr::zero())
|
||||
} else {
|
||||
let a_u256 = fr_to_u256(&a);
|
||||
let b_u256 = fr_to_u256(&b);
|
||||
u256_to_fr(&(a_u256 % b_u256))
|
||||
}
|
||||
}
|
||||
Eq => Ok(match a.cmp(&b) {
|
||||
Ordering::Equal => Fr::one(),
|
||||
_ => Fr::zero(),
|
||||
}),
|
||||
Neq => Ok(match a.cmp(&b) {
|
||||
Ordering::Equal => Fr::zero(),
|
||||
_ => Fr::one(),
|
||||
}),
|
||||
Lt => u256_to_fr(&u_lt(&fr_to_u256(&a), &fr_to_u256(&b))),
|
||||
Gt => u256_to_fr(&u_gt(&fr_to_u256(&a), &fr_to_u256(&b))),
|
||||
Leq => u256_to_fr(&u_lte(&fr_to_u256(&a), &fr_to_u256(&b))),
|
||||
Geq => u256_to_fr(&u_gte(&fr_to_u256(&a), &fr_to_u256(&b))),
|
||||
Land => Ok(if a.is_zero() || b.is_zero() {
|
||||
Fr::zero()
|
||||
} else {
|
||||
Fr::one()
|
||||
}),
|
||||
Lor => Ok(if a.is_zero() && b.is_zero() {
|
||||
Fr::zero()
|
||||
} else {
|
||||
Fr::one()
|
||||
}),
|
||||
Shl => shl(a, b),
|
||||
Shr => shr(a, b),
|
||||
Bor => bit_or(a, b),
|
||||
Band => bit_and(a, b),
|
||||
Bxor => bit_xor(a, b),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Operation> for proto::DuoOp {
|
||||
fn from(v: &Operation) -> Self {
|
||||
match v {
|
||||
Operation::Mul => proto::DuoOp::Mul,
|
||||
Operation::Div => proto::DuoOp::Div,
|
||||
Operation::Add => proto::DuoOp::Add,
|
||||
Operation::Sub => proto::DuoOp::Sub,
|
||||
Operation::Pow => proto::DuoOp::Pow,
|
||||
Operation::Idiv => proto::DuoOp::Idiv,
|
||||
Operation::Mod => proto::DuoOp::Mod,
|
||||
Operation::Eq => proto::DuoOp::Eq,
|
||||
Operation::Neq => proto::DuoOp::Neq,
|
||||
Operation::Lt => proto::DuoOp::Lt,
|
||||
Operation::Gt => proto::DuoOp::Gt,
|
||||
Operation::Leq => proto::DuoOp::Leq,
|
||||
Operation::Geq => proto::DuoOp::Geq,
|
||||
Operation::Land => proto::DuoOp::Land,
|
||||
Operation::Lor => proto::DuoOp::Lor,
|
||||
Operation::Shl => proto::DuoOp::Shl,
|
||||
Operation::Shr => proto::DuoOp::Shr,
|
||||
Operation::Bor => proto::DuoOp::Bor,
|
||||
Operation::Band => proto::DuoOp::Band,
|
||||
Operation::Bxor => proto::DuoOp::Bxor,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub(crate) enum UnoOperation {
|
||||
Neg,
|
||||
Id, // identity - just return self
|
||||
}
|
||||
|
||||
impl UnoOperation {
|
||||
fn eval_fr(&self, a: Fr) -> Result<Fr, String> {
|
||||
match self {
|
||||
UnoOperation::Neg => {
|
||||
if a.is_zero() {
|
||||
Ok(Fr::zero())
|
||||
} else {
|
||||
let mut x = Fr::MODULUS;
|
||||
x.sub_with_borrow(&a.into_bigint());
|
||||
Fr::from_bigint(x).ok_or_else(|| "Failed to compute negation".to_string())
|
||||
}
|
||||
}
|
||||
_ => Err(format!(
|
||||
"uno operator {:?} not implemented for Montgomery",
|
||||
self
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&UnoOperation> for proto::UnoOp {
|
||||
fn from(v: &UnoOperation) -> Self {
|
||||
match v {
|
||||
UnoOperation::Neg => proto::UnoOp::Neg,
|
||||
UnoOperation::Id => proto::UnoOp::Id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub(crate) enum TresOperation {
|
||||
TernCond,
|
||||
}
|
||||
|
||||
impl TresOperation {
|
||||
fn eval_fr(&self, a: Fr, b: Fr, c: Fr) -> Result<Fr, String> {
|
||||
match self {
|
||||
TresOperation::TernCond => {
|
||||
if a.is_zero() {
|
||||
Ok(c)
|
||||
} else {
|
||||
Ok(b)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&TresOperation> for proto::TresOp {
|
||||
fn from(v: &TresOperation) -> Self {
|
||||
match v {
|
||||
TresOperation::TernCond => proto::TresOp::TernCond,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub(crate) enum Node {
|
||||
Input(usize),
|
||||
Constant(U256),
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
MontConstant(Fr),
|
||||
UnoOp(UnoOperation, usize),
|
||||
Op(Operation, usize, usize),
|
||||
TresOp(TresOperation, usize, usize, usize),
|
||||
}
|
||||
|
||||
pub(crate) fn evaluate(
|
||||
nodes: &[Node],
|
||||
inputs: &[U256],
|
||||
outputs: &[usize],
|
||||
) -> Result<Vec<Fr>, String> {
|
||||
// Evaluate the graph.
|
||||
let mut values = Vec::with_capacity(nodes.len());
|
||||
for &node in nodes.iter() {
|
||||
let value = match node {
|
||||
Node::Constant(c) => u256_to_fr(&c)?,
|
||||
Node::MontConstant(c) => c,
|
||||
Node::Input(i) => u256_to_fr(&inputs[i])?,
|
||||
Node::Op(op, a, b) => op.eval_fr(values[a], values[b])?,
|
||||
Node::UnoOp(op, a) => op.eval_fr(values[a])?,
|
||||
Node::TresOp(op, a, b, c) => op.eval_fr(values[a], values[b], values[c])?,
|
||||
};
|
||||
values.push(value);
|
||||
}
|
||||
|
||||
// Convert from Montgomery form and return the outputs.
|
||||
let mut out = vec![Fr::from(0); outputs.len()];
|
||||
for i in 0..outputs.len() {
|
||||
out[i] = values[outputs[i]];
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn shl(a: Fr, b: Fr) -> Result<Fr, String> {
|
||||
if b.is_zero() {
|
||||
return Ok(a);
|
||||
}
|
||||
|
||||
if b.cmp(&Fr::from(Fr::MODULUS_BIT_SIZE)).is_ge() {
|
||||
return Ok(Fr::zero());
|
||||
}
|
||||
|
||||
let n = b.into_bigint().0[0] as u32;
|
||||
let a = a.into_bigint();
|
||||
Fr::from_bigint(a << n).ok_or_else(|| "Failed to compute left shift".to_string())
|
||||
}
|
||||
|
||||
fn shr(a: Fr, b: Fr) -> Result<Fr, String> {
|
||||
if b.is_zero() {
|
||||
return Ok(a);
|
||||
}
|
||||
|
||||
match b.cmp(&Fr::from(254u64)) {
|
||||
Ordering::Equal => return Ok(Fr::zero()),
|
||||
Ordering::Greater => return Ok(Fr::zero()),
|
||||
_ => (),
|
||||
};
|
||||
|
||||
let mut n = b.into_bigint().to_bytes_le()[0];
|
||||
let mut result = a.into_bigint();
|
||||
let c = result.as_mut();
|
||||
while n >= 64 {
|
||||
for i in 0..3 {
|
||||
c[i as usize] = c[(i + 1) as usize];
|
||||
}
|
||||
c[3] = 0;
|
||||
n -= 64;
|
||||
}
|
||||
|
||||
if n == 0 {
|
||||
return Fr::from_bigint(result).ok_or_else(|| "Failed to compute right shift".to_string());
|
||||
}
|
||||
|
||||
let mask: u64 = (1 << n) - 1;
|
||||
let mut carrier: u64 = c[3] & mask;
|
||||
c[3] >>= n;
|
||||
for i in (0..3).rev() {
|
||||
let new_carrier = c[i] & mask;
|
||||
c[i] = (c[i] >> n) | (carrier << (64 - n));
|
||||
carrier = new_carrier;
|
||||
}
|
||||
Fr::from_bigint(result).ok_or_else(|| "Failed to compute right shift".to_string())
|
||||
}
|
||||
|
||||
fn bit_and(a: Fr, b: Fr) -> Result<Fr, String> {
|
||||
let a = a.into_bigint();
|
||||
let b = b.into_bigint();
|
||||
let c: [u64; 4] = [
|
||||
a.0[0] & b.0[0],
|
||||
a.0[1] & b.0[1],
|
||||
a.0[2] & b.0[2],
|
||||
a.0[3] & b.0[3],
|
||||
];
|
||||
let mut d: BigInt<4> = BigInt::new(c);
|
||||
if d > Fr::MODULUS {
|
||||
d.sub_with_borrow(&Fr::MODULUS);
|
||||
}
|
||||
|
||||
Fr::from_bigint(d).ok_or_else(|| "Failed to compute bitwise AND".to_string())
|
||||
}
|
||||
|
||||
fn bit_or(a: Fr, b: Fr) -> Result<Fr, String> {
|
||||
let a = a.into_bigint();
|
||||
let b = b.into_bigint();
|
||||
let c: [u64; 4] = [
|
||||
a.0[0] | b.0[0],
|
||||
a.0[1] | b.0[1],
|
||||
a.0[2] | b.0[2],
|
||||
a.0[3] | b.0[3],
|
||||
];
|
||||
let mut d: BigInt<4> = BigInt::new(c);
|
||||
if d > Fr::MODULUS {
|
||||
d.sub_with_borrow(&Fr::MODULUS);
|
||||
}
|
||||
|
||||
Fr::from_bigint(d).ok_or_else(|| "Failed to compute bitwise OR".to_string())
|
||||
}
|
||||
|
||||
fn bit_xor(a: Fr, b: Fr) -> Result<Fr, String> {
|
||||
let a = a.into_bigint();
|
||||
let b = b.into_bigint();
|
||||
let c: [u64; 4] = [
|
||||
a.0[0] ^ b.0[0],
|
||||
a.0[1] ^ b.0[1],
|
||||
a.0[2] ^ b.0[2],
|
||||
a.0[3] ^ b.0[3],
|
||||
];
|
||||
let mut d: BigInt<4> = BigInt::new(c);
|
||||
if d > Fr::MODULUS {
|
||||
d.sub_with_borrow(&Fr::MODULUS);
|
||||
}
|
||||
|
||||
Fr::from_bigint(d).ok_or_else(|| "Failed to compute bitwise XOR".to_string())
|
||||
}
|
||||
|
||||
// M / 2
|
||||
const HALF_M: U256 =
|
||||
uint!(10944121435919637611123202872628637544274182200208017171849102093287904247808_U256);
|
||||
|
||||
fn u_gte(a: &U256, b: &U256) -> U256 {
|
||||
let a_neg = &HALF_M < a;
|
||||
let b_neg = &HALF_M < b;
|
||||
|
||||
match (a_neg, b_neg) {
|
||||
(false, false) => U256::from(a >= b),
|
||||
(true, false) => uint!(0_U256),
|
||||
(false, true) => uint!(1_U256),
|
||||
(true, true) => U256::from(a >= b),
|
||||
}
|
||||
}
|
||||
|
||||
fn u_lte(a: &U256, b: &U256) -> U256 {
|
||||
let a_neg = &HALF_M < a;
|
||||
let b_neg = &HALF_M < b;
|
||||
|
||||
match (a_neg, b_neg) {
|
||||
(false, false) => U256::from(a <= b),
|
||||
(true, false) => uint!(1_U256),
|
||||
(false, true) => uint!(0_U256),
|
||||
(true, true) => U256::from(a <= b),
|
||||
}
|
||||
}
|
||||
|
||||
fn u_gt(a: &U256, b: &U256) -> U256 {
|
||||
let a_neg = &HALF_M < a;
|
||||
let b_neg = &HALF_M < b;
|
||||
|
||||
match (a_neg, b_neg) {
|
||||
(false, false) => U256::from(a > b),
|
||||
(true, false) => uint!(0_U256),
|
||||
(false, true) => uint!(1_U256),
|
||||
(true, true) => U256::from(a > b),
|
||||
}
|
||||
}
|
||||
|
||||
fn u_lt(a: &U256, b: &U256) -> U256 {
|
||||
let a_neg = &HALF_M < a;
|
||||
let b_neg = &HALF_M < b;
|
||||
|
||||
match (a_neg, b_neg) {
|
||||
(false, false) => U256::from(a < b),
|
||||
(true, false) => uint!(1_U256),
|
||||
(false, true) => uint!(0_U256),
|
||||
(true, true) => U256::from(a < b),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::{ops::Div, str::FromStr};
|
||||
|
||||
use ruint::uint;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_ok() {
|
||||
let a = Fr::from(4u64);
|
||||
let b = Fr::from(2u64);
|
||||
let c = shl(a, b).unwrap();
|
||||
assert_eq!(c.cmp(&Fr::from(16u64)), Ordering::Equal)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_div() {
|
||||
assert_eq!(
|
||||
Operation::Div
|
||||
.eval_fr(Fr::from(2u64), Fr::from(3u64))
|
||||
.unwrap(),
|
||||
Fr::from_str(
|
||||
"7296080957279758407415468581752425029516121466805344781232734728858602831873"
|
||||
)
|
||||
.unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Operation::Div
|
||||
.eval_fr(Fr::from(6u64), Fr::from(2u64))
|
||||
.unwrap(),
|
||||
Fr::from_str("3").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Operation::Div
|
||||
.eval_fr(Fr::from(7u64), Fr::from(2u64))
|
||||
.unwrap(),
|
||||
Fr::from_str(
|
||||
"10944121435919637611123202872628637544274182200208017171849102093287904247812"
|
||||
)
|
||||
.unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_idiv() {
|
||||
assert_eq!(
|
||||
Operation::Idiv
|
||||
.eval_fr(Fr::from(2u64), Fr::from(3u64))
|
||||
.unwrap(),
|
||||
Fr::from_str("0").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Operation::Idiv
|
||||
.eval_fr(Fr::from(6u64), Fr::from(2u64))
|
||||
.unwrap(),
|
||||
Fr::from_str("3").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Operation::Idiv
|
||||
.eval_fr(Fr::from(7u64), Fr::from(2u64))
|
||||
.unwrap(),
|
||||
Fr::from_str("3").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fr_mod() {
|
||||
assert_eq!(
|
||||
Operation::Mod
|
||||
.eval_fr(Fr::from(7u64), Fr::from(2u64))
|
||||
.unwrap(),
|
||||
Fr::from_str("1").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Operation::Mod
|
||||
.eval_fr(Fr::from(7u64), Fr::from(9u64))
|
||||
.unwrap(),
|
||||
Fr::from_str("7").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_u_gte() {
|
||||
let result = u_gte(&uint!(10_U256), &uint!(3_U256));
|
||||
assert_eq!(result, uint!(1_U256));
|
||||
|
||||
let result = u_gte(&uint!(3_U256), &uint!(3_U256));
|
||||
assert_eq!(result, uint!(1_U256));
|
||||
|
||||
let result = u_gte(&uint!(2_U256), &uint!(3_U256));
|
||||
assert_eq!(result, uint!(0_U256));
|
||||
|
||||
// -1 >= 3 => 0
|
||||
let result = u_gte(
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495616_U256
|
||||
),
|
||||
&uint!(3_U256),
|
||||
);
|
||||
assert_eq!(result, uint!(0_U256));
|
||||
|
||||
// -1 >= -2 => 1
|
||||
let result = u_gte(
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495616_U256
|
||||
),
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
|
||||
),
|
||||
);
|
||||
assert_eq!(result, uint!(1_U256));
|
||||
|
||||
// -2 >= -1 => 0
|
||||
let result = u_gte(
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
|
||||
),
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495616_U256
|
||||
),
|
||||
);
|
||||
assert_eq!(result, uint!(0_U256));
|
||||
|
||||
// -2 == -2 => 1
|
||||
let result = u_gte(
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
|
||||
),
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
|
||||
),
|
||||
);
|
||||
assert_eq!(result, uint!(1_U256));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_x() {
|
||||
let x = M.div(uint!(2_U256));
|
||||
|
||||
println!("x: {:?}", x.as_limbs());
|
||||
println!("x: {M}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_2() {
|
||||
let nodes: Vec<Node> = vec![];
|
||||
// let node = nodes[0];
|
||||
let node = nodes.first();
|
||||
println!("{node:?}");
|
||||
}
|
||||
}
|
||||
117
rln/src/circuit/iden3calc/proto.rs
Normal file
117
rln/src/circuit/iden3calc/proto.rs
Normal file
@@ -0,0 +1,117 @@
|
||||
// This crate has been generated by prost-build during compilation of the code by iden3
|
||||
// and modified manually. The *.proto file used to generate this on can be found here:
|
||||
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/protos/messages.proto
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Clone, PartialEq, prost::Message)]
|
||||
pub(crate) struct BigUInt {
|
||||
#[prost(bytes = "vec", tag = "1")]
|
||||
pub value_le: Vec<u8>,
|
||||
}
|
||||
#[derive(Clone, Copy, PartialEq, prost::Message)]
|
||||
pub(crate) struct InputNode {
|
||||
#[prost(uint32, tag = "1")]
|
||||
pub idx: u32,
|
||||
}
|
||||
#[derive(Clone, PartialEq, prost::Message)]
|
||||
pub(crate) struct ConstantNode {
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub value: Option<BigUInt>,
|
||||
}
|
||||
#[derive(Clone, Copy, PartialEq, prost::Message)]
|
||||
pub(crate) struct UnoOpNode {
|
||||
#[prost(enumeration = "UnoOp", tag = "1")]
|
||||
pub op: i32,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub a_idx: u32,
|
||||
}
|
||||
#[derive(Clone, Copy, PartialEq, prost::Message)]
|
||||
pub(crate) struct DuoOpNode {
|
||||
#[prost(enumeration = "DuoOp", tag = "1")]
|
||||
pub op: i32,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub a_idx: u32,
|
||||
#[prost(uint32, tag = "3")]
|
||||
pub b_idx: u32,
|
||||
}
|
||||
#[derive(Clone, Copy, PartialEq, prost::Message)]
|
||||
pub(crate) struct TresOpNode {
|
||||
#[prost(enumeration = "TresOp", tag = "1")]
|
||||
pub op: i32,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub a_idx: u32,
|
||||
#[prost(uint32, tag = "3")]
|
||||
pub b_idx: u32,
|
||||
#[prost(uint32, tag = "4")]
|
||||
pub c_idx: u32,
|
||||
}
|
||||
#[derive(Clone, PartialEq, prost::Message)]
|
||||
pub(crate) struct Node {
|
||||
#[prost(oneof = "node::Node", tags = "1, 2, 3, 4, 5")]
|
||||
pub node: Option<node::Node>,
|
||||
}
|
||||
/// Nested message and enum types in `Node`.
|
||||
pub(crate) mod node {
|
||||
#[derive(Clone, PartialEq, prost::Oneof)]
|
||||
pub(crate) enum Node {
|
||||
#[prost(message, tag = "1")]
|
||||
Input(super::InputNode),
|
||||
#[prost(message, tag = "2")]
|
||||
Constant(super::ConstantNode),
|
||||
#[prost(message, tag = "3")]
|
||||
UnoOp(super::UnoOpNode),
|
||||
#[prost(message, tag = "4")]
|
||||
DuoOp(super::DuoOpNode),
|
||||
#[prost(message, tag = "5")]
|
||||
TresOp(super::TresOpNode),
|
||||
}
|
||||
}
|
||||
#[derive(Clone, Copy, PartialEq, prost::Message)]
|
||||
pub(crate) struct SignalDescription {
|
||||
#[prost(uint32, tag = "1")]
|
||||
pub offset: u32,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub len: u32,
|
||||
}
|
||||
#[derive(Clone, PartialEq, prost::Message)]
|
||||
pub(crate) struct GraphMetadata {
|
||||
#[prost(uint32, repeated, tag = "1")]
|
||||
pub witness_signals: Vec<u32>,
|
||||
#[prost(map = "string, message", tag = "2")]
|
||||
pub inputs: HashMap<String, SignalDescription>,
|
||||
}
|
||||
#[derive(Clone, Copy, Debug, PartialEq, prost::Enumeration)]
|
||||
pub(crate) enum DuoOp {
|
||||
Mul = 0,
|
||||
Div = 1,
|
||||
Add = 2,
|
||||
Sub = 3,
|
||||
Pow = 4,
|
||||
Idiv = 5,
|
||||
Mod = 6,
|
||||
Eq = 7,
|
||||
Neq = 8,
|
||||
Lt = 9,
|
||||
Gt = 10,
|
||||
Leq = 11,
|
||||
Geq = 12,
|
||||
Land = 13,
|
||||
Lor = 14,
|
||||
Shl = 15,
|
||||
Shr = 16,
|
||||
Bor = 17,
|
||||
Band = 18,
|
||||
Bxor = 19,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, prost::Enumeration)]
|
||||
pub(crate) enum UnoOp {
|
||||
Neg = 0,
|
||||
Id = 1,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, prost::Enumeration)]
|
||||
pub(crate) enum TresOp {
|
||||
TernCond = 0,
|
||||
}
|
||||
528
rln/src/circuit/iden3calc/storage.rs
Normal file
528
rln/src/circuit/iden3calc/storage.rs
Normal file
@@ -0,0 +1,528 @@
|
||||
// This crate is based on the code by iden3. Its preimage can be found here:
|
||||
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/storage.rs
|
||||
|
||||
use std::io::{Read, Write};
|
||||
|
||||
use ark_ff::PrimeField;
|
||||
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
|
||||
use prost::Message;
|
||||
|
||||
use super::{
|
||||
graph::{self, Operation, TresOperation, UnoOperation},
|
||||
proto, InputSignalsInfo,
|
||||
};
|
||||
use crate::circuit::Fr;
|
||||
|
||||
/// Format of the wtns.graph file:
|
||||
/// + magic line: wtns.graph.001
|
||||
/// + 4 bytes unsigned LE 32-bit integer: number of nodes
|
||||
/// + series of protobuf serialized nodes. Each node prefixed by varint length
|
||||
/// + protobuf serialized GraphMetadata
|
||||
/// + 8 bytes unsigned LE 64-bit integer: offset of GraphMetadata message
|
||||
const WITNESSCALC_GRAPH_MAGIC: &[u8] = b"wtns.graph.001";
|
||||
|
||||
const MAX_VARINT_LENGTH: usize = 10;
|
||||
|
||||
impl TryFrom<proto::Node> for graph::Node {
|
||||
type Error = std::io::Error;
|
||||
|
||||
fn try_from(value: proto::Node) -> Result<Self, Self::Error> {
|
||||
let node = value.node.ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"Proto::Node must have a node field",
|
||||
)
|
||||
})?;
|
||||
match node {
|
||||
proto::node::Node::Input(input_node) => Ok(graph::Node::Input(input_node.idx as usize)),
|
||||
proto::node::Node::Constant(constant_node) => {
|
||||
let i = constant_node.value.ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"Constant node must have a value",
|
||||
)
|
||||
})?;
|
||||
Ok(graph::Node::MontConstant(Fr::from_le_bytes_mod_order(
|
||||
i.value_le.as_slice(),
|
||||
)))
|
||||
}
|
||||
proto::node::Node::UnoOp(uno_op_node) => {
|
||||
let op = proto::UnoOp::try_from(uno_op_node.op).map_err(|_| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"UnoOp must be valid enum value",
|
||||
)
|
||||
})?;
|
||||
Ok(graph::Node::UnoOp(op.into(), uno_op_node.a_idx as usize))
|
||||
}
|
||||
proto::node::Node::DuoOp(duo_op_node) => {
|
||||
let op = proto::DuoOp::try_from(duo_op_node.op).map_err(|_| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"DuoOp must be valid enum value",
|
||||
)
|
||||
})?;
|
||||
Ok(graph::Node::Op(
|
||||
op.into(),
|
||||
duo_op_node.a_idx as usize,
|
||||
duo_op_node.b_idx as usize,
|
||||
))
|
||||
}
|
||||
proto::node::Node::TresOp(tres_op_node) => {
|
||||
let op = proto::TresOp::try_from(tres_op_node.op).map_err(|_| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"TresOp must be valid enum value",
|
||||
)
|
||||
})?;
|
||||
Ok(graph::Node::TresOp(
|
||||
op.into(),
|
||||
tres_op_node.a_idx as usize,
|
||||
tres_op_node.b_idx as usize,
|
||||
tres_op_node.c_idx as usize,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&graph::Node> for proto::node::Node {
|
||||
fn from(node: &graph::Node) -> Self {
|
||||
match node {
|
||||
graph::Node::Input(i) => proto::node::Node::Input(proto::InputNode { idx: *i as u32 }),
|
||||
graph::Node::Constant(_) => {
|
||||
panic!("We are not supposed to write Constant to the witnesscalc graph. All Constant should be converted to MontConstant.");
|
||||
}
|
||||
graph::Node::UnoOp(op, a) => {
|
||||
let op = proto::UnoOp::from(op);
|
||||
proto::node::Node::UnoOp(proto::UnoOpNode {
|
||||
op: op as i32,
|
||||
a_idx: *a as u32,
|
||||
})
|
||||
}
|
||||
graph::Node::Op(op, a, b) => proto::node::Node::DuoOp(proto::DuoOpNode {
|
||||
op: proto::DuoOp::from(op) as i32,
|
||||
a_idx: *a as u32,
|
||||
b_idx: *b as u32,
|
||||
}),
|
||||
graph::Node::TresOp(op, a, b, c) => proto::node::Node::TresOp(proto::TresOpNode {
|
||||
op: proto::TresOp::from(op) as i32,
|
||||
a_idx: *a as u32,
|
||||
b_idx: *b as u32,
|
||||
c_idx: *c as u32,
|
||||
}),
|
||||
graph::Node::MontConstant(c) => {
|
||||
let bi = Into::<num_bigint::BigUint>::into(*c);
|
||||
let i = proto::BigUInt {
|
||||
value_le: bi.to_bytes_le(),
|
||||
};
|
||||
proto::node::Node::Constant(proto::ConstantNode { value: Some(i) })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proto::UnoOp> for UnoOperation {
|
||||
fn from(value: proto::UnoOp) -> Self {
|
||||
match value {
|
||||
proto::UnoOp::Neg => UnoOperation::Neg,
|
||||
proto::UnoOp::Id => UnoOperation::Id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proto::DuoOp> for Operation {
|
||||
fn from(value: proto::DuoOp) -> Self {
|
||||
match value {
|
||||
proto::DuoOp::Mul => Operation::Mul,
|
||||
proto::DuoOp::Div => Operation::Div,
|
||||
proto::DuoOp::Add => Operation::Add,
|
||||
proto::DuoOp::Sub => Operation::Sub,
|
||||
proto::DuoOp::Pow => Operation::Pow,
|
||||
proto::DuoOp::Idiv => Operation::Idiv,
|
||||
proto::DuoOp::Mod => Operation::Mod,
|
||||
proto::DuoOp::Eq => Operation::Eq,
|
||||
proto::DuoOp::Neq => Operation::Neq,
|
||||
proto::DuoOp::Lt => Operation::Lt,
|
||||
proto::DuoOp::Gt => Operation::Gt,
|
||||
proto::DuoOp::Leq => Operation::Leq,
|
||||
proto::DuoOp::Geq => Operation::Geq,
|
||||
proto::DuoOp::Land => Operation::Land,
|
||||
proto::DuoOp::Lor => Operation::Lor,
|
||||
proto::DuoOp::Shl => Operation::Shl,
|
||||
proto::DuoOp::Shr => Operation::Shr,
|
||||
proto::DuoOp::Bor => Operation::Bor,
|
||||
proto::DuoOp::Band => Operation::Band,
|
||||
proto::DuoOp::Bxor => Operation::Bxor,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proto::TresOp> for graph::TresOperation {
|
||||
fn from(value: proto::TresOp) -> Self {
|
||||
match value {
|
||||
proto::TresOp::TernCond => TresOperation::TernCond,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub(crate) fn serialize_witnesscalc_graph<T: Write>(
|
||||
mut w: T,
|
||||
nodes: &Vec<graph::Node>,
|
||||
witness_signals: &[usize],
|
||||
input_signals: &InputSignalsInfo,
|
||||
) -> std::io::Result<()> {
|
||||
let mut ptr = 0usize;
|
||||
w.write_all(WITNESSCALC_GRAPH_MAGIC)?;
|
||||
ptr += WITNESSCALC_GRAPH_MAGIC.len();
|
||||
|
||||
w.write_u64::<LittleEndian>(nodes.len() as u64)?;
|
||||
ptr += 8;
|
||||
|
||||
let metadata = proto::GraphMetadata {
|
||||
witness_signals: witness_signals
|
||||
.iter()
|
||||
.map(|x| *x as u32)
|
||||
.collect::<Vec<u32>>(),
|
||||
inputs: input_signals
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
let sig = proto::SignalDescription {
|
||||
offset: v.0 as u32,
|
||||
len: v.1 as u32,
|
||||
};
|
||||
(k.clone(), sig)
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
||||
// capacity of buf should be enough to hold the largest message + 10 bytes
|
||||
// of varint length
|
||||
let mut buf = Vec::with_capacity(metadata.encoded_len() + MAX_VARINT_LENGTH);
|
||||
|
||||
for node in nodes {
|
||||
let node_pb = proto::Node {
|
||||
node: Some(proto::node::Node::from(node)),
|
||||
};
|
||||
|
||||
assert_eq!(buf.len(), 0);
|
||||
node_pb.encode_length_delimited(&mut buf)?;
|
||||
ptr += buf.len();
|
||||
|
||||
w.write_all(&buf)?;
|
||||
buf.clear();
|
||||
}
|
||||
|
||||
metadata.encode_length_delimited(&mut buf)?;
|
||||
w.write_all(&buf)?;
|
||||
buf.clear();
|
||||
|
||||
w.write_u64::<LittleEndian>(ptr as u64)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_message_length<R: Read>(rw: &mut WriteBackReader<R>) -> std::io::Result<usize> {
|
||||
let mut buf = [0u8; MAX_VARINT_LENGTH];
|
||||
let bytes_read = rw.read(&mut buf)?;
|
||||
if bytes_read == 0 {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::UnexpectedEof,
|
||||
"Unexpected EOF",
|
||||
));
|
||||
}
|
||||
|
||||
let len_delimiter = prost::decode_length_delimiter(buf.as_ref())?;
|
||||
|
||||
let lnln = prost::length_delimiter_len(len_delimiter);
|
||||
|
||||
if lnln < bytes_read {
|
||||
rw.write_all(&buf[lnln..bytes_read])?;
|
||||
}
|
||||
|
||||
Ok(len_delimiter)
|
||||
}
|
||||
|
||||
fn read_message<R: Read, M: Message + std::default::Default>(
|
||||
rw: &mut WriteBackReader<R>,
|
||||
) -> std::io::Result<M> {
|
||||
let ln = read_message_length(rw)?;
|
||||
let mut buf = vec![0u8; ln];
|
||||
let bytes_read = rw.read(&mut buf)?;
|
||||
if bytes_read != ln {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::UnexpectedEof,
|
||||
"Unexpected EOF",
|
||||
));
|
||||
}
|
||||
|
||||
let msg = prost::Message::decode(&buf[..])?;
|
||||
|
||||
Ok(msg)
|
||||
}
|
||||
|
||||
pub(crate) fn deserialize_witnesscalc_graph(
|
||||
r: impl Read,
|
||||
) -> std::io::Result<(Vec<graph::Node>, Vec<usize>, InputSignalsInfo)> {
|
||||
let mut br = WriteBackReader::new(r);
|
||||
let mut magic = [0u8; WITNESSCALC_GRAPH_MAGIC.len()];
|
||||
|
||||
br.read_exact(&mut magic)?;
|
||||
|
||||
if !magic.eq(WITNESSCALC_GRAPH_MAGIC) {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"Invalid magic",
|
||||
));
|
||||
}
|
||||
|
||||
let nodes_num = br.read_u64::<LittleEndian>()?;
|
||||
let mut nodes = Vec::with_capacity(nodes_num as usize);
|
||||
for _ in 0..nodes_num {
|
||||
let n: proto::Node = read_message(&mut br)?;
|
||||
nodes.push(n.try_into()?);
|
||||
}
|
||||
|
||||
let md: proto::GraphMetadata = read_message(&mut br)?;
|
||||
|
||||
let witness_signals = md
|
||||
.witness_signals
|
||||
.iter()
|
||||
.map(|x| *x as usize)
|
||||
.collect::<Vec<usize>>();
|
||||
|
||||
let input_signals = md
|
||||
.inputs
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), (v.offset as usize, v.len as usize)))
|
||||
.collect::<InputSignalsInfo>();
|
||||
|
||||
Ok((nodes, witness_signals, input_signals))
|
||||
}
|
||||
|
||||
struct WriteBackReader<R: Read> {
|
||||
reader: R,
|
||||
buffer: Vec<u8>,
|
||||
}
|
||||
|
||||
impl<R: Read> WriteBackReader<R> {
|
||||
fn new(reader: R) -> Self {
|
||||
WriteBackReader {
|
||||
reader,
|
||||
buffer: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read> Read for WriteBackReader<R> {
|
||||
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
if buf.is_empty() {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let mut n = 0usize;
|
||||
|
||||
if !self.buffer.is_empty() {
|
||||
n = std::cmp::min(buf.len(), self.buffer.len());
|
||||
self.buffer[self.buffer.len() - n..]
|
||||
.iter()
|
||||
.rev()
|
||||
.enumerate()
|
||||
.for_each(|(i, x)| {
|
||||
buf[i] = *x;
|
||||
});
|
||||
self.buffer.truncate(self.buffer.len() - n);
|
||||
}
|
||||
|
||||
while n < buf.len() {
|
||||
let m = self.reader.read(&mut buf[n..])?;
|
||||
if m == 0 {
|
||||
break;
|
||||
}
|
||||
n += m;
|
||||
}
|
||||
|
||||
Ok(n)
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read> Write for WriteBackReader<R> {
|
||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
self.buffer.reserve(buf.len());
|
||||
self.buffer.extend(buf.iter().rev());
|
||||
Ok(buf.len())
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> std::io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use core::str::FromStr;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use byteorder::ByteOrder;
|
||||
use graph::{Operation, TresOperation, UnoOperation};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_read_message() {
|
||||
let mut buf = Vec::new();
|
||||
let n1 = proto::Node {
|
||||
node: Some(proto::node::Node::Input(proto::InputNode { idx: 1 })),
|
||||
};
|
||||
n1.encode_length_delimited(&mut buf).unwrap();
|
||||
|
||||
let n2 = proto::Node {
|
||||
node: Some(proto::node::Node::Input(proto::InputNode { idx: 2 })),
|
||||
};
|
||||
n2.encode_length_delimited(&mut buf).unwrap();
|
||||
|
||||
let mut reader = std::io::Cursor::new(&buf);
|
||||
|
||||
let mut rw = WriteBackReader::new(&mut reader);
|
||||
|
||||
let got_n1: proto::Node = read_message(&mut rw).unwrap();
|
||||
assert!(n1.eq(&got_n1));
|
||||
|
||||
let got_n2: proto::Node = read_message(&mut rw).unwrap();
|
||||
assert!(n2.eq(&got_n2));
|
||||
|
||||
assert_eq!(reader.position(), buf.len() as u64);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_read_message_variant() {
|
||||
let nodes = vec![
|
||||
proto::Node {
|
||||
node: Some(proto::node::Node::from(&graph::Node::Input(0))),
|
||||
},
|
||||
proto::Node {
|
||||
node: Some(proto::node::Node::from(&graph::Node::MontConstant(
|
||||
Fr::from_str("1").unwrap(),
|
||||
))),
|
||||
},
|
||||
proto::Node {
|
||||
node: Some(proto::node::Node::from(&graph::Node::UnoOp(
|
||||
UnoOperation::Id,
|
||||
4,
|
||||
))),
|
||||
},
|
||||
proto::Node {
|
||||
node: Some(proto::node::Node::from(&graph::Node::Op(
|
||||
Operation::Mul,
|
||||
5,
|
||||
6,
|
||||
))),
|
||||
},
|
||||
proto::Node {
|
||||
node: Some(proto::node::Node::from(&graph::Node::TresOp(
|
||||
TresOperation::TernCond,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
))),
|
||||
},
|
||||
];
|
||||
|
||||
let mut buf = Vec::new();
|
||||
for n in &nodes {
|
||||
n.encode_length_delimited(&mut buf).unwrap();
|
||||
}
|
||||
|
||||
let mut nodes_got: Vec<proto::Node> = Vec::new();
|
||||
let mut reader = std::io::Cursor::new(&buf);
|
||||
let mut rw = WriteBackReader::new(&mut reader);
|
||||
for _ in 0..nodes.len() {
|
||||
nodes_got.push(read_message(&mut rw).unwrap());
|
||||
}
|
||||
|
||||
assert_eq!(nodes, nodes_got);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_back_reader() {
|
||||
let data = [1u8, 2, 3, 4, 5, 6];
|
||||
let mut r = WriteBackReader::new(std::io::Cursor::new(&data));
|
||||
|
||||
let buf = &mut [0u8; 5];
|
||||
r.read_exact(buf).unwrap();
|
||||
assert_eq!(buf, &[1, 2, 3, 4, 5]);
|
||||
|
||||
// return [4, 5] to reader
|
||||
r.write_all(&buf[3..]).unwrap();
|
||||
// return [2, 3] to reader
|
||||
r.write_all(&buf[1..3]).unwrap();
|
||||
|
||||
buf.fill(0);
|
||||
|
||||
// read 3 bytes, expect [2, 3, 4] after returns
|
||||
let mut n = r.read(&mut buf[..3]).unwrap();
|
||||
assert_eq!(n, 3);
|
||||
assert_eq!(buf, &[2, 3, 4, 0, 0]);
|
||||
|
||||
buf.fill(0);
|
||||
|
||||
// read everything left in reader
|
||||
n = r.read(buf).unwrap();
|
||||
assert_eq!(n, 2);
|
||||
assert_eq!(buf, &[5, 6, 0, 0, 0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_inputs() {
|
||||
let nodes = vec![
|
||||
graph::Node::Input(0),
|
||||
graph::Node::MontConstant(Fr::from_str("1").unwrap()),
|
||||
graph::Node::UnoOp(UnoOperation::Id, 4),
|
||||
graph::Node::Op(Operation::Mul, 5, 6),
|
||||
graph::Node::TresOp(TresOperation::TernCond, 7, 8, 9),
|
||||
];
|
||||
|
||||
let witness_signals = vec![4, 1];
|
||||
|
||||
let mut input_signals: InputSignalsInfo = HashMap::new();
|
||||
input_signals.insert("sig1".to_string(), (1, 3));
|
||||
input_signals.insert("sig2".to_string(), (5, 1));
|
||||
|
||||
let mut tmp = Vec::new();
|
||||
serialize_witnesscalc_graph(&mut tmp, &nodes, &witness_signals, &input_signals).unwrap();
|
||||
|
||||
let mut reader = std::io::Cursor::new(&tmp);
|
||||
|
||||
let (nodes_res, witness_signals_res, input_signals_res) =
|
||||
deserialize_witnesscalc_graph(&mut reader).unwrap();
|
||||
|
||||
assert_eq!(nodes, nodes_res);
|
||||
assert_eq!(input_signals, input_signals_res);
|
||||
assert_eq!(witness_signals, witness_signals_res);
|
||||
|
||||
let metadata_start = LittleEndian::read_u64(&tmp[tmp.len() - 8..]);
|
||||
|
||||
let mt_reader = std::io::Cursor::new(&tmp[metadata_start as usize..]);
|
||||
let mut rw = WriteBackReader::new(mt_reader);
|
||||
let metadata: proto::GraphMetadata = read_message(&mut rw).unwrap();
|
||||
|
||||
let metadata_want = proto::GraphMetadata {
|
||||
witness_signals: vec![4, 1],
|
||||
inputs: input_signals
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
(
|
||||
k.clone(),
|
||||
proto::SignalDescription {
|
||||
offset: v.0 as u32,
|
||||
len: v.1 as u32,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
||||
assert_eq!(metadata, metadata_want);
|
||||
}
|
||||
}
|
||||
151
rln/src/circuit/mod.rs
Normal file
151
rln/src/circuit/mod.rs
Normal file
@@ -0,0 +1,151 @@
|
||||
// This crate provides interfaces for the zero-knowledge circuit and keys
|
||||
|
||||
pub(crate) mod error;
|
||||
pub(crate) mod iden3calc;
|
||||
pub(crate) mod qap;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use ark_bn254::{
|
||||
Bn254, Fq as ArkFq, Fq2 as ArkFq2, Fr as ArkFr, G1Affine as ArkG1Affine,
|
||||
G1Projective as ArkG1Projective, G2Affine as ArkG2Affine, G2Projective as ArkG2Projective,
|
||||
};
|
||||
use ark_ff::Field;
|
||||
use ark_groth16::{
|
||||
Proof as ArkProof, ProvingKey as ArkProvingKey, VerifyingKey as ArkVerifyingKey,
|
||||
};
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
|
||||
use self::error::ZKeyReadError;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
const GRAPH_BYTES: &[u8] = include_bytes!("../../resources/tree_depth_20/graph.bin");
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
const ARKZKEY_BYTES: &[u8] = include_bytes!("../../resources/tree_depth_20/rln_final.arkzkey");
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
static ARKZKEY: LazyLock<Zkey> = LazyLock::new(|| {
|
||||
read_arkzkey_from_bytes_uncompressed(ARKZKEY_BYTES).expect("Default zkey must be valid")
|
||||
});
|
||||
|
||||
pub const DEFAULT_TREE_DEPTH: usize = 20;
|
||||
pub const COMPRESS_PROOF_SIZE: usize = 128;
|
||||
|
||||
// The following types define the pairing friendly elliptic curve, the underlying finite fields and groups default to this module
|
||||
// Note that proofs are serialized assuming Fr to be 4x8 = 32 bytes in size. Hence, changing to a curve with different encoding will make proof verification to fail
|
||||
|
||||
/// BN254 pairing-friendly elliptic curve.
|
||||
pub type Curve = Bn254;
|
||||
|
||||
/// Scalar field Fr of the BN254 curve.
|
||||
pub type Fr = ArkFr;
|
||||
|
||||
/// Base field Fq of the BN254 curve.
|
||||
pub type Fq = ArkFq;
|
||||
|
||||
/// Quadratic extension field element for the BN254 curve.
|
||||
pub type Fq2 = ArkFq2;
|
||||
|
||||
/// Affine representation of a G1 group element on the BN254 curve.
|
||||
pub type G1Affine = ArkG1Affine;
|
||||
|
||||
/// Projective representation of a G1 group element on the BN254 curve.
|
||||
pub type G1Projective = ArkG1Projective;
|
||||
|
||||
/// Affine representation of a G2 group element on the BN254 curve.
|
||||
pub type G2Affine = ArkG2Affine;
|
||||
|
||||
/// Projective representation of a G2 group element on the BN254 curve.
|
||||
pub type G2Projective = ArkG2Projective;
|
||||
|
||||
/// Groth16 proof for the BN254 curve.
|
||||
pub type Proof = ArkProof<Curve>;
|
||||
|
||||
/// Proving key for the Groth16 proof system.
|
||||
pub type ProvingKey = ArkProvingKey<Curve>;
|
||||
|
||||
/// Combining the proving key and constraint matrices.
|
||||
pub type Zkey = (ArkProvingKey<Curve>, ConstraintMatrices<Fr>);
|
||||
|
||||
/// Verifying key for the Groth16 proof system.
|
||||
pub type VerifyingKey = ArkVerifyingKey<Curve>;
|
||||
|
||||
/// Loads the zkey from raw bytes
|
||||
pub fn zkey_from_raw(zkey_data: &[u8]) -> Result<Zkey, ZKeyReadError> {
|
||||
if zkey_data.is_empty() {
|
||||
return Err(ZKeyReadError::EmptyBytes);
|
||||
}
|
||||
|
||||
let proving_key_and_matrices = read_arkzkey_from_bytes_uncompressed(zkey_data)?;
|
||||
|
||||
Ok(proving_key_and_matrices)
|
||||
}
|
||||
|
||||
// Loads default zkey from folder
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn zkey_from_folder() -> &'static Zkey {
|
||||
&ARKZKEY
|
||||
}
|
||||
|
||||
// Loads default graph from folder
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn graph_from_folder() -> &'static [u8] {
|
||||
GRAPH_BYTES
|
||||
}
|
||||
|
||||
// The following functions and structs are based on code from ark-zkey:
|
||||
// https://github.com/zkmopro/ark-zkey/blob/main/src/lib.rs#L106
|
||||
|
||||
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
|
||||
struct SerializableProvingKey(ArkProvingKey<Curve>);
|
||||
|
||||
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
|
||||
struct SerializableConstraintMatrices<F: Field> {
|
||||
num_instance_variables: usize,
|
||||
num_witness_variables: usize,
|
||||
num_constraints: usize,
|
||||
a_num_non_zero: usize,
|
||||
b_num_non_zero: usize,
|
||||
c_num_non_zero: usize,
|
||||
a: SerializableMatrix<F>,
|
||||
b: SerializableMatrix<F>,
|
||||
c: SerializableMatrix<F>,
|
||||
}
|
||||
|
||||
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
|
||||
struct SerializableMatrix<F: Field> {
|
||||
pub data: Vec<Vec<(F, usize)>>,
|
||||
}
|
||||
|
||||
fn read_arkzkey_from_bytes_uncompressed(arkzkey_data: &[u8]) -> Result<Zkey, ZKeyReadError> {
|
||||
if arkzkey_data.is_empty() {
|
||||
return Err(ZKeyReadError::EmptyBytes);
|
||||
}
|
||||
|
||||
let mut cursor = std::io::Cursor::new(arkzkey_data);
|
||||
|
||||
let serialized_proving_key =
|
||||
SerializableProvingKey::deserialize_uncompressed_unchecked(&mut cursor)?;
|
||||
|
||||
let serialized_constraint_matrices =
|
||||
SerializableConstraintMatrices::deserialize_uncompressed_unchecked(&mut cursor)?;
|
||||
|
||||
let proving_key: ProvingKey = serialized_proving_key.0;
|
||||
let constraint_matrices: ConstraintMatrices<Fr> = ConstraintMatrices {
|
||||
num_instance_variables: serialized_constraint_matrices.num_instance_variables,
|
||||
num_witness_variables: serialized_constraint_matrices.num_witness_variables,
|
||||
num_constraints: serialized_constraint_matrices.num_constraints,
|
||||
a_num_non_zero: serialized_constraint_matrices.a_num_non_zero,
|
||||
b_num_non_zero: serialized_constraint_matrices.b_num_non_zero,
|
||||
c_num_non_zero: serialized_constraint_matrices.c_num_non_zero,
|
||||
a: serialized_constraint_matrices.a.data,
|
||||
b: serialized_constraint_matrices.b.data,
|
||||
c: serialized_constraint_matrices.c.data,
|
||||
};
|
||||
let zkey = (proving_key, constraint_matrices);
|
||||
|
||||
Ok(zkey)
|
||||
}
|
||||
119
rln/src/circuit/qap.rs
Normal file
119
rln/src/circuit/qap.rs
Normal file
@@ -0,0 +1,119 @@
|
||||
// This crate is based on the code by arkworks. Its preimage can be found here:
|
||||
// https://github.com/arkworks-rs/circom-compat/blob/3c95ed98e23a408b4d99a53e483a9bba39685a4e/src/circom/qap.rs
|
||||
|
||||
use ark_ff::PrimeField;
|
||||
use ark_groth16::r1cs_to_qap::{evaluate_constraint, LibsnarkReduction, R1CSToQAP};
|
||||
use ark_poly::EvaluationDomain;
|
||||
use ark_relations::r1cs::{ConstraintMatrices, ConstraintSystemRef, SynthesisError};
|
||||
use ark_std::{cfg_into_iter, cfg_iter, cfg_iter_mut, vec};
|
||||
#[cfg(feature = "parallel")]
|
||||
use rayon::iter::{
|
||||
IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator,
|
||||
IntoParallelRefMutIterator, ParallelIterator,
|
||||
};
|
||||
|
||||
/// Implements the witness map used by snarkjs. The arkworks witness map calculates the
|
||||
/// coefficients of H through computing (AB-C)/Z in the evaluation domain and going back to the
|
||||
/// coefficients domain. snarkjs instead precomputes the Lagrange form of the powers of tau bases
|
||||
/// in a domain twice as large and the witness map is computed as the odd coefficients of (AB-C)
|
||||
/// in that domain. This serves as HZ when computing the C proof element.
|
||||
pub(crate) struct CircomReduction;
|
||||
|
||||
impl R1CSToQAP for CircomReduction {
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn instance_map_with_evaluation<F: PrimeField, D: EvaluationDomain<F>>(
|
||||
cs: ConstraintSystemRef<F>,
|
||||
t: &F,
|
||||
) -> Result<(Vec<F>, Vec<F>, Vec<F>, F, usize, usize), SynthesisError> {
|
||||
LibsnarkReduction::instance_map_with_evaluation::<F, D>(cs, t)
|
||||
}
|
||||
|
||||
fn witness_map_from_matrices<F: PrimeField, D: EvaluationDomain<F>>(
|
||||
matrices: &ConstraintMatrices<F>,
|
||||
num_inputs: usize,
|
||||
num_constraints: usize,
|
||||
full_assignment: &[F],
|
||||
) -> Result<Vec<F>, SynthesisError> {
|
||||
let zero = F::zero();
|
||||
let domain =
|
||||
D::new(num_constraints + num_inputs).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;
|
||||
let domain_size = domain.size();
|
||||
|
||||
let mut a = vec![zero; domain_size];
|
||||
let mut b = vec![zero; domain_size];
|
||||
|
||||
#[allow(unexpected_cfgs)]
|
||||
cfg_iter_mut!(a[..num_constraints])
|
||||
.zip(cfg_iter_mut!(b[..num_constraints]))
|
||||
.zip(cfg_iter!(&matrices.a))
|
||||
.zip(cfg_iter!(&matrices.b))
|
||||
.for_each(|(((a, b), at_i), bt_i)| {
|
||||
*a = evaluate_constraint(at_i, full_assignment);
|
||||
*b = evaluate_constraint(bt_i, full_assignment);
|
||||
});
|
||||
|
||||
{
|
||||
let start = num_constraints;
|
||||
let end = start + num_inputs;
|
||||
a[start..end].clone_from_slice(&full_assignment[..num_inputs]);
|
||||
}
|
||||
|
||||
let mut c = vec![zero; domain_size];
|
||||
#[allow(unexpected_cfgs)]
|
||||
cfg_iter_mut!(c[..num_constraints])
|
||||
.zip(&a)
|
||||
.zip(&b)
|
||||
.for_each(|((c_i, &a), &b)| {
|
||||
*c_i = a * b;
|
||||
});
|
||||
|
||||
domain.ifft_in_place(&mut a);
|
||||
domain.ifft_in_place(&mut b);
|
||||
|
||||
let root_of_unity = {
|
||||
let domain_size_double = 2 * domain_size;
|
||||
let domain_double =
|
||||
D::new(domain_size_double).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;
|
||||
domain_double.element(1)
|
||||
};
|
||||
D::distribute_powers_and_mul_by_const(&mut a, root_of_unity, F::one());
|
||||
D::distribute_powers_and_mul_by_const(&mut b, root_of_unity, F::one());
|
||||
|
||||
domain.fft_in_place(&mut a);
|
||||
domain.fft_in_place(&mut b);
|
||||
|
||||
let mut ab = domain.mul_polynomials_in_evaluation_domain(&a, &b);
|
||||
drop(a);
|
||||
drop(b);
|
||||
|
||||
domain.ifft_in_place(&mut c);
|
||||
D::distribute_powers_and_mul_by_const(&mut c, root_of_unity, F::one());
|
||||
domain.fft_in_place(&mut c);
|
||||
|
||||
#[allow(unexpected_cfgs)]
|
||||
cfg_iter_mut!(ab)
|
||||
.zip(c)
|
||||
.for_each(|(ab_i, c_i)| *ab_i -= &c_i);
|
||||
|
||||
Ok(ab)
|
||||
}
|
||||
|
||||
fn h_query_scalars<F: PrimeField, D: EvaluationDomain<F>>(
|
||||
max_power: usize,
|
||||
t: F,
|
||||
_: F,
|
||||
delta_inverse: F,
|
||||
) -> Result<Vec<F>, SynthesisError> {
|
||||
// the usual H query has domain-1 powers. Z has domain powers. So HZ has 2*domain-1 powers.
|
||||
#[allow(unexpected_cfgs)]
|
||||
let mut scalars = cfg_into_iter!(0..2 * max_power + 1)
|
||||
.map(|i| delta_inverse * t.pow([i as u64]))
|
||||
.collect::<Vec<_>>();
|
||||
let domain_size = scalars.len();
|
||||
let domain = D::new(domain_size).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;
|
||||
// generate the lagrange coefficients
|
||||
domain.ifft_in_place(&mut scalars);
|
||||
#[allow(unexpected_cfgs)]
|
||||
Ok(cfg_into_iter!(scalars).skip(1).step_by(2).collect())
|
||||
}
|
||||
}
|
||||
83
rln/src/error.rs
Normal file
83
rln/src/error.rs
Normal file
@@ -0,0 +1,83 @@
|
||||
use std::{array::TryFromSliceError, num::TryFromIntError};
|
||||
|
||||
use ark_relations::r1cs::SynthesisError;
|
||||
use num_bigint::{BigInt, ParseBigIntError};
|
||||
use thiserror::Error;
|
||||
use zerokit_utils::error::{FromConfigError, HashError, ZerokitMerkleTreeError};
|
||||
|
||||
use crate::circuit::{
|
||||
error::{WitnessCalcError, ZKeyReadError},
|
||||
Fr,
|
||||
};
|
||||
|
||||
/// Errors that can occur during RLN utility operations (conversions, parsing, etc.)
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum UtilsError {
|
||||
#[error("Expected radix 10 or 16")]
|
||||
WrongRadix,
|
||||
#[error("Failed to parse big integer: {0}")]
|
||||
ParseBigInt(#[from] ParseBigIntError),
|
||||
#[error("Failed to convert to usize: {0}")]
|
||||
ToUsize(#[from] TryFromIntError),
|
||||
#[error("Failed to convert from slice: {0}")]
|
||||
FromSlice(#[from] TryFromSliceError),
|
||||
#[error("Input data too short: expected at least {expected} bytes, got {actual} bytes")]
|
||||
InsufficientData { expected: usize, actual: usize },
|
||||
}
|
||||
|
||||
/// Errors that can occur during RLN protocol operations (proof generation, verification, etc.)
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum ProtocolError {
|
||||
#[error("Error producing proof: {0}")]
|
||||
Synthesis(#[from] SynthesisError),
|
||||
#[error("RLN utility error: {0}")]
|
||||
Utils(#[from] UtilsError),
|
||||
#[error("Error calculating witness: {0}")]
|
||||
WitnessCalc(#[from] WitnessCalcError),
|
||||
#[error("Expected to read {0} bytes but read only {1} bytes")]
|
||||
InvalidReadLen(usize, usize),
|
||||
#[error("Cannot convert bigint {0:?} to biguint")]
|
||||
BigUintConversion(BigInt),
|
||||
#[error("Message id ({0}) is not within user_message_limit ({1})")]
|
||||
InvalidMessageId(Fr, Fr),
|
||||
#[error("Merkle proof length mismatch: expected {0}, got {1}")]
|
||||
InvalidMerkleProofLength(usize, usize),
|
||||
#[error("External nullifiers mismatch: {0} != {1}")]
|
||||
ExternalNullifierMismatch(Fr, Fr),
|
||||
#[error("Cannot recover secret: division by zero")]
|
||||
DivisionByZero,
|
||||
#[error("Merkle tree operation error: {0}")]
|
||||
MerkleTree(#[from] ZerokitMerkleTreeError),
|
||||
#[error("Hash computation error: {0}")]
|
||||
Hash(#[from] HashError),
|
||||
#[error("Proof serialization error: {0}")]
|
||||
SerializationError(#[from] ark_serialize::SerializationError),
|
||||
}
|
||||
|
||||
/// Errors that can occur during proof verification
|
||||
#[derive(Error, Debug)]
|
||||
pub enum VerifyError {
|
||||
#[error("Invalid proof provided")]
|
||||
InvalidProof,
|
||||
#[error("Expected one of the provided roots")]
|
||||
InvalidRoot,
|
||||
#[error("Signal value does not match")]
|
||||
InvalidSignal,
|
||||
}
|
||||
|
||||
/// Top-level RLN error type encompassing all RLN operations
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum RLNError {
|
||||
#[error("Configuration error: {0}")]
|
||||
Config(#[from] FromConfigError),
|
||||
#[error("Merkle tree error: {0}")]
|
||||
MerkleTree(#[from] ZerokitMerkleTreeError),
|
||||
#[error("Hash error: {0}")]
|
||||
Hash(#[from] HashError),
|
||||
#[error("ZKey error: {0}")]
|
||||
ZKey(#[from] ZKeyReadError),
|
||||
#[error("Protocol error: {0}")]
|
||||
Protocol(#[from] ProtocolError),
|
||||
#[error("Verification error: {0}")]
|
||||
Verify(#[from] VerifyError),
|
||||
}
|
||||
786
rln/src/ffi.rs
786
rln/src/ffi.rs
@@ -1,786 +0,0 @@
|
||||
// This crate implements the public Foreign Function Interface (FFI) for the RLN module
|
||||
|
||||
use std::slice;
|
||||
|
||||
use crate::public::RLN;
|
||||
|
||||
/// Buffer struct is taken from
|
||||
/// https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs
|
||||
///
|
||||
/// Also heavily inspired by https://github.com/kilic/rln/blob/master/src/ffi.rs
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Buffer {
|
||||
pub ptr: *const u8,
|
||||
pub len: usize,
|
||||
}
|
||||
|
||||
impl From<&[u8]> for Buffer {
|
||||
fn from(src: &[u8]) -> Self {
|
||||
Self {
|
||||
ptr: &src[0] as *const u8,
|
||||
len: src.len(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&Buffer> for &'a [u8] {
|
||||
fn from(src: &Buffer) -> &'a [u8] {
|
||||
unsafe { slice::from_raw_parts(src.ptr, src.len) }
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: check if there are security implications by using this clippy
|
||||
// #[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// RLN APIs
|
||||
////////////////////////////////////////////////////////
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut *mut RLN) -> bool {
|
||||
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
|
||||
let rln = RLN::new(tree_height, input_data);
|
||||
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
|
||||
true
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new_with_params(
|
||||
tree_height: usize,
|
||||
circom_buffer: *const Buffer,
|
||||
zkey_buffer: *const Buffer,
|
||||
vk_buffer: *const Buffer,
|
||||
ctx: *mut *mut RLN,
|
||||
) -> bool {
|
||||
let circom_data = <&[u8]>::from(unsafe { &*circom_buffer });
|
||||
let zkey_data = <&[u8]>::from(unsafe { &*zkey_buffer });
|
||||
let vk_data = <&[u8]>::from(unsafe { &*vk_buffer });
|
||||
let rln = RLN::new_with_params(tree_height, circom_data, zkey_data, vk_data);
|
||||
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
|
||||
true
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// Merkle tree APIs
|
||||
////////////////////////////////////////////////////////
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn set_tree(ctx: *mut RLN, tree_height: usize) -> bool {
|
||||
let rln = unsafe { &mut *ctx };
|
||||
rln.set_tree(tree_height).is_ok()
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn delete_leaf(ctx: *mut RLN, index: usize) -> bool {
|
||||
let rln = unsafe { &mut *ctx };
|
||||
rln.delete_leaf(index).is_ok()
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn set_leaf(ctx: *mut RLN, index: usize, input_buffer: *const Buffer) -> bool {
|
||||
let rln = unsafe { &mut *ctx };
|
||||
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
|
||||
rln.set_leaf(index, input_data).is_ok()
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn set_next_leaf(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
|
||||
let rln = unsafe { &mut *ctx };
|
||||
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
|
||||
rln.set_next_leaf(input_data).is_ok()
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn set_leaves(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
|
||||
let rln = unsafe { &mut *ctx };
|
||||
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
|
||||
rln.set_leaves(input_data).is_ok()
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn get_root(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
|
||||
let rln = unsafe { &*ctx };
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
if rln.get_root(&mut output_data).is_ok() {
|
||||
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
} else {
|
||||
std::mem::forget(output_data);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn get_proof(ctx: *const RLN, index: usize, output_buffer: *mut Buffer) -> bool {
|
||||
let rln = unsafe { &*ctx };
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
if rln.get_proof(index, &mut output_data).is_ok() {
|
||||
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
} else {
|
||||
std::mem::forget(output_data);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// zkSNARKs APIs
|
||||
////////////////////////////////////////////////////////
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn prove(
|
||||
ctx: *mut RLN,
|
||||
input_buffer: *const Buffer,
|
||||
output_buffer: *mut Buffer,
|
||||
) -> bool {
|
||||
let rln = unsafe { &mut *ctx };
|
||||
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
|
||||
if rln.prove(input_data, &mut output_data).is_ok() {
|
||||
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
} else {
|
||||
std::mem::forget(output_data);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn verify(
|
||||
ctx: *const RLN,
|
||||
proof_buffer: *const Buffer,
|
||||
proof_is_valid_ptr: *mut bool,
|
||||
) -> bool {
|
||||
let rln = unsafe { &*ctx };
|
||||
let proof_data = <&[u8]>::from(unsafe { &*proof_buffer });
|
||||
if match rln.verify(proof_data) {
|
||||
Ok(verified) => verified,
|
||||
Err(_) => return false,
|
||||
} {
|
||||
unsafe { *proof_is_valid_ptr = true };
|
||||
} else {
|
||||
unsafe { *proof_is_valid_ptr = false };
|
||||
};
|
||||
true
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn generate_rln_proof(
|
||||
ctx: *mut RLN,
|
||||
input_buffer: *const Buffer,
|
||||
output_buffer: *mut Buffer,
|
||||
) -> bool {
|
||||
let rln = unsafe { &mut *ctx };
|
||||
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
|
||||
if rln.generate_rln_proof(input_data, &mut output_data).is_ok() {
|
||||
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
} else {
|
||||
std::mem::forget(output_data);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn verify_rln_proof(
|
||||
ctx: *const RLN,
|
||||
proof_buffer: *const Buffer,
|
||||
proof_is_valid_ptr: *mut bool,
|
||||
) -> bool {
|
||||
let rln = unsafe { &*ctx };
|
||||
let proof_data = <&[u8]>::from(unsafe { &*proof_buffer });
|
||||
if match rln.verify_rln_proof(proof_data) {
|
||||
Ok(verified) => verified,
|
||||
Err(_) => return false,
|
||||
} {
|
||||
unsafe { *proof_is_valid_ptr = true };
|
||||
} else {
|
||||
unsafe { *proof_is_valid_ptr = false };
|
||||
};
|
||||
true
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// Utils
|
||||
////////////////////////////////////////////////////////
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn key_gen(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
|
||||
let rln = unsafe { &*ctx };
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
if rln.key_gen(&mut output_data).is_ok() {
|
||||
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
} else {
|
||||
std::mem::forget(output_data);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn hash(
|
||||
ctx: *mut RLN,
|
||||
input_buffer: *const Buffer,
|
||||
output_buffer: *mut Buffer,
|
||||
) -> bool {
|
||||
let rln = unsafe { &mut *ctx };
|
||||
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
|
||||
if rln.hash(input_data, &mut output_data).is_ok() {
|
||||
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
} else {
|
||||
std::mem::forget(output_data);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::circuit::*;
|
||||
use crate::poseidon_hash::poseidon_hash;
|
||||
use crate::protocol::*;
|
||||
use crate::utils::*;
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::mem::MaybeUninit;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
#[test]
|
||||
// We test merkle batch Merkle tree additions
|
||||
fn test_merkle_operations_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Fr> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..no_of_leaves {
|
||||
leaves.push(Fr::rand(&mut rng));
|
||||
}
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
// We first add leaves one by one specifying the index
|
||||
for (i, leaf) in leaves.iter().enumerate() {
|
||||
// We prepare id_commitment and we set the leaf at provided index
|
||||
let leaf_ser = fr_to_bytes_le(&leaf);
|
||||
let input_buffer = &Buffer::from(leaf_ser.as_ref());
|
||||
let success = set_leaf(rln_pointer, i, input_buffer);
|
||||
assert!(success, "set leaf call failed");
|
||||
}
|
||||
|
||||
// We get the root of the tree obtained adding one leaf per time
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_single, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
// We reset the tree to default
|
||||
let success = set_tree(rln_pointer, tree_height);
|
||||
assert!(success, "set tree call failed");
|
||||
|
||||
// We add leaves one by one using the internal index (new leaves goes in next available position)
|
||||
for leaf in &leaves {
|
||||
let leaf_ser = fr_to_bytes_le(&leaf);
|
||||
let input_buffer = &Buffer::from(leaf_ser.as_ref());
|
||||
let success = set_next_leaf(rln_pointer, input_buffer);
|
||||
assert!(success, "set next leaf call failed");
|
||||
}
|
||||
|
||||
// We get the root of the tree obtained adding leaves using the internal index
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_next, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
// We check if roots are the same
|
||||
assert_eq!(root_single, root_next);
|
||||
|
||||
// We reset the tree to default
|
||||
let success = set_tree(rln_pointer, tree_height);
|
||||
assert!(success, "set tree call failed");
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let leaves_ser = vec_fr_to_bytes_le(&leaves);
|
||||
let input_buffer = &Buffer::from(leaves_ser.as_ref());
|
||||
let success = set_leaves(rln_pointer, input_buffer);
|
||||
assert!(success, "set leaves call failed");
|
||||
|
||||
// We get the root of the tree obtained adding leaves in batch
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_batch, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
// We check if roots are the same
|
||||
assert_eq!(root_single, root_batch);
|
||||
|
||||
// We now delete all leaves set and check if the root corresponds to the empty tree root
|
||||
// delete calls over indexes higher than no_of_leaves are ignored and will not increase self.tree.next_index
|
||||
let delete_range = 2 * no_of_leaves;
|
||||
for i in 0..delete_range {
|
||||
let success = delete_leaf(rln_pointer, i);
|
||||
assert!(success, "delete leaf call failed");
|
||||
}
|
||||
|
||||
// We get the root of the tree obtained deleting all leaves
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_delete, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
// We reset the tree to default
|
||||
let success = set_tree(rln_pointer, tree_height);
|
||||
assert!(success, "set tree call failed");
|
||||
|
||||
// We get the root of the empty tree
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_empty, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
// We check if roots are the same
|
||||
assert_eq!(root_delete, root_empty);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// This test is similar to the one in lib, but uses only public C API
|
||||
fn test_merkle_proof_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
// generate identity
|
||||
let identity_secret = hash_to_field(b"test-merkle-proof");
|
||||
let id_commitment = poseidon_hash(&vec![identity_secret]);
|
||||
|
||||
// We prepare id_commitment and we set the leaf at provided index
|
||||
let leaf_ser = fr_to_bytes_le(&id_commitment);
|
||||
let input_buffer = &Buffer::from(leaf_ser.as_ref());
|
||||
let success = set_leaf(rln_pointer, leaf_index, input_buffer);
|
||||
assert!(success, "set leaf call failed");
|
||||
|
||||
// We obtain the Merkle tree root
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
// We obtain the Merkle tree root
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_proof(rln_pointer, leaf_index, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get merkle proof call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
|
||||
let (path_elements, read) = bytes_le_to_vec_fr(&result_data);
|
||||
let (identity_path_index, _) = bytes_le_to_vec_u8(&result_data[read..].to_vec());
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
let mut expected_path_elements = vec![
|
||||
str_to_fr(
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
16,
|
||||
),
|
||||
];
|
||||
|
||||
let mut expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
// We add the remaining elements for the case TEST_TREE_HEIGHT = 19
|
||||
if TEST_TREE_HEIGHT == 19 || TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
),
|
||||
]);
|
||||
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
|
||||
}
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
16,
|
||||
)]);
|
||||
expected_identity_path_index.append(&mut vec![0]);
|
||||
}
|
||||
|
||||
assert_eq!(path_elements, expected_path_elements);
|
||||
assert_eq!(identity_path_index, expected_identity_path_index);
|
||||
|
||||
// We double check that the proof computed from public API is correct
|
||||
let root_from_proof =
|
||||
compute_tree_root(&id_commitment, &path_elements, &identity_path_index, false);
|
||||
|
||||
assert_eq!(root, root_from_proof);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Benchmarks proof generation and verification
|
||||
fn test_groth16_proofs_performance_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
// We compute some benchmarks regarding proof and verify API calls
|
||||
// Note that circuit loading requires some initial overhead.
|
||||
// Once the circuit is loaded (i.e., when the RLN object is created), proof generation and verification times should be similar at each call.
|
||||
let sample_size = 100;
|
||||
let mut prove_time: u128 = 0;
|
||||
let mut verify_time: u128 = 0;
|
||||
|
||||
for _ in 0..sample_size {
|
||||
// We generate random witness instances and relative proof values
|
||||
let rln_witness = random_rln_witness(tree_height);
|
||||
let proof_values = proof_values_from_witness(&rln_witness);
|
||||
|
||||
// We prepare id_commitment and we set the leaf at provided index
|
||||
let rln_witness_ser = serialize_witness(&rln_witness);
|
||||
let input_buffer = &Buffer::from(rln_witness_ser.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let now = Instant::now();
|
||||
let success = prove(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
prove_time += now.elapsed().as_nanos();
|
||||
assert!(success, "prove call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
|
||||
// We read the returned proof and we append proof values for verify
|
||||
let serialized_proof = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let serialized_proof_values = serialize_proof_values(&proof_values);
|
||||
let mut verify_data = Vec::<u8>::new();
|
||||
verify_data.extend(&serialized_proof);
|
||||
verify_data.extend(&serialized_proof_values);
|
||||
|
||||
// We prepare input proof values and we call verify
|
||||
let input_buffer = &Buffer::from(verify_data.as_ref());
|
||||
let mut proof_is_valid: bool = false;
|
||||
let proof_is_valid_ptr = &mut proof_is_valid as *mut bool;
|
||||
let now = Instant::now();
|
||||
let success = verify(rln_pointer, input_buffer, proof_is_valid_ptr);
|
||||
verify_time += now.elapsed().as_nanos();
|
||||
assert!(success, "verify call failed");
|
||||
assert_eq!(proof_is_valid, true);
|
||||
}
|
||||
|
||||
println!(
|
||||
"Average prove API call time: {:?}",
|
||||
Duration::from_nanos((prove_time / sample_size).try_into().unwrap())
|
||||
);
|
||||
println!(
|
||||
"Average verify API call time: {:?}",
|
||||
Duration::from_nanos((verify_time / sample_size).try_into().unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Creating a RLN with raw data should generate same results as using a path to resources
|
||||
fn test_rln_raw_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a RLN instance using a resource folder path
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
// We obtain the root from the RLN instance
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_rln_folder, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
// Reading the raw data from the files required for instantiating a RLN instance using raw data
|
||||
let circom_path = format!("./resources/tree_height_{TEST_TREE_HEIGHT}/rln.wasm");
|
||||
let mut circom_file = File::open(&circom_path).expect("no file found");
|
||||
let metadata = std::fs::metadata(&circom_path).expect("unable to read metadata");
|
||||
let mut circom_buffer = vec![0; metadata.len() as usize];
|
||||
circom_file
|
||||
.read_exact(&mut circom_buffer)
|
||||
.expect("buffer overflow");
|
||||
|
||||
let zkey_path = format!("./resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
|
||||
let mut zkey_file = File::open(&zkey_path).expect("no file found");
|
||||
let metadata = std::fs::metadata(&zkey_path).expect("unable to read metadata");
|
||||
let mut zkey_buffer = vec![0; metadata.len() as usize];
|
||||
zkey_file
|
||||
.read_exact(&mut zkey_buffer)
|
||||
.expect("buffer overflow");
|
||||
|
||||
let vk_path = format!("./resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.json");
|
||||
|
||||
let mut vk_file = File::open(&vk_path).expect("no file found");
|
||||
let metadata = std::fs::metadata(&vk_path).expect("unable to read metadata");
|
||||
let mut vk_buffer = vec![0; metadata.len() as usize];
|
||||
vk_file.read_exact(&mut vk_buffer).expect("buffer overflow");
|
||||
|
||||
let circom_data = &Buffer::from(&circom_buffer[..]);
|
||||
let zkey_data = &Buffer::from(&zkey_buffer[..]);
|
||||
let vk_data = &Buffer::from(&vk_buffer[..]);
|
||||
|
||||
// Creating a RLN instance passing the raw data
|
||||
let mut rln_pointer_raw_bytes = MaybeUninit::<*mut RLN>::uninit();
|
||||
let success = new_with_params(
|
||||
tree_height,
|
||||
circom_data,
|
||||
zkey_data,
|
||||
vk_data,
|
||||
rln_pointer_raw_bytes.as_mut_ptr(),
|
||||
);
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer2 = unsafe { &mut *rln_pointer_raw_bytes.assume_init() };
|
||||
|
||||
// We obtain the root from the RLN instance containing raw data
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer2, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_rln_raw, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
// And compare that the same root was generated
|
||||
assert_eq!(root_rln_folder, root_rln_raw);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Computes and verifies an RLN ZK proof using FFI APIs
|
||||
fn test_rln_proof_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Fr> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..no_of_leaves {
|
||||
leaves.push(Fr::rand(&mut rng));
|
||||
}
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let leaves_ser = vec_fr_to_bytes_le(&leaves);
|
||||
let input_buffer = &Buffer::from(leaves_ser.as_ref());
|
||||
let success = set_leaves(rln_pointer, input_buffer);
|
||||
assert!(success, "set leaves call failed");
|
||||
|
||||
// We generate a new identity pair
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = key_gen(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "key gen call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret, read) = bytes_le_to_fr(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..].to_vec());
|
||||
|
||||
// We set as leaf id_commitment, its index would be equal to no_of_leaves
|
||||
let leaf_ser = fr_to_bytes_le(&id_commitment);
|
||||
let input_buffer = &Buffer::from(leaf_ser.as_ref());
|
||||
let success = set_next_leaf(rln_pointer, input_buffer);
|
||||
assert!(success, "set next leaf call failed");
|
||||
|
||||
let identity_index: u64 = no_of_leaves;
|
||||
|
||||
// We generate a random signal
|
||||
let mut rng = rand::thread_rng();
|
||||
let signal: [u8; 32] = rng.gen();
|
||||
let signal_len = u64::try_from(signal.len()).unwrap();
|
||||
|
||||
// We generate a random epoch
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
|
||||
// We prepare input for generate_rln_proof API
|
||||
// input_data is [ id_key<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
serialized.append(&mut fr_to_bytes_le(&identity_secret));
|
||||
serialized.append(&mut identity_index.to_le_bytes().to_vec());
|
||||
serialized.append(&mut fr_to_bytes_le(&epoch));
|
||||
serialized.append(&mut signal_len.to_le_bytes().to_vec());
|
||||
serialized.append(&mut signal.to_vec());
|
||||
|
||||
// We call generate_rln_proof
|
||||
let input_buffer = &Buffer::from(serialized.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = generate_rln_proof(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "set leaves call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
// result_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> ]
|
||||
let mut proof_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
|
||||
// We prepare input for verify_rln_proof API
|
||||
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
|
||||
// that is [ proof_data | signal_len<8> | signal<var> ]
|
||||
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
|
||||
proof_data.append(&mut signal.to_vec());
|
||||
|
||||
// We call generate_rln_proof
|
||||
let input_buffer = &Buffer::from(proof_data.as_ref());
|
||||
let mut proof_is_valid: bool = false;
|
||||
let proof_is_valid_ptr = &mut proof_is_valid as *mut bool;
|
||||
let success = verify_rln_proof(rln_pointer, input_buffer, proof_is_valid_ptr);
|
||||
assert!(success, "verify call failed");
|
||||
assert_eq!(proof_is_valid, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Tests hash to field using FFI APIs
|
||||
fn test_hash_to_field_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
let signal: [u8; 32] = rng.gen();
|
||||
|
||||
// We prepare id_commitment and we set the leaf at provided index
|
||||
let input_buffer = &Buffer::from(signal.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = hash(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "hash call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
|
||||
// We read the returned proof and we append proof values for verify
|
||||
let serialized_hash = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (hash1, _) = bytes_le_to_fr(&serialized_hash);
|
||||
|
||||
let hash2 = hash_to_field(&signal);
|
||||
|
||||
assert_eq!(hash1, hash2);
|
||||
}
|
||||
}
|
||||
566
rln/src/ffi/ffi_rln.rs
Normal file
566
rln/src/ffi/ffi_rln.rs
Normal file
@@ -0,0 +1,566 @@
|
||||
#![allow(non_camel_case_types)]
|
||||
|
||||
use num_bigint::BigInt;
|
||||
use safer_ffi::{boxed::Box_, derive_ReprC, ffi_export, prelude::repr_c};
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
use {safer_ffi::prelude::char_p, std::fs::File, std::io::Read};
|
||||
|
||||
use super::ffi_utils::{CBoolResult, CFr, CResult};
|
||||
use crate::prelude::*;
|
||||
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
const MAX_CONFIG_SIZE: u64 = 1024 * 1024; // 1MB
|
||||
|
||||
// FFI_RLN
|
||||
|
||||
#[derive_ReprC]
|
||||
#[repr(opaque)]
|
||||
pub struct FFI_RLN(pub(crate) RLN);
|
||||
|
||||
// RLN initialization APIs
|
||||
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_new(
|
||||
tree_depth: usize,
|
||||
config_path: char_p::Ref<'_>,
|
||||
) -> CResult<repr_c::Box<FFI_RLN>, repr_c::String> {
|
||||
let config_str = File::open(config_path.to_str())
|
||||
.and_then(|mut file| {
|
||||
let metadata = file.metadata()?;
|
||||
if metadata.len() > MAX_CONFIG_SIZE {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
format!(
|
||||
"Config file too large: {} bytes (max {} bytes)",
|
||||
metadata.len(),
|
||||
MAX_CONFIG_SIZE
|
||||
),
|
||||
));
|
||||
}
|
||||
let mut s = String::new();
|
||||
file.read_to_string(&mut s)?;
|
||||
Ok(s)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
match RLN::new(tree_depth, config_str.as_str()) {
|
||||
Ok(rln) => CResult {
|
||||
ok: Some(Box_::new(FFI_RLN(rln))),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "stateless")]
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_new() -> CResult<repr_c::Box<FFI_RLN>, repr_c::String> {
|
||||
match RLN::new() {
|
||||
Ok(rln) => CResult {
|
||||
ok: Some(Box_::new(FFI_RLN(rln))),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_new_with_params(
|
||||
tree_depth: usize,
|
||||
zkey_data: &repr_c::Vec<u8>,
|
||||
graph_data: &repr_c::Vec<u8>,
|
||||
config_path: char_p::Ref<'_>,
|
||||
) -> CResult<repr_c::Box<FFI_RLN>, repr_c::String> {
|
||||
let config_str = File::open(config_path.to_str())
|
||||
.and_then(|mut file| {
|
||||
let metadata = file.metadata()?;
|
||||
if metadata.len() > MAX_CONFIG_SIZE {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
format!(
|
||||
"Config file too large: {} bytes (max {} bytes)",
|
||||
metadata.len(),
|
||||
MAX_CONFIG_SIZE
|
||||
),
|
||||
));
|
||||
}
|
||||
let mut s = String::new();
|
||||
file.read_to_string(&mut s)?;
|
||||
Ok(s)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
match RLN::new_with_params(
|
||||
tree_depth,
|
||||
zkey_data.to_vec(),
|
||||
graph_data.to_vec(),
|
||||
config_str.as_str(),
|
||||
) {
|
||||
Ok(rln) => CResult {
|
||||
ok: Some(Box_::new(FFI_RLN(rln))),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "stateless")]
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_new_with_params(
|
||||
zkey_data: &repr_c::Vec<u8>,
|
||||
graph_data: &repr_c::Vec<u8>,
|
||||
) -> CResult<repr_c::Box<FFI_RLN>, repr_c::String> {
|
||||
match RLN::new_with_params(zkey_data.to_vec(), graph_data.to_vec()) {
|
||||
Ok(rln) => CResult {
|
||||
ok: Some(Box_::new(FFI_RLN(rln))),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_free(rln: repr_c::Box<FFI_RLN>) {
|
||||
drop(rln);
|
||||
}
|
||||
|
||||
// RLNProof
|
||||
|
||||
#[derive_ReprC]
|
||||
#[repr(opaque)]
|
||||
pub struct FFI_RLNProof(pub(crate) RLNProof);
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_proof_get_values(
|
||||
rln_proof: &repr_c::Box<FFI_RLNProof>,
|
||||
) -> repr_c::Box<FFI_RLNProofValues> {
|
||||
Box_::new(FFI_RLNProofValues(rln_proof.0.proof_values))
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_proof_to_bytes_le(
|
||||
rln_proof: &repr_c::Box<FFI_RLNProof>,
|
||||
) -> CResult<repr_c::Vec<u8>, repr_c::String> {
|
||||
match rln_proof_to_bytes_le(&rln_proof.0) {
|
||||
Ok(bytes) => CResult {
|
||||
ok: Some(bytes.into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_proof_to_bytes_be(
|
||||
rln_proof: &repr_c::Box<FFI_RLNProof>,
|
||||
) -> CResult<repr_c::Vec<u8>, repr_c::String> {
|
||||
match rln_proof_to_bytes_be(&rln_proof.0) {
|
||||
Ok(bytes) => CResult {
|
||||
ok: Some(bytes.into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_bytes_le_to_rln_proof(
|
||||
bytes: &repr_c::Vec<u8>,
|
||||
) -> CResult<repr_c::Box<FFI_RLNProof>, repr_c::String> {
|
||||
match bytes_le_to_rln_proof(bytes) {
|
||||
Ok((rln_proof, _)) => CResult {
|
||||
ok: Some(Box_::new(FFI_RLNProof(rln_proof))),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_bytes_be_to_rln_proof(
|
||||
bytes: &repr_c::Vec<u8>,
|
||||
) -> CResult<repr_c::Box<FFI_RLNProof>, repr_c::String> {
|
||||
match bytes_be_to_rln_proof(bytes) {
|
||||
Ok((rln_proof, _)) => CResult {
|
||||
ok: Some(Box_::new(FFI_RLNProof(rln_proof))),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_proof_free(rln_proof: repr_c::Box<FFI_RLNProof>) {
|
||||
drop(rln_proof);
|
||||
}
|
||||
|
||||
// RLNWitnessInput
|
||||
|
||||
#[derive_ReprC]
|
||||
#[repr(opaque)]
|
||||
pub struct FFI_RLNWitnessInput(pub(crate) RLNWitnessInput);
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_witness_input_new(
|
||||
identity_secret: &CFr,
|
||||
user_message_limit: &CFr,
|
||||
message_id: &CFr,
|
||||
path_elements: &repr_c::Vec<CFr>,
|
||||
identity_path_index: &repr_c::Vec<u8>,
|
||||
x: &CFr,
|
||||
external_nullifier: &CFr,
|
||||
) -> CResult<repr_c::Box<FFI_RLNWitnessInput>, repr_c::String> {
|
||||
let mut identity_secret_fr = identity_secret.0;
|
||||
let path_elements: Vec<Fr> = path_elements.iter().map(|cfr| cfr.0).collect();
|
||||
let identity_path_index: Vec<u8> = identity_path_index.iter().copied().collect();
|
||||
match RLNWitnessInput::new(
|
||||
IdSecret::from(&mut identity_secret_fr),
|
||||
user_message_limit.0,
|
||||
message_id.0,
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x.0,
|
||||
external_nullifier.0,
|
||||
) {
|
||||
Ok(witness) => CResult {
|
||||
ok: Some(Box_::new(FFI_RLNWitnessInput(witness))),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_witness_to_bytes_le(
|
||||
witness: &repr_c::Box<FFI_RLNWitnessInput>,
|
||||
) -> CResult<repr_c::Vec<u8>, repr_c::String> {
|
||||
match rln_witness_to_bytes_le(&witness.0) {
|
||||
Ok(bytes) => CResult {
|
||||
ok: Some(bytes.into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_witness_to_bytes_be(
|
||||
witness: &repr_c::Box<FFI_RLNWitnessInput>,
|
||||
) -> CResult<repr_c::Vec<u8>, repr_c::String> {
|
||||
match rln_witness_to_bytes_be(&witness.0) {
|
||||
Ok(bytes) => CResult {
|
||||
ok: Some(bytes.into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_bytes_le_to_rln_witness(
|
||||
bytes: &repr_c::Vec<u8>,
|
||||
) -> CResult<repr_c::Box<FFI_RLNWitnessInput>, repr_c::String> {
|
||||
match bytes_le_to_rln_witness(bytes) {
|
||||
Ok((witness, _)) => CResult {
|
||||
ok: Some(Box_::new(FFI_RLNWitnessInput(witness))),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_bytes_be_to_rln_witness(
|
||||
bytes: &repr_c::Vec<u8>,
|
||||
) -> CResult<repr_c::Box<FFI_RLNWitnessInput>, repr_c::String> {
|
||||
match bytes_be_to_rln_witness(bytes) {
|
||||
Ok((witness, _)) => CResult {
|
||||
ok: Some(Box_::new(FFI_RLNWitnessInput(witness))),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_witness_to_bigint_json(
|
||||
witness: &repr_c::Box<FFI_RLNWitnessInput>,
|
||||
) -> CResult<repr_c::String, repr_c::String> {
|
||||
match rln_witness_to_bigint_json(&witness.0) {
|
||||
Ok(json) => CResult {
|
||||
ok: Some(json.to_string().into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_witness_input_free(witness: repr_c::Box<FFI_RLNWitnessInput>) {
|
||||
drop(witness);
|
||||
}
|
||||
|
||||
// RLNProofValues
|
||||
|
||||
#[derive_ReprC]
|
||||
#[repr(opaque)]
|
||||
pub struct FFI_RLNProofValues(pub(crate) RLNProofValues);
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_proof_values_get_y(pv: &repr_c::Box<FFI_RLNProofValues>) -> repr_c::Box<CFr> {
|
||||
CFr::from(pv.0.y).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_proof_values_get_nullifier(
|
||||
pv: &repr_c::Box<FFI_RLNProofValues>,
|
||||
) -> repr_c::Box<CFr> {
|
||||
CFr::from(pv.0.nullifier).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_proof_values_get_root(pv: &repr_c::Box<FFI_RLNProofValues>) -> repr_c::Box<CFr> {
|
||||
CFr::from(pv.0.root).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_proof_values_get_x(pv: &repr_c::Box<FFI_RLNProofValues>) -> repr_c::Box<CFr> {
|
||||
CFr::from(pv.0.x).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_proof_values_get_external_nullifier(
|
||||
pv: &repr_c::Box<FFI_RLNProofValues>,
|
||||
) -> repr_c::Box<CFr> {
|
||||
CFr::from(pv.0.external_nullifier).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_proof_values_to_bytes_le(pv: &repr_c::Box<FFI_RLNProofValues>) -> repr_c::Vec<u8> {
|
||||
rln_proof_values_to_bytes_le(&pv.0).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_proof_values_to_bytes_be(pv: &repr_c::Box<FFI_RLNProofValues>) -> repr_c::Vec<u8> {
|
||||
rln_proof_values_to_bytes_be(&pv.0).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_bytes_le_to_rln_proof_values(
|
||||
bytes: &repr_c::Vec<u8>,
|
||||
) -> CResult<repr_c::Box<FFI_RLNProofValues>, repr_c::String> {
|
||||
match bytes_le_to_rln_proof_values(bytes) {
|
||||
Ok((pv, _)) => CResult {
|
||||
ok: Some(Box_::new(FFI_RLNProofValues(pv))),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(format!("{:?}", err).into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_bytes_be_to_rln_proof_values(
|
||||
bytes: &repr_c::Vec<u8>,
|
||||
) -> CResult<repr_c::Box<FFI_RLNProofValues>, repr_c::String> {
|
||||
match bytes_be_to_rln_proof_values(bytes) {
|
||||
Ok((pv, _)) => CResult {
|
||||
ok: Some(Box_::new(FFI_RLNProofValues(pv))),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(format!("{:?}", err).into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_rln_proof_values_free(proof_values: repr_c::Box<FFI_RLNProofValues>) {
|
||||
drop(proof_values);
|
||||
}
|
||||
|
||||
// Proof generation APIs
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_generate_rln_proof(
|
||||
rln: &repr_c::Box<FFI_RLN>,
|
||||
witness: &repr_c::Box<FFI_RLNWitnessInput>,
|
||||
) -> CResult<repr_c::Box<FFI_RLNProof>, repr_c::String> {
|
||||
match rln.0.generate_rln_proof(&witness.0) {
|
||||
Ok((proof, proof_values)) => {
|
||||
let rln_proof = RLNProof {
|
||||
proof_values,
|
||||
proof,
|
||||
};
|
||||
CResult {
|
||||
ok: Some(Box_::new(FFI_RLNProof(rln_proof))),
|
||||
err: None,
|
||||
}
|
||||
}
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_generate_rln_proof_with_witness(
|
||||
rln: &repr_c::Box<FFI_RLN>,
|
||||
calculated_witness: &repr_c::Vec<repr_c::String>,
|
||||
witness: &repr_c::Box<FFI_RLNWitnessInput>,
|
||||
) -> CResult<repr_c::Box<FFI_RLNProof>, repr_c::String> {
|
||||
let calculated_witness_bigint: Result<Vec<BigInt>, _> = calculated_witness
|
||||
.iter()
|
||||
.map(|s| {
|
||||
let s_str = unsafe { std::str::from_utf8_unchecked(s.as_bytes()) };
|
||||
s_str.parse::<BigInt>()
|
||||
})
|
||||
.collect();
|
||||
|
||||
let calculated_witness_bigint = match calculated_witness_bigint {
|
||||
Ok(w) => w,
|
||||
Err(err) => {
|
||||
return CResult {
|
||||
ok: None,
|
||||
err: Some(format!("Failed to parse witness: {}", err).into()),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match rln
|
||||
.0
|
||||
.generate_rln_proof_with_witness(calculated_witness_bigint, &witness.0)
|
||||
{
|
||||
Ok((proof, proof_values)) => {
|
||||
let rln_proof = RLNProof {
|
||||
proof_values,
|
||||
proof,
|
||||
};
|
||||
CResult {
|
||||
ok: Some(Box_::new(FFI_RLNProof(rln_proof))),
|
||||
err: None,
|
||||
}
|
||||
}
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Proof verification APIs
|
||||
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
#[ffi_export]
|
||||
pub fn ffi_verify_rln_proof(
|
||||
rln: &repr_c::Box<FFI_RLN>,
|
||||
rln_proof: &repr_c::Box<FFI_RLNProof>,
|
||||
x: &CFr,
|
||||
) -> CBoolResult {
|
||||
match rln
|
||||
.0
|
||||
.verify_rln_proof(&rln_proof.0.proof, &rln_proof.0.proof_values, &x.0)
|
||||
{
|
||||
Ok(verified) => CBoolResult {
|
||||
ok: verified,
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CBoolResult {
|
||||
ok: false,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_verify_with_roots(
|
||||
rln: &repr_c::Box<FFI_RLN>,
|
||||
rln_proof: &repr_c::Box<FFI_RLNProof>,
|
||||
roots: &repr_c::Vec<CFr>,
|
||||
x: &CFr,
|
||||
) -> CBoolResult {
|
||||
let roots_fr: Vec<Fr> = roots.iter().map(|cfr| cfr.0).collect();
|
||||
|
||||
match rln.0.verify_with_roots(
|
||||
&rln_proof.0.proof,
|
||||
&rln_proof.0.proof_values,
|
||||
&x.0,
|
||||
&roots_fr,
|
||||
) {
|
||||
Ok(verified) => CBoolResult {
|
||||
ok: verified,
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CBoolResult {
|
||||
ok: false,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Identity secret recovery API
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_recover_id_secret(
|
||||
proof_values_1: &repr_c::Box<FFI_RLNProofValues>,
|
||||
proof_values_2: &repr_c::Box<FFI_RLNProofValues>,
|
||||
) -> CResult<repr_c::Box<CFr>, repr_c::String> {
|
||||
match recover_id_secret(&proof_values_1.0, &proof_values_2.0) {
|
||||
Ok(secret) => CResult {
|
||||
ok: Some(Box_::new(CFr::from(*secret))),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
269
rln/src/ffi/ffi_tree.rs
Normal file
269
rln/src/ffi/ffi_tree.rs
Normal file
@@ -0,0 +1,269 @@
|
||||
#![allow(non_camel_case_types)]
|
||||
#![cfg(not(feature = "stateless"))]
|
||||
|
||||
use safer_ffi::{boxed::Box_, derive_ReprC, ffi_export, prelude::repr_c};
|
||||
|
||||
use super::{
|
||||
ffi_rln::FFI_RLN,
|
||||
ffi_utils::{CBoolResult, CFr, CResult},
|
||||
};
|
||||
|
||||
// MerkleProof
|
||||
|
||||
#[derive_ReprC]
|
||||
#[repr(C)]
|
||||
pub struct FFI_MerkleProof {
|
||||
pub path_elements: repr_c::Vec<CFr>,
|
||||
pub path_index: repr_c::Vec<u8>,
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_merkle_proof_free(merkle_proof: repr_c::Box<FFI_MerkleProof>) {
|
||||
drop(merkle_proof);
|
||||
}
|
||||
|
||||
// Merkle tree management APIs
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_set_tree(rln: &mut repr_c::Box<FFI_RLN>, tree_depth: usize) -> CBoolResult {
|
||||
match rln.0.set_tree(tree_depth) {
|
||||
Ok(_) => CBoolResult {
|
||||
ok: true,
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CBoolResult {
|
||||
ok: false,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Merkle tree leaf operations
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_delete_leaf(rln: &mut repr_c::Box<FFI_RLN>, index: usize) -> CBoolResult {
|
||||
match rln.0.delete_leaf(index) {
|
||||
Ok(_) => CBoolResult {
|
||||
ok: true,
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CBoolResult {
|
||||
ok: false,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_set_leaf(rln: &mut repr_c::Box<FFI_RLN>, index: usize, leaf: &CFr) -> CBoolResult {
|
||||
match rln.0.set_leaf(index, leaf.0) {
|
||||
Ok(_) => CBoolResult {
|
||||
ok: true,
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CBoolResult {
|
||||
ok: false,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_get_leaf(
|
||||
rln: &repr_c::Box<FFI_RLN>,
|
||||
index: usize,
|
||||
) -> CResult<repr_c::Box<CFr>, repr_c::String> {
|
||||
match rln.0.get_leaf(index) {
|
||||
Ok(leaf) => CResult {
|
||||
ok: Some(CFr::from(leaf).into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_leaves_set(rln: &repr_c::Box<FFI_RLN>) -> usize {
|
||||
rln.0.leaves_set()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_set_next_leaf(rln: &mut repr_c::Box<FFI_RLN>, leaf: &CFr) -> CBoolResult {
|
||||
match rln.0.set_next_leaf(leaf.0) {
|
||||
Ok(_) => CBoolResult {
|
||||
ok: true,
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CBoolResult {
|
||||
ok: false,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_set_leaves_from(
|
||||
rln: &mut repr_c::Box<FFI_RLN>,
|
||||
index: usize,
|
||||
leaves: &repr_c::Vec<CFr>,
|
||||
) -> CBoolResult {
|
||||
let leaves_vec: Vec<_> = leaves.iter().map(|cfr| cfr.0).collect();
|
||||
match rln.0.set_leaves_from(index, leaves_vec) {
|
||||
Ok(_) => CBoolResult {
|
||||
ok: true,
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CBoolResult {
|
||||
ok: false,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_init_tree_with_leaves(
|
||||
rln: &mut repr_c::Box<FFI_RLN>,
|
||||
leaves: &repr_c::Vec<CFr>,
|
||||
) -> CBoolResult {
|
||||
let leaves_vec: Vec<_> = leaves.iter().map(|cfr| cfr.0).collect();
|
||||
match rln.0.init_tree_with_leaves(leaves_vec) {
|
||||
Ok(_) => CBoolResult {
|
||||
ok: true,
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CBoolResult {
|
||||
ok: false,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Atomic operations
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_atomic_operation(
|
||||
rln: &mut repr_c::Box<FFI_RLN>,
|
||||
index: usize,
|
||||
leaves: &repr_c::Vec<CFr>,
|
||||
indices: &repr_c::Vec<usize>,
|
||||
) -> CBoolResult {
|
||||
let leaves_vec: Vec<_> = leaves.iter().map(|cfr| cfr.0).collect();
|
||||
let indices_vec: Vec<_> = indices.iter().copied().collect();
|
||||
match rln.0.atomic_operation(index, leaves_vec, indices_vec) {
|
||||
Ok(_) => CBoolResult {
|
||||
ok: true,
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CBoolResult {
|
||||
ok: false,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_seq_atomic_operation(
|
||||
rln: &mut repr_c::Box<FFI_RLN>,
|
||||
leaves: &repr_c::Vec<CFr>,
|
||||
indices: &repr_c::Vec<u8>,
|
||||
) -> CBoolResult {
|
||||
let index = rln.0.leaves_set();
|
||||
let leaves_vec: Vec<_> = leaves.iter().map(|cfr| cfr.0).collect();
|
||||
let indices_vec: Vec<_> = indices.iter().map(|x| *x as usize).collect();
|
||||
match rln.0.atomic_operation(index, leaves_vec, indices_vec) {
|
||||
Ok(_) => CBoolResult {
|
||||
ok: true,
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CBoolResult {
|
||||
ok: false,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Root and proof operations
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_get_root(rln: &repr_c::Box<FFI_RLN>) -> repr_c::Box<CFr> {
|
||||
CFr::from(rln.0.get_root()).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_get_merkle_proof(
|
||||
rln: &repr_c::Box<FFI_RLN>,
|
||||
index: usize,
|
||||
) -> CResult<repr_c::Box<FFI_MerkleProof>, repr_c::String> {
|
||||
match rln.0.get_merkle_proof(index) {
|
||||
Ok((path_elements, path_index)) => {
|
||||
let path_elements: repr_c::Vec<CFr> = path_elements
|
||||
.iter()
|
||||
.map(|fr| CFr::from(*fr))
|
||||
.collect::<Vec<_>>()
|
||||
.into();
|
||||
|
||||
let path_index: repr_c::Vec<u8> = path_index.into();
|
||||
|
||||
let merkle_proof = FFI_MerkleProof {
|
||||
path_elements,
|
||||
path_index,
|
||||
};
|
||||
|
||||
CResult {
|
||||
ok: Some(Box_::new(merkle_proof)),
|
||||
err: None,
|
||||
}
|
||||
}
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Persistent metadata APIs
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_set_metadata(rln: &mut repr_c::Box<FFI_RLN>, metadata: &repr_c::Vec<u8>) -> CBoolResult {
|
||||
match rln.0.set_metadata(metadata) {
|
||||
Ok(_) => CBoolResult {
|
||||
ok: true,
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CBoolResult {
|
||||
ok: false,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_get_metadata(rln: &repr_c::Box<FFI_RLN>) -> CResult<repr_c::Vec<u8>, repr_c::String> {
|
||||
match rln.0.get_metadata() {
|
||||
Ok(metadata) => CResult {
|
||||
ok: Some(metadata.into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_flush(rln: &mut repr_c::Box<FFI_RLN>) -> CBoolResult {
|
||||
match rln.0.flush() {
|
||||
Ok(_) => CBoolResult {
|
||||
ok: true,
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CBoolResult {
|
||||
ok: false,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
407
rln/src/ffi/ffi_utils.rs
Normal file
407
rln/src/ffi/ffi_utils.rs
Normal file
@@ -0,0 +1,407 @@
|
||||
#![allow(non_camel_case_types)]
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
use safer_ffi::{
|
||||
boxed::Box_,
|
||||
derive_ReprC, ffi_export,
|
||||
prelude::{repr_c, ReprC},
|
||||
};
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
// CResult
|
||||
|
||||
#[derive_ReprC]
|
||||
#[repr(C)]
|
||||
pub struct CResult<T: ReprC, Err: ReprC> {
|
||||
pub ok: Option<T>,
|
||||
pub err: Option<Err>,
|
||||
}
|
||||
|
||||
// CBoolResult
|
||||
|
||||
#[derive_ReprC]
|
||||
#[repr(C)]
|
||||
pub struct CBoolResult {
|
||||
pub ok: bool,
|
||||
pub err: Option<repr_c::String>,
|
||||
}
|
||||
|
||||
// CFr
|
||||
|
||||
#[derive_ReprC]
|
||||
#[repr(opaque)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub struct CFr(pub(crate) Fr);
|
||||
|
||||
impl Deref for CFr {
|
||||
type Target = Fr;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Fr> for CFr {
|
||||
fn from(fr: Fr) -> Self {
|
||||
Self(fr)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CFr> for repr_c::Box<CFr> {
|
||||
fn from(cfr: CFr) -> Self {
|
||||
Box_::new(cfr)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&CFr> for repr_c::Box<CFr> {
|
||||
fn from(cfr: &CFr) -> Self {
|
||||
CFr(cfr.0).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Fr> for CFr {
|
||||
fn eq(&self, other: &Fr) -> bool {
|
||||
self.0 == *other
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_cfr_zero() -> repr_c::Box<CFr> {
|
||||
CFr::from(Fr::from(0)).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_cfr_one() -> repr_c::Box<CFr> {
|
||||
CFr::from(Fr::from(1)).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_cfr_to_bytes_le(cfr: &CFr) -> repr_c::Vec<u8> {
|
||||
fr_to_bytes_le(&cfr.0).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_cfr_to_bytes_be(cfr: &CFr) -> repr_c::Vec<u8> {
|
||||
fr_to_bytes_be(&cfr.0).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_bytes_le_to_cfr(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Box<CFr>, repr_c::String> {
|
||||
match bytes_le_to_fr(bytes) {
|
||||
Ok((cfr, _)) => CResult {
|
||||
ok: Some(CFr(cfr).into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(format!("{:?}", err).into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_bytes_be_to_cfr(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Box<CFr>, repr_c::String> {
|
||||
match bytes_be_to_fr(bytes) {
|
||||
Ok((cfr, _)) => CResult {
|
||||
ok: Some(CFr(cfr).into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(format!("{:?}", err).into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_uint_to_cfr(value: u32) -> repr_c::Box<CFr> {
|
||||
CFr::from(Fr::from(value)).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_cfr_debug(cfr: Option<&CFr>) -> repr_c::String {
|
||||
match cfr {
|
||||
Some(cfr) => format!("{:?}", cfr.0).into(),
|
||||
None => "None".into(),
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_cfr_free(cfr: repr_c::Box<CFr>) {
|
||||
drop(cfr);
|
||||
}
|
||||
|
||||
// Vec<CFr>
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_vec_cfr_new(capacity: usize) -> repr_c::Vec<CFr> {
|
||||
Vec::with_capacity(capacity).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_vec_cfr_from_cfr(cfr: &CFr) -> repr_c::Vec<CFr> {
|
||||
vec![*cfr].into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_vec_cfr_push(v: &mut safer_ffi::Vec<CFr>, cfr: &CFr) {
|
||||
let mut new: Vec<CFr> = std::mem::replace(v, Vec::new().into()).into();
|
||||
if new.len() == new.capacity() {
|
||||
new.reserve_exact(1);
|
||||
}
|
||||
new.push(*cfr);
|
||||
*v = new.into();
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_vec_cfr_len(v: &repr_c::Vec<CFr>) -> usize {
|
||||
v.len()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_vec_cfr_get(v: &repr_c::Vec<CFr>, i: usize) -> Option<&CFr> {
|
||||
v.get(i)
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_vec_cfr_to_bytes_le(vec: &repr_c::Vec<CFr>) -> repr_c::Vec<u8> {
|
||||
let vec_fr: Vec<Fr> = vec.iter().map(|cfr| cfr.0).collect();
|
||||
vec_fr_to_bytes_le(&vec_fr).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_vec_cfr_to_bytes_be(vec: &repr_c::Vec<CFr>) -> repr_c::Vec<u8> {
|
||||
let vec_fr: Vec<Fr> = vec.iter().map(|cfr| cfr.0).collect();
|
||||
vec_fr_to_bytes_be(&vec_fr).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_bytes_le_to_vec_cfr(
|
||||
bytes: &repr_c::Vec<u8>,
|
||||
) -> CResult<repr_c::Vec<CFr>, repr_c::String> {
|
||||
match bytes_le_to_vec_fr(bytes) {
|
||||
Ok((vec_fr, _)) => {
|
||||
let vec_cfr: Vec<CFr> = vec_fr.into_iter().map(CFr).collect();
|
||||
CResult {
|
||||
ok: Some(vec_cfr.into()),
|
||||
err: None,
|
||||
}
|
||||
}
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_bytes_be_to_vec_cfr(
|
||||
bytes: &repr_c::Vec<u8>,
|
||||
) -> CResult<repr_c::Vec<CFr>, repr_c::String> {
|
||||
match bytes_be_to_vec_fr(bytes) {
|
||||
Ok((vec_fr, _)) => {
|
||||
let vec_cfr: Vec<CFr> = vec_fr.into_iter().map(CFr).collect();
|
||||
CResult {
|
||||
ok: Some(vec_cfr.into()),
|
||||
err: None,
|
||||
}
|
||||
}
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_vec_cfr_debug(v: Option<&repr_c::Vec<CFr>>) -> repr_c::String {
|
||||
match v {
|
||||
Some(v) => {
|
||||
let vec_fr: Vec<Fr> = v.iter().map(|cfr| cfr.0).collect();
|
||||
format!("{:?}", vec_fr).into()
|
||||
}
|
||||
None => "None".into(),
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_vec_cfr_free(v: repr_c::Vec<CFr>) {
|
||||
drop(v);
|
||||
}
|
||||
|
||||
// Vec<u8>
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_vec_u8_to_bytes_le(vec: &repr_c::Vec<u8>) -> repr_c::Vec<u8> {
|
||||
vec_u8_to_bytes_le(vec).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_vec_u8_to_bytes_be(vec: &repr_c::Vec<u8>) -> repr_c::Vec<u8> {
|
||||
vec_u8_to_bytes_be(vec).into()
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_bytes_le_to_vec_u8(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Vec<u8>, repr_c::String> {
|
||||
match bytes_le_to_vec_u8(bytes) {
|
||||
Ok((vec, _)) => CResult {
|
||||
ok: Some(vec.into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_bytes_be_to_vec_u8(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Vec<u8>, repr_c::String> {
|
||||
match bytes_be_to_vec_u8(bytes) {
|
||||
Ok((vec, _)) => CResult {
|
||||
ok: Some(vec.into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(err.to_string().into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_vec_u8_debug(v: Option<&repr_c::Vec<u8>>) -> repr_c::String {
|
||||
match v {
|
||||
Some(v) => format!("{:x?}", v.deref()).into(),
|
||||
None => "None".into(),
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_vec_u8_free(v: repr_c::Vec<u8>) {
|
||||
drop(v);
|
||||
}
|
||||
|
||||
// Utility APIs
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_hash_to_field_le(input: &repr_c::Vec<u8>) -> CResult<repr_c::Box<CFr>, repr_c::String> {
|
||||
match hash_to_field_le(input) {
|
||||
Ok(hash_result) => CResult {
|
||||
ok: Some(CFr::from(hash_result).into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(format!("{:?}", err).into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_hash_to_field_be(input: &repr_c::Vec<u8>) -> CResult<repr_c::Box<CFr>, repr_c::String> {
|
||||
match hash_to_field_be(input) {
|
||||
Ok(hash_result) => CResult {
|
||||
ok: Some(CFr::from(hash_result).into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(format!("{:?}", err).into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_poseidon_hash_pair(a: &CFr, b: &CFr) -> CResult<repr_c::Box<CFr>, repr_c::String> {
|
||||
match poseidon_hash(&[a.0, b.0]) {
|
||||
Ok(hash_result) => CResult {
|
||||
ok: Some(CFr::from(hash_result).into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(format!("{:?}", err).into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_key_gen() -> CResult<repr_c::Vec<CFr>, repr_c::String> {
|
||||
match keygen() {
|
||||
Ok((identity_secret, id_commitment)) => CResult {
|
||||
ok: Some(vec![CFr(*identity_secret), CFr(id_commitment)].into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(format!("{:?}", err).into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_seeded_key_gen(seed: &repr_c::Vec<u8>) -> CResult<repr_c::Vec<CFr>, repr_c::String> {
|
||||
match seeded_keygen(seed) {
|
||||
Ok((identity_secret, id_commitment)) => CResult {
|
||||
ok: Some(vec![CFr(identity_secret), CFr(id_commitment)].into()),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(format!("{:?}", err).into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_extended_key_gen() -> CResult<repr_c::Vec<CFr>, repr_c::String> {
|
||||
match extended_keygen() {
|
||||
Ok((identity_trapdoor, identity_nullifier, identity_secret, id_commitment)) => CResult {
|
||||
ok: Some(
|
||||
vec![
|
||||
CFr(identity_trapdoor),
|
||||
CFr(identity_nullifier),
|
||||
CFr(identity_secret),
|
||||
CFr(id_commitment),
|
||||
]
|
||||
.into(),
|
||||
),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(format!("{:?}", err).into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_seeded_extended_key_gen(
|
||||
seed: &repr_c::Vec<u8>,
|
||||
) -> CResult<repr_c::Vec<CFr>, repr_c::String> {
|
||||
match extended_seeded_keygen(seed) {
|
||||
Ok((identity_trapdoor, identity_nullifier, identity_secret, id_commitment)) => CResult {
|
||||
ok: Some(
|
||||
vec![
|
||||
CFr(identity_trapdoor),
|
||||
CFr(identity_nullifier),
|
||||
CFr(identity_secret),
|
||||
CFr(id_commitment),
|
||||
]
|
||||
.into(),
|
||||
),
|
||||
err: None,
|
||||
},
|
||||
Err(err) => CResult {
|
||||
ok: None,
|
||||
err: Some(format!("{:?}", err).into()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[ffi_export]
|
||||
pub fn ffi_c_string_free(s: repr_c::String) {
|
||||
drop(s);
|
||||
}
|
||||
10
rln/src/ffi/mod.rs
Normal file
10
rln/src/ffi/mod.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
#![cfg(not(target_arch = "wasm32"))]
|
||||
|
||||
pub mod ffi_rln;
|
||||
pub mod ffi_tree;
|
||||
pub mod ffi_utils;
|
||||
|
||||
#[cfg(feature = "headers")]
|
||||
pub fn generate_headers() -> std::io::Result<()> {
|
||||
safer_ffi::headers::builder().to_file("rln.h")?.generate()
|
||||
}
|
||||
82
rln/src/hashers.rs
Normal file
82
rln/src/hashers.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
// This crate instantiates the Poseidon hash algorithm.
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use tiny_keccak::{Hasher, Keccak};
|
||||
use zerokit_utils::{error::HashError, poseidon::Poseidon};
|
||||
|
||||
use crate::{
|
||||
circuit::Fr,
|
||||
error::UtilsError,
|
||||
utils::{bytes_be_to_fr, bytes_le_to_fr},
|
||||
};
|
||||
|
||||
/// These indexed constants hardcode the supported round parameters tuples (t, RF, RN, SKIP_MATRICES) for the Bn254 scalar field.
|
||||
/// SKIP_MATRICES is the index of the randomly generated secure MDS matrix.
|
||||
/// TODO: generate these parameters
|
||||
const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
|
||||
(2, 8, 56, 0),
|
||||
(3, 8, 57, 0),
|
||||
(4, 8, 56, 0),
|
||||
(5, 8, 60, 0),
|
||||
(6, 8, 60, 0),
|
||||
(7, 8, 63, 0),
|
||||
(8, 8, 64, 0),
|
||||
(9, 8, 63, 0),
|
||||
];
|
||||
|
||||
/// Poseidon Hash wrapper over above implementation.
|
||||
static POSEIDON: Lazy<Poseidon<Fr>> = Lazy::new(|| Poseidon::<Fr>::from(&ROUND_PARAMS));
|
||||
|
||||
pub fn poseidon_hash(input: &[Fr]) -> Result<Fr, HashError> {
|
||||
let hash = POSEIDON.hash(input)?;
|
||||
Ok(hash)
|
||||
}
|
||||
|
||||
/// The zerokit RLN Merkle tree Hasher.
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub struct PoseidonHash;
|
||||
|
||||
/// The default Hasher trait used by Merkle tree implementation in utils.
|
||||
impl zerokit_utils::merkle_tree::Hasher for PoseidonHash {
|
||||
type Fr = Fr;
|
||||
type Error = HashError;
|
||||
|
||||
fn default_leaf() -> Self::Fr {
|
||||
Self::Fr::from(0)
|
||||
}
|
||||
|
||||
fn hash(inputs: &[Self::Fr]) -> Result<Self::Fr, Self::Error> {
|
||||
poseidon_hash(inputs)
|
||||
}
|
||||
}
|
||||
|
||||
/// Hashes arbitrary signal to the underlying prime field.
|
||||
pub fn hash_to_field_le(signal: &[u8]) -> Result<Fr, UtilsError> {
|
||||
// We hash the input signal using Keccak256
|
||||
let mut hash = [0; 32];
|
||||
let mut hasher = Keccak::v256();
|
||||
hasher.update(signal);
|
||||
hasher.finalize(&mut hash);
|
||||
|
||||
// We export the hash as a field element
|
||||
let (el, _) = bytes_le_to_fr(hash.as_ref())?;
|
||||
|
||||
Ok(el)
|
||||
}
|
||||
|
||||
/// Hashes arbitrary signal to the underlying prime field.
|
||||
pub fn hash_to_field_be(signal: &[u8]) -> Result<Fr, UtilsError> {
|
||||
// We hash the input signal using Keccak256
|
||||
let mut hash = [0; 32];
|
||||
let mut hasher = Keccak::v256();
|
||||
hasher.update(signal);
|
||||
hasher.finalize(&mut hash);
|
||||
|
||||
// Reverse the bytes to get big endian representation
|
||||
hash.reverse();
|
||||
|
||||
// We export the hash as a field element
|
||||
let (el, _) = bytes_be_to_fr(hash.as_ref())?;
|
||||
|
||||
Ok(el)
|
||||
}
|
||||
447
rln/src/lib.rs
447
rln/src/lib.rs
@@ -1,432 +1,31 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
pub mod circuit;
|
||||
pub mod error;
|
||||
pub mod ffi;
|
||||
pub mod merkle_tree;
|
||||
pub mod poseidon_constants;
|
||||
pub mod poseidon_hash;
|
||||
pub mod hashers;
|
||||
pub mod pm_tree_adapter;
|
||||
pub mod poseidon_tree;
|
||||
pub mod prelude;
|
||||
pub mod protocol;
|
||||
pub mod public;
|
||||
pub mod utils;
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
// Ensure that only one Merkle tree feature is enabled at a time
|
||||
#[cfg(any(
|
||||
all(feature = "fullmerkletree", feature = "optimalmerkletree"),
|
||||
all(feature = "fullmerkletree", feature = "pmtree-ft"),
|
||||
all(feature = "optimalmerkletree", feature = "pmtree-ft"),
|
||||
))]
|
||||
compile_error!(
|
||||
"Only one of `fullmerkletree`, `optimalmerkletree`, or `pmtree-ft` can be enabled at a time."
|
||||
);
|
||||
|
||||
use crate::circuit::{
|
||||
circom_from_folder, vk_from_folder, zkey_from_folder, Fr, TEST_RESOURCES_FOLDER,
|
||||
TEST_TREE_HEIGHT,
|
||||
};
|
||||
use crate::poseidon_hash::poseidon_hash;
|
||||
use crate::poseidon_tree::PoseidonTree;
|
||||
use crate::protocol::*;
|
||||
use crate::utils::str_to_fr;
|
||||
|
||||
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
|
||||
const WITNESS_JSON_15: &str = r#"
|
||||
{
|
||||
"identity_secret": "12825549237505733615964533204745049909430608936689388901883576945030025938736",
|
||||
"path_elements": [
|
||||
"18622655742232062119094611065896226799484910997537830749762961454045300666333",
|
||||
"20590447254980891299813706518821659736846425329007960381537122689749540452732",
|
||||
"7423237065226347324353380772367382631490014989348495481811164164159255474657",
|
||||
"11286972368698509976183087595462810875513684078608517520839298933882497716792",
|
||||
"3607627140608796879659380071776844901612302623152076817094415224584923813162",
|
||||
"19712377064642672829441595136074946683621277828620209496774504837737984048981",
|
||||
"20775607673010627194014556968476266066927294572720319469184847051418138353016",
|
||||
"3396914609616007258851405644437304192397291162432396347162513310381425243293",
|
||||
"21551820661461729022865262380882070649935529853313286572328683688269863701601",
|
||||
"6573136701248752079028194407151022595060682063033565181951145966236778420039",
|
||||
"12413880268183407374852357075976609371175688755676981206018884971008854919922",
|
||||
"14271763308400718165336499097156975241954733520325982997864342600795471836726",
|
||||
"20066985985293572387227381049700832219069292839614107140851619262827735677018",
|
||||
"9394776414966240069580838672673694685292165040808226440647796406499139370960",
|
||||
"11331146992410411304059858900317123658895005918277453009197229807340014528524"
|
||||
],
|
||||
"identity_path_index": [
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0
|
||||
],
|
||||
"x": "8143228284048792769012135629627737459844825626241842423967352803501040982",
|
||||
"epoch": "0x0000005b612540fc986b42322f8cb91c2273afad58ed006fdba0c97b4b16b12f",
|
||||
"rln_identifier": "11412926387081627876309792396682864042420635853496105400039841573530884328439"
|
||||
}
|
||||
"#;
|
||||
|
||||
// Input generated with protocol::random_rln_witness
|
||||
const WITNESS_JSON_19: &str = r#"
|
||||
{
|
||||
"identity_secret": "922538810348594125658702672067738675294669207539999802857585668079702330450",
|
||||
"path_elements": [
|
||||
"16059714054680148404543504061485737353203416489071538960876865983954285286166",
|
||||
"3041470753871943901334053763207316028823782848445723460227667780327106380356",
|
||||
"2557297527793326315072058421057853700096944625924483912548759909801348042183",
|
||||
"6677578602456189582427063963562590713054668181987223110955234085327917303436",
|
||||
"2250827150965576973906150764756422151438812678308727218463995574869267980301",
|
||||
"1895457427602709606993445561553433669787657053834360973759981803464906070980",
|
||||
"11033689991077061346803816826729204895841441316315304395980565540264104346466",
|
||||
"18588752216879570844240300406954267039026327526134910835334500497981810174976",
|
||||
"19346480964028499661277403659363466542857230928032088490855656809181891953123",
|
||||
"21460193770370072688835316363068413651465631481105148051902686770759127189327",
|
||||
"20906347653364838502964722817589315918082261023317339146393355650507243340078",
|
||||
"13466599592974387800162739317046838825289754472645703919149409009404541432954",
|
||||
"9617165663598957201253074168824246164494443748556931540348223968573884172285",
|
||||
"6936463137584425684797785981770877165377386163416057257854261010817156666898",
|
||||
"369902028235468424790098825415813437044876310542601948037281422841675126849",
|
||||
"13510969869821080499683463562609720931680005714401083864659516045615497273644",
|
||||
"2567921390740781421487331055530491683313154421589525170472201828596388395736",
|
||||
"14360870889466292805403568662660511177232987619663547772298178013674025998478",
|
||||
"4735344599616284973799984501493858013178071155960162022656706545116168334293"
|
||||
],
|
||||
"identity_path_index": [
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
0
|
||||
],
|
||||
"x": "6427050788896290028100534859169645070970780055911091444144195464808120686416",
|
||||
"epoch": "0x2bd155d9f85c741044da6909d144f9cc5ce8e0d545a9ed4921b156e8b8569bab",
|
||||
"rln_identifier": "2193983000213424579594329476781986065965849144986973472766961413131458022566"
|
||||
}
|
||||
"#;
|
||||
|
||||
const WITNESS_JSON_20: &str = r#"
|
||||
{
|
||||
"identity_secret": "13732353453861280511150022598793312186188599006979552959297495195757997428306",
|
||||
"path_elements": [
|
||||
"20463525608687844300981085488128968694844212760055234622292326942405619575964",
|
||||
"8040856403709217901175408904825741112286158901303127670929462145501210871313",
|
||||
"3776499751255585163563840252112871568402966629435152937692711318702338789837",
|
||||
"19415813252626942110541463414404411443562242499365750694284604341271149125679",
|
||||
"19414720788761208006634240390286942738242262010168559813148115573784354129237",
|
||||
"17680594732844291740094158892269696200077963275550625226493856898849422516043",
|
||||
"16009199741350632715210088346611798597033333293348807000623441780059543674510",
|
||||
"18743496911007535170857676824393811326863602477260615792503039058813338644738",
|
||||
"1029572792321380246989475723806770724699749375691788486434716005338938722216",
|
||||
"21713138150151063186050010182615713685603650963220209951496401043119768920892",
|
||||
"6713732504049401389983008178456811894856018247924860823028704114266363984580",
|
||||
"2746686888799473963221285145390361693256731812094259845879519459924507786594",
|
||||
"18620748467731297359505500266677881218553438497271819903304075323783392031715",
|
||||
"2446201221122671119406471414204229600430018713181038717206670749886932158104",
|
||||
"12037171942017611311954851302868199608036334625783560875426350283156617524597",
|
||||
"21798743392351780927808323348278035105395367759688979232116905142049921734349",
|
||||
"17450230289417496971557215666910229260621413088991137405744457922069827319039",
|
||||
"20936854099128086256353520300046664152516566958630447858438908748907198510485",
|
||||
"13513344965831154386658059617477268600255664386844920822248038939666265737046",
|
||||
"15546319496880899251450021422131511560001766832580480193115646510655765306630"
|
||||
|
||||
],
|
||||
"identity_path_index": [
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0
|
||||
],
|
||||
"x": "18073935665561339809445069958310044423750771681863480888589546877024349720547",
|
||||
"epoch": "0x147e4c23a43a1ddca78d94bcd28147f62ca74b3dc7e56bb0a314a954b9f0e567",
|
||||
"rln_identifier": "2193983000213424579594329476781986065965849144986973472766961413131458022566"
|
||||
}
|
||||
"#;
|
||||
|
||||
#[test]
|
||||
// We test Merkle tree generation, proofs and verification
|
||||
fn test_merkle_proof() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
|
||||
// generate identity
|
||||
let identity_secret = hash_to_field(b"test-merkle-proof");
|
||||
let id_commitment = poseidon_hash(&vec![identity_secret]);
|
||||
|
||||
// generate merkle tree
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(tree_height, default_leaf);
|
||||
tree.set(leaf_index, id_commitment.into()).unwrap();
|
||||
|
||||
// We check correct computation of the root
|
||||
let root = tree.root();
|
||||
|
||||
if TEST_TREE_HEIGHT == 15 {
|
||||
assert_eq!(
|
||||
root,
|
||||
str_to_fr(
|
||||
"0x1984f2e01184aef5cb974640898a5f5c25556554e2b06d99d4841badb8b198cd",
|
||||
16
|
||||
)
|
||||
);
|
||||
} else if TEST_TREE_HEIGHT == 19 {
|
||||
assert_eq!(
|
||||
root,
|
||||
str_to_fr(
|
||||
"0x219ceb53f2b1b7a6cf74e80d50d44d68ecb4a53c6cc65b25593c8d56343fb1fe",
|
||||
16
|
||||
)
|
||||
);
|
||||
} else if TEST_TREE_HEIGHT == 20 {
|
||||
assert_eq!(
|
||||
root,
|
||||
str_to_fr(
|
||||
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
|
||||
16
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
// These values refers to TEST_TREE_HEIGHT == 16
|
||||
let mut expected_path_elements = vec![
|
||||
str_to_fr(
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
16,
|
||||
),
|
||||
];
|
||||
|
||||
let mut expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
// We add the remaining elements for the case TEST_TREE_HEIGHT = 20
|
||||
if TEST_TREE_HEIGHT == 19 || TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
),
|
||||
]);
|
||||
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
|
||||
}
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
16,
|
||||
)]);
|
||||
expected_identity_path_index.append(&mut vec![0]);
|
||||
}
|
||||
|
||||
assert_eq!(path_elements, expected_path_elements);
|
||||
assert_eq!(identity_path_index, expected_identity_path_index);
|
||||
|
||||
// We check correct verification of the proof
|
||||
assert!(tree.verify(&id_commitment, &merkle_proof).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_witness_from_json() {
|
||||
// We generate all relevant keys
|
||||
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let builder = circom_from_folder(TEST_RESOURCES_FOLDER);
|
||||
|
||||
// We compute witness from the json input example
|
||||
let mut witness_json: &str = "";
|
||||
|
||||
if TEST_TREE_HEIGHT == 15 {
|
||||
witness_json = WITNESS_JSON_15;
|
||||
} else if TEST_TREE_HEIGHT == 19 {
|
||||
witness_json = WITNESS_JSON_19;
|
||||
} else if TEST_TREE_HEIGHT == 20 {
|
||||
witness_json = WITNESS_JSON_20;
|
||||
}
|
||||
|
||||
let rln_witness = rln_witness_from_json(witness_json);
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
|
||||
|
||||
let proof_values = proof_values_from_witness(&rln_witness);
|
||||
|
||||
// Let's verify the proof
|
||||
let verified = verify_proof(&verification_key, &proof, &proof_values);
|
||||
|
||||
assert!(verified.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_end_to_end() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
|
||||
// Generate identity pair
|
||||
let (identity_secret, id_commitment) = keygen();
|
||||
|
||||
//// generate merkle tree
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(tree_height, default_leaf);
|
||||
tree.set(leaf_index, id_commitment.into()).unwrap();
|
||||
|
||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||
|
||||
let signal = b"hey hey";
|
||||
let x = hash_to_field(signal);
|
||||
|
||||
// We set the remaining values to random ones
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
//let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
|
||||
let rln_witness: RLNWitnessInput = rln_witness_from_values(
|
||||
identity_secret,
|
||||
&merkle_proof,
|
||||
x,
|
||||
epoch, /*, rln_identifier*/
|
||||
);
|
||||
|
||||
// We generate all relevant keys
|
||||
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let builder = circom_from_folder(TEST_RESOURCES_FOLDER);
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
|
||||
|
||||
let proof_values = proof_values_from_witness(&rln_witness);
|
||||
|
||||
// Let's verify the proof
|
||||
let success = verify_proof(&verification_key, &proof, &proof_values).unwrap();
|
||||
|
||||
assert!(success);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_witness_serialization() {
|
||||
// We test witness serialization
|
||||
let mut witness_json: &str = "";
|
||||
|
||||
if TEST_TREE_HEIGHT == 15 {
|
||||
witness_json = WITNESS_JSON_15;
|
||||
} else if TEST_TREE_HEIGHT == 19 {
|
||||
witness_json = WITNESS_JSON_19;
|
||||
} else if TEST_TREE_HEIGHT == 20 {
|
||||
witness_json = WITNESS_JSON_20;
|
||||
}
|
||||
|
||||
let rln_witness = rln_witness_from_json(witness_json);
|
||||
|
||||
let ser = serialize_witness(&rln_witness);
|
||||
let (deser, _) = deserialize_witness(&ser);
|
||||
assert_eq!(rln_witness, deser);
|
||||
|
||||
// We test Proof values serialization
|
||||
let proof_values = proof_values_from_witness(&rln_witness);
|
||||
let ser = serialize_proof_values(&proof_values);
|
||||
let (deser, _) = deserialize_proof_values(&ser);
|
||||
assert_eq!(proof_values, deser);
|
||||
}
|
||||
}
|
||||
// Ensure that the `stateless` feature is not enabled with any Merkle tree features
|
||||
#[cfg(all(
|
||||
feature = "stateless",
|
||||
any(
|
||||
feature = "fullmerkletree",
|
||||
feature = "optimalmerkletree",
|
||||
feature = "pmtree-ft"
|
||||
)
|
||||
))]
|
||||
compile_error!("Cannot enable any Merkle tree features with stateless");
|
||||
|
||||
@@ -1,666 +0,0 @@
|
||||
// This crate provides different implementation of Merkle tree
|
||||
// Currently two interchangeable implementations are supported:
|
||||
// - FullMerkleTree: each tree node is stored
|
||||
// - OptimalMerkleTree: only nodes used to prove accumulation of set leaves are stored
|
||||
// Library defaults are set in the poseidon_tree crate
|
||||
//
|
||||
// Merkle tree implementations are adapted from https://github.com/kilic/rln/blob/master/src/merkle.rs
|
||||
// and https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/merkle_tree.rs
|
||||
|
||||
//!
|
||||
//! # To do
|
||||
//!
|
||||
//! * Disk based storage backend (using mmaped files should be easy)
|
||||
//! * Implement serialization for tree and Merkle proof
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::io;
|
||||
use std::{
|
||||
cmp::max,
|
||||
fmt::Debug,
|
||||
iter::{once, repeat, successors},
|
||||
};
|
||||
|
||||
/// In the Hasher trait we define the node type, the default leaf
|
||||
/// and the hash function used to initialize a Merkle Tree implementation
|
||||
pub trait Hasher {
|
||||
/// Type of the leaf and tree node
|
||||
type Fr: Copy + Clone + Eq;
|
||||
|
||||
/// Returns the default tree leaf
|
||||
fn default_leaf() -> Self::Fr;
|
||||
|
||||
/// Utility to compute the hash of an intermediate node
|
||||
fn hash(input: &[Self::Fr]) -> Self::Fr;
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
/// Optimal Merkle Tree Implementation
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
/// The Merkle tree structure
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct OptimalMerkleTree<H>
|
||||
where
|
||||
H: Hasher,
|
||||
{
|
||||
/// The depth of the tree, i.e. the number of levels from leaf to root
|
||||
depth: usize,
|
||||
|
||||
/// The nodes cached from the empty part of the tree (where leaves are set to default).
|
||||
/// Since the rightmost part of the tree is usually changed much later than its creation,
|
||||
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
|
||||
/// and by caching few intermediate nodes to the root computed from default leaves
|
||||
cached_nodes: Vec<H::Fr>,
|
||||
|
||||
/// The tree nodes
|
||||
nodes: HashMap<(usize, usize), H::Fr>,
|
||||
|
||||
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
|
||||
// (deletions leave next_index unchanged)
|
||||
next_index: usize,
|
||||
}
|
||||
|
||||
/// The Merkle proof
|
||||
/// Contains a vector of (node, branch_index) that defines the proof path elements and branch direction (1 or 0)
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct OptimalMerkleProof<H: Hasher>(pub Vec<(H::Fr, u8)>);
|
||||
|
||||
/// Implementations
|
||||
|
||||
impl<H: Hasher> OptimalMerkleTree<H> {
|
||||
pub fn default(depth: usize) -> Self {
|
||||
OptimalMerkleTree::<H>::new(depth, H::default_leaf())
|
||||
}
|
||||
|
||||
/// Creates a new `MerkleTree`
|
||||
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
|
||||
pub fn new(depth: usize, default_leaf: H::Fr) -> Self {
|
||||
let mut cached_nodes: Vec<H::Fr> = Vec::with_capacity(depth + 1);
|
||||
cached_nodes.push(default_leaf);
|
||||
for i in 0..depth {
|
||||
cached_nodes.push(H::hash(&[cached_nodes[i]; 2]));
|
||||
}
|
||||
cached_nodes.reverse();
|
||||
OptimalMerkleTree {
|
||||
cached_nodes: cached_nodes.clone(),
|
||||
depth: depth,
|
||||
nodes: HashMap::new(),
|
||||
next_index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
// Returns the depth of the tree
|
||||
pub fn depth(&self) -> usize {
|
||||
self.depth
|
||||
}
|
||||
|
||||
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
|
||||
pub fn capacity(&self) -> usize {
|
||||
1 << self.depth
|
||||
}
|
||||
|
||||
// Returns the total number of leaves set
|
||||
pub fn leaves_set(&mut self) -> usize {
|
||||
self.next_index
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
// Returns the root of the tree
|
||||
pub fn root(&self) -> H::Fr {
|
||||
self.get_node(0, 0)
|
||||
}
|
||||
|
||||
// Sets a leaf at the specified tree index
|
||||
pub fn set(&mut self, index: usize, leaf: H::Fr) -> io::Result<()> {
|
||||
if index >= self.capacity() {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"index exceeds set size",
|
||||
));
|
||||
}
|
||||
self.nodes.insert((self.depth, index), leaf);
|
||||
self.recalculate_from(index);
|
||||
self.next_index = max(self.next_index, index + 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Sets a leaf at the next available index
|
||||
pub fn update_next(&mut self, leaf: H::Fr) -> io::Result<()> {
|
||||
self.set(self.next_index, leaf)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
|
||||
pub fn delete(&mut self, index: usize) -> io::Result<()> {
|
||||
// We reset the leaf only if we previously set a leaf at that index
|
||||
if index < self.next_index {
|
||||
self.set(index, H::default_leaf())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Computes a merkle proof the the leaf at the specified index
|
||||
pub fn proof(&self, index: usize) -> io::Result<OptimalMerkleProof<H>> {
|
||||
if index >= self.capacity() {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"index exceeds set size",
|
||||
));
|
||||
}
|
||||
let mut witness = Vec::<(H::Fr, u8)>::with_capacity(self.depth);
|
||||
let mut i = index;
|
||||
let mut depth = self.depth;
|
||||
loop {
|
||||
i ^= 1;
|
||||
witness.push((self.get_node(depth, i), (1 - (i & 1)).try_into().unwrap()));
|
||||
i >>= 1;
|
||||
depth -= 1;
|
||||
if depth == 0 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert_eq!(i, 0);
|
||||
Ok(OptimalMerkleProof(witness))
|
||||
}
|
||||
|
||||
// Verifies a Merkle proof with respect to the input leaf and the tree root
|
||||
pub fn verify(&self, leaf: &H::Fr, witness: &OptimalMerkleProof<H>) -> io::Result<bool> {
|
||||
if witness.length() != self.depth {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"witness length doesn't match tree depth",
|
||||
));
|
||||
}
|
||||
let expected_root = witness.compute_root_from(leaf);
|
||||
Ok(expected_root.eq(&self.root()))
|
||||
}
|
||||
|
||||
// Utilities for updating the tree nodes
|
||||
|
||||
fn get_node(&self, depth: usize, index: usize) -> H::Fr {
|
||||
let node = *self
|
||||
.nodes
|
||||
.get(&(depth, index))
|
||||
.unwrap_or_else(|| &self.cached_nodes[depth]);
|
||||
node
|
||||
}
|
||||
|
||||
fn get_leaf(&self, index: usize) -> H::Fr {
|
||||
self.get_node(self.depth, index)
|
||||
}
|
||||
|
||||
fn hash_couple(&mut self, depth: usize, index: usize) -> H::Fr {
|
||||
let b = index & !1;
|
||||
H::hash(&[self.get_node(depth, b), self.get_node(depth, b + 1)])
|
||||
}
|
||||
|
||||
fn recalculate_from(&mut self, index: usize) {
|
||||
let mut i = index;
|
||||
let mut depth = self.depth;
|
||||
loop {
|
||||
let h = self.hash_couple(depth, i);
|
||||
i >>= 1;
|
||||
depth -= 1;
|
||||
self.nodes.insert((depth, i), h);
|
||||
if depth == 0 {
|
||||
break;
|
||||
}
|
||||
}
|
||||
assert_eq!(depth, 0);
|
||||
assert_eq!(i, 0);
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: Hasher> OptimalMerkleProof<H> {
|
||||
#[must_use]
|
||||
// Returns the length of a Merkle proof
|
||||
pub fn length(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
/// Computes the leaf index corresponding to a Merkle proof
|
||||
#[must_use]
|
||||
pub fn leaf_index(&self) -> usize {
|
||||
// In current implementation the path indexes in a proof correspond to the binary representation of the leaf index
|
||||
let mut binary_repr = self.get_path_index();
|
||||
binary_repr.reverse();
|
||||
binary_repr
|
||||
.into_iter()
|
||||
.fold(0, |acc, digit| (acc << 1) + usize::from(digit))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
/// Returns the path elements forming a Merkle proof
|
||||
pub fn get_path_elements(&self) -> Vec<H::Fr> {
|
||||
self.0.iter().map(|x| x.0).collect()
|
||||
}
|
||||
|
||||
/// Returns the path indexes forming a Merkle proof
|
||||
#[must_use]
|
||||
pub fn get_path_index(&self) -> Vec<u8> {
|
||||
self.0.iter().map(|x| x.1).collect()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
|
||||
pub fn compute_root_from(&self, leaf: &H::Fr) -> H::Fr {
|
||||
let mut acc: H::Fr = *leaf;
|
||||
for w in self.0.iter() {
|
||||
if w.1 == 0 {
|
||||
acc = H::hash(&[acc, w.0]);
|
||||
} else {
|
||||
acc = H::hash(&[w.0, acc]);
|
||||
}
|
||||
}
|
||||
acc
|
||||
}
|
||||
}
|
||||
|
||||
// Debug formatting for printing a (Optimal) Merkle Proof
|
||||
impl<H> Debug for OptimalMerkleProof<H>
|
||||
where
|
||||
H: Hasher,
|
||||
H::Fr: Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("Proof").field(&self.0).finish()
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
/// Full Merkle Tree Implementation
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
/// Merkle tree with all leaf and intermediate hashes stored
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct FullMerkleTree<H: Hasher> {
|
||||
/// The depth of the tree, i.e. the number of levels from leaf to root
|
||||
depth: usize,
|
||||
|
||||
/// The nodes cached from the empty part of the tree (where leaves are set to default).
|
||||
/// Since the rightmost part of the tree is usually changed much later than its creation,
|
||||
/// we can prove accumulation of elements in the leftmost part, with no need to initialize the full tree
|
||||
/// and by caching few intermediate nodes to the root computed from default leaves
|
||||
cached_nodes: Vec<H::Fr>,
|
||||
|
||||
/// The tree nodes
|
||||
nodes: Vec<H::Fr>,
|
||||
|
||||
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
|
||||
// (deletions leave next_index unchanged)
|
||||
next_index: usize,
|
||||
}
|
||||
|
||||
/// Element of a Merkle proof
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub enum FullMerkleBranch<H: Hasher> {
|
||||
/// Left branch taken, value is the right sibling hash.
|
||||
Left(H::Fr),
|
||||
|
||||
/// Right branch taken, value is the left sibling hash.
|
||||
Right(H::Fr),
|
||||
}
|
||||
|
||||
/// Merkle proof path, bottom to top.
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct FullMerkleProof<H: Hasher>(pub Vec<FullMerkleBranch<H>>);
|
||||
|
||||
/// Implementations
|
||||
|
||||
impl<H: Hasher> FullMerkleTree<H> {
|
||||
pub fn default(depth: usize) -> Self {
|
||||
FullMerkleTree::<H>::new(depth, H::default_leaf())
|
||||
}
|
||||
|
||||
/// Creates a new `MerkleTree`
|
||||
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
|
||||
pub fn new(depth: usize, initial_leaf: H::Fr) -> Self {
|
||||
// Compute cache node values, leaf to root
|
||||
let cached_nodes = successors(Some(initial_leaf), |prev| Some(H::hash(&[*prev, *prev])))
|
||||
.take(depth + 1)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Compute node values
|
||||
let nodes = cached_nodes
|
||||
.iter()
|
||||
.rev()
|
||||
.enumerate()
|
||||
.flat_map(|(levels, hash)| repeat(hash).take(1 << levels))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
debug_assert!(nodes.len() == (1 << (depth + 1)) - 1);
|
||||
|
||||
let next_index = 0;
|
||||
|
||||
Self {
|
||||
depth,
|
||||
cached_nodes,
|
||||
nodes,
|
||||
next_index,
|
||||
}
|
||||
}
|
||||
|
||||
// Returns the depth of the tree
|
||||
pub fn depth(&self) -> usize {
|
||||
self.depth
|
||||
}
|
||||
|
||||
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
|
||||
pub fn capacity(&self) -> usize {
|
||||
1 << self.depth
|
||||
}
|
||||
|
||||
// Returns the total number of leaves set
|
||||
pub fn leaves_set(&mut self) -> usize {
|
||||
self.next_index
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
// Returns the root of the tree
|
||||
pub fn root(&self) -> H::Fr {
|
||||
self.nodes[0]
|
||||
}
|
||||
|
||||
// Sets a leaf at the specified tree index
|
||||
pub fn set(&mut self, leaf: usize, hash: H::Fr) -> io::Result<()> {
|
||||
self.set_range(leaf, once(hash))?;
|
||||
self.next_index = max(self.next_index, leaf + 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Sets tree nodes, starting from start index
|
||||
// Function proper of FullMerkleTree implementation
|
||||
fn set_range<I: IntoIterator<Item = H::Fr>>(
|
||||
&mut self,
|
||||
start: usize,
|
||||
hashes: I,
|
||||
) -> io::Result<()> {
|
||||
let index = self.capacity() + start - 1;
|
||||
let mut count = 0;
|
||||
// TODO: Error/panic when hashes is longer than available leafs
|
||||
for (leaf, hash) in self.nodes[index..].iter_mut().zip(hashes) {
|
||||
*leaf = hash;
|
||||
count += 1;
|
||||
}
|
||||
if count != 0 {
|
||||
self.update_nodes(index, index + (count - 1));
|
||||
self.next_index = max(self.next_index, start + count);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Sets a leaf at the next available index
|
||||
pub fn update_next(&mut self, leaf: H::Fr) -> io::Result<()> {
|
||||
self.set(self.next_index, leaf)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
|
||||
pub fn delete(&mut self, index: usize) -> io::Result<()> {
|
||||
// We reset the leaf only if we previously set a leaf at that index
|
||||
if index < self.next_index {
|
||||
self.set(index, H::default_leaf())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Computes a merkle proof the the leaf at the specified index
|
||||
pub fn proof(&self, leaf: usize) -> io::Result<FullMerkleProof<H>> {
|
||||
if leaf >= self.capacity() {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidInput,
|
||||
"index exceeds set size",
|
||||
));
|
||||
}
|
||||
let mut index = self.capacity() + leaf - 1;
|
||||
let mut path = Vec::with_capacity(self.depth + 1);
|
||||
while let Some(parent) = self.parent(index) {
|
||||
// Add proof for node at index to parent
|
||||
path.push(match index & 1 {
|
||||
1 => FullMerkleBranch::Left(self.nodes[index + 1]),
|
||||
0 => FullMerkleBranch::Right(self.nodes[index - 1]),
|
||||
_ => unreachable!(),
|
||||
});
|
||||
index = parent;
|
||||
}
|
||||
Ok(FullMerkleProof(path))
|
||||
}
|
||||
|
||||
// Verifies a Merkle proof with respect to the input leaf and the tree root
|
||||
pub fn verify(&self, hash: &H::Fr, proof: &FullMerkleProof<H>) -> io::Result<bool> {
|
||||
Ok(proof.compute_root_from(hash) == self.root())
|
||||
}
|
||||
|
||||
// Utilities for updating the tree nodes
|
||||
|
||||
/// For a given node index, return the parent node index
|
||||
/// Returns None if there is no parent (root node)
|
||||
fn parent(&self, index: usize) -> Option<usize> {
|
||||
if index == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(((index + 1) >> 1) - 1)
|
||||
}
|
||||
}
|
||||
|
||||
/// For a given node index, return index of the first (left) child.
|
||||
fn first_child(&self, index: usize) -> usize {
|
||||
(index << 1) + 1
|
||||
}
|
||||
|
||||
fn levels(&self, index: usize) -> usize {
|
||||
// `n.next_power_of_two()` will return `n` iff `n` is a power of two.
|
||||
// The extra offset corrects this.
|
||||
(index + 2).next_power_of_two().trailing_zeros() as usize - 1
|
||||
}
|
||||
|
||||
fn update_nodes(&mut self, start: usize, end: usize) {
|
||||
debug_assert_eq!(self.levels(start), self.levels(end));
|
||||
if let (Some(start), Some(end)) = (self.parent(start), self.parent(end)) {
|
||||
for parent in start..=end {
|
||||
let child = self.first_child(parent);
|
||||
self.nodes[parent] = H::hash(&[self.nodes[child], self.nodes[child + 1]]);
|
||||
}
|
||||
self.update_nodes(start, end);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: Hasher> FullMerkleProof<H> {
|
||||
#[must_use]
|
||||
// Returns the length of a Merkle proof
|
||||
pub fn length(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
/// Computes the leaf index corresponding to a Merkle proof
|
||||
#[must_use]
|
||||
pub fn leaf_index(&self) -> usize {
|
||||
self.0.iter().rev().fold(0, |index, branch| match branch {
|
||||
FullMerkleBranch::Left(_) => index << 1,
|
||||
FullMerkleBranch::Right(_) => (index << 1) + 1,
|
||||
})
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
/// Returns the path elements forming a Merkle proof
|
||||
pub fn get_path_elements(&self) -> Vec<H::Fr> {
|
||||
self.0
|
||||
.iter()
|
||||
.map(|x| match x {
|
||||
FullMerkleBranch::Left(value) | FullMerkleBranch::Right(value) => *value,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns the path indexes forming a Merkle proof
|
||||
#[must_use]
|
||||
pub fn get_path_index(&self) -> Vec<u8> {
|
||||
self.0
|
||||
.iter()
|
||||
.map(|branch| match branch {
|
||||
FullMerkleBranch::Left(_) => 0,
|
||||
FullMerkleBranch::Right(_) => 1,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
|
||||
#[must_use]
|
||||
pub fn compute_root_from(&self, hash: &H::Fr) -> H::Fr {
|
||||
self.0.iter().fold(*hash, |hash, branch| match branch {
|
||||
FullMerkleBranch::Left(sibling) => H::hash(&[hash, *sibling]),
|
||||
FullMerkleBranch::Right(sibling) => H::hash(&[*sibling, hash]),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Debug formatting for printing a (Full) Merkle Proof Branch
|
||||
impl<H> Debug for FullMerkleBranch<H>
|
||||
where
|
||||
H: Hasher,
|
||||
H::Fr: Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Left(arg0) => f.debug_tuple("Left").field(arg0).finish(),
|
||||
Self::Right(arg0) => f.debug_tuple("Right").field(arg0).finish(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Debug formatting for printing a (Full) Merkle Proof
|
||||
impl<H> Debug for FullMerkleProof<H>
|
||||
where
|
||||
H: Hasher,
|
||||
H::Fr: Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("Proof").field(&self.0).finish()
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
/// Tests
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
// Tests adapted from https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/merkle_tree.rs
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use hex_literal::hex;
|
||||
use tiny_keccak::{Hasher as _, Keccak};
|
||||
|
||||
struct Keccak256;
|
||||
|
||||
impl Hasher for Keccak256 {
|
||||
type Fr = [u8; 32];
|
||||
|
||||
fn default_leaf() -> Self::Fr {
|
||||
[0; 32]
|
||||
}
|
||||
|
||||
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
|
||||
let mut output = [0; 32];
|
||||
let mut hasher = Keccak::v256();
|
||||
for element in inputs {
|
||||
hasher.update(element);
|
||||
}
|
||||
hasher.finalize(&mut output);
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_root() {
|
||||
let leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
];
|
||||
|
||||
let default_tree_root =
|
||||
hex!("b4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30");
|
||||
|
||||
let roots = [
|
||||
hex!("c1ba1812ff680ce84c1d5b4f1087eeb08147a4d510f3496b2849df3a73f5af95"),
|
||||
hex!("893760ec5b5bee236f29e85aef64f17139c3c1b7ff24ce64eb6315fca0f2485b"),
|
||||
hex!("222ff5e0b5877792c2bc1670e2ccd0c2c97cd7bb1672a57d598db05092d3d72c"),
|
||||
hex!("a9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36"),
|
||||
];
|
||||
|
||||
let mut tree = FullMerkleTree::<Keccak256>::new(2, [0; 32]);
|
||||
assert_eq!(tree.root(), default_tree_root);
|
||||
for i in 0..leaves.len() {
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
assert_eq!(tree.root(), roots[i]);
|
||||
}
|
||||
|
||||
let mut tree = OptimalMerkleTree::<Keccak256>::new(2, [0; 32]);
|
||||
assert_eq!(tree.root(), default_tree_root);
|
||||
for i in 0..leaves.len() {
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
assert_eq!(tree.root(), roots[i]);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proof() {
|
||||
let leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
];
|
||||
|
||||
// We thest the FullMerkleTree implementation
|
||||
let mut tree = FullMerkleTree::<Keccak256>::new(2, [0; 32]);
|
||||
for i in 0..leaves.len() {
|
||||
// We set the leaves
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
|
||||
// We compute a merkle proof
|
||||
let proof = tree.proof(i).expect("index should be set");
|
||||
|
||||
// We verify if the merkle proof corresponds to the right leaf index
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
|
||||
// We verify the proof
|
||||
assert!(tree.verify(&leaves[i], &proof).unwrap());
|
||||
|
||||
// We ensure that the Merkle proof and the leaf generate the same root as the tree
|
||||
assert_eq!(proof.compute_root_from(&leaves[i]), tree.root());
|
||||
|
||||
// We check that the proof is not valid for another leaf
|
||||
assert!(!tree
|
||||
.verify(&leaves[(i + 1) % leaves.len()], &proof)
|
||||
.unwrap());
|
||||
}
|
||||
|
||||
// We test the OptimalMerkleTree implementation
|
||||
let mut tree = OptimalMerkleTree::<Keccak256>::new(2, [0; 32]);
|
||||
for i in 0..leaves.len() {
|
||||
// We set the leaves
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
|
||||
// We compute a merkle proof
|
||||
let proof = tree.proof(i).expect("index should be set");
|
||||
|
||||
// We verify if the merkle proof corresponds to the right leaf index
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
|
||||
// We verify the proof
|
||||
assert!(tree.verify(&leaves[i], &proof).unwrap());
|
||||
|
||||
// We ensure that the Merkle proof and the leaf generate the same root as the tree
|
||||
assert_eq!(proof.compute_root_from(&leaves[i]), tree.root());
|
||||
|
||||
// We check that the proof is not valid for another leaf
|
||||
assert!(!tree
|
||||
.verify(&leaves[(i + 1) % leaves.len()], &proof)
|
||||
.unwrap());
|
||||
}
|
||||
}
|
||||
}
|
||||
532
rln/src/pm_tree_adapter.rs
Normal file
532
rln/src/pm_tree_adapter.rs
Normal file
@@ -0,0 +1,532 @@
|
||||
#![cfg(feature = "pmtree-ft")]
|
||||
|
||||
use std::{fmt::Debug, path::PathBuf, str::FromStr};
|
||||
|
||||
use serde_json::Value;
|
||||
use tempfile::Builder;
|
||||
use zerokit_utils::{
|
||||
error::{FromConfigError, ZerokitMerkleTreeError},
|
||||
merkle_tree::{ZerokitMerkleProof, ZerokitMerkleTree},
|
||||
pm_tree::{
|
||||
pmtree,
|
||||
pmtree::{tree::Key, Database, Hasher, PmtreeErrorKind},
|
||||
Config, Mode, SledDB,
|
||||
},
|
||||
};
|
||||
|
||||
use crate::{
|
||||
circuit::Fr,
|
||||
hashers::{poseidon_hash, PoseidonHash},
|
||||
utils::{bytes_le_to_fr, fr_to_bytes_le},
|
||||
};
|
||||
|
||||
const METADATA_KEY: [u8; 8] = *b"metadata";
|
||||
|
||||
pub struct PmTree {
|
||||
tree: pmtree::MerkleTree<SledDB, PoseidonHash>,
|
||||
/// The indices of leaves which are set into zero upto next_index.
|
||||
/// Set to 0 if the leaf is empty and set to 1 in otherwise.
|
||||
cached_leaves_indices: Vec<u8>,
|
||||
// metadata that an application may use to store additional information
|
||||
metadata: Vec<u8>,
|
||||
}
|
||||
|
||||
pub struct PmTreeProof {
|
||||
proof: pmtree::tree::MerkleProof<PoseidonHash>,
|
||||
}
|
||||
|
||||
pub type FrOf<H> = <H as Hasher>::Fr;
|
||||
|
||||
// The pmtree Hasher trait used by pmtree Merkle tree
|
||||
impl Hasher for PoseidonHash {
|
||||
type Fr = Fr;
|
||||
|
||||
fn serialize(value: Self::Fr) -> pmtree::Value {
|
||||
fr_to_bytes_le(&value)
|
||||
}
|
||||
|
||||
fn deserialize(value: pmtree::Value) -> Self::Fr {
|
||||
// TODO: allow to handle error properly in pmtree Hasher trait
|
||||
let (fr, _) = bytes_le_to_fr(&value).expect("Fr deserialization must be valid");
|
||||
fr
|
||||
}
|
||||
|
||||
fn default_leaf() -> Self::Fr {
|
||||
Fr::from(0)
|
||||
}
|
||||
|
||||
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
|
||||
// TODO: allow to handle error properly in pmtree Hasher trait
|
||||
poseidon_hash(inputs).expect("Poseidon hash must be valid")
|
||||
}
|
||||
}
|
||||
|
||||
fn default_tmp_path() -> Result<PathBuf, std::io::Error> {
|
||||
Ok(Builder::new()
|
||||
.prefix("pmtree-")
|
||||
.tempfile()?
|
||||
.into_temp_path()
|
||||
.to_path_buf())
|
||||
}
|
||||
|
||||
const DEFAULT_TEMPORARY: bool = true;
|
||||
const DEFAULT_CACHE_CAPACITY: u64 = 1073741824; // 1 Gigabyte
|
||||
const DEFAULT_FLUSH_EVERY_MS: u64 = 500; // 500 Milliseconds
|
||||
const DEFAULT_MODE: Mode = Mode::HighThroughput;
|
||||
const DEFAULT_USE_COMPRESSION: bool = false;
|
||||
|
||||
pub struct PmtreeConfigBuilder {
|
||||
path: Option<PathBuf>,
|
||||
temporary: bool,
|
||||
cache_capacity: u64,
|
||||
flush_every_ms: u64,
|
||||
mode: Mode,
|
||||
use_compression: bool,
|
||||
}
|
||||
|
||||
impl Default for PmtreeConfigBuilder {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl PmtreeConfigBuilder {
|
||||
pub fn new() -> Self {
|
||||
PmtreeConfigBuilder {
|
||||
path: None,
|
||||
temporary: DEFAULT_TEMPORARY,
|
||||
cache_capacity: DEFAULT_CACHE_CAPACITY,
|
||||
flush_every_ms: DEFAULT_FLUSH_EVERY_MS,
|
||||
mode: DEFAULT_MODE,
|
||||
use_compression: DEFAULT_USE_COMPRESSION,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn path<P: Into<PathBuf>>(mut self, path: P) -> Self {
|
||||
self.path = Some(path.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn temporary(mut self, temporary: bool) -> Self {
|
||||
self.temporary = temporary;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn cache_capacity(mut self, capacity: u64) -> Self {
|
||||
self.cache_capacity = capacity;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn flush_every_ms(mut self, ms: u64) -> Self {
|
||||
self.flush_every_ms = ms;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn mode(mut self, mode: Mode) -> Self {
|
||||
self.mode = mode;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn use_compression(mut self, compression: bool) -> Self {
|
||||
self.use_compression = compression;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn build(self) -> Result<PmtreeConfig, FromConfigError> {
|
||||
let path = match (self.temporary, self.path) {
|
||||
(true, None) => default_tmp_path()?,
|
||||
(false, None) => return Err(FromConfigError::MissingPath),
|
||||
(true, Some(path)) if path.exists() => return Err(FromConfigError::PathExists),
|
||||
(_, Some(path)) => path,
|
||||
};
|
||||
|
||||
let config = Config::new()
|
||||
.temporary(self.temporary)
|
||||
.path(path)
|
||||
.cache_capacity(self.cache_capacity)
|
||||
.flush_every_ms(Some(self.flush_every_ms))
|
||||
.mode(self.mode)
|
||||
.use_compression(self.use_compression);
|
||||
|
||||
Ok(PmtreeConfig(config))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PmtreeConfig(Config);
|
||||
|
||||
impl PmtreeConfig {
|
||||
pub fn builder() -> PmtreeConfigBuilder {
|
||||
PmtreeConfigBuilder::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for PmtreeConfig {
|
||||
type Err = FromConfigError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let config: Value = serde_json::from_str(s)?;
|
||||
|
||||
let path = config["path"].as_str();
|
||||
let path = path.map(PathBuf::from);
|
||||
let temporary = config["temporary"].as_bool();
|
||||
let cache_capacity = config["cache_capacity"].as_u64();
|
||||
let flush_every_ms = config["flush_every_ms"].as_u64();
|
||||
let mode = match config["mode"].as_str() {
|
||||
Some("HighThroughput") => Mode::HighThroughput,
|
||||
Some("LowSpace") => Mode::LowSpace,
|
||||
_ => Mode::HighThroughput,
|
||||
};
|
||||
let use_compression = config["use_compression"].as_bool();
|
||||
|
||||
if let (Some(true), Some(path)) = (temporary, path.as_ref()) {
|
||||
if path.exists() {
|
||||
return Err(FromConfigError::PathExists);
|
||||
}
|
||||
}
|
||||
|
||||
let default_tmp_path = default_tmp_path()?;
|
||||
let config = Config::new()
|
||||
.temporary(temporary.unwrap_or(DEFAULT_TEMPORARY))
|
||||
.path(path.unwrap_or(default_tmp_path))
|
||||
.cache_capacity(cache_capacity.unwrap_or(DEFAULT_CACHE_CAPACITY))
|
||||
.flush_every_ms(flush_every_ms)
|
||||
.mode(mode)
|
||||
.use_compression(use_compression.unwrap_or(false));
|
||||
Ok(PmtreeConfig(config))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PmtreeConfig {
|
||||
fn default() -> Self {
|
||||
Self::builder()
|
||||
.build()
|
||||
.expect("Default PmtreeConfig must be valid")
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for PmtreeConfig {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for PmtreeConfig {
|
||||
fn clone(&self) -> Self {
|
||||
PmtreeConfig(self.0.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl ZerokitMerkleTree for PmTree {
|
||||
type Proof = PmTreeProof;
|
||||
type Hasher = PoseidonHash;
|
||||
type Config = PmtreeConfig;
|
||||
|
||||
fn default(depth: usize) -> Result<Self, ZerokitMerkleTreeError> {
|
||||
let default_config = PmtreeConfig::default();
|
||||
PmTree::new(depth, Self::Hasher::default_leaf(), default_config)
|
||||
}
|
||||
|
||||
fn new(
|
||||
depth: usize,
|
||||
_default_leaf: FrOf<Self::Hasher>,
|
||||
config: Self::Config,
|
||||
) -> Result<Self, ZerokitMerkleTreeError> {
|
||||
let tree_loaded = pmtree::MerkleTree::load(config.clone().0);
|
||||
let tree = match tree_loaded {
|
||||
Ok(tree) => tree,
|
||||
Err(_) => pmtree::MerkleTree::new(depth, config.0)?,
|
||||
};
|
||||
|
||||
Ok(PmTree {
|
||||
tree,
|
||||
cached_leaves_indices: vec![0; 1 << depth],
|
||||
metadata: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn depth(&self) -> usize {
|
||||
self.tree.depth()
|
||||
}
|
||||
|
||||
fn capacity(&self) -> usize {
|
||||
self.tree.capacity()
|
||||
}
|
||||
|
||||
fn leaves_set(&self) -> usize {
|
||||
self.tree.leaves_set()
|
||||
}
|
||||
|
||||
fn root(&self) -> FrOf<Self::Hasher> {
|
||||
self.tree.root()
|
||||
}
|
||||
|
||||
fn set(
|
||||
&mut self,
|
||||
index: usize,
|
||||
leaf: FrOf<Self::Hasher>,
|
||||
) -> Result<(), ZerokitMerkleTreeError> {
|
||||
self.tree.set(index, leaf)?;
|
||||
self.cached_leaves_indices[index] = 1;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_range<I: IntoIterator<Item = FrOf<Self::Hasher>>>(
|
||||
&mut self,
|
||||
start: usize,
|
||||
values: I,
|
||||
) -> Result<(), ZerokitMerkleTreeError> {
|
||||
let v = values.into_iter().collect::<Vec<_>>();
|
||||
self.tree.set_range(start, v.clone().into_iter())?;
|
||||
for i in start..v.len() {
|
||||
self.cached_leaves_indices[i] = 1
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>, ZerokitMerkleTreeError> {
|
||||
self.tree
|
||||
.get(index)
|
||||
.map_err(ZerokitMerkleTreeError::PmtreeErrorKind)
|
||||
}
|
||||
|
||||
fn get_subtree_root(
|
||||
&self,
|
||||
n: usize,
|
||||
index: usize,
|
||||
) -> Result<FrOf<Self::Hasher>, ZerokitMerkleTreeError> {
|
||||
if n > self.depth() {
|
||||
return Err(ZerokitMerkleTreeError::InvalidLevel);
|
||||
}
|
||||
if index >= self.capacity() {
|
||||
return Err(ZerokitMerkleTreeError::InvalidLeaf);
|
||||
}
|
||||
if n == 0 {
|
||||
Ok(self.root())
|
||||
} else if n == self.depth() {
|
||||
self.get(index)
|
||||
} else {
|
||||
match self.tree.get_elem(Key::new(n, index >> (self.depth() - n))) {
|
||||
Ok(value) => Ok(value),
|
||||
Err(_) => Err(ZerokitMerkleTreeError::InvalidSubTreeIndex),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_empty_leaves_indices(&self) -> Vec<usize> {
|
||||
let next_idx = self.leaves_set();
|
||||
self.cached_leaves_indices
|
||||
.iter()
|
||||
.take(next_idx)
|
||||
.enumerate()
|
||||
.filter(|&(_, &v)| v == 0u8)
|
||||
.map(|(idx, _)| idx)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn override_range<I: IntoIterator<Item = FrOf<Self::Hasher>>, J: IntoIterator<Item = usize>>(
|
||||
&mut self,
|
||||
start: usize,
|
||||
leaves: I,
|
||||
indices: J,
|
||||
) -> Result<(), ZerokitMerkleTreeError> {
|
||||
let leaves = leaves.into_iter().collect::<Vec<_>>();
|
||||
let mut indices = indices.into_iter().collect::<Vec<_>>();
|
||||
indices.sort();
|
||||
|
||||
match (leaves.len(), indices.len()) {
|
||||
(0, 0) => Err(ZerokitMerkleTreeError::InvalidLeaf),
|
||||
(1, 0) => self.set(start, leaves[0]),
|
||||
(0, 1) => self.delete(indices[0]),
|
||||
(_, 0) => self.set_range(start, leaves.into_iter()),
|
||||
(0, _) => self
|
||||
.remove_indices(&indices)
|
||||
.map_err(ZerokitMerkleTreeError::PmtreeErrorKind),
|
||||
(_, _) => self
|
||||
.remove_indices_and_set_leaves(start, leaves, &indices)
|
||||
.map_err(ZerokitMerkleTreeError::PmtreeErrorKind),
|
||||
}
|
||||
}
|
||||
|
||||
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<(), ZerokitMerkleTreeError> {
|
||||
self.tree
|
||||
.update_next(leaf)
|
||||
.map_err(ZerokitMerkleTreeError::PmtreeErrorKind)
|
||||
}
|
||||
|
||||
/// Delete a leaf in the merkle tree given its index
|
||||
///
|
||||
/// Deleting a leaf is done by resetting it to its default value. Note that the next_index field
|
||||
/// will not be changed (== previously used index cannot be reused - this to avoid replay
|
||||
/// attacks or unexpected and very hard to tackle issues)
|
||||
fn delete(&mut self, index: usize) -> Result<(), ZerokitMerkleTreeError> {
|
||||
self.tree
|
||||
.delete(index)
|
||||
.map_err(ZerokitMerkleTreeError::PmtreeErrorKind)?;
|
||||
self.cached_leaves_indices[index] = 0;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn proof(&self, index: usize) -> Result<Self::Proof, ZerokitMerkleTreeError> {
|
||||
let proof = self.tree.proof(index)?;
|
||||
Ok(PmTreeProof { proof })
|
||||
}
|
||||
|
||||
fn verify(
|
||||
&self,
|
||||
leaf: &FrOf<Self::Hasher>,
|
||||
merkle_proof: &Self::Proof,
|
||||
) -> Result<bool, ZerokitMerkleTreeError> {
|
||||
if self.tree.verify(leaf, &merkle_proof.proof) {
|
||||
Ok(true)
|
||||
} else {
|
||||
Err(ZerokitMerkleTreeError::InvalidMerkleProof)
|
||||
}
|
||||
}
|
||||
|
||||
fn set_metadata(&mut self, metadata: &[u8]) -> Result<(), ZerokitMerkleTreeError> {
|
||||
self.tree
|
||||
.db
|
||||
.put(METADATA_KEY, metadata.to_vec())
|
||||
.map_err(ZerokitMerkleTreeError::PmtreeErrorKind)?;
|
||||
self.metadata = metadata.to_vec();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn metadata(&self) -> Result<Vec<u8>, ZerokitMerkleTreeError> {
|
||||
if !self.metadata.is_empty() {
|
||||
return Ok(self.metadata.clone());
|
||||
}
|
||||
// if empty, try searching the db
|
||||
let data = self.tree.db.get(METADATA_KEY)?;
|
||||
|
||||
// Return empty metadata if not found, otherwise return the data
|
||||
Ok(data.unwrap_or_default())
|
||||
}
|
||||
|
||||
fn close_db_connection(&mut self) -> Result<(), ZerokitMerkleTreeError> {
|
||||
self.tree
|
||||
.db
|
||||
.close()
|
||||
.map_err(ZerokitMerkleTreeError::PmtreeErrorKind)
|
||||
}
|
||||
}
|
||||
|
||||
type PmTreeHasher = <PmTree as ZerokitMerkleTree>::Hasher;
|
||||
type FrOfPmTreeHasher = FrOf<PmTreeHasher>;
|
||||
|
||||
impl PmTree {
|
||||
fn remove_indices(&mut self, indices: &[usize]) -> Result<(), PmtreeErrorKind> {
|
||||
if indices.is_empty() {
|
||||
return Err(PmtreeErrorKind::TreeError(
|
||||
pmtree::TreeErrorKind::InvalidKey,
|
||||
));
|
||||
}
|
||||
let start = indices[0];
|
||||
let end = indices[indices.len() - 1] + 1;
|
||||
|
||||
let new_leaves = (start..end).map(|_| PmTreeHasher::default_leaf());
|
||||
|
||||
self.tree.set_range(start, new_leaves)?;
|
||||
|
||||
for i in start..end {
|
||||
self.cached_leaves_indices[i] = 0
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_indices_and_set_leaves(
|
||||
&mut self,
|
||||
start: usize,
|
||||
leaves: Vec<FrOfPmTreeHasher>,
|
||||
indices: &[usize],
|
||||
) -> Result<(), PmtreeErrorKind> {
|
||||
if indices.is_empty() {
|
||||
return Err(PmtreeErrorKind::TreeError(
|
||||
pmtree::TreeErrorKind::InvalidKey,
|
||||
));
|
||||
}
|
||||
let min_index = indices[0];
|
||||
let max_index = start + leaves.len();
|
||||
|
||||
let mut set_values = vec![PmTreeHasher::default_leaf(); max_index - min_index];
|
||||
|
||||
for i in min_index..start {
|
||||
if !indices.contains(&i) {
|
||||
let value = self.tree.get(i)?;
|
||||
set_values[i - min_index] = value;
|
||||
}
|
||||
}
|
||||
|
||||
for (i, &leaf) in leaves.iter().enumerate() {
|
||||
set_values[start - min_index + i] = leaf;
|
||||
}
|
||||
|
||||
self.tree.set_range(start, set_values)?;
|
||||
|
||||
for i in indices {
|
||||
self.cached_leaves_indices[*i] = 0;
|
||||
}
|
||||
|
||||
for i in start..(max_index - min_index) {
|
||||
self.cached_leaves_indices[i] = 1
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ZerokitMerkleProof for PmTreeProof {
|
||||
type Index = u8;
|
||||
type Hasher = PoseidonHash;
|
||||
|
||||
fn length(&self) -> usize {
|
||||
self.proof.length()
|
||||
}
|
||||
|
||||
fn leaf_index(&self) -> usize {
|
||||
self.proof.leaf_index()
|
||||
}
|
||||
|
||||
fn get_path_elements(&self) -> Vec<FrOf<Self::Hasher>> {
|
||||
self.proof.get_path_elements()
|
||||
}
|
||||
|
||||
fn get_path_index(&self) -> Vec<Self::Index> {
|
||||
self.proof.get_path_index()
|
||||
}
|
||||
|
||||
fn compute_root_from(
|
||||
&self,
|
||||
leaf: &FrOf<Self::Hasher>,
|
||||
) -> Result<FrOf<Self::Hasher>, ZerokitMerkleTreeError> {
|
||||
Ok(self.proof.compute_root_from(leaf))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_pmtree_json_config() {
|
||||
let json = r#"
|
||||
{
|
||||
"path": "pmtree-123456",
|
||||
"temporary": false,
|
||||
"cache_capacity": 1073741824,
|
||||
"flush_every_ms": 500,
|
||||
"mode": "HighThroughput",
|
||||
"use_compression": false
|
||||
}"#;
|
||||
|
||||
let _: PmtreeConfig = json.parse().unwrap();
|
||||
|
||||
let _ = PmtreeConfig::builder()
|
||||
.path(default_tmp_path().unwrap())
|
||||
.temporary(DEFAULT_TEMPORARY)
|
||||
.cache_capacity(DEFAULT_CACHE_CAPACITY)
|
||||
.mode(DEFAULT_MODE)
|
||||
.use_compression(DEFAULT_USE_COMPRESSION)
|
||||
.build()
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,129 +0,0 @@
|
||||
// This crate implements the Poseidon hash algorithm https://eprint.iacr.org/2019/458.pdf
|
||||
|
||||
// The implementation is taken from https://github.com/arnaucube/poseidon-rs/blob/233027d6075a637c29ad84a8a44f5653b81f0410/src/lib.rs
|
||||
// and slightly adapted to work over arkworks field data type
|
||||
|
||||
use crate::circuit::Fr;
|
||||
use crate::poseidon_constants::constants;
|
||||
use crate::utils::*;
|
||||
use ark_std::Zero;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Constants {
|
||||
pub c: Vec<Vec<Fr>>,
|
||||
pub m: Vec<Vec<Vec<Fr>>>,
|
||||
pub n_rounds_f: usize,
|
||||
pub n_rounds_p: Vec<usize>,
|
||||
}
|
||||
pub fn load_constants() -> Constants {
|
||||
let (c_str, m_str) = constants();
|
||||
let mut c: Vec<Vec<Fr>> = Vec::new();
|
||||
for i in 0..c_str.len() {
|
||||
let mut cci: Vec<Fr> = Vec::new();
|
||||
for j in 0..c_str[i].len() {
|
||||
let b: Fr = str_to_fr(c_str[i][j], 10);
|
||||
cci.push(b);
|
||||
}
|
||||
c.push(cci);
|
||||
}
|
||||
let mut m: Vec<Vec<Vec<Fr>>> = Vec::new();
|
||||
for i in 0..m_str.len() {
|
||||
let mut mi: Vec<Vec<Fr>> = Vec::new();
|
||||
for j in 0..m_str[i].len() {
|
||||
let mut mij: Vec<Fr> = Vec::new();
|
||||
for k in 0..m_str[i][j].len() {
|
||||
let b: Fr = str_to_fr(m_str[i][j][k], 10);
|
||||
mij.push(b);
|
||||
}
|
||||
mi.push(mij);
|
||||
}
|
||||
m.push(mi);
|
||||
}
|
||||
Constants {
|
||||
c: c,
|
||||
m: m,
|
||||
n_rounds_f: 8,
|
||||
n_rounds_p: vec![56, 57, 56, 60, 60, 63, 64, 63],
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Poseidon {
|
||||
constants: Constants,
|
||||
}
|
||||
impl Poseidon {
|
||||
pub fn new() -> Poseidon {
|
||||
Poseidon {
|
||||
constants: load_constants(),
|
||||
}
|
||||
}
|
||||
pub fn ark(&self, state: &mut [Fr], c: &[Fr], it: usize) {
|
||||
for i in 0..state.len() {
|
||||
state[i] += c[it + i];
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sbox(&self, n_rounds_f: usize, n_rounds_p: usize, state: &mut [Fr], i: usize) {
|
||||
if (i < n_rounds_f / 2) || (i >= n_rounds_f / 2 + n_rounds_p) {
|
||||
for j in 0..state.len() {
|
||||
let aux = state[j];
|
||||
state[j] *= state[j];
|
||||
state[j] *= state[j];
|
||||
state[j] *= aux;
|
||||
}
|
||||
} else {
|
||||
let aux = state[0];
|
||||
state[0] *= state[0];
|
||||
state[0] *= state[0];
|
||||
state[0] *= aux;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mix(&self, state: &[Fr], m: &[Vec<Fr>]) -> Vec<Fr> {
|
||||
let mut new_state: Vec<Fr> = Vec::new();
|
||||
for i in 0..state.len() {
|
||||
new_state.push(Fr::zero());
|
||||
for j in 0..state.len() {
|
||||
let mut mij = m[i][j];
|
||||
mij *= state[j];
|
||||
new_state[i] += mij;
|
||||
}
|
||||
}
|
||||
new_state.clone()
|
||||
}
|
||||
|
||||
pub fn hash(&self, inp: Vec<Fr>) -> Result<Fr, String> {
|
||||
let t = inp.len() + 1;
|
||||
if inp.is_empty() || (inp.len() >= self.constants.n_rounds_p.len() - 1) {
|
||||
return Err("Wrong inputs length".to_string());
|
||||
}
|
||||
let n_rounds_f = self.constants.n_rounds_f;
|
||||
let n_rounds_p = self.constants.n_rounds_p[t - 2];
|
||||
|
||||
let mut state = vec![Fr::zero(); t];
|
||||
state[1..].clone_from_slice(&inp);
|
||||
|
||||
for i in 0..(n_rounds_f + n_rounds_p) {
|
||||
self.ark(&mut state, &self.constants.c[t - 2], i * t);
|
||||
self.sbox(n_rounds_f, n_rounds_p, &mut state, i);
|
||||
state = self.mix(&state, &self.constants.m[t - 2]);
|
||||
}
|
||||
|
||||
Ok(state[0])
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Poseidon {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
// Poseidon Hash wrapper over above implementation. Adapted from semaphore-rs poseidon hash wrapper.
|
||||
static POSEIDON: Lazy<Poseidon> = Lazy::new(Poseidon::new);
|
||||
|
||||
pub fn poseidon_hash(input: &[Fr]) -> Fr {
|
||||
POSEIDON
|
||||
.hash(input.to_vec())
|
||||
.expect("hash with fixed input size can't fail")
|
||||
}
|
||||
@@ -1,109 +1,32 @@
|
||||
// This crate defines the RLN module default Merkle tree implementation and its Hasher
|
||||
// Implementation inspired by https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/poseidon_tree.rs
|
||||
|
||||
// Implementation inspired by https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/poseidon_tree.rs (no differences)
|
||||
#![cfg(not(feature = "stateless"))]
|
||||
|
||||
use crate::circuit::Fr;
|
||||
use crate::merkle_tree::*;
|
||||
use crate::poseidon_hash::poseidon_hash;
|
||||
use cfg_if::cfg_if;
|
||||
|
||||
// The zerokit RLN default Merkle tree implementation is the OptimalMerkleTree.
|
||||
// To switch to FullMerkleTree implementation, it is enough to enable the fullmerkletree feature
|
||||
// The zerokit RLN default Merkle tree implementation is the PMTree from the vacp2p_pmtree crate
|
||||
// To switch to FullMerkleTree or OptimalMerkleTree, enable the corresponding feature in the Cargo.toml file
|
||||
|
||||
cfg_if! {
|
||||
if #[cfg(feature = "fullmerkletree")] {
|
||||
use zerokit_utils::merkle_tree::{FullMerkleTree, FullMerkleProof};
|
||||
use crate::hashers::PoseidonHash;
|
||||
|
||||
pub type PoseidonTree = FullMerkleTree<PoseidonHash>;
|
||||
pub type MerkleProof = FullMerkleProof<PoseidonHash>;
|
||||
} else {
|
||||
} else if #[cfg(feature = "optimalmerkletree")] {
|
||||
use zerokit_utils::merkle_tree::{OptimalMerkleTree, OptimalMerkleProof};
|
||||
use crate::hashers::PoseidonHash;
|
||||
|
||||
pub type PoseidonTree = OptimalMerkleTree<PoseidonHash>;
|
||||
pub type MerkleProof = OptimalMerkleProof<PoseidonHash>;
|
||||
}
|
||||
}
|
||||
|
||||
// The zerokit RLN default Hasher
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub struct PoseidonHash;
|
||||
|
||||
impl Hasher for PoseidonHash {
|
||||
type Fr = Fr;
|
||||
|
||||
fn default_leaf() -> Self::Fr {
|
||||
Self::Fr::from(0)
|
||||
}
|
||||
|
||||
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
|
||||
poseidon_hash(inputs)
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
/// Tests
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
/// A basic performance comparison between the two supported Merkle Tree implementations
|
||||
fn test_merkle_implementations_performances() {
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
let tree_height = 20;
|
||||
let sample_size = 100;
|
||||
|
||||
let leaves: Vec<Fr> = (0..sample_size).map(|s| Fr::from(s)).collect();
|
||||
|
||||
let mut gen_time_full: u128 = 0;
|
||||
let mut upd_time_full: u128 = 0;
|
||||
let mut gen_time_opt: u128 = 0;
|
||||
let mut upd_time_opt: u128 = 0;
|
||||
|
||||
for _ in 0..sample_size.try_into().unwrap() {
|
||||
let now = Instant::now();
|
||||
FullMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
gen_time_full += now.elapsed().as_nanos();
|
||||
|
||||
let now = Instant::now();
|
||||
OptimalMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
gen_time_opt += now.elapsed().as_nanos();
|
||||
}
|
||||
|
||||
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
for i in 0..sample_size.try_into().unwrap() {
|
||||
let now = Instant::now();
|
||||
tree_full.set(i, leaves[i]).unwrap();
|
||||
upd_time_full += now.elapsed().as_nanos();
|
||||
let proof = tree_full.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
|
||||
let now = Instant::now();
|
||||
tree_opt.set(i, leaves[i]).unwrap();
|
||||
upd_time_opt += now.elapsed().as_nanos();
|
||||
let proof = tree_opt.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
}
|
||||
|
||||
println!("Average tree generation time:");
|
||||
println!(
|
||||
" - Full Merkle Tree: {:?}",
|
||||
Duration::from_nanos((gen_time_full / sample_size).try_into().unwrap())
|
||||
);
|
||||
println!(
|
||||
" - Optimal Merkle Tree: {:?}",
|
||||
Duration::from_nanos((gen_time_opt / sample_size).try_into().unwrap())
|
||||
);
|
||||
|
||||
println!("Average update_next execution time:");
|
||||
println!(
|
||||
" - Full Merkle Tree: {:?}",
|
||||
Duration::from_nanos((upd_time_full / sample_size).try_into().unwrap())
|
||||
);
|
||||
|
||||
println!(
|
||||
" - Optimal Merkle Tree: {:?}",
|
||||
Duration::from_nanos((upd_time_opt / sample_size).try_into().unwrap())
|
||||
);
|
||||
} else if #[cfg(feature = "pmtree-ft")] {
|
||||
use crate::pm_tree_adapter::{PmTree, PmTreeProof};
|
||||
|
||||
pub type PoseidonTree = PmTree;
|
||||
pub type MerkleProof = PmTreeProof;
|
||||
} else {
|
||||
compile_error!("One of the features `fullmerkletree`, `optimalmerkletree`, or `pmtree-ft` must be enabled.");
|
||||
}
|
||||
}
|
||||
|
||||
37
rln/src/prelude.rs
Normal file
37
rln/src/prelude.rs
Normal file
@@ -0,0 +1,37 @@
|
||||
// This module re-exports the most commonly used types and functions from the RLN library
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub use crate::circuit::{graph_from_folder, zkey_from_folder};
|
||||
#[cfg(feature = "pmtree-ft")]
|
||||
pub use crate::pm_tree_adapter::{FrOf, PmTree, PmTreeProof, PmtreeConfig, PmtreeConfigBuilder};
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub use crate::poseidon_tree::{MerkleProof, PoseidonTree};
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub use crate::protocol::compute_tree_root;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub use crate::protocol::{generate_zk_proof, verify_zk_proof};
|
||||
pub use crate::{
|
||||
circuit::{
|
||||
zkey_from_raw, Curve, Fq, Fq2, Fr, G1Affine, G1Projective, G2Affine, G2Projective, Proof,
|
||||
VerifyingKey, Zkey, COMPRESS_PROOF_SIZE, DEFAULT_TREE_DEPTH,
|
||||
},
|
||||
error::{ProtocolError, RLNError, UtilsError, VerifyError},
|
||||
hashers::{hash_to_field_be, hash_to_field_le, poseidon_hash, PoseidonHash},
|
||||
protocol::{
|
||||
bytes_be_to_rln_proof, bytes_be_to_rln_proof_values, bytes_be_to_rln_witness,
|
||||
bytes_le_to_rln_proof, bytes_le_to_rln_proof_values, bytes_le_to_rln_witness,
|
||||
extended_keygen, extended_seeded_keygen, generate_zk_proof_with_witness, keygen,
|
||||
proof_values_from_witness, recover_id_secret, rln_proof_to_bytes_be, rln_proof_to_bytes_le,
|
||||
rln_proof_values_to_bytes_be, rln_proof_values_to_bytes_le, rln_witness_to_bigint_json,
|
||||
rln_witness_to_bytes_be, rln_witness_to_bytes_le, seeded_keygen, RLNProof, RLNProofValues,
|
||||
RLNWitnessInput,
|
||||
},
|
||||
public::RLN,
|
||||
utils::{
|
||||
bytes_be_to_fr, bytes_be_to_vec_fr, bytes_be_to_vec_u8, bytes_be_to_vec_usize,
|
||||
bytes_le_to_fr, bytes_le_to_vec_fr, bytes_le_to_vec_u8, bytes_le_to_vec_usize,
|
||||
fr_to_bytes_be, fr_to_bytes_le, normalize_usize_be, normalize_usize_le, str_to_fr,
|
||||
to_bigint, vec_fr_to_bytes_be, vec_fr_to_bytes_le, vec_u8_to_bytes_be, vec_u8_to_bytes_le,
|
||||
IdSecret, FR_BYTE_SIZE,
|
||||
},
|
||||
};
|
||||
@@ -1,492 +0,0 @@
|
||||
// This crate collects all the underlying primitives used to implement RLN
|
||||
|
||||
use ark_circom::{CircomReduction, WitnessCalculator};
|
||||
use ark_groth16::{
|
||||
create_proof_with_reduction_and_matrices, prepare_verifying_key,
|
||||
verify_proof as ark_verify_proof, Proof as ArkProof, ProvingKey, VerifyingKey,
|
||||
};
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use ark_relations::r1cs::SynthesisError;
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use color_eyre::Result;
|
||||
use num_bigint::BigInt;
|
||||
use rand::Rng;
|
||||
use std::sync::Mutex;
|
||||
#[cfg(debug_assertions)]
|
||||
use std::time::Instant;
|
||||
use thiserror::Error;
|
||||
use tiny_keccak::{Hasher as _, Keccak};
|
||||
|
||||
use crate::circuit::{Curve, Fr};
|
||||
use crate::poseidon_hash::poseidon_hash;
|
||||
use crate::poseidon_tree::*;
|
||||
use crate::public::RLN_IDENTIFIER;
|
||||
use crate::utils::*;
|
||||
|
||||
///////////////////////////////////////////////////////
|
||||
// RLN Witness data structure and utility functions
|
||||
///////////////////////////////////////////////////////
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct RLNWitnessInput {
|
||||
identity_secret: Fr,
|
||||
path_elements: Vec<Fr>,
|
||||
identity_path_index: Vec<u8>,
|
||||
x: Fr,
|
||||
epoch: Fr,
|
||||
rln_identifier: Fr,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct RLNProofValues {
|
||||
// Public outputs:
|
||||
pub y: Fr,
|
||||
pub nullifier: Fr,
|
||||
pub root: Fr,
|
||||
// Public Inputs:
|
||||
pub x: Fr,
|
||||
pub epoch: Fr,
|
||||
pub rln_identifier: Fr,
|
||||
}
|
||||
|
||||
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Vec<u8> {
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_witness.identity_secret));
|
||||
serialized.append(&mut vec_fr_to_bytes_le(&rln_witness.path_elements));
|
||||
serialized.append(&mut vec_u8_to_bytes_le(&rln_witness.identity_path_index));
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_witness.x));
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_witness.epoch));
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_witness.rln_identifier));
|
||||
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn deserialize_witness(serialized: &[u8]) -> (RLNWitnessInput, usize) {
|
||||
let mut all_read: usize = 0;
|
||||
|
||||
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (identity_path_index, read) = bytes_le_to_vec_u8(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (x, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
// TODO: check rln_identifier against public::RLN_IDENTIFIER
|
||||
assert_eq!(serialized.len(), all_read);
|
||||
|
||||
(
|
||||
RLNWitnessInput {
|
||||
identity_secret,
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x,
|
||||
epoch,
|
||||
rln_identifier,
|
||||
},
|
||||
all_read,
|
||||
)
|
||||
}
|
||||
|
||||
// This function deserializes input for kilic's rln generate_proof public API
|
||||
// https://github.com/kilic/rln/blob/7ac74183f8b69b399e3bc96c1ae8ab61c026dc43/src/public.rs#L148
|
||||
// input_data is [ id_key<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
|
||||
// return value is a rln witness populated according to this information
|
||||
pub fn proof_inputs_to_rln_witness(
|
||||
tree: &mut PoseidonTree,
|
||||
serialized: &[u8],
|
||||
) -> (RLNWitnessInput, usize) {
|
||||
let mut all_read: usize = 0;
|
||||
|
||||
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let id_index = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap());
|
||||
all_read += 8;
|
||||
|
||||
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let signal_len = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap());
|
||||
all_read += 8;
|
||||
|
||||
let signal: Vec<u8> =
|
||||
serialized[all_read..all_read + usize::try_from(signal_len).unwrap()].to_vec();
|
||||
|
||||
let merkle_proof = tree
|
||||
.proof(usize::try_from(id_index).unwrap())
|
||||
.expect("proof should exist");
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
|
||||
let x = hash_to_field(&signal);
|
||||
|
||||
let rln_identifier = hash_to_field(RLN_IDENTIFIER);
|
||||
|
||||
(
|
||||
RLNWitnessInput {
|
||||
identity_secret,
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x,
|
||||
epoch,
|
||||
rln_identifier,
|
||||
},
|
||||
all_read,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn serialize_proof_values(rln_proof_values: &RLNProofValues) -> Vec<u8> {
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.root));
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.epoch));
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.x));
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.y));
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.nullifier));
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.rln_identifier));
|
||||
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) {
|
||||
let mut all_read: usize = 0;
|
||||
|
||||
let (root, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (x, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (y, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (nullifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
(
|
||||
RLNProofValues {
|
||||
y,
|
||||
nullifier,
|
||||
root,
|
||||
x,
|
||||
epoch,
|
||||
rln_identifier,
|
||||
},
|
||||
all_read,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn rln_witness_from_json(input_json_str: &str) -> RLNWitnessInput {
|
||||
let input_json: serde_json::Value =
|
||||
serde_json::from_str(input_json_str).expect("JSON was not well-formatted");
|
||||
|
||||
let identity_secret = str_to_fr(&input_json["identity_secret"].to_string(), 10);
|
||||
|
||||
let path_elements = input_json["path_elements"]
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|v| str_to_fr(&v.to_string(), 10))
|
||||
.collect();
|
||||
|
||||
let identity_path_index = input_json["identity_path_index"]
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|v| v.as_u64().unwrap() as u8)
|
||||
.collect();
|
||||
|
||||
let x = str_to_fr(&input_json["x"].to_string(), 10);
|
||||
|
||||
let epoch = str_to_fr(&input_json["epoch"].to_string(), 16);
|
||||
|
||||
let rln_identifier = str_to_fr(&input_json["rln_identifier"].to_string(), 10);
|
||||
|
||||
// TODO: check rln_identifier against public::RLN_IDENTIFIER
|
||||
|
||||
RLNWitnessInput {
|
||||
identity_secret,
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x,
|
||||
epoch,
|
||||
rln_identifier,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rln_witness_from_values(
|
||||
identity_secret: Fr,
|
||||
merkle_proof: &MerkleProof,
|
||||
x: Fr,
|
||||
epoch: Fr,
|
||||
//rln_identifier: Fr,
|
||||
) -> RLNWitnessInput {
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
let rln_identifier = hash_to_field(RLN_IDENTIFIER);
|
||||
|
||||
RLNWitnessInput {
|
||||
identity_secret,
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x,
|
||||
epoch,
|
||||
rln_identifier,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn random_rln_witness(tree_height: usize) -> RLNWitnessInput {
|
||||
let mut rng = thread_rng();
|
||||
|
||||
let identity_secret = hash_to_field(&rng.gen::<[u8; 32]>());
|
||||
let x = hash_to_field(&rng.gen::<[u8; 32]>());
|
||||
let epoch = hash_to_field(&rng.gen::<[u8; 32]>());
|
||||
let rln_identifier = hash_to_field(RLN_IDENTIFIER); //hash_to_field(&rng.gen::<[u8; 32]>());
|
||||
|
||||
let mut path_elements: Vec<Fr> = Vec::new();
|
||||
let mut identity_path_index: Vec<u8> = Vec::new();
|
||||
|
||||
for _ in 0..tree_height {
|
||||
path_elements.push(hash_to_field(&rng.gen::<[u8; 32]>()));
|
||||
identity_path_index.push(rng.gen_range(0..2) as u8);
|
||||
}
|
||||
|
||||
RLNWitnessInput {
|
||||
identity_secret,
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x,
|
||||
epoch,
|
||||
rln_identifier,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn proof_values_from_witness(rln_witness: &RLNWitnessInput) -> RLNProofValues {
|
||||
// y share
|
||||
let a_0 = rln_witness.identity_secret;
|
||||
let a_1 = poseidon_hash(&[a_0, rln_witness.epoch]);
|
||||
let y = rln_witness.x * a_1;
|
||||
let y = y + a_0;
|
||||
|
||||
// Nullifier
|
||||
let nullifier = poseidon_hash(&[a_1, rln_witness.rln_identifier]);
|
||||
|
||||
// Merkle tree root computations
|
||||
let root = compute_tree_root(
|
||||
&rln_witness.identity_secret,
|
||||
&rln_witness.path_elements,
|
||||
&rln_witness.identity_path_index,
|
||||
true,
|
||||
);
|
||||
|
||||
RLNProofValues {
|
||||
y,
|
||||
nullifier,
|
||||
root,
|
||||
x: rln_witness.x,
|
||||
epoch: rln_witness.epoch,
|
||||
rln_identifier: rln_witness.rln_identifier,
|
||||
}
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////
|
||||
// Merkle tree utility functions
|
||||
///////////////////////////////////////////////////////
|
||||
|
||||
pub fn compute_tree_root(
|
||||
leaf: &Fr,
|
||||
path_elements: &[Fr],
|
||||
identity_path_index: &[u8],
|
||||
hash_leaf: bool,
|
||||
) -> Fr {
|
||||
let mut root = *leaf;
|
||||
if hash_leaf {
|
||||
root = poseidon_hash(&[root]);
|
||||
}
|
||||
|
||||
for i in 0..identity_path_index.len() {
|
||||
if identity_path_index[i] == 0 {
|
||||
root = poseidon_hash(&[root, path_elements[i]]);
|
||||
} else {
|
||||
root = poseidon_hash(&[path_elements[i], root]);
|
||||
}
|
||||
}
|
||||
|
||||
root
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////
|
||||
// Signal/nullifier utility functions
|
||||
///////////////////////////////////////////////////////
|
||||
|
||||
// Generates a tupe (identity_secret, id_commitment) where
|
||||
// identity_secret is random and id_commitment = PoseidonHash(identity_secret)
|
||||
pub fn keygen() -> (Fr, Fr) {
|
||||
let mut rng = thread_rng();
|
||||
let identity_secret = Fr::rand(&mut rng);
|
||||
let id_commitment = poseidon_hash(&[identity_secret]);
|
||||
(identity_secret, id_commitment)
|
||||
}
|
||||
|
||||
// Hashes arbitrary signal to the underlying prime field
|
||||
pub fn hash_to_field(signal: &[u8]) -> Fr {
|
||||
// We hash the input signal using Keccak256
|
||||
// (note that a bigger curve order might require a bigger hash blocksize)
|
||||
let mut hash = [0; 32];
|
||||
let mut hasher = Keccak::v256();
|
||||
hasher.update(signal);
|
||||
hasher.finalize(&mut hash);
|
||||
|
||||
// We export the hash as a field element
|
||||
let (el, _) = bytes_le_to_fr(hash.as_ref());
|
||||
el
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////
|
||||
// zkSNARK utility functions
|
||||
///////////////////////////////////////////////////////
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ProofError {
|
||||
#[error("Error reading circuit key: {0}")]
|
||||
CircuitKeyError(#[from] std::io::Error),
|
||||
#[error("Error producing witness: {0}")]
|
||||
WitnessError(color_eyre::Report),
|
||||
#[error("Error producing proof: {0}")]
|
||||
SynthesisError(#[from] SynthesisError),
|
||||
}
|
||||
|
||||
/// Generates a RLN proof
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns a [`ProofError`] if proving fails.
|
||||
pub fn generate_proof(
|
||||
witness_calculator: &Mutex<WitnessCalculator>,
|
||||
proving_key: &(ProvingKey<Curve>, ConstraintMatrices<Fr>),
|
||||
rln_witness: &RLNWitnessInput,
|
||||
) -> Result<ArkProof<Curve>, ProofError> {
|
||||
// We confert the path indexes to field elements
|
||||
// TODO: check if necessary
|
||||
let mut path_elements = Vec::new();
|
||||
rln_witness
|
||||
.path_elements
|
||||
.iter()
|
||||
.for_each(|v| path_elements.push(to_bigint(v)));
|
||||
|
||||
let mut identity_path_index = Vec::new();
|
||||
rln_witness
|
||||
.identity_path_index
|
||||
.iter()
|
||||
.for_each(|v| identity_path_index.push(BigInt::from(*v)));
|
||||
|
||||
let inputs = [
|
||||
(
|
||||
"identity_secret",
|
||||
vec![to_bigint(&rln_witness.identity_secret)],
|
||||
),
|
||||
("path_elements", path_elements),
|
||||
("identity_path_index", identity_path_index),
|
||||
("x", vec![to_bigint(&rln_witness.x)]),
|
||||
("epoch", vec![to_bigint(&rln_witness.epoch)]),
|
||||
(
|
||||
"rln_identifier",
|
||||
vec![to_bigint(&rln_witness.rln_identifier)],
|
||||
),
|
||||
];
|
||||
let inputs = inputs
|
||||
.into_iter()
|
||||
.map(|(name, values)| (name.to_string(), values));
|
||||
|
||||
// If in debug mode, we measure and later print time take to compute witness
|
||||
#[cfg(debug_assertions)]
|
||||
let now = Instant::now();
|
||||
|
||||
let full_assignment = witness_calculator
|
||||
.lock()
|
||||
.expect("witness_calculator mutex should not get poisoned")
|
||||
.calculate_witness_element::<Curve, _>(inputs, false)
|
||||
.map_err(ProofError::WitnessError)?;
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
println!("witness generation took: {:.2?}", now.elapsed());
|
||||
|
||||
// Random Values
|
||||
let mut rng = thread_rng();
|
||||
let r = Fr::rand(&mut rng);
|
||||
let s = Fr::rand(&mut rng);
|
||||
|
||||
// If in debug mode, we measure and later print time take to compute proof
|
||||
#[cfg(debug_assertions)]
|
||||
let now = Instant::now();
|
||||
|
||||
let proof = create_proof_with_reduction_and_matrices::<_, CircomReduction>(
|
||||
&proving_key.0,
|
||||
r,
|
||||
s,
|
||||
&proving_key.1,
|
||||
proving_key.1.num_instance_variables,
|
||||
proving_key.1.num_constraints,
|
||||
full_assignment.as_slice(),
|
||||
)?;
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
println!("proof generation took: {:.2?}", now.elapsed());
|
||||
|
||||
Ok(proof)
|
||||
}
|
||||
|
||||
/// Verifies a given RLN proof
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns a [`ProofError`] if verifying fails. Verification failure does not
|
||||
/// necessarily mean the proof is incorrect.
|
||||
pub fn verify_proof(
|
||||
verifying_key: &VerifyingKey<Curve>,
|
||||
proof: &ArkProof<Curve>,
|
||||
proof_values: &RLNProofValues,
|
||||
) -> Result<bool, ProofError> {
|
||||
// We re-arrange proof-values according to the circuit specification
|
||||
let inputs = vec![
|
||||
proof_values.y,
|
||||
proof_values.root,
|
||||
proof_values.nullifier,
|
||||
proof_values.x,
|
||||
proof_values.epoch,
|
||||
proof_values.rln_identifier,
|
||||
];
|
||||
|
||||
// Check that the proof is valid
|
||||
let pvk = prepare_verifying_key(verifying_key);
|
||||
//let pr: ArkProof<Curve> = (*proof).into();
|
||||
|
||||
// If in debug mode, we measure and later print time take to verify proof
|
||||
#[cfg(debug_assertions)]
|
||||
let now = Instant::now();
|
||||
|
||||
let verified = ark_verify_proof(&pvk, proof, &inputs)?;
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
println!("verify took: {:.2?}", now.elapsed());
|
||||
|
||||
Ok(verified)
|
||||
}
|
||||
80
rln/src/protocol/keygen.rs
Normal file
80
rln/src/protocol/keygen.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::SeedableRng;
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
use tiny_keccak::{Hasher as _, Keccak};
|
||||
use zerokit_utils::error::ZerokitMerkleTreeError;
|
||||
|
||||
use crate::{circuit::Fr, hashers::poseidon_hash, utils::IdSecret};
|
||||
|
||||
/// Generates a random RLN identity using a cryptographically secure RNG.
|
||||
///
|
||||
/// Returns `(identity_secret, id_commitment)` where the commitment is `PoseidonHash(identity_secret)`.
|
||||
pub fn keygen() -> Result<(IdSecret, Fr), ZerokitMerkleTreeError> {
|
||||
let mut rng = thread_rng();
|
||||
let identity_secret = IdSecret::rand(&mut rng);
|
||||
let id_commitment = poseidon_hash(&[*identity_secret.clone()])?;
|
||||
Ok((identity_secret, id_commitment))
|
||||
}
|
||||
|
||||
/// Generates an extended RLN identity compatible with Semaphore.
|
||||
///
|
||||
/// Returns `(identity_trapdoor, identity_nullifier, identity_secret, id_commitment)` where:
|
||||
/// - `identity_secret = PoseidonHash(identity_trapdoor, identity_nullifier)`
|
||||
/// - `id_commitment = PoseidonHash(identity_secret)`
|
||||
pub fn extended_keygen() -> Result<(Fr, Fr, Fr, Fr), ZerokitMerkleTreeError> {
|
||||
let mut rng = thread_rng();
|
||||
let identity_trapdoor = Fr::rand(&mut rng);
|
||||
let identity_nullifier = Fr::rand(&mut rng);
|
||||
let identity_secret = poseidon_hash(&[identity_trapdoor, identity_nullifier])?;
|
||||
let id_commitment = poseidon_hash(&[identity_secret])?;
|
||||
Ok((
|
||||
identity_trapdoor,
|
||||
identity_nullifier,
|
||||
identity_secret,
|
||||
id_commitment,
|
||||
))
|
||||
}
|
||||
|
||||
/// Generates a deterministic RLN identity from a seed.
|
||||
///
|
||||
/// Uses ChaCha20 RNG seeded with Keccak-256 hash of the input.
|
||||
/// Returns `(identity_secret, id_commitment)`. Same input always produces the same identity.
|
||||
pub fn seeded_keygen(signal: &[u8]) -> Result<(Fr, Fr), ZerokitMerkleTreeError> {
|
||||
// ChaCha20 requires a seed of exactly 32 bytes.
|
||||
// We first hash the input seed signal to a 32 bytes array and pass this as seed to ChaCha20
|
||||
let mut seed = [0; 32];
|
||||
let mut hasher = Keccak::v256();
|
||||
hasher.update(signal);
|
||||
hasher.finalize(&mut seed);
|
||||
|
||||
let mut rng = ChaCha20Rng::from_seed(seed);
|
||||
let identity_secret = Fr::rand(&mut rng);
|
||||
let id_commitment = poseidon_hash(&[identity_secret])?;
|
||||
Ok((identity_secret, id_commitment))
|
||||
}
|
||||
|
||||
/// Generates a deterministic extended RLN identity from a seed, compatible with Semaphore.
|
||||
///
|
||||
/// Uses ChaCha20 RNG seeded with Keccak-256 hash of the input.
|
||||
/// Returns `(identity_trapdoor, identity_nullifier, identity_secret, id_commitment)`.
|
||||
/// Same input always produces the same identity.
|
||||
pub fn extended_seeded_keygen(signal: &[u8]) -> Result<(Fr, Fr, Fr, Fr), ZerokitMerkleTreeError> {
|
||||
// ChaCha20 requires a seed of exactly 32 bytes.
|
||||
// We first hash the input seed signal to a 32 bytes array and pass this as seed to ChaCha20
|
||||
let mut seed = [0; 32];
|
||||
let mut hasher = Keccak::v256();
|
||||
hasher.update(signal);
|
||||
hasher.finalize(&mut seed);
|
||||
|
||||
let mut rng = ChaCha20Rng::from_seed(seed);
|
||||
let identity_trapdoor = Fr::rand(&mut rng);
|
||||
let identity_nullifier = Fr::rand(&mut rng);
|
||||
let identity_secret = poseidon_hash(&[identity_trapdoor, identity_nullifier])?;
|
||||
let id_commitment = poseidon_hash(&[identity_secret])?;
|
||||
Ok((
|
||||
identity_trapdoor,
|
||||
identity_nullifier,
|
||||
identity_secret,
|
||||
id_commitment,
|
||||
))
|
||||
}
|
||||
19
rln/src/protocol/mod.rs
Normal file
19
rln/src/protocol/mod.rs
Normal file
@@ -0,0 +1,19 @@
|
||||
// This crate collects all the underlying primitives used to implement RLN
|
||||
|
||||
mod keygen;
|
||||
mod proof;
|
||||
mod slashing;
|
||||
mod witness;
|
||||
|
||||
pub use keygen::{extended_keygen, extended_seeded_keygen, keygen, seeded_keygen};
|
||||
pub use proof::{
|
||||
bytes_be_to_rln_proof, bytes_be_to_rln_proof_values, bytes_le_to_rln_proof,
|
||||
bytes_le_to_rln_proof_values, generate_zk_proof, generate_zk_proof_with_witness,
|
||||
rln_proof_to_bytes_be, rln_proof_to_bytes_le, rln_proof_values_to_bytes_be,
|
||||
rln_proof_values_to_bytes_le, verify_zk_proof, RLNProof, RLNProofValues,
|
||||
};
|
||||
pub use slashing::recover_id_secret;
|
||||
pub use witness::{
|
||||
bytes_be_to_rln_witness, bytes_le_to_rln_witness, compute_tree_root, proof_values_from_witness,
|
||||
rln_witness_to_bigint_json, rln_witness_to_bytes_be, rln_witness_to_bytes_le, RLNWitnessInput,
|
||||
};
|
||||
345
rln/src/protocol/proof.rs
Normal file
345
rln/src/protocol/proof.rs
Normal file
@@ -0,0 +1,345 @@
|
||||
use ark_ff::PrimeField;
|
||||
use ark_groth16::{prepare_verifying_key, Groth16};
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use num_bigint::BigInt;
|
||||
use num_traits::Signed;
|
||||
|
||||
use super::witness::{inputs_for_witness_calculation, RLNWitnessInput};
|
||||
use crate::{
|
||||
circuit::{
|
||||
iden3calc::calc_witness, qap::CircomReduction, Curve, Fr, Proof, VerifyingKey, Zkey,
|
||||
COMPRESS_PROOF_SIZE,
|
||||
},
|
||||
error::ProtocolError,
|
||||
utils::{bytes_be_to_fr, bytes_le_to_fr, fr_to_bytes_be, fr_to_bytes_le, FR_BYTE_SIZE},
|
||||
};
|
||||
|
||||
/// Complete RLN proof.
|
||||
///
|
||||
/// Combines the Groth16 proof with its public values.
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct RLNProof {
|
||||
pub proof: Proof,
|
||||
pub proof_values: RLNProofValues,
|
||||
}
|
||||
|
||||
/// Public values for RLN proof verification.
|
||||
///
|
||||
/// Contains the circuit's public inputs and outputs. Used in proof verification
|
||||
/// and identity secret recovery when rate limit violations are detected.
|
||||
#[derive(Debug, PartialEq, Clone, Copy)]
|
||||
pub struct RLNProofValues {
|
||||
// Public outputs:
|
||||
pub y: Fr,
|
||||
pub nullifier: Fr,
|
||||
pub root: Fr,
|
||||
// Public Inputs:
|
||||
pub x: Fr,
|
||||
pub external_nullifier: Fr,
|
||||
}
|
||||
|
||||
/// Serializes RLN proof values to little-endian bytes.
|
||||
pub fn rln_proof_values_to_bytes_le(rln_proof_values: &RLNProofValues) -> Vec<u8> {
|
||||
// Calculate capacity for Vec:
|
||||
// 5 field elements: root, external_nullifier, x, y, nullifier
|
||||
let mut bytes = Vec::with_capacity(FR_BYTE_SIZE * 5);
|
||||
|
||||
bytes.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.root));
|
||||
bytes.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.external_nullifier));
|
||||
bytes.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.x));
|
||||
bytes.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.y));
|
||||
bytes.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.nullifier));
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
/// Serializes RLN proof values to big-endian bytes.
|
||||
pub fn rln_proof_values_to_bytes_be(rln_proof_values: &RLNProofValues) -> Vec<u8> {
|
||||
// Calculate capacity for Vec:
|
||||
// 5 field elements: root, external_nullifier, x, y, nullifier
|
||||
let mut bytes = Vec::with_capacity(FR_BYTE_SIZE * 5);
|
||||
|
||||
bytes.extend_from_slice(&fr_to_bytes_be(&rln_proof_values.root));
|
||||
bytes.extend_from_slice(&fr_to_bytes_be(&rln_proof_values.external_nullifier));
|
||||
bytes.extend_from_slice(&fr_to_bytes_be(&rln_proof_values.x));
|
||||
bytes.extend_from_slice(&fr_to_bytes_be(&rln_proof_values.y));
|
||||
bytes.extend_from_slice(&fr_to_bytes_be(&rln_proof_values.nullifier));
|
||||
|
||||
bytes
|
||||
}
|
||||
|
||||
/// Deserializes RLN proof values from little-endian bytes.
|
||||
///
|
||||
/// Format: `[ root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]`
|
||||
///
|
||||
/// Returns the deserialized proof values and the number of bytes read.
|
||||
pub fn bytes_le_to_rln_proof_values(
|
||||
bytes: &[u8],
|
||||
) -> Result<(RLNProofValues, usize), ProtocolError> {
|
||||
let mut read: usize = 0;
|
||||
|
||||
let (root, el_size) = bytes_le_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (external_nullifier, el_size) = bytes_le_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (x, el_size) = bytes_le_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (y, el_size) = bytes_le_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (nullifier, el_size) = bytes_le_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
Ok((
|
||||
RLNProofValues {
|
||||
y,
|
||||
nullifier,
|
||||
root,
|
||||
x,
|
||||
external_nullifier,
|
||||
},
|
||||
read,
|
||||
))
|
||||
}
|
||||
|
||||
/// Deserializes RLN proof values from big-endian bytes.
|
||||
///
|
||||
/// Format: `[ root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]`
|
||||
///
|
||||
/// Returns the deserialized proof values and the number of bytes read.
|
||||
pub fn bytes_be_to_rln_proof_values(
|
||||
bytes: &[u8],
|
||||
) -> Result<(RLNProofValues, usize), ProtocolError> {
|
||||
let mut read: usize = 0;
|
||||
|
||||
let (root, el_size) = bytes_be_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (external_nullifier, el_size) = bytes_be_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (x, el_size) = bytes_be_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (y, el_size) = bytes_be_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (nullifier, el_size) = bytes_be_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
Ok((
|
||||
RLNProofValues {
|
||||
y,
|
||||
nullifier,
|
||||
root,
|
||||
x,
|
||||
external_nullifier,
|
||||
},
|
||||
read,
|
||||
))
|
||||
}
|
||||
|
||||
/// Serializes RLN proof to little-endian bytes.
|
||||
///
|
||||
/// Note: The Groth16 proof is always serialized in LE format (arkworks behavior),
|
||||
/// while proof_values are serialized in LE format.
|
||||
pub fn rln_proof_to_bytes_le(rln_proof: &RLNProof) -> Result<Vec<u8>, ProtocolError> {
|
||||
// Calculate capacity for Vec:
|
||||
// - 128 bytes for compressed Groth16 proof
|
||||
// - 5 field elements for proof values (root, external_nullifier, x, y, nullifier)
|
||||
let mut bytes = Vec::with_capacity(COMPRESS_PROOF_SIZE + FR_BYTE_SIZE * 5);
|
||||
|
||||
// Serialize proof (always LE format from arkworks)
|
||||
rln_proof.proof.serialize_compressed(&mut bytes)?;
|
||||
|
||||
// Serialize proof values in LE
|
||||
let proof_values_bytes = rln_proof_values_to_bytes_le(&rln_proof.proof_values);
|
||||
bytes.extend_from_slice(&proof_values_bytes);
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
/// Serializes RLN proof to big-endian bytes.
|
||||
///
|
||||
/// Note: The Groth16 proof is always serialized in LE format (arkworks behavior),
|
||||
/// while proof_values are serialized in BE format. This creates a mixed-endian format.
|
||||
pub fn rln_proof_to_bytes_be(rln_proof: &RLNProof) -> Result<Vec<u8>, ProtocolError> {
|
||||
// Calculate capacity for Vec:
|
||||
// - 128 bytes for compressed Groth16 proof
|
||||
// - 5 field elements for proof values (root, external_nullifier, x, y, nullifier)
|
||||
let mut bytes = Vec::with_capacity(COMPRESS_PROOF_SIZE + FR_BYTE_SIZE * 5);
|
||||
|
||||
// Serialize proof (always LE format from arkworks)
|
||||
rln_proof.proof.serialize_compressed(&mut bytes)?;
|
||||
|
||||
// Serialize proof values in BE
|
||||
let proof_values_bytes = rln_proof_values_to_bytes_be(&rln_proof.proof_values);
|
||||
bytes.extend_from_slice(&proof_values_bytes);
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
/// Deserializes RLN proof from little-endian bytes.
|
||||
///
|
||||
/// Format: `[ proof<128,LE> | root<32,LE> | external_nullifier<32,LE> | x<32,LE> | y<32,LE> | nullifier<32,LE> ]`
|
||||
///
|
||||
/// Returns the deserialized proof and the number of bytes read.
|
||||
pub fn bytes_le_to_rln_proof(bytes: &[u8]) -> Result<(RLNProof, usize), ProtocolError> {
|
||||
let mut read: usize = 0;
|
||||
|
||||
// Deserialize proof (always LE from arkworks)
|
||||
let proof = Proof::deserialize_compressed(&bytes[read..read + COMPRESS_PROOF_SIZE])?;
|
||||
read += COMPRESS_PROOF_SIZE;
|
||||
|
||||
// Deserialize proof values
|
||||
let (values, el_size) = bytes_le_to_rln_proof_values(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
Ok((
|
||||
RLNProof {
|
||||
proof,
|
||||
proof_values: values,
|
||||
},
|
||||
read,
|
||||
))
|
||||
}
|
||||
|
||||
/// Deserializes RLN proof from big-endian bytes.
|
||||
///
|
||||
/// Format: `[ proof<128,LE> | root<32,BE> | external_nullifier<32,BE> | x<32,BE> | y<32,BE> | nullifier<32,BE> ]`
|
||||
///
|
||||
/// Note: Mixed-endian format - proof is LE (arkworks), proof_values are BE.
|
||||
///
|
||||
/// Returns the deserialized proof and the number of bytes read.
|
||||
pub fn bytes_be_to_rln_proof(bytes: &[u8]) -> Result<(RLNProof, usize), ProtocolError> {
|
||||
let mut read: usize = 0;
|
||||
|
||||
// Deserialize proof (always LE from arkworks)
|
||||
let proof = Proof::deserialize_compressed(&bytes[read..read + COMPRESS_PROOF_SIZE])?;
|
||||
read += COMPRESS_PROOF_SIZE;
|
||||
|
||||
// Deserialize proof values
|
||||
let (values, el_size) = bytes_be_to_rln_proof_values(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
Ok((
|
||||
RLNProof {
|
||||
proof,
|
||||
proof_values: values,
|
||||
},
|
||||
read,
|
||||
))
|
||||
}
|
||||
|
||||
// zkSNARK proof generation and verification
|
||||
|
||||
/// Converts calculated witness (BigInt) to field elements.
|
||||
fn calculated_witness_to_field_elements<E: ark_ec::pairing::Pairing>(
|
||||
calculated_witness: Vec<BigInt>,
|
||||
) -> Result<Vec<E::ScalarField>, ProtocolError> {
|
||||
let modulus = <E::ScalarField as PrimeField>::MODULUS;
|
||||
|
||||
// convert it to field elements
|
||||
let mut field_elements = vec![];
|
||||
for w in calculated_witness.into_iter() {
|
||||
let w = if w.sign() == num_bigint::Sign::Minus {
|
||||
// Need to negate the witness element if negative
|
||||
modulus.into()
|
||||
- w.abs()
|
||||
.to_biguint()
|
||||
.ok_or(ProtocolError::BigUintConversion(w))?
|
||||
} else {
|
||||
w.to_biguint().ok_or(ProtocolError::BigUintConversion(w))?
|
||||
};
|
||||
field_elements.push(E::ScalarField::from(w))
|
||||
}
|
||||
|
||||
Ok(field_elements)
|
||||
}
|
||||
|
||||
/// Generates a zkSNARK proof from pre-calculated witness values.
|
||||
///
|
||||
/// Use this when witness calculation is performed externally.
|
||||
pub fn generate_zk_proof_with_witness(
|
||||
calculated_witness: Vec<BigInt>,
|
||||
zkey: &Zkey,
|
||||
) -> Result<Proof, ProtocolError> {
|
||||
let full_assignment = calculated_witness_to_field_elements::<Curve>(calculated_witness)?;
|
||||
|
||||
// Random Values
|
||||
let mut rng = thread_rng();
|
||||
let r = Fr::rand(&mut rng);
|
||||
let s = Fr::rand(&mut rng);
|
||||
|
||||
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
|
||||
&zkey.0,
|
||||
r,
|
||||
s,
|
||||
&zkey.1,
|
||||
zkey.1.num_instance_variables,
|
||||
zkey.1.num_constraints,
|
||||
full_assignment.as_slice(),
|
||||
)?;
|
||||
|
||||
Ok(proof)
|
||||
}
|
||||
|
||||
/// Generates a zkSNARK proof from witness input using the provided circuit data.
|
||||
pub fn generate_zk_proof(
|
||||
zkey: &Zkey,
|
||||
witness: &RLNWitnessInput,
|
||||
graph_data: &[u8],
|
||||
) -> Result<Proof, ProtocolError> {
|
||||
let inputs = inputs_for_witness_calculation(witness)?
|
||||
.into_iter()
|
||||
.map(|(name, values)| (name.to_string(), values));
|
||||
|
||||
let full_assignment = calc_witness(inputs, graph_data)?;
|
||||
|
||||
// Random Values
|
||||
let mut rng = thread_rng();
|
||||
let r = Fr::rand(&mut rng);
|
||||
let s = Fr::rand(&mut rng);
|
||||
|
||||
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
|
||||
&zkey.0,
|
||||
r,
|
||||
s,
|
||||
&zkey.1,
|
||||
zkey.1.num_instance_variables,
|
||||
zkey.1.num_constraints,
|
||||
full_assignment.as_slice(),
|
||||
)?;
|
||||
|
||||
Ok(proof)
|
||||
}
|
||||
|
||||
/// Verifies a zkSNARK proof against the verifying key and public values.
|
||||
///
|
||||
/// Returns `true` if the proof is cryptographically valid, `false` if verification fails.
|
||||
/// Note: Verification failure may occur due to proof computation errors, not necessarily malicious proofs.
|
||||
pub fn verify_zk_proof(
|
||||
verifying_key: &VerifyingKey,
|
||||
proof: &Proof,
|
||||
proof_values: &RLNProofValues,
|
||||
) -> Result<bool, ProtocolError> {
|
||||
// We re-arrange proof-values according to the circuit specification
|
||||
let inputs = vec![
|
||||
proof_values.y,
|
||||
proof_values.root,
|
||||
proof_values.nullifier,
|
||||
proof_values.x,
|
||||
proof_values.external_nullifier,
|
||||
];
|
||||
|
||||
// Check that the proof is valid
|
||||
let pvk = prepare_verifying_key(verifying_key);
|
||||
|
||||
let verified = Groth16::<_, CircomReduction>::verify_proof(&pvk, proof, &inputs)?;
|
||||
|
||||
Ok(verified)
|
||||
}
|
||||
55
rln/src/protocol/slashing.rs
Normal file
55
rln/src/protocol/slashing.rs
Normal file
@@ -0,0 +1,55 @@
|
||||
use ark_ff::AdditiveGroup;
|
||||
|
||||
use super::proof::RLNProofValues;
|
||||
use crate::{circuit::Fr, error::ProtocolError, utils::IdSecret};
|
||||
|
||||
/// Computes identity secret from two (x, y) shares.
|
||||
fn compute_id_secret(share1: (Fr, Fr), share2: (Fr, Fr)) -> Result<IdSecret, ProtocolError> {
|
||||
// Assuming a0 is the identity secret and a1 = poseidonHash([a0, external_nullifier]),
|
||||
// a (x,y) share satisfies the following relation
|
||||
// y = a_0 + x * a_1
|
||||
let (x1, y1) = share1;
|
||||
let (x2, y2) = share2;
|
||||
|
||||
// If the two input shares were computed for the same external_nullifier and identity secret, we can recover the latter
|
||||
// y1 = a_0 + x1 * a_1
|
||||
// y2 = a_0 + x2 * a_1
|
||||
|
||||
if (x1 - x2) != Fr::ZERO {
|
||||
let a_1 = (y1 - y2) / (x1 - x2);
|
||||
let mut a_0 = y1 - x1 * a_1;
|
||||
|
||||
// If shares come from the same polynomial, a0 is correctly recovered and a1 = poseidonHash([a0, external_nullifier])
|
||||
let id_secret = IdSecret::from(&mut a_0);
|
||||
Ok(id_secret)
|
||||
} else {
|
||||
Err(ProtocolError::DivisionByZero)
|
||||
}
|
||||
}
|
||||
|
||||
/// Recovers identity secret from two proof shares with the same external nullifier.
|
||||
///
|
||||
/// When a user violates rate limits by generating multiple proofs in the same epoch,
|
||||
/// their shares can be used to recover their identity secret through polynomial interpolation.
|
||||
pub fn recover_id_secret(
|
||||
rln_proof_values_1: &RLNProofValues,
|
||||
rln_proof_values_2: &RLNProofValues,
|
||||
) -> Result<IdSecret, ProtocolError> {
|
||||
let external_nullifier_1 = rln_proof_values_1.external_nullifier;
|
||||
let external_nullifier_2 = rln_proof_values_2.external_nullifier;
|
||||
|
||||
// We continue only if the proof values are for the same external nullifier
|
||||
if external_nullifier_1 != external_nullifier_2 {
|
||||
return Err(ProtocolError::ExternalNullifierMismatch(
|
||||
external_nullifier_1,
|
||||
external_nullifier_2,
|
||||
));
|
||||
}
|
||||
|
||||
// We extract the two shares
|
||||
let share1 = (rln_proof_values_1.x, rln_proof_values_1.y);
|
||||
let share2 = (rln_proof_values_2.x, rln_proof_values_2.y);
|
||||
|
||||
// We recover the secret
|
||||
compute_id_secret(share1, share2)
|
||||
}
|
||||
354
rln/src/protocol/witness.rs
Normal file
354
rln/src/protocol/witness.rs
Normal file
@@ -0,0 +1,354 @@
|
||||
use zeroize::Zeroize;
|
||||
|
||||
use super::proof::RLNProofValues;
|
||||
use crate::{
|
||||
circuit::Fr,
|
||||
error::ProtocolError,
|
||||
hashers::poseidon_hash,
|
||||
utils::{
|
||||
bytes_be_to_fr, bytes_be_to_vec_fr, bytes_be_to_vec_u8, bytes_le_to_fr, bytes_le_to_vec_fr,
|
||||
bytes_le_to_vec_u8, fr_to_bytes_be, fr_to_bytes_le, to_bigint, vec_fr_to_bytes_be,
|
||||
vec_fr_to_bytes_le, vec_u8_to_bytes_be, vec_u8_to_bytes_le, FrOrSecret, IdSecret,
|
||||
FR_BYTE_SIZE,
|
||||
},
|
||||
};
|
||||
|
||||
/// Witness input for RLN proof generation.
|
||||
///
|
||||
/// Contains the identity credentials, merkle proof, rate-limiting parameters,
|
||||
/// and signal binding data required to generate a Groth16 proof for the RLN protocol.
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct RLNWitnessInput {
|
||||
identity_secret: IdSecret,
|
||||
user_message_limit: Fr,
|
||||
message_id: Fr,
|
||||
path_elements: Vec<Fr>,
|
||||
identity_path_index: Vec<u8>,
|
||||
x: Fr,
|
||||
external_nullifier: Fr,
|
||||
}
|
||||
|
||||
impl RLNWitnessInput {
|
||||
pub fn new(
|
||||
identity_secret: IdSecret,
|
||||
user_message_limit: Fr,
|
||||
message_id: Fr,
|
||||
path_elements: Vec<Fr>,
|
||||
identity_path_index: Vec<u8>,
|
||||
x: Fr,
|
||||
external_nullifier: Fr,
|
||||
) -> Result<Self, ProtocolError> {
|
||||
// Message ID range check
|
||||
if message_id > user_message_limit {
|
||||
return Err(ProtocolError::InvalidMessageId(
|
||||
message_id,
|
||||
user_message_limit,
|
||||
));
|
||||
}
|
||||
|
||||
// Merkle proof length check
|
||||
let path_elements_len = path_elements.len();
|
||||
let identity_path_index_len = identity_path_index.len();
|
||||
if path_elements_len != identity_path_index_len {
|
||||
return Err(ProtocolError::InvalidMerkleProofLength(
|
||||
path_elements_len,
|
||||
identity_path_index_len,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
identity_secret,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x,
|
||||
external_nullifier,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn identity_secret(&self) -> &IdSecret {
|
||||
&self.identity_secret
|
||||
}
|
||||
|
||||
pub fn user_message_limit(&self) -> &Fr {
|
||||
&self.user_message_limit
|
||||
}
|
||||
|
||||
pub fn message_id(&self) -> &Fr {
|
||||
&self.message_id
|
||||
}
|
||||
|
||||
pub fn path_elements(&self) -> &[Fr] {
|
||||
&self.path_elements
|
||||
}
|
||||
|
||||
pub fn identity_path_index(&self) -> &[u8] {
|
||||
&self.identity_path_index
|
||||
}
|
||||
|
||||
pub fn x(&self) -> &Fr {
|
||||
&self.x
|
||||
}
|
||||
|
||||
pub fn external_nullifier(&self) -> &Fr {
|
||||
&self.external_nullifier
|
||||
}
|
||||
}
|
||||
|
||||
/// Serializes an RLN witness to little-endian bytes.
|
||||
pub fn rln_witness_to_bytes_le(witness: &RLNWitnessInput) -> Result<Vec<u8>, ProtocolError> {
|
||||
// Calculate capacity for Vec:
|
||||
// - 5 fixed field elements: identity_secret, user_message_limit, message_id, x, external_nullifier
|
||||
// - variable number of path elements
|
||||
// - identity_path_index (variable size)
|
||||
let mut bytes: Vec<u8> = Vec::with_capacity(
|
||||
FR_BYTE_SIZE * (5 + witness.path_elements.len()) + witness.identity_path_index.len(),
|
||||
);
|
||||
bytes.extend_from_slice(&witness.identity_secret.to_bytes_le());
|
||||
bytes.extend_from_slice(&fr_to_bytes_le(&witness.user_message_limit));
|
||||
bytes.extend_from_slice(&fr_to_bytes_le(&witness.message_id));
|
||||
bytes.extend_from_slice(&vec_fr_to_bytes_le(&witness.path_elements));
|
||||
bytes.extend_from_slice(&vec_u8_to_bytes_le(&witness.identity_path_index));
|
||||
bytes.extend_from_slice(&fr_to_bytes_le(&witness.x));
|
||||
bytes.extend_from_slice(&fr_to_bytes_le(&witness.external_nullifier));
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
/// Serializes an RLN witness to big-endian bytes.
|
||||
pub fn rln_witness_to_bytes_be(witness: &RLNWitnessInput) -> Result<Vec<u8>, ProtocolError> {
|
||||
// Calculate capacity for Vec:
|
||||
// - 5 fixed field elements: identity_secret, user_message_limit, message_id, x, external_nullifier
|
||||
// - variable number of path elements
|
||||
// - identity_path_index (variable size)
|
||||
let mut bytes: Vec<u8> = Vec::with_capacity(
|
||||
FR_BYTE_SIZE * (5 + witness.path_elements.len()) + witness.identity_path_index.len(),
|
||||
);
|
||||
bytes.extend_from_slice(&witness.identity_secret.to_bytes_be());
|
||||
bytes.extend_from_slice(&fr_to_bytes_be(&witness.user_message_limit));
|
||||
bytes.extend_from_slice(&fr_to_bytes_be(&witness.message_id));
|
||||
bytes.extend_from_slice(&vec_fr_to_bytes_be(&witness.path_elements));
|
||||
bytes.extend_from_slice(&vec_u8_to_bytes_be(&witness.identity_path_index));
|
||||
bytes.extend_from_slice(&fr_to_bytes_be(&witness.x));
|
||||
bytes.extend_from_slice(&fr_to_bytes_be(&witness.external_nullifier));
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
/// Deserializes an RLN witness from little-endian bytes.
|
||||
///
|
||||
/// Format: `[ identity_secret<32> | user_message_limit<32> | message_id<32> | path_elements<var> | identity_path_index<var> | x<32> | external_nullifier<32> ]`
|
||||
///
|
||||
/// Returns the deserialized witness and the number of bytes read.
|
||||
pub fn bytes_le_to_rln_witness(bytes: &[u8]) -> Result<(RLNWitnessInput, usize), ProtocolError> {
|
||||
let mut read: usize = 0;
|
||||
|
||||
let (identity_secret, el_size) = IdSecret::from_bytes_le(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (user_message_limit, el_size) = bytes_le_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (message_id, el_size) = bytes_le_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (path_elements, el_size) = bytes_le_to_vec_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (identity_path_index, el_size) = bytes_le_to_vec_u8(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (x, el_size) = bytes_le_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (external_nullifier, el_size) = bytes_le_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
if bytes.len() != read {
|
||||
return Err(ProtocolError::InvalidReadLen(bytes.len(), read));
|
||||
}
|
||||
|
||||
Ok((
|
||||
RLNWitnessInput::new(
|
||||
identity_secret,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x,
|
||||
external_nullifier,
|
||||
)?,
|
||||
read,
|
||||
))
|
||||
}
|
||||
|
||||
/// Deserializes an RLN witness from big-endian bytes.
|
||||
///
|
||||
/// Format: `[ identity_secret<32> | user_message_limit<32> | message_id<32> | path_elements<var> | identity_path_index<var> | x<32> | external_nullifier<32> ]`
|
||||
///
|
||||
/// Returns the deserialized witness and the number of bytes read.
|
||||
pub fn bytes_be_to_rln_witness(bytes: &[u8]) -> Result<(RLNWitnessInput, usize), ProtocolError> {
|
||||
let mut read: usize = 0;
|
||||
|
||||
let (identity_secret, el_size) = IdSecret::from_bytes_be(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (user_message_limit, el_size) = bytes_be_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (message_id, el_size) = bytes_be_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (path_elements, el_size) = bytes_be_to_vec_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (identity_path_index, el_size) = bytes_be_to_vec_u8(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (x, el_size) = bytes_be_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
let (external_nullifier, el_size) = bytes_be_to_fr(&bytes[read..])?;
|
||||
read += el_size;
|
||||
|
||||
if bytes.len() != read {
|
||||
return Err(ProtocolError::InvalidReadLen(bytes.len(), read));
|
||||
}
|
||||
|
||||
Ok((
|
||||
RLNWitnessInput::new(
|
||||
identity_secret,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x,
|
||||
external_nullifier,
|
||||
)?,
|
||||
read,
|
||||
))
|
||||
}
|
||||
|
||||
/// Converts RLN witness to JSON with BigInt string representation for witness calculator.
|
||||
pub fn rln_witness_to_bigint_json(
|
||||
witness: &RLNWitnessInput,
|
||||
) -> Result<serde_json::Value, ProtocolError> {
|
||||
use num_bigint::BigInt;
|
||||
|
||||
let mut path_elements = Vec::new();
|
||||
|
||||
for v in witness.path_elements.iter() {
|
||||
path_elements.push(to_bigint(v).to_str_radix(10));
|
||||
}
|
||||
|
||||
let mut identity_path_index = Vec::new();
|
||||
witness
|
||||
.identity_path_index
|
||||
.iter()
|
||||
.for_each(|v| identity_path_index.push(BigInt::from(*v).to_str_radix(10)));
|
||||
|
||||
let inputs = serde_json::json!({
|
||||
"identitySecret": to_bigint(&witness.identity_secret).to_str_radix(10),
|
||||
"userMessageLimit": to_bigint(&witness.user_message_limit).to_str_radix(10),
|
||||
"messageId": to_bigint(&witness.message_id).to_str_radix(10),
|
||||
"pathElements": path_elements,
|
||||
"identityPathIndex": identity_path_index,
|
||||
"x": to_bigint(&witness.x).to_str_radix(10),
|
||||
"externalNullifier": to_bigint(&witness.external_nullifier).to_str_radix(10),
|
||||
});
|
||||
|
||||
Ok(inputs)
|
||||
}
|
||||
|
||||
/// Computes RLN proof values from witness input.
|
||||
///
|
||||
/// Calculates the public outputs (y, nullifier, root) that will be part of the proof.
|
||||
pub fn proof_values_from_witness(
|
||||
witness: &RLNWitnessInput,
|
||||
) -> Result<RLNProofValues, ProtocolError> {
|
||||
// y share
|
||||
let a_0 = &witness.identity_secret;
|
||||
let mut to_hash = [**a_0, witness.external_nullifier, witness.message_id];
|
||||
let a_1 = poseidon_hash(&to_hash)?;
|
||||
let y = *(a_0.clone()) + witness.x * a_1;
|
||||
|
||||
// Nullifier
|
||||
let nullifier = poseidon_hash(&[a_1])?;
|
||||
to_hash[0].zeroize();
|
||||
|
||||
// Merkle tree root computations
|
||||
let root = compute_tree_root(
|
||||
&witness.identity_secret,
|
||||
&witness.user_message_limit,
|
||||
&witness.path_elements,
|
||||
&witness.identity_path_index,
|
||||
)?;
|
||||
|
||||
Ok(RLNProofValues {
|
||||
y,
|
||||
nullifier,
|
||||
root,
|
||||
x: witness.x,
|
||||
external_nullifier: witness.external_nullifier,
|
||||
})
|
||||
}
|
||||
|
||||
/// Computes the Merkle tree root from identity credentials and Merkle membership proof.
|
||||
pub fn compute_tree_root(
|
||||
identity_secret: &IdSecret,
|
||||
user_message_limit: &Fr,
|
||||
path_elements: &[Fr],
|
||||
identity_path_index: &[u8],
|
||||
) -> Result<Fr, ProtocolError> {
|
||||
let mut to_hash = [*identity_secret.clone()];
|
||||
let id_commitment = poseidon_hash(&to_hash)?;
|
||||
to_hash[0].zeroize();
|
||||
|
||||
let mut root = poseidon_hash(&[id_commitment, *user_message_limit])?;
|
||||
|
||||
for i in 0..identity_path_index.len() {
|
||||
if identity_path_index[i] == 0 {
|
||||
root = poseidon_hash(&[root, path_elements[i]])?;
|
||||
} else {
|
||||
root = poseidon_hash(&[path_elements[i], root])?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(root)
|
||||
}
|
||||
|
||||
/// Prepares inputs for witness calculation from RLN witness input.
|
||||
pub(super) fn inputs_for_witness_calculation(
|
||||
witness: &RLNWitnessInput,
|
||||
) -> Result<[(&str, Vec<FrOrSecret>); 7], ProtocolError> {
|
||||
let mut identity_path_index = Vec::with_capacity(witness.identity_path_index.len());
|
||||
witness
|
||||
.identity_path_index
|
||||
.iter()
|
||||
.for_each(|v| identity_path_index.push(Fr::from(*v)));
|
||||
|
||||
Ok([
|
||||
(
|
||||
"identitySecret",
|
||||
vec![witness.identity_secret.clone().into()],
|
||||
),
|
||||
("userMessageLimit", vec![witness.user_message_limit.into()]),
|
||||
("messageId", vec![witness.message_id.into()]),
|
||||
(
|
||||
"pathElements",
|
||||
witness
|
||||
.path_elements
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(Into::into)
|
||||
.collect(),
|
||||
),
|
||||
(
|
||||
"identityPathIndex",
|
||||
identity_path_index.into_iter().map(Into::into).collect(),
|
||||
),
|
||||
("x", vec![witness.x.into()]),
|
||||
("externalNullifier", vec![witness.external_nullifier.into()]),
|
||||
])
|
||||
}
|
||||
1247
rln/src/public.rs
1247
rln/src/public.rs
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user