mirror of
https://github.com/vacp2p/zerokit.git
synced 2026-01-09 13:47:58 -05:00
Compare commits
56 Commits
optimize-w
...
eyre-remov
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
93f5997544 | ||
|
|
4077357e3f | ||
|
|
84d9799d09 | ||
|
|
c576af8e62 | ||
|
|
81470b9678 | ||
|
|
9d4198c205 | ||
|
|
c60e0c33fc | ||
|
|
ba467d370c | ||
|
|
ffd5851d7d | ||
|
|
759d312680 | ||
|
|
fb0ffd74a3 | ||
|
|
9d8372be39 | ||
|
|
de9c0d5072 | ||
|
|
5c60ec7cce | ||
|
|
8793965650 | ||
|
|
1930ca1610 | ||
|
|
4b4169d7a7 | ||
|
|
8a3e33be41 | ||
|
|
7bb2444ba4 | ||
|
|
00f8d039a8 | ||
|
|
e39f156fff | ||
|
|
8b04930583 | ||
|
|
b9d27039c3 | ||
|
|
49e2517e15 | ||
|
|
6621efd0bb | ||
|
|
4a74ff0d6c | ||
|
|
fc823e7187 | ||
|
|
0d5642492a | ||
|
|
c4579e1917 | ||
|
|
e6238fd722 | ||
|
|
5540ddc993 | ||
|
|
d8f813bc2e | ||
|
|
c6493bd10f | ||
|
|
dd5edd6818 | ||
|
|
85d71a5427 | ||
|
|
7790954c4a | ||
|
|
820240d8c0 | ||
|
|
fe2b224981 | ||
|
|
d3d85c3e3c | ||
|
|
0005b1d61f | ||
|
|
4931b25237 | ||
|
|
652cc3647e | ||
|
|
51939be4a8 | ||
|
|
cd60af5b52 | ||
|
|
8581ac0b78 | ||
|
|
5937a67ee6 | ||
|
|
d96eb59e92 | ||
|
|
a372053047 | ||
|
|
b450bfdb37 | ||
|
|
0521c7349e | ||
|
|
d91a5b3568 | ||
|
|
cf9dbb419d | ||
|
|
aaa12db70d | ||
|
|
30d5f94181 | ||
|
|
ccd2ead847 | ||
|
|
7669d72f9b |
5
.github/labels.yml
vendored
5
.github/labels.yml
vendored
@@ -90,11 +90,6 @@
|
||||
description: go-waku-productionization track (Waku Product)
|
||||
color: 9DEA79
|
||||
|
||||
# Tracks within zk-WASM project
|
||||
- name: track:kickoff
|
||||
description: Kickoff track (zk-WASM)
|
||||
color: 06B6C8
|
||||
|
||||
# Tracks within RAD project
|
||||
- name: track:waku-specs
|
||||
description: Waku specs track (RAD)
|
||||
|
||||
142
.github/workflows/ci.yml
vendored
142
.github/workflows/ci.yml
vendored
@@ -3,39 +3,32 @@ on:
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- '!.github/workflows/*.yml'
|
||||
- '!multiplier/src/**'
|
||||
- '!private-settlement/src/**'
|
||||
- '!rln-wasm/**'
|
||||
- '!rln/src/**'
|
||||
- '!rln/resources/**'
|
||||
- '!semaphore/src/**'
|
||||
- '!utils/src/**'
|
||||
- "**.md"
|
||||
- "!.github/workflows/*.yml"
|
||||
- "!rln-wasm/**"
|
||||
- "!rln/src/**"
|
||||
- "!rln/resources/**"
|
||||
- "!utils/src/**"
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
- '!.github/workflows/*.yml'
|
||||
- '!multiplier/src/**'
|
||||
- '!private-settlement/src/**'
|
||||
- '!rln-wasm/**'
|
||||
- '!rln/src/**'
|
||||
- '!rln/resources/**'
|
||||
- '!semaphore/src/**'
|
||||
- '!utils/src/**'
|
||||
|
||||
- "**.md"
|
||||
- "!.github/workflows/*.yml"
|
||||
- "!rln-wasm/**"
|
||||
- "!rln/src/**"
|
||||
- "!rln/resources/**"
|
||||
- "!utils/src/**"
|
||||
|
||||
name: Tests
|
||||
|
||||
jobs:
|
||||
test:
|
||||
utils-test:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [multiplier, semaphore, rln, utils]
|
||||
platform: [ ubuntu-latest, macos-latest ]
|
||||
crate: [ utils ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
|
||||
name: test - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
@@ -51,17 +44,49 @@ jobs:
|
||||
run: make installdeps
|
||||
- name: cargo-make test
|
||||
run: |
|
||||
cargo make test --release
|
||||
cargo make test --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
|
||||
rln-test:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest, macos-latest ]
|
||||
crate: [ rln ]
|
||||
feature: [ "default", "arkzkey", "stateless" ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: test - ${{ matrix.crate }} - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: cargo-make test
|
||||
run: |
|
||||
if [ ${{ matrix.feature }} == default ]; then
|
||||
cargo make test --release
|
||||
else
|
||||
cargo make test_${{ matrix.feature }} --release
|
||||
fi
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
rln-wasm:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
platform: [ ubuntu-latest, macos-latest ]
|
||||
feature: [ "default", "arkzkey" ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: test - rln-wasm - ${{ matrix.platform }}
|
||||
|
||||
name: test - rln-wasm - ${{ matrix.platform }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install stable toolchain
|
||||
@@ -73,23 +98,33 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install Dependencies
|
||||
run: make installdeps
|
||||
- name: Install wasm-pack
|
||||
uses: jetli/wasm-pack-action@v0.3.0
|
||||
- run: cargo make build
|
||||
- name: cargo-make build
|
||||
run: |
|
||||
if [ ${{ matrix.feature }} == default ]; then
|
||||
cargo make build
|
||||
else
|
||||
cargo make build_${{ matrix.feature }}
|
||||
fi
|
||||
working-directory: rln-wasm
|
||||
- run: cargo make test --release
|
||||
- name: cargo-make test
|
||||
run: |
|
||||
if [ ${{ matrix.feature }} == default ]; then
|
||||
cargo make test --release
|
||||
else
|
||||
cargo make test_${{ matrix.feature }} --release
|
||||
fi
|
||||
working-directory: rln-wasm
|
||||
|
||||
lint:
|
||||
strategy:
|
||||
matrix:
|
||||
# we run lint tests only on ubuntu
|
||||
platform: [ubuntu-latest]
|
||||
crate: [multiplier, semaphore, rln, utils]
|
||||
platform: [ ubuntu-latest ]
|
||||
crate: [ rln, rln-wasm, utils ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: lint - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
name: lint - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
@@ -110,19 +145,17 @@ jobs:
|
||||
- name: cargo clippy
|
||||
if: success() || failure()
|
||||
run: |
|
||||
cargo clippy --release -- -D warnings
|
||||
cargo clippy --release
|
||||
working-directory: ${{ matrix.crate }}
|
||||
# We skip clippy on rln-wasm, since wasm target is managed by cargo make
|
||||
# Currently not treating warnings as error, too noisy
|
||||
# -- -D warnings
|
||||
benchmark:
|
||||
|
||||
benchmark-utils:
|
||||
# run only in pull requests
|
||||
if: github.event_name == 'pull_request'
|
||||
strategy:
|
||||
matrix:
|
||||
# we run benchmark tests only on ubuntu
|
||||
platform: [ubuntu-latest]
|
||||
crate: [rln, utils]
|
||||
platform: [ ubuntu-latest ]
|
||||
crate: [ utils ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
@@ -134,4 +167,27 @@ jobs:
|
||||
- uses: boa-dev/criterion-compare-action@v3
|
||||
with:
|
||||
branchName: ${{ github.base_ref }}
|
||||
cwd: ${{ matrix.crate }}
|
||||
cwd: ${{ matrix.crate }}
|
||||
|
||||
benchmark-rln:
|
||||
# run only in pull requests
|
||||
if: github.event_name == 'pull_request'
|
||||
strategy:
|
||||
matrix:
|
||||
# we run benchmark tests only on ubuntu
|
||||
platform: [ ubuntu-latest ]
|
||||
crate: [ rln ]
|
||||
feature: [ "default", "arkzkey" ]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: benchmark - ${{ matrix.platform }} - ${{ matrix.crate }} - ${{ matrix.feature }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: boa-dev/criterion-compare-action@v3
|
||||
with:
|
||||
branchName: ${{ github.base_ref }}
|
||||
cwd: ${{ matrix.crate }}
|
||||
features: ${{ matrix.feature }}
|
||||
49
.github/workflows/nightly-release.yml
vendored
49
.github/workflows/nightly-release.yml
vendored
@@ -8,10 +8,14 @@ jobs:
|
||||
linux:
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
feature: [ "default", "arkzkey", "stateless" ]
|
||||
target:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- aarch64-unknown-linux-gnu
|
||||
- i686-unknown-linux-gnu
|
||||
# - i686-unknown-linux-gnu
|
||||
include:
|
||||
- feature: stateless
|
||||
cargo_args: --exclude rln-cli
|
||||
name: Linux build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -29,16 +33,16 @@ jobs:
|
||||
run: make installdeps
|
||||
- name: cross build
|
||||
run: |
|
||||
cross build --release --target ${{ matrix.target }} --workspace --exclude rln-wasm
|
||||
cross build --release --target ${{ matrix.target }} --features ${{ matrix.feature }} --workspace ${{ matrix.cargo_args }}
|
||||
mkdir release
|
||||
cp target/${{ matrix.target }}/release/librln* release/
|
||||
tar -czvf ${{ matrix.target }}-rln.tar.gz release/
|
||||
tar -czvf ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz release/
|
||||
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.target }}-archive
|
||||
path: ${{ matrix.target }}-rln.tar.gz
|
||||
name: ${{ matrix.target }}-${{ matrix.feature }}-archive
|
||||
path: ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz
|
||||
retention-days: 2
|
||||
|
||||
macos:
|
||||
@@ -46,9 +50,13 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
target:
|
||||
feature: [ "default", "arkzkey", "stateless" ]
|
||||
target:
|
||||
- x86_64-apple-darwin
|
||||
- aarch64-apple-darwin
|
||||
include:
|
||||
- feature: stateless
|
||||
cargo_args: --exclude rln-cli
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
@@ -64,18 +72,18 @@ jobs:
|
||||
run: make installdeps
|
||||
- name: cross build
|
||||
run: |
|
||||
cross build --release --target ${{ matrix.target }} --workspace --exclude rln-wasm
|
||||
cross build --release --target ${{ matrix.target }} --features ${{ matrix.feature }} --workspace ${{ matrix.cargo_args }}
|
||||
mkdir release
|
||||
cp target/${{ matrix.target }}/release/librln* release/
|
||||
tar -czvf ${{ matrix.target }}-rln.tar.gz release/
|
||||
tar -czvf ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz release/
|
||||
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.target }}-archive
|
||||
path: ${{ matrix.target }}-rln.tar.gz
|
||||
name: ${{ matrix.target }}-${{ matrix.feature }}-archive
|
||||
path: ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz
|
||||
retention-days: 2
|
||||
|
||||
|
||||
browser-rln-wasm:
|
||||
name: Browser build (RLN WASM)
|
||||
runs-on: ubuntu-latest
|
||||
@@ -91,8 +99,6 @@ jobs:
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install dependencies
|
||||
run: make installdeps
|
||||
- name: Install wasm-pack
|
||||
uses: jetli/wasm-pack-action@v0.3.0
|
||||
- name: cross make build
|
||||
run: |
|
||||
cross make build
|
||||
@@ -102,16 +108,15 @@ jobs:
|
||||
working-directory: rln-wasm
|
||||
|
||||
- name: Upload archive artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: browser-rln-wasm-archive
|
||||
path: rln-wasm/browser-rln-wasm.tar.gz
|
||||
retention-days: 2
|
||||
|
||||
|
||||
prepare-prerelease:
|
||||
name: Prepare pre-release
|
||||
needs: [linux, macos, browser-rln-wasm]
|
||||
needs: [ linux, macos, browser-rln-wasm ]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
@@ -119,8 +124,8 @@ jobs:
|
||||
with:
|
||||
ref: master
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v2
|
||||
|
||||
uses: actions/download-artifact@v4
|
||||
|
||||
- name: Delete tag
|
||||
uses: dev-drprasad/delete-tag-and-release@v0.2.1
|
||||
with:
|
||||
@@ -142,7 +147,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Delete artifacts
|
||||
uses: geekyeggo/delete-artifact@v1
|
||||
uses: geekyeggo/delete-artifact@v5
|
||||
with:
|
||||
failOnError: false
|
||||
name: |
|
||||
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -3,12 +3,15 @@
|
||||
*.log
|
||||
tmp/
|
||||
rln/pmtree_db
|
||||
rln-cli/database
|
||||
|
||||
# Generated by Cargo
|
||||
# will have compiled files and executables
|
||||
debug/
|
||||
target/
|
||||
wabt/
|
||||
|
||||
# Generated by Nix
|
||||
result/
|
||||
|
||||
# These are backup files generated by rustfmt
|
||||
**/*.rs.bk
|
||||
|
||||
4
.gitmodules
vendored
4
.gitmodules
vendored
@@ -1,4 +0,0 @@
|
||||
[submodule "semaphore/vendor/semaphore"]
|
||||
path = semaphore/vendor/semaphore
|
||||
ignore = dirty
|
||||
url = https://github.com/appliedzkp/semaphore.git
|
||||
@@ -1,3 +1,5 @@
|
||||
# CHANGE LOG
|
||||
|
||||
## 2023-02-28 v0.2
|
||||
|
||||
This release contains:
|
||||
@@ -10,7 +12,6 @@ This release contains:
|
||||
- Dual License under Apache 2.0 and MIT
|
||||
- RLN compiles as a static library, which can be consumed through a C FFI
|
||||
|
||||
|
||||
## 2022-09-19 v0.1
|
||||
|
||||
Initial beta release.
|
||||
|
||||
3431
Cargo.lock
generated
3431
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
18
Cargo.toml
18
Cargo.toml
@@ -1,13 +1,6 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"multiplier",
|
||||
"private-settlement",
|
||||
"semaphore",
|
||||
"rln",
|
||||
"rln-cli",
|
||||
"rln-wasm",
|
||||
"utils",
|
||||
]
|
||||
members = ["rln", "rln-cli", "rln-wasm", "utils"]
|
||||
default-members = ["rln", "rln-cli", "rln-wasm", "utils"]
|
||||
resolver = "2"
|
||||
|
||||
# Compilation profile for any non-workspace member.
|
||||
@@ -15,10 +8,3 @@ resolver = "2"
|
||||
# while having neglible impact on incremental build times.
|
||||
[profile.dev.package."*"]
|
||||
opt-level = 3
|
||||
|
||||
[profile.release.package."rln-wasm"]
|
||||
# Tell `rustc` to optimize for small code size.
|
||||
opt-level = "s"
|
||||
|
||||
[profile.release.package."semaphore"]
|
||||
codegen-units = 1
|
||||
|
||||
@@ -29,4 +29,7 @@ image = "ghcr.io/cross-rs/mips64-unknown-linux-gnuabi64:latest"
|
||||
image = "ghcr.io/cross-rs/mips64el-unknown-linux-gnuabi64:latest"
|
||||
|
||||
[target.mipsel-unknown-linux-gnu]
|
||||
image = "ghcr.io/cross-rs/mipsel-unknown-linux-gnu:latest"
|
||||
image = "ghcr.io/cross-rs/mipsel-unknown-linux-gnu:latest"
|
||||
|
||||
[target.aarch64-linux-android]
|
||||
image = "ghcr.io/cross-rs/aarch64-linux-android:edge"
|
||||
|
||||
21
Makefile
21
Makefile
@@ -1,4 +1,4 @@
|
||||
.PHONY: all installdeps build test clean
|
||||
.PHONY: all installdeps build test bench clean
|
||||
|
||||
all: .pre-build build
|
||||
|
||||
@@ -13,15 +13,21 @@ endif
|
||||
|
||||
installdeps: .pre-build
|
||||
ifeq ($(shell uname),Darwin)
|
||||
# commented due to https://github.com/orgs/Homebrew/discussions/4612
|
||||
# @brew update
|
||||
@brew update
|
||||
@brew install cmake ninja
|
||||
else ifeq ($(shell uname),Linux)
|
||||
@sudo apt-get update
|
||||
@sudo apt-get install -y cmake ninja-build
|
||||
endif
|
||||
@git clone --recursive https://github.com/WebAssembly/wabt.git
|
||||
@cd wabt && mkdir build && cd build && cmake .. -GNinja && ninja && sudo ninja install
|
||||
@if [ ! -d "$$HOME/.nvm" ]; then \
|
||||
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.2/install.sh | bash; \
|
||||
fi
|
||||
@bash -c 'export NVM_DIR="$$HOME/.nvm" && \
|
||||
[ -s "$$NVM_DIR/nvm.sh" ] && \. "$$NVM_DIR/nvm.sh" && \
|
||||
nvm install 22.14.0 && \
|
||||
nvm use 22.14.0'
|
||||
@curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
|
||||
@echo "\033[1;32m>>> Now run this command to activate Node.js 22.14.0: \033[1;33msource $$HOME/.nvm/nvm.sh && nvm use 22.14.0\033[0m"
|
||||
|
||||
build: .pre-build
|
||||
@cargo make build
|
||||
@@ -29,5 +35,8 @@ build: .pre-build
|
||||
test: .pre-build
|
||||
@cargo make test
|
||||
|
||||
bench: .pre-build
|
||||
@cargo make bench
|
||||
|
||||
clean:
|
||||
@cargo clean
|
||||
@cargo clean
|
||||
74
README.md
74
README.md
@@ -1,35 +1,81 @@
|
||||
# Zerokit
|
||||
|
||||
A set of Zero Knowledge modules, written in Rust and designed to be used in other system programming environments.
|
||||
[](https://crates.io/crates/rln)
|
||||
[](https://github.com/vacp2p/zerokit/actions)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
[](https://opensource.org/licenses/Apache-2.0)
|
||||
|
||||
## Initial scope
|
||||
A collection of Zero Knowledge modules written in Rust and designed to be used in other system programming environments.
|
||||
|
||||
Focus on RLN and being able to use [Circom](https://iden3.io/circom) based
|
||||
version through ark-circom, as opposed to the native one that currently exists
|
||||
in Rust.
|
||||
## Overview
|
||||
|
||||
## Acknowledgements
|
||||
Zerokit provides zero-knowledge cryptographic primitives with a focus on performance, security, and usability.
|
||||
The current focus is on Rate-Limiting Nullifier [RLN](https://github.com/Rate-Limiting-Nullifier) implementation.
|
||||
|
||||
- Uses [ark-circom](https://github.com/gakonst/ark-circom), Rust wrapper around Circom.
|
||||
Current implementation is based on the following [specification](https://github.com/vacp2p/rfc-index/blob/main/vac/raw/rln-v2.md)
|
||||
and focused on RLNv2 which allows to set a rate limit for the number of messages that can be sent by a user.
|
||||
|
||||
- Inspired by Applied ZKP group work, e.g. [zk-kit](https://github.com/appliedzkp/zk-kit).
|
||||
## Features
|
||||
|
||||
- [RLN library](https://github.com/kilic/rln) written in Rust based on Bellman.
|
||||
- **RLN Implementation**: Efficient Rate-Limiting Nullifier using zkSNARKs
|
||||
- **Circom Compatibility**: Uses Circom-based circuits for RLN
|
||||
- **Cross-Platform**: Support for multiple architectures (see compatibility note below)
|
||||
- **FFI-Friendly**: Easy to integrate with other languages
|
||||
|
||||
- [semaphore-rs](https://github.com/worldcoin/semaphore-rs) written in Rust based on ark-circom.
|
||||
## Architecture
|
||||
|
||||
Zerokit currently focuses on RLN (Rate-Limiting Nullifier) implementation using [Circom](https://iden3.io/circom) circuits through ark-circom, providing an alternative to existing native Rust implementations.
|
||||
|
||||
## Build and Test
|
||||
|
||||
To install missing dependencies, run the following commands from the root folder
|
||||
> [!IMPORTANT]
|
||||
> For WASM support or x32 architecture builds, use version `0.6.1`. The current version has dependency issues for these platforms. WASM support will return in a future release.
|
||||
|
||||
### Install Dependencies
|
||||
|
||||
```bash
|
||||
make installdeps
|
||||
```
|
||||
To build and test all crates, run the following commands from the root folder
|
||||
|
||||
### Build and Test All Crates
|
||||
|
||||
```bash
|
||||
make build
|
||||
make test
|
||||
```
|
||||
|
||||
## Release assets
|
||||
## Release Assets
|
||||
|
||||
We use [`cross-rs`](https://github.com/cross-rs/cross) to cross-compile and generate release assets for rln.
|
||||
We use [`cross-rs`](https://github.com/cross-rs/cross) to cross-compile and generate release assets:
|
||||
|
||||
```bash
|
||||
# Example: Build for specific target
|
||||
cross build --target x86_64-unknown-linux-gnu --release -p rln
|
||||
```
|
||||
|
||||
## Used By
|
||||
|
||||
Zerokit powers zero-knowledge functionality in:
|
||||
|
||||
- [**nwaku**](https://github.com/waku-org/nwaku) - Nim implementation of the Waku v2 protocol
|
||||
- [**js-rln**](https://github.com/waku-org/js-rln) - JavaScript bindings for RLN
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
- Inspired by [Applied ZKP](https://zkp.science/) group work, including [zk-kit](https://github.com/appliedzkp/zk-kit)
|
||||
- Uses [ark-circom](https://github.com/gakonst/ark-circom) for zkey and Groth16 proof generation
|
||||
- Witness calculation based on [circom-witnesscalc](https://github.com/iden3/circom-witnesscalc) by iden3.
|
||||
The execution graph file used by this code has been generated by means of the same iden3 software.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> The circom-witnesscalc code fragments have been borrowed instead of depending on this crate,
|
||||
because its types of input and output data were incompatible with the corresponding zerokit code fragments,
|
||||
and circom-witnesscalc has some dependencies, which are redundant for our purpose.
|
||||
|
||||
## Documentation
|
||||
|
||||
For detailed documentation on each module:
|
||||
|
||||
```bash
|
||||
cargo doc --open
|
||||
```
|
||||
|
||||
27
flake.lock
generated
Normal file
27
flake.lock
generated
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1740603184,
|
||||
"narHash": "sha256-t+VaahjQAWyA+Ctn2idyo1yxRIYpaDxMgHkgCNiMJa4=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "f44bd8ca21e026135061a0a57dcf3d0775b67a49",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "f44bd8ca21e026135061a0a57dcf3d0775b67a49",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
38
flake.nix
Normal file
38
flake.nix
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
description = "A flake for building zerokit";
|
||||
|
||||
inputs = {
|
||||
# Version 24.11
|
||||
nixpkgs.url = "github:NixOS/nixpkgs?rev=f44bd8ca21e026135061a0a57dcf3d0775b67a49";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs }:
|
||||
let
|
||||
stableSystems = [
|
||||
"x86_64-linux" "aarch64-linux"
|
||||
"x86_64-darwin" "aarch64-darwin"
|
||||
"x86_64-windows" "i686-linux"
|
||||
"i686-windows"
|
||||
];
|
||||
forAllSystems = nixpkgs.lib.genAttrs stableSystems;
|
||||
pkgsFor = forAllSystems (system: import nixpkgs { inherit system; });
|
||||
in rec
|
||||
{
|
||||
packages = forAllSystems (system: let
|
||||
pkgs = pkgsFor.${system};
|
||||
in rec {
|
||||
zerokit-android-arm64 = pkgs.callPackage ./nix/default.nix { target-platform="aarch64-android-prebuilt"; rust-target= "aarch64-linux-android"; };
|
||||
default = zerokit-android-arm64;
|
||||
});
|
||||
|
||||
devShells = forAllSystems (system: let
|
||||
pkgs = pkgsFor.${system};
|
||||
in {
|
||||
default = pkgs.mkShell {
|
||||
inputsFrom = [
|
||||
packages.${system}.default
|
||||
];
|
||||
};
|
||||
});
|
||||
};
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
[package]
|
||||
name = "multiplier"
|
||||
version = "0.3.0"
|
||||
edition = "2018"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
|
||||
# WASM operations
|
||||
# wasmer = { version = "2.0" }
|
||||
# fnv = { version = "1.0.3", default-features = false }
|
||||
# num = { version = "0.4.0" }
|
||||
# num-traits = { version = "0.2.0", default-features = false }
|
||||
|
||||
# ZKP Generation
|
||||
# ark-ff = { version = "0.3.0", default-features = false, features = ["parallel", "asm"] }
|
||||
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
|
||||
ark-bn254 = { version = "0.3.0" }
|
||||
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
|
||||
# ark-poly = { version = "^0.3.0", default-features = false, features = ["parallel"] }
|
||||
ark-serialize = { version = "0.3.0", default-features = false }
|
||||
|
||||
ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circom-2"], rev = "35ce5a9" }
|
||||
|
||||
# error handling
|
||||
color-eyre = "0.6.1"
|
||||
|
||||
# decoding of data
|
||||
# hex = "0.4.3"
|
||||
# byteorder = "1.4.3"
|
||||
@@ -1,7 +0,0 @@
|
||||
[tasks.build]
|
||||
command = "cargo"
|
||||
args = ["build", "--release"]
|
||||
|
||||
[tasks.test]
|
||||
command = "cargo"
|
||||
args = ["test", "--release"]
|
||||
@@ -1,21 +0,0 @@
|
||||
# Multiplier example
|
||||
|
||||
Example wrapper around a basic Circom circuit to test Circom 2 integration
|
||||
through ark-circom and FFI.
|
||||
|
||||
## Build and Test
|
||||
|
||||
To build and test, run the following commands within the module folder
|
||||
```bash
|
||||
cargo make build
|
||||
cargo make test
|
||||
```
|
||||
|
||||
## FFI
|
||||
|
||||
To generate C or Nim bindings from Rust FFI, use `cbindgen` or `nbindgen`:
|
||||
|
||||
```
|
||||
cbindgen . -o target/multiplier.h
|
||||
nbindgen . -o target/multiplier.nim
|
||||
```
|
||||
Binary file not shown.
Binary file not shown.
@@ -1,77 +0,0 @@
|
||||
use crate::public::Multiplier;
|
||||
use std::slice;
|
||||
|
||||
/// Buffer struct is taken from
|
||||
/// https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs
|
||||
///
|
||||
/// Also heavily inspired by https://github.com/kilic/rln/blob/master/src/ffi.rs
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct Buffer {
|
||||
pub ptr: *const u8,
|
||||
pub len: usize,
|
||||
}
|
||||
|
||||
impl From<&[u8]> for Buffer {
|
||||
fn from(src: &[u8]) -> Self {
|
||||
Self {
|
||||
ptr: &src[0] as *const u8,
|
||||
len: src.len(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&Buffer> for &'a [u8] {
|
||||
fn from(src: &Buffer) -> &'a [u8] {
|
||||
unsafe { slice::from_raw_parts(src.ptr, src.len) }
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new_circuit(ctx: *mut *mut Multiplier) -> bool {
|
||||
if let Ok(mul) = Multiplier::new() {
|
||||
unsafe { *ctx = Box::into_raw(Box::new(mul)) };
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn prove(ctx: *const Multiplier, output_buffer: *mut Buffer) -> bool {
|
||||
println!("multiplier ffi: prove");
|
||||
let mul = unsafe { &*ctx };
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
|
||||
match mul.prove(&mut output_data) {
|
||||
Ok(proof_data) => proof_data,
|
||||
Err(_) => return false,
|
||||
};
|
||||
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn verify(
|
||||
ctx: *const Multiplier,
|
||||
proof_buffer: *const Buffer,
|
||||
result_ptr: *mut u32,
|
||||
) -> bool {
|
||||
println!("multiplier ffi: verify");
|
||||
let mul = unsafe { &*ctx };
|
||||
let proof_data = <&[u8]>::from(unsafe { &*proof_buffer });
|
||||
if match mul.verify(proof_data) {
|
||||
Ok(verified) => verified,
|
||||
Err(_) => return false,
|
||||
} {
|
||||
unsafe { *result_ptr = 0 };
|
||||
} else {
|
||||
unsafe { *result_ptr = 1 };
|
||||
};
|
||||
true
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
pub mod ffi;
|
||||
pub mod public;
|
||||
@@ -1,49 +0,0 @@
|
||||
use ark_circom::{CircomBuilder, CircomConfig};
|
||||
use ark_std::rand::thread_rng;
|
||||
use color_eyre::{Report, Result};
|
||||
|
||||
use ark_bn254::Bn254;
|
||||
use ark_groth16::{
|
||||
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
|
||||
};
|
||||
|
||||
fn groth16_proof_example() -> Result<()> {
|
||||
let cfg = CircomConfig::<Bn254>::new(
|
||||
"./resources/circom2_multiplier2.wasm",
|
||||
"./resources/circom2_multiplier2.r1cs",
|
||||
)?;
|
||||
|
||||
let mut builder = CircomBuilder::new(cfg);
|
||||
builder.push_input("a", 3);
|
||||
builder.push_input("b", 11);
|
||||
|
||||
// create an empty instance for setting it up
|
||||
let circom = builder.setup();
|
||||
|
||||
let mut rng = thread_rng();
|
||||
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng)?;
|
||||
|
||||
let circom = builder.build()?;
|
||||
|
||||
let inputs = circom
|
||||
.get_public_inputs()
|
||||
.ok_or(Report::msg("no public inputs"))?;
|
||||
|
||||
let proof = prove(circom, ¶ms, &mut rng)?;
|
||||
|
||||
let pvk = prepare_verifying_key(¶ms.vk);
|
||||
|
||||
match verify_proof(&pvk, &proof, &inputs) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(_) => Err(Report::msg("not verified")),
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
println!("Hello, world!");
|
||||
|
||||
match groth16_proof_example() {
|
||||
Ok(_) => println!("Success"),
|
||||
Err(_) => println!("Error"),
|
||||
}
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
use ark_circom::{CircomBuilder, CircomCircuit, CircomConfig};
|
||||
use ark_std::rand::thread_rng;
|
||||
|
||||
use ark_bn254::Bn254;
|
||||
use ark_groth16::{
|
||||
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
|
||||
Proof, ProvingKey,
|
||||
};
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
use color_eyre::{Report, Result};
|
||||
use std::io::{Read, Write};
|
||||
|
||||
pub struct Multiplier {
|
||||
circom: CircomCircuit<Bn254>,
|
||||
params: ProvingKey<Bn254>,
|
||||
}
|
||||
|
||||
impl Multiplier {
|
||||
// TODO Break this apart here
|
||||
pub fn new() -> Result<Multiplier> {
|
||||
let cfg = CircomConfig::<Bn254>::new(
|
||||
"./resources/circom2_multiplier2.wasm",
|
||||
"./resources/circom2_multiplier2.r1cs",
|
||||
)?;
|
||||
|
||||
let mut builder = CircomBuilder::new(cfg);
|
||||
builder.push_input("a", 3);
|
||||
builder.push_input("b", 11);
|
||||
|
||||
// create an empty instance for setting it up
|
||||
let circom = builder.setup();
|
||||
|
||||
let mut rng = thread_rng();
|
||||
|
||||
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng)?;
|
||||
|
||||
let circom = builder.build()?;
|
||||
|
||||
Ok(Multiplier { circom, params })
|
||||
}
|
||||
|
||||
// TODO Input Read
|
||||
pub fn prove<W: Write>(&self, result_data: W) -> Result<()> {
|
||||
let mut rng = thread_rng();
|
||||
|
||||
// XXX: There's probably a better way to do this
|
||||
let circom = self.circom.clone();
|
||||
let params = self.params.clone();
|
||||
|
||||
let proof = prove(circom, ¶ms, &mut rng)?;
|
||||
|
||||
// XXX: Unclear if this is different from other serialization(s)
|
||||
proof.serialize(result_data)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn verify<R: Read>(&self, input_data: R) -> Result<bool> {
|
||||
let proof = Proof::deserialize(input_data)?;
|
||||
|
||||
let pvk = prepare_verifying_key(&self.params.vk);
|
||||
|
||||
// XXX Part of input data?
|
||||
let inputs = self
|
||||
.circom
|
||||
.get_public_inputs()
|
||||
.ok_or(Report::msg("no public inputs"))?;
|
||||
|
||||
let verified = verify_proof(&pvk, &proof, &inputs)?;
|
||||
|
||||
Ok(verified)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Multiplier {
|
||||
fn default() -> Self {
|
||||
Self::new().unwrap()
|
||||
}
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use multiplier::public::Multiplier;
|
||||
|
||||
#[test]
|
||||
fn multiplier_proof() {
|
||||
let mul = Multiplier::new().unwrap();
|
||||
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
let _ = mul.prove(&mut output_data);
|
||||
|
||||
let proof_data = &output_data[..];
|
||||
|
||||
// XXX Pass as arg?
|
||||
//let pvk = prepare_verifying_key(&mul.params.vk);
|
||||
|
||||
let verified = mul.verify(proof_data).unwrap();
|
||||
|
||||
assert!(verified);
|
||||
}
|
||||
}
|
||||
35
nix/default.nix
Normal file
35
nix/default.nix
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
pkgs,
|
||||
target-platform ? "aarch64-android-prebuilt",
|
||||
rust-target ? "aarch64-linux-android",
|
||||
}:
|
||||
|
||||
pkgs.pkgsCross.${target-platform}.rustPlatform.buildRustPackage {
|
||||
pname = "zerokit";
|
||||
version = "nightly";
|
||||
|
||||
src = ../.;
|
||||
|
||||
cargoLock = {
|
||||
lockFile = ../Cargo.lock;
|
||||
allowBuiltinFetchGit = true;
|
||||
};
|
||||
|
||||
CARGO_HOME = "/tmp";
|
||||
|
||||
buildPhase = ''
|
||||
pushd rln
|
||||
cargo rustc --crate-type=cdylib --release --lib --target=${rust-target}
|
||||
popd
|
||||
'';
|
||||
|
||||
installPhase = ''
|
||||
mkdir -p $out/
|
||||
cp ./target/${rust-target}/release/librln.so $out/
|
||||
'';
|
||||
|
||||
meta = with pkgs.lib; {
|
||||
description = "Zerokit";
|
||||
license = licenses.mit;
|
||||
};
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
[package]
|
||||
name = "private-settlement"
|
||||
version = "0.3.0"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
@@ -1,7 +0,0 @@
|
||||
[tasks.build]
|
||||
command = "cargo"
|
||||
args = ["build", "--release"]
|
||||
|
||||
[tasks.test]
|
||||
command = "cargo"
|
||||
args = ["test", "--release"]
|
||||
@@ -1,11 +0,0 @@
|
||||
# Private Settlement Module
|
||||
|
||||
This module is to provide APIs to manage, compute and verify [Private Settlement](https://rfc.vac.dev/spec/44/) zkSNARK proofs and primitives.
|
||||
|
||||
## Build and Test
|
||||
|
||||
To build and test, run the following commands within the module folder
|
||||
```bash
|
||||
cargo make build
|
||||
cargo make test
|
||||
```
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let result = 2 + 2;
|
||||
assert_eq!(result, 4);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,27 @@
|
||||
[package]
|
||||
name = "rln-cli"
|
||||
version = "0.3.0"
|
||||
version = "0.4.0"
|
||||
edition = "2021"
|
||||
|
||||
[[example]]
|
||||
name = "relay"
|
||||
path = "src/examples/relay.rs"
|
||||
|
||||
[[example]]
|
||||
name = "stateless"
|
||||
path = "src/examples/stateless.rs"
|
||||
required-features = ["stateless"]
|
||||
|
||||
[dependencies]
|
||||
rln = { path = "../rln" }
|
||||
clap = { version = "4.2.7", features = ["cargo", "derive", "env"]}
|
||||
clap_derive = { version = "=4.2.0" }
|
||||
color-eyre = "=0.6.2"
|
||||
# serialization
|
||||
serde_json = "1.0.48"
|
||||
serde = { version = "1.0.130", features = ["derive"] }
|
||||
rln = { path = "../rln", default-features = false }
|
||||
zerokit_utils = { path = "../utils" }
|
||||
clap = { version = "4.5.35", features = ["cargo", "derive", "env"] }
|
||||
clap_derive = { version = "4.5.32" }
|
||||
color-eyre = "0.6.3"
|
||||
serde_json = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
[features]
|
||||
default = []
|
||||
arkzkey = ["rln/arkzkey"]
|
||||
stateless = ["rln/stateless"]
|
||||
|
||||
9
rln-cli/Makefile.toml
Normal file
9
rln-cli/Makefile.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
[tasks.build]
|
||||
command = "cargo"
|
||||
args = ["build"]
|
||||
|
||||
[tasks.test]
|
||||
disabled = true
|
||||
|
||||
[tasks.bench]
|
||||
disabled = true
|
||||
170
rln-cli/README.md
Normal file
170
rln-cli/README.md
Normal file
@@ -0,0 +1,170 @@
|
||||
# Zerokit RLN-CLI
|
||||
|
||||
The Zerokit RLN-CLI provides a command-line interface for interacting with the public API of the [Zerokit RLN Module](../rln/README.md).
|
||||
|
||||
It also contain:
|
||||
|
||||
+ [Relay Example](#relay-example) to demonstrate the use of the RLN module for spam prevention.
|
||||
+ [Stateless Example](#stateless-example) to demonstrate the use of the RLN module for stateless features.
|
||||
|
||||
## Configuration
|
||||
|
||||
The CLI can be configured using a JSON configuration file (see the [example](example.config.json)).
|
||||
|
||||
You can specify the configuration file path using the `RLN_CONFIG_PATH` environment variable:
|
||||
|
||||
```bash
|
||||
export RLN_CONFIG_PATH=example.config.json
|
||||
```
|
||||
|
||||
Alternatively, you can provide the configuration file path as an argument for each command:
|
||||
|
||||
```bash
|
||||
RLN_CONFIG_PATH=example.config.json cargo run -- <SUBCOMMAND> [OPTIONS]
|
||||
```
|
||||
|
||||
If the configuration file is empty, default settings will be used, but the tree data folder will be temporary and not saved to the preconfigured path.
|
||||
|
||||
We recommend using the example config, as all commands (except `new` and `create-with-params`) require an initialized RLN instance.
|
||||
|
||||
## Feature Flags
|
||||
|
||||
The CLI supports optional features. To enable the **arkzkey** feature, run:
|
||||
|
||||
```bash
|
||||
cargo run --features arkzkey -- <SUBCOMMAND> [OPTIONS]
|
||||
```
|
||||
|
||||
For more details, refer to the [Zerokit RLN Module](../rln/README.md) documentation.
|
||||
|
||||
## Relay Example
|
||||
|
||||
The following [Example](src/examples/relay.rs) demonstrates how RLN enables spam prevention in anonymous environments for multple users.
|
||||
|
||||
You can run the example using the following command:
|
||||
|
||||
```bash
|
||||
cargo run --example relay
|
||||
```
|
||||
|
||||
or with the **arkzkey** feature flag:
|
||||
|
||||
```bash
|
||||
cargo run --example relay --features arkzkey
|
||||
```
|
||||
|
||||
You can also change **MESSAGE_LIMIT** and **TREEE_HEIGHT** in the [relay.rs](src/examples/relay.rs) file to see how the RLN instance behaves with different parameters.
|
||||
|
||||
The customize **TREEE_HEIGHT** constant differs from the default value of `20` should follow [Custom Circuit Compilation](../rln/README.md#advanced-custom-circuit-compilation) instructions.
|
||||
|
||||
## Stateless Example
|
||||
|
||||
The following [Example](src/examples/stateless.rs) demonstrates how RLN can be used for stateless features by creating the Merkle tree outside of RLN instance.
|
||||
|
||||
This example function similarly to the [Relay Example](#relay-example) but uses a stateless RLN and seperate Merkle tree.
|
||||
|
||||
You can run the example using the following command:
|
||||
|
||||
```bash
|
||||
cargo run --example stateless --features stateless
|
||||
```
|
||||
|
||||
or with the **arkzkey** feature flag:
|
||||
|
||||
```bash
|
||||
cargo run --example stateless --features stateless,arkzkey
|
||||
```
|
||||
|
||||
## CLI Commands
|
||||
|
||||
### Instance Management
|
||||
|
||||
To initialize a new RLN instance:
|
||||
|
||||
```bash
|
||||
cargo run new --tree-height <HEIGHT>
|
||||
```
|
||||
|
||||
To initialize an RLN instance with custom parameters:
|
||||
|
||||
```bash
|
||||
cargo run new-with-params --resources-path <PATH> --tree-height <HEIGHT>
|
||||
```
|
||||
|
||||
To update the Merkle tree height:
|
||||
|
||||
```bash
|
||||
cargo run set-tree --tree-height <HEIGHT>
|
||||
```
|
||||
|
||||
### Leaf Operations
|
||||
|
||||
To set a single leaf:
|
||||
|
||||
```bash
|
||||
cargo run set-leaf --index <INDEX> --input <INPUT_PATH>
|
||||
```
|
||||
|
||||
To set multiple leaves:
|
||||
|
||||
```bash
|
||||
cargo run set-multiple-leaves --index <START_INDEX> --input <INPUT_PATH>
|
||||
```
|
||||
|
||||
To reset multiple leaves:
|
||||
|
||||
```bash
|
||||
cargo run reset-multiple-leaves --input <INPUT_PATH>
|
||||
```
|
||||
|
||||
To set the next available leaf:
|
||||
|
||||
```bash
|
||||
cargo run set-next-leaf --input <INPUT_PATH>
|
||||
```
|
||||
|
||||
To delete a specific leaf:
|
||||
|
||||
```bash
|
||||
cargo run delete-leaf --index <INDEX>
|
||||
```
|
||||
|
||||
### Proof Operations
|
||||
|
||||
To generate a proof:
|
||||
|
||||
```bash
|
||||
cargo run prove --input <INPUT_PATH>
|
||||
```
|
||||
|
||||
To generate an RLN proof:
|
||||
|
||||
```bash
|
||||
cargo run generate-proof --input <INPUT_PATH>
|
||||
```
|
||||
|
||||
To verify a proof:
|
||||
|
||||
```bash
|
||||
cargo run verify --input <PROOF_PATH>
|
||||
```
|
||||
|
||||
To verify a proof with multiple Merkle roots:
|
||||
|
||||
```bash
|
||||
cargo run verify-with-roots --input <INPUT_PATH> --roots <ROOTS_PATH>
|
||||
```
|
||||
|
||||
### Tree Information
|
||||
|
||||
To retrieve the current Merkle root:
|
||||
|
||||
```bash
|
||||
cargo run get-root
|
||||
```
|
||||
|
||||
To obtain a Merkle proof for a specific index:
|
||||
|
||||
```bash
|
||||
cargo run get-proof --index <INDEX>
|
||||
```
|
||||
11
rln-cli/example.config.json
Normal file
11
rln-cli/example.config.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"tree_config": {
|
||||
"path": "database",
|
||||
"temporary": false,
|
||||
"cache_capacity": 150000,
|
||||
"flush_every_ms": 12000,
|
||||
"mode": "HighThroughput",
|
||||
"use_compression": false
|
||||
},
|
||||
"tree_height": 20
|
||||
}
|
||||
@@ -1,49 +1,51 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::Subcommand;
|
||||
use rln::circuit::TEST_TREE_HEIGHT;
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub(crate) enum Commands {
|
||||
New {
|
||||
#[arg(short, long, default_value_t = TEST_TREE_HEIGHT)]
|
||||
tree_height: usize,
|
||||
/// Sets a custom config file
|
||||
#[arg(short, long)]
|
||||
config: PathBuf,
|
||||
},
|
||||
NewWithParams {
|
||||
#[arg(short, long, default_value_t = TEST_TREE_HEIGHT)]
|
||||
tree_height: usize,
|
||||
/// Sets a custom config file
|
||||
#[arg(short, long)]
|
||||
config: PathBuf,
|
||||
#[arg(short, long)]
|
||||
tree_config_input: PathBuf,
|
||||
#[arg(short, long, default_value = "../rln/resources/tree_height_20")]
|
||||
resources_path: PathBuf,
|
||||
},
|
||||
SetTree {
|
||||
#[arg(short, long, default_value_t = TEST_TREE_HEIGHT)]
|
||||
tree_height: usize,
|
||||
},
|
||||
SetLeaf {
|
||||
#[arg(short, long)]
|
||||
index: usize,
|
||||
#[arg(short, long)]
|
||||
file: PathBuf,
|
||||
input: PathBuf,
|
||||
},
|
||||
SetMultipleLeaves {
|
||||
#[arg(short, long)]
|
||||
index: usize,
|
||||
#[arg(short, long)]
|
||||
file: PathBuf,
|
||||
input: PathBuf,
|
||||
},
|
||||
ResetMultipleLeaves {
|
||||
#[arg(short, long)]
|
||||
file: PathBuf,
|
||||
input: PathBuf,
|
||||
},
|
||||
SetNextLeaf {
|
||||
#[arg(short, long)]
|
||||
file: PathBuf,
|
||||
input: PathBuf,
|
||||
},
|
||||
DeleteLeaf {
|
||||
#[arg(short, long)]
|
||||
index: usize,
|
||||
},
|
||||
GetRoot,
|
||||
GetProof {
|
||||
#[arg(short, long)]
|
||||
index: usize,
|
||||
},
|
||||
Prove {
|
||||
@@ -52,7 +54,7 @@ pub(crate) enum Commands {
|
||||
},
|
||||
Verify {
|
||||
#[arg(short, long)]
|
||||
file: PathBuf,
|
||||
input: PathBuf,
|
||||
},
|
||||
GenerateProof {
|
||||
#[arg(short, long)]
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
use color_eyre::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{fs::File, io::Read, path::PathBuf};
|
||||
|
||||
pub const RLN_STATE_PATH: &str = "RLN_STATE_PATH";
|
||||
use color_eyre::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
|
||||
pub const RLN_CONFIG_PATH: &str = "RLN_CONFIG_PATH";
|
||||
|
||||
#[derive(Default, Serialize, Deserialize)]
|
||||
pub(crate) struct Config {
|
||||
@@ -11,19 +13,26 @@ pub(crate) struct Config {
|
||||
|
||||
#[derive(Default, Serialize, Deserialize)]
|
||||
pub(crate) struct InnerConfig {
|
||||
pub file: PathBuf,
|
||||
pub tree_height: usize,
|
||||
pub tree_config: Value,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub(crate) fn load_config() -> Result<Config> {
|
||||
let path = PathBuf::from(std::env::var(RLN_STATE_PATH)?);
|
||||
match std::env::var(RLN_CONFIG_PATH) {
|
||||
Ok(env) => {
|
||||
let path = PathBuf::from(env);
|
||||
let mut file = File::open(path)?;
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents)?;
|
||||
let inner: InnerConfig = serde_json::from_str(&contents)?;
|
||||
Ok(Config { inner: Some(inner) })
|
||||
}
|
||||
Err(_) => Ok(Config::default()),
|
||||
}
|
||||
}
|
||||
|
||||
let mut file = File::open(path)?;
|
||||
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents)?;
|
||||
let state: Config = serde_json::from_str(&contents)?;
|
||||
Ok(state)
|
||||
pub(crate) fn as_bytes(&self) -> Vec<u8> {
|
||||
serde_json::to_string(&self.inner).unwrap().into_bytes()
|
||||
}
|
||||
}
|
||||
|
||||
317
rln-cli/src/examples/relay.rs
Normal file
317
rln-cli/src/examples/relay.rs
Normal file
@@ -0,0 +1,317 @@
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
fs::File,
|
||||
io::{stdin, stdout, Cursor, Read, Write},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use clap::{Parser, Subcommand};
|
||||
use color_eyre::{eyre::eyre, Result};
|
||||
use rln::{
|
||||
circuit::Fr,
|
||||
hashers::{hash_to_field, poseidon_hash},
|
||||
protocol::{keygen, prepare_prove_input, prepare_verify_input},
|
||||
public::RLN,
|
||||
utils::{bytes_le_to_fr, fr_to_bytes_le, generate_input_buffer},
|
||||
};
|
||||
|
||||
const MESSAGE_LIMIT: u32 = 1;
|
||||
|
||||
const TREEE_HEIGHT: usize = 20;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
List,
|
||||
Register,
|
||||
Send {
|
||||
#[arg(short, long)]
|
||||
user_index: usize,
|
||||
#[arg(short, long)]
|
||||
message_id: u32,
|
||||
#[arg(short, long)]
|
||||
signal: String,
|
||||
},
|
||||
Clear,
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct Identity {
|
||||
identity_secret_hash: Fr,
|
||||
id_commitment: Fr,
|
||||
}
|
||||
|
||||
impl Identity {
|
||||
fn new() -> Self {
|
||||
let (identity_secret_hash, id_commitment) = keygen();
|
||||
Identity {
|
||||
identity_secret_hash,
|
||||
id_commitment,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct RLNSystem {
|
||||
rln: RLN,
|
||||
used_nullifiers: HashMap<[u8; 32], Vec<u8>>,
|
||||
local_identities: HashMap<usize, Identity>,
|
||||
}
|
||||
|
||||
impl RLNSystem {
|
||||
fn new() -> Result<Self> {
|
||||
let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
let resources_path: PathBuf = format!("../rln/resources/tree_height_{TREEE_HEIGHT}").into();
|
||||
#[cfg(feature = "arkzkey")]
|
||||
let filenames = ["rln_final.arkzkey", "graph.bin"];
|
||||
#[cfg(not(feature = "arkzkey"))]
|
||||
let filenames = ["rln_final.zkey", "graph.bin"];
|
||||
for filename in filenames {
|
||||
let fullpath = resources_path.join(Path::new(filename));
|
||||
let mut file = File::open(&fullpath)?;
|
||||
let metadata = std::fs::metadata(&fullpath)?;
|
||||
let mut output_buffer = vec![0; metadata.len() as usize];
|
||||
file.read_exact(&mut output_buffer)?;
|
||||
resources.push(output_buffer);
|
||||
}
|
||||
let rln = RLN::new_with_params(
|
||||
TREEE_HEIGHT,
|
||||
resources[0].clone(),
|
||||
resources[1].clone(),
|
||||
generate_input_buffer(),
|
||||
)?;
|
||||
println!("RLN instance initialized successfully");
|
||||
Ok(RLNSystem {
|
||||
rln,
|
||||
used_nullifiers: HashMap::new(),
|
||||
local_identities: HashMap::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn list_users(&self) {
|
||||
if self.local_identities.is_empty() {
|
||||
println!("No users registered yet.");
|
||||
return;
|
||||
}
|
||||
|
||||
println!("Registered users:");
|
||||
for (index, identity) in &self.local_identities {
|
||||
println!("User Index: {index}");
|
||||
println!("+ Identity Secret Hash: {}", identity.identity_secret_hash);
|
||||
println!("+ Identity Commitment: {}", identity.id_commitment);
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
fn register_user(&mut self) -> Result<usize> {
|
||||
let index = self.rln.leaves_set();
|
||||
let identity = Identity::new();
|
||||
|
||||
let rate_commitment = poseidon_hash(&[identity.id_commitment, Fr::from(MESSAGE_LIMIT)]);
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
|
||||
match self.rln.set_next_leaf(&mut buffer) {
|
||||
Ok(_) => {
|
||||
println!("Registered User Index: {index}");
|
||||
println!("+ Identity secret hash: {}", identity.identity_secret_hash);
|
||||
println!("+ Identity commitment: {},", identity.id_commitment);
|
||||
self.local_identities.insert(index, identity);
|
||||
}
|
||||
Err(_) => {
|
||||
println!("Maximum user limit reached: 2^{TREEE_HEIGHT}");
|
||||
}
|
||||
};
|
||||
|
||||
Ok(index)
|
||||
}
|
||||
|
||||
fn generate_proof(
|
||||
&mut self,
|
||||
user_index: usize,
|
||||
message_id: u32,
|
||||
signal: &str,
|
||||
external_nullifier: Fr,
|
||||
) -> Result<Vec<u8>> {
|
||||
let identity = match self.local_identities.get(&user_index) {
|
||||
Some(identity) => identity,
|
||||
None => return Err(eyre!("user index {user_index} not found")),
|
||||
};
|
||||
|
||||
let serialized = prepare_prove_input(
|
||||
identity.identity_secret_hash,
|
||||
user_index,
|
||||
Fr::from(MESSAGE_LIMIT),
|
||||
Fr::from(message_id),
|
||||
external_nullifier,
|
||||
signal.as_bytes(),
|
||||
);
|
||||
let mut input_buffer = Cursor::new(serialized);
|
||||
let mut output_buffer = Cursor::new(Vec::new());
|
||||
self.rln
|
||||
.generate_rln_proof(&mut input_buffer, &mut output_buffer)?;
|
||||
|
||||
println!("Proof generated successfully:");
|
||||
println!("+ User Index: {user_index}");
|
||||
println!("+ Message ID: {message_id}");
|
||||
println!("+ Signal: {signal}");
|
||||
|
||||
Ok(output_buffer.into_inner())
|
||||
}
|
||||
|
||||
fn verify_proof(&mut self, proof_data: Vec<u8>, signal: &str) -> Result<()> {
|
||||
let proof_with_signal = prepare_verify_input(proof_data.clone(), signal.as_bytes());
|
||||
let mut input_buffer = Cursor::new(proof_with_signal);
|
||||
|
||||
match self.rln.verify_rln_proof(&mut input_buffer) {
|
||||
Ok(true) => {
|
||||
let nullifier = &proof_data[256..288];
|
||||
let nullifier_key: [u8; 32] = nullifier.try_into()?;
|
||||
|
||||
if let Some(previous_proof) = self.used_nullifiers.get(&nullifier_key) {
|
||||
self.handle_duplicate_message_id(previous_proof.clone(), proof_data)?;
|
||||
return Ok(());
|
||||
}
|
||||
self.used_nullifiers.insert(nullifier_key, proof_data);
|
||||
println!("Message verified and accepted");
|
||||
}
|
||||
Ok(false) => {
|
||||
println!("Verification failed: message_id must be unique within the epoch and satisfy 0 <= message_id < MESSAGE_LIMIT: {MESSAGE_LIMIT}");
|
||||
}
|
||||
Err(err) => return Err(err),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_duplicate_message_id(
|
||||
&mut self,
|
||||
previous_proof: Vec<u8>,
|
||||
current_proof: Vec<u8>,
|
||||
) -> Result<()> {
|
||||
let x = ¤t_proof[192..224];
|
||||
let y = ¤t_proof[224..256];
|
||||
|
||||
let prev_x = &previous_proof[192..224];
|
||||
let prev_y = &previous_proof[224..256];
|
||||
if x == prev_x && y == prev_y {
|
||||
return Err(eyre!("this exact message and signal has already been sent"));
|
||||
}
|
||||
|
||||
let mut proof1 = Cursor::new(previous_proof);
|
||||
let mut proof2 = Cursor::new(current_proof);
|
||||
let mut output = Cursor::new(Vec::new());
|
||||
|
||||
match self
|
||||
.rln
|
||||
.recover_id_secret(&mut proof1, &mut proof2, &mut output)
|
||||
{
|
||||
Ok(_) => {
|
||||
let output_data = output.into_inner();
|
||||
let (leaked_identity_secret_hash, _) = bytes_le_to_fr(&output_data);
|
||||
|
||||
if let Some((user_index, identity)) = self
|
||||
.local_identities
|
||||
.iter()
|
||||
.find(|(_, identity)| {
|
||||
identity.identity_secret_hash == leaked_identity_secret_hash
|
||||
})
|
||||
.map(|(index, identity)| (*index, identity))
|
||||
{
|
||||
let real_identity_secret_hash = identity.identity_secret_hash;
|
||||
if leaked_identity_secret_hash != real_identity_secret_hash {
|
||||
Err(eyre!("identity secret hash mismatch {leaked_identity_secret_hash} != {real_identity_secret_hash}"))
|
||||
} else {
|
||||
println!("DUPLICATE message ID detected! Reveal identity secret hash: {leaked_identity_secret_hash}");
|
||||
self.local_identities.remove(&user_index);
|
||||
self.rln.delete_leaf(user_index)?;
|
||||
println!("User index {user_index} has been SLASHED");
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
Err(eyre!(
|
||||
"user identity secret hash {leaked_identity_secret_hash} not found"
|
||||
))
|
||||
}
|
||||
}
|
||||
Err(err) => Err(eyre!("Failed to recover identity secret: {err}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
println!("Initializing RLN instance...");
|
||||
print!("\x1B[2J\x1B[1;1H");
|
||||
let mut rln_system = RLNSystem::new()?;
|
||||
let rln_epoch = hash_to_field(b"epoch");
|
||||
let rln_identifier = hash_to_field(b"rln-identifier");
|
||||
let external_nullifier = poseidon_hash(&[rln_epoch, rln_identifier]);
|
||||
println!("RLN Relay Example:");
|
||||
println!("Message Limit: {MESSAGE_LIMIT}");
|
||||
println!("----------------------------------");
|
||||
println!();
|
||||
show_commands();
|
||||
loop {
|
||||
print!("\n> ");
|
||||
stdout().flush()?;
|
||||
let mut input = String::new();
|
||||
stdin().read_line(&mut input)?;
|
||||
let trimmed = input.trim();
|
||||
let args = std::iter::once("").chain(trimmed.split_whitespace());
|
||||
|
||||
match Cli::try_parse_from(args) {
|
||||
Ok(cli) => match cli.command {
|
||||
Commands::List => {
|
||||
rln_system.list_users();
|
||||
}
|
||||
Commands::Register => {
|
||||
rln_system.register_user()?;
|
||||
}
|
||||
Commands::Send {
|
||||
user_index,
|
||||
message_id,
|
||||
signal,
|
||||
} => {
|
||||
match rln_system.generate_proof(
|
||||
user_index,
|
||||
message_id,
|
||||
&signal,
|
||||
external_nullifier,
|
||||
) {
|
||||
Ok(proof) => {
|
||||
if let Err(err) = rln_system.verify_proof(proof, &signal) {
|
||||
println!("Verification error: {err}");
|
||||
};
|
||||
}
|
||||
Err(err) => {
|
||||
println!("Proof generation error: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::Clear => {
|
||||
print!("\x1B[2J\x1B[1;1H");
|
||||
show_commands();
|
||||
}
|
||||
Commands::Exit => {
|
||||
break;
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
eprintln!("Command error: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn show_commands() {
|
||||
println!("Available commands:");
|
||||
println!(" list - List registered users");
|
||||
println!(" register - Register a new user index");
|
||||
println!(" send -u <index> -m <message_id> -s <signal> - Send a message with proof");
|
||||
println!(" clear - Clear the screen");
|
||||
println!(" exit - Exit the program");
|
||||
}
|
||||
314
rln-cli/src/examples/stateless.rs
Normal file
314
rln-cli/src/examples/stateless.rs
Normal file
@@ -0,0 +1,314 @@
|
||||
#![cfg(feature = "stateless")]
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::{stdin, stdout, Cursor, Write},
|
||||
};
|
||||
|
||||
use clap::{Parser, Subcommand};
|
||||
use color_eyre::{eyre::eyre, Result};
|
||||
use rln::{
|
||||
circuit::{Fr, TEST_TREE_HEIGHT},
|
||||
hashers::{hash_to_field, poseidon_hash},
|
||||
poseidon_tree::PoseidonTree,
|
||||
protocol::{keygen, prepare_verify_input, rln_witness_from_values, serialize_witness},
|
||||
public::RLN,
|
||||
utils::{bytes_le_to_fr, fr_to_bytes_le},
|
||||
};
|
||||
use zerokit_utils::ZerokitMerkleTree;
|
||||
|
||||
const MESSAGE_LIMIT: u32 = 1;
|
||||
|
||||
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Commands,
|
||||
}
|
||||
|
||||
#[derive(Subcommand)]
|
||||
enum Commands {
|
||||
List,
|
||||
Register,
|
||||
Send {
|
||||
#[arg(short, long)]
|
||||
user_index: usize,
|
||||
#[arg(short, long)]
|
||||
message_id: u32,
|
||||
#[arg(short, long)]
|
||||
signal: String,
|
||||
},
|
||||
Clear,
|
||||
Exit,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct Identity {
|
||||
identity_secret_hash: Fr,
|
||||
id_commitment: Fr,
|
||||
}
|
||||
|
||||
impl Identity {
|
||||
fn new() -> Self {
|
||||
let (identity_secret_hash, id_commitment) = keygen();
|
||||
Identity {
|
||||
identity_secret_hash,
|
||||
id_commitment,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct RLNSystem {
|
||||
rln: RLN,
|
||||
tree: PoseidonTree,
|
||||
used_nullifiers: HashMap<[u8; 32], Vec<u8>>,
|
||||
local_identities: HashMap<usize, Identity>,
|
||||
}
|
||||
|
||||
impl RLNSystem {
|
||||
fn new() -> Result<Self> {
|
||||
let rln = RLN::new()?;
|
||||
let default_leaf = Fr::from(0);
|
||||
let tree = PoseidonTree::new(
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)?;
|
||||
|
||||
Ok(RLNSystem {
|
||||
rln,
|
||||
tree,
|
||||
used_nullifiers: HashMap::new(),
|
||||
local_identities: HashMap::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn list_users(&self) {
|
||||
if self.local_identities.is_empty() {
|
||||
println!("No users registered yet.");
|
||||
return;
|
||||
}
|
||||
|
||||
println!("Registered users:");
|
||||
for (index, identity) in &self.local_identities {
|
||||
println!("User Index: {index}");
|
||||
println!("+ Identity Secret Hash: {}", identity.identity_secret_hash);
|
||||
println!("+ Identity Commitment: {}", identity.id_commitment);
|
||||
println!();
|
||||
}
|
||||
}
|
||||
|
||||
fn register_user(&mut self) -> Result<usize> {
|
||||
let index = self.tree.leaves_set();
|
||||
let identity = Identity::new();
|
||||
|
||||
let rate_commitment = poseidon_hash(&[identity.id_commitment, Fr::from(MESSAGE_LIMIT)]);
|
||||
self.tree.update_next(rate_commitment)?;
|
||||
|
||||
println!("Registered User Index: {index}");
|
||||
println!("+ Identity secret hash: {}", identity.identity_secret_hash);
|
||||
println!("+ Identity commitment: {}", identity.id_commitment);
|
||||
|
||||
self.local_identities.insert(index, identity);
|
||||
Ok(index)
|
||||
}
|
||||
|
||||
fn generate_proof(
|
||||
&mut self,
|
||||
user_index: usize,
|
||||
message_id: u32,
|
||||
signal: &str,
|
||||
external_nullifier: Fr,
|
||||
) -> Result<Vec<u8>> {
|
||||
let identity = match self.local_identities.get(&user_index) {
|
||||
Some(identity) => identity,
|
||||
None => return Err(eyre!("user index {user_index} not found")),
|
||||
};
|
||||
|
||||
let merkle_proof = self.tree.proof(user_index)?;
|
||||
let x = hash_to_field(signal.as_bytes());
|
||||
|
||||
let rln_witness = rln_witness_from_values(
|
||||
identity.identity_secret_hash,
|
||||
&merkle_proof,
|
||||
x,
|
||||
external_nullifier,
|
||||
Fr::from(MESSAGE_LIMIT),
|
||||
Fr::from(message_id),
|
||||
)?;
|
||||
|
||||
let serialized = serialize_witness(&rln_witness)?;
|
||||
let mut input_buffer = Cursor::new(serialized);
|
||||
let mut output_buffer = Cursor::new(Vec::new());
|
||||
|
||||
self.rln
|
||||
.generate_rln_proof_with_witness(&mut input_buffer, &mut output_buffer)?;
|
||||
|
||||
println!("Proof generated successfully:");
|
||||
println!("+ User Index: {user_index}");
|
||||
println!("+ Message ID: {message_id}");
|
||||
println!("+ Signal: {signal}");
|
||||
|
||||
Ok(output_buffer.into_inner())
|
||||
}
|
||||
|
||||
fn verify_proof(&mut self, proof_data: Vec<u8>, signal: &str) -> Result<()> {
|
||||
let proof_with_signal = prepare_verify_input(proof_data.clone(), signal.as_bytes());
|
||||
let mut input_buffer = Cursor::new(proof_with_signal);
|
||||
|
||||
let root = self.tree.root();
|
||||
let roots_serialized = fr_to_bytes_le(&root);
|
||||
let mut roots_buffer = Cursor::new(roots_serialized);
|
||||
|
||||
match self
|
||||
.rln
|
||||
.verify_with_roots(&mut input_buffer, &mut roots_buffer)
|
||||
{
|
||||
Ok(true) => {
|
||||
let nullifier = &proof_data[256..288];
|
||||
let nullifier_key: [u8; 32] = nullifier.try_into()?;
|
||||
|
||||
if let Some(previous_proof) = self.used_nullifiers.get(&nullifier_key) {
|
||||
self.handle_duplicate_message_id(previous_proof.clone(), proof_data)?;
|
||||
return Ok(());
|
||||
}
|
||||
self.used_nullifiers.insert(nullifier_key, proof_data);
|
||||
println!("Message verified and accepted");
|
||||
}
|
||||
Ok(false) => {
|
||||
println!("Verification failed: message_id must be unique within the epoch and satisfy 0 <= message_id < MESSAGE_LIMIT: {MESSAGE_LIMIT}");
|
||||
}
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn handle_duplicate_message_id(
|
||||
&mut self,
|
||||
previous_proof: Vec<u8>,
|
||||
current_proof: Vec<u8>,
|
||||
) -> Result<()> {
|
||||
let x = ¤t_proof[192..224];
|
||||
let y = ¤t_proof[224..256];
|
||||
|
||||
let prev_x = &previous_proof[192..224];
|
||||
let prev_y = &previous_proof[224..256];
|
||||
if x == prev_x && y == prev_y {
|
||||
return Err(eyre!("this exact message and signal has already been sent"));
|
||||
}
|
||||
|
||||
let mut proof1 = Cursor::new(previous_proof);
|
||||
let mut proof2 = Cursor::new(current_proof);
|
||||
let mut output = Cursor::new(Vec::new());
|
||||
|
||||
match self
|
||||
.rln
|
||||
.recover_id_secret(&mut proof1, &mut proof2, &mut output)
|
||||
{
|
||||
Ok(_) => {
|
||||
let output_data = output.into_inner();
|
||||
let (leaked_identity_secret_hash, _) = bytes_le_to_fr(&output_data);
|
||||
|
||||
if let Some((user_index, identity)) = self
|
||||
.local_identities
|
||||
.iter()
|
||||
.find(|(_, identity)| {
|
||||
identity.identity_secret_hash == leaked_identity_secret_hash
|
||||
})
|
||||
.map(|(index, identity)| (*index, identity))
|
||||
{
|
||||
let real_identity_secret_hash = identity.identity_secret_hash;
|
||||
if leaked_identity_secret_hash != real_identity_secret_hash {
|
||||
Err(eyre!("identity secret hash mismatch {leaked_identity_secret_hash} != {real_identity_secret_hash}"))
|
||||
} else {
|
||||
println!("DUPLICATE message ID detected! Reveal identity secret hash: {leaked_identity_secret_hash}");
|
||||
self.local_identities.remove(&user_index);
|
||||
println!("User index {user_index} has been SLASHED");
|
||||
Ok(())
|
||||
}
|
||||
} else {
|
||||
Err(eyre!(
|
||||
"user identity secret hash {leaked_identity_secret_hash} not found"
|
||||
))
|
||||
}
|
||||
}
|
||||
Err(err) => Err(eyre!("Failed to recover identity secret: {err}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
println!("Initializing RLN instance...");
|
||||
print!("\x1B[2J\x1B[1;1H");
|
||||
let mut rln_system = RLNSystem::new()?;
|
||||
let rln_epoch = hash_to_field(b"epoch");
|
||||
let rln_identifier = hash_to_field(b"rln-identifier");
|
||||
let external_nullifier = poseidon_hash(&[rln_epoch, rln_identifier]);
|
||||
println!("RLN Stateless Relay Example:");
|
||||
println!("Message Limit: {MESSAGE_LIMIT}");
|
||||
println!("----------------------------------");
|
||||
println!();
|
||||
show_commands();
|
||||
|
||||
loop {
|
||||
print!("\n> ");
|
||||
stdout().flush()?;
|
||||
let mut input = String::new();
|
||||
stdin().read_line(&mut input)?;
|
||||
let trimmed = input.trim();
|
||||
let args = std::iter::once("").chain(trimmed.split_whitespace());
|
||||
|
||||
match Cli::try_parse_from(args) {
|
||||
Ok(cli) => match cli.command {
|
||||
Commands::List => {
|
||||
rln_system.list_users();
|
||||
}
|
||||
Commands::Register => {
|
||||
rln_system.register_user()?;
|
||||
}
|
||||
Commands::Send {
|
||||
user_index,
|
||||
message_id,
|
||||
signal,
|
||||
} => {
|
||||
match rln_system.generate_proof(
|
||||
user_index,
|
||||
message_id,
|
||||
&signal,
|
||||
external_nullifier,
|
||||
) {
|
||||
Ok(proof) => {
|
||||
if let Err(err) = rln_system.verify_proof(proof, &signal) {
|
||||
println!("Verification error: {err}");
|
||||
};
|
||||
}
|
||||
Err(err) => {
|
||||
println!("Proof generation error: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Commands::Clear => {
|
||||
print!("\x1B[2J\x1B[1;1H");
|
||||
show_commands();
|
||||
}
|
||||
Commands::Exit => {
|
||||
break;
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
eprintln!("Command error: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn show_commands() {
|
||||
println!("Available commands:");
|
||||
println!(" list - List registered users");
|
||||
println!(" register - Register a new user index");
|
||||
println!(" send -u <index> -m <message_id> -s <signal> - Send a message with proof");
|
||||
println!(" clear - Clear the screen");
|
||||
println!(" exit - Exit the program");
|
||||
}
|
||||
@@ -1,9 +1,18 @@
|
||||
use std::{fs::File, io::Read, path::Path};
|
||||
use std::{
|
||||
fs::File,
|
||||
io::{Cursor, Read},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use clap::Parser;
|
||||
use color_eyre::{Report, Result};
|
||||
use color_eyre::{eyre::Report, Result};
|
||||
use commands::Commands;
|
||||
use rln::public::RLN;
|
||||
use config::{Config, InnerConfig};
|
||||
use rln::{
|
||||
public::RLN,
|
||||
utils::{bytes_le_to_fr, bytes_le_to_vec_fr},
|
||||
};
|
||||
use serde_json::json;
|
||||
use state::State;
|
||||
|
||||
mod commands;
|
||||
@@ -20,74 +29,97 @@ struct Cli {
|
||||
fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
|
||||
let mut state = State::load_state()?;
|
||||
let mut state = match &cli.command {
|
||||
Some(Commands::New { .. }) | Some(Commands::NewWithParams { .. }) => State::default(),
|
||||
_ => State::load_state()?,
|
||||
};
|
||||
|
||||
match &cli.command {
|
||||
Some(Commands::New {
|
||||
tree_height,
|
||||
config,
|
||||
}) => {
|
||||
let resources = File::open(&config)?;
|
||||
state.rln = Some(RLN::new(*tree_height, resources)?);
|
||||
match cli.command {
|
||||
Some(Commands::New { tree_height }) => {
|
||||
let config = Config::load_config()?;
|
||||
state.rln = if let Some(InnerConfig { tree_height, .. }) = config.inner {
|
||||
println!("Initializing RLN with custom config");
|
||||
Some(RLN::new(tree_height, Cursor::new(config.as_bytes()))?)
|
||||
} else {
|
||||
println!("Initializing RLN with default config");
|
||||
Some(RLN::new(tree_height, Cursor::new(json!({}).to_string()))?)
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::NewWithParams {
|
||||
tree_height,
|
||||
config,
|
||||
tree_config_input,
|
||||
resources_path,
|
||||
}) => {
|
||||
let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
for filename in ["rln.wasm", "rln_final.zkey", "verification_key.json"] {
|
||||
let fullpath = config.join(Path::new(filename));
|
||||
#[cfg(feature = "arkzkey")]
|
||||
let filenames = ["rln_final.arkzkey", "graph.bin"];
|
||||
#[cfg(not(feature = "arkzkey"))]
|
||||
let filenames = ["rln_final.zkey", "graph.bin"];
|
||||
for filename in filenames {
|
||||
let fullpath = resources_path.join(Path::new(filename));
|
||||
let mut file = File::open(&fullpath)?;
|
||||
let metadata = std::fs::metadata(&fullpath)?;
|
||||
let mut buffer = vec![0; metadata.len() as usize];
|
||||
file.read_exact(&mut buffer)?;
|
||||
resources.push(buffer);
|
||||
let mut output_buffer = vec![0; metadata.len() as usize];
|
||||
file.read_exact(&mut output_buffer)?;
|
||||
resources.push(output_buffer);
|
||||
}
|
||||
let tree_config_input_file = File::open(&tree_config_input)?;
|
||||
state.rln = Some(RLN::new_with_params(
|
||||
*tree_height,
|
||||
resources[0].clone(),
|
||||
resources[1].clone(),
|
||||
resources[2].clone(),
|
||||
tree_config_input_file,
|
||||
)?);
|
||||
let config = Config::load_config()?;
|
||||
if let Some(InnerConfig {
|
||||
tree_height,
|
||||
tree_config,
|
||||
}) = config.inner
|
||||
{
|
||||
println!("Initializing RLN with custom config");
|
||||
state.rln = Some(RLN::new_with_params(
|
||||
tree_height,
|
||||
resources[0].clone(),
|
||||
resources[1].clone(),
|
||||
Cursor::new(tree_config.to_string().as_bytes()),
|
||||
)?)
|
||||
} else {
|
||||
println!("Initializing RLN with default config");
|
||||
state.rln = Some(RLN::new_with_params(
|
||||
tree_height,
|
||||
resources[0].clone(),
|
||||
resources[1].clone(),
|
||||
Cursor::new(json!({}).to_string()),
|
||||
)?)
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::SetTree { tree_height }) => {
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.set_tree(*tree_height)?;
|
||||
.set_tree(tree_height)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::SetLeaf { index, file }) => {
|
||||
let input_data = File::open(&file)?;
|
||||
Some(Commands::SetLeaf { index, input }) => {
|
||||
let input_data = File::open(input)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.set_leaf(*index, input_data)?;
|
||||
.set_leaf(index, input_data)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::SetMultipleLeaves { index, file }) => {
|
||||
let input_data = File::open(&file)?;
|
||||
Some(Commands::SetMultipleLeaves { index, input }) => {
|
||||
let input_data = File::open(input)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.set_leaves_from(*index, input_data)?;
|
||||
.set_leaves_from(index, input_data)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::ResetMultipleLeaves { file }) => {
|
||||
let input_data = File::open(&file)?;
|
||||
Some(Commands::ResetMultipleLeaves { input }) => {
|
||||
let input_data = File::open(input)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.init_tree_with_leaves(input_data)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::SetNextLeaf { file }) => {
|
||||
let input_data = File::open(&file)?;
|
||||
Some(Commands::SetNextLeaf { input }) => {
|
||||
let input_data = File::open(input)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
@@ -98,60 +130,73 @@ fn main() -> Result<()> {
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.delete_leaf(*index)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::GetRoot) => {
|
||||
let writer = std::io::stdout();
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.get_root(writer)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::GetProof { index }) => {
|
||||
let writer = std::io::stdout();
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.get_proof(*index, writer)?;
|
||||
.delete_leaf(index)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::Prove { input }) => {
|
||||
let input_data = File::open(&input)?;
|
||||
let writer = std::io::stdout();
|
||||
let input_data = File::open(input)?;
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.prove(input_data, writer)?;
|
||||
.prove(input_data, &mut output_buffer)?;
|
||||
let proof = output_buffer.into_inner();
|
||||
println!("proof: {:?}", proof);
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::Verify { file }) => {
|
||||
let input_data = File::open(&file)?;
|
||||
state
|
||||
Some(Commands::Verify { input }) => {
|
||||
let input_data = File::open(input)?;
|
||||
let verified = state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.verify(input_data)?;
|
||||
println!("verified: {:?}", verified);
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::GenerateProof { input }) => {
|
||||
let input_data = File::open(&input)?;
|
||||
let writer = std::io::stdout();
|
||||
let input_data = File::open(input)?;
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.generate_rln_proof(input_data, writer)?;
|
||||
.generate_rln_proof(input_data, &mut output_buffer)?;
|
||||
let proof = output_buffer.into_inner();
|
||||
println!("proof: {:?}", proof);
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::VerifyWithRoots { input, roots }) => {
|
||||
let input_data = File::open(&input)?;
|
||||
let roots_data = File::open(&roots)?;
|
||||
let input_data = File::open(input)?;
|
||||
let roots_data = File::open(roots)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.verify_with_roots(input_data, roots_data)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::GetRoot) => {
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.get_root(&mut output_buffer)
|
||||
.unwrap();
|
||||
let (root, _) = bytes_le_to_fr(&output_buffer.into_inner());
|
||||
println!("root: {root}");
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::GetProof { index }) => {
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.get_proof(index, &mut output_buffer)?;
|
||||
let output_buffer_inner = output_buffer.into_inner();
|
||||
let (path_elements, _) = bytes_le_to_vec_fr(&output_buffer_inner)?;
|
||||
for (index, element) in path_elements.iter().enumerate() {
|
||||
println!("path element {}: {}", index, element);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
None => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
use std::io::Cursor;
|
||||
|
||||
use color_eyre::Result;
|
||||
use rln::public::RLN;
|
||||
use std::fs::File;
|
||||
|
||||
use crate::config::{Config, InnerConfig};
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct State<'a> {
|
||||
pub rln: Option<RLN<'a>>,
|
||||
pub(crate) struct State {
|
||||
pub rln: Option<RLN>,
|
||||
}
|
||||
|
||||
impl<'a> State<'a> {
|
||||
pub(crate) fn load_state() -> Result<State<'a>> {
|
||||
impl State {
|
||||
pub(crate) fn load_state() -> Result<State> {
|
||||
let config = Config::load_config()?;
|
||||
let rln = if let Some(InnerConfig { file, tree_height }) = config.inner {
|
||||
let resources = File::open(&file)?;
|
||||
Some(RLN::new(tree_height, resources)?)
|
||||
let rln = if let Some(InnerConfig { tree_height, .. }) = config.inner {
|
||||
Some(RLN::new(tree_height, Cursor::new(config.as_bytes()))?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
@@ -1,33 +1,39 @@
|
||||
[package]
|
||||
name = "rln-wasm"
|
||||
version = "0.0.13"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
license = "MIT or Apache2"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[features]
|
||||
default = ["console_error_panic_hook"]
|
||||
required-features = ["stateless"]
|
||||
|
||||
[dependencies]
|
||||
rln = { path = "../rln", default-features = false, features = ["wasm"] }
|
||||
num-bigint = { version = "0.4", default-features = false, features = ["rand", "serde"] }
|
||||
wasmer = { version = "2.3", default-features = false, features = ["js", "std"] }
|
||||
web-sys = {version = "0.3", features=["console"]}
|
||||
getrandom = { version = "0.2.7", default-features = false, features = ["js"] }
|
||||
wasm-bindgen = "0.2.63"
|
||||
serde-wasm-bindgen = "0.4"
|
||||
js-sys = "0.3.59"
|
||||
serde_json = "1.0.85"
|
||||
rln = { path = "../rln", default-features = false }
|
||||
num-bigint = { version = "0.4.6", default-features = false, features = [
|
||||
"rand",
|
||||
"serde",
|
||||
] }
|
||||
wasm-bindgen = "0.2.100"
|
||||
serde-wasm-bindgen = "0.6.5"
|
||||
js-sys = "0.3.77"
|
||||
serde_json = "1.0"
|
||||
|
||||
# The `console_error_panic_hook` crate provides better debugging of panics by
|
||||
# The `console_error_panic_xhook` crate provides better debugging of panics by
|
||||
# logging them with `console.error`. This is great for development, but requires
|
||||
# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for
|
||||
# code size when deploying.
|
||||
console_error_panic_hook = { version = "0.1.7", optional = true }
|
||||
zerokit_utils = { path = "../utils" }
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
getrandom = { version = "0.2.15", features = ["js"] }
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen-test = "0.3.13"
|
||||
wasm-bindgen-futures = "0.4.33"
|
||||
wasm-bindgen-test = "0.3.50"
|
||||
wasm-bindgen-futures = "0.4.50"
|
||||
|
||||
[features]
|
||||
default = ["console_error_panic_hook"]
|
||||
stateless = ["rln/stateless"]
|
||||
arkzkey = ["rln/arkzkey"]
|
||||
|
||||
@@ -1,26 +1,53 @@
|
||||
[tasks.pack-build]
|
||||
command = "wasm-pack"
|
||||
args = ["build", "--release", "--target", "web", "--scope", "waku"]
|
||||
|
||||
[tasks.pack-rename]
|
||||
script = "sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/package.json.bak"
|
||||
|
||||
[tasks.build]
|
||||
clear = true
|
||||
dependencies = [
|
||||
"pack-build",
|
||||
"pack-rename",
|
||||
"post-build"
|
||||
]
|
||||
dependencies = ["pack_build", "pack_rename"]
|
||||
|
||||
[tasks.post-build]
|
||||
command = "wasm-strip"
|
||||
args = ["./pkg/rln_wasm_bg.wasm"]
|
||||
[tasks.build_arkzkey]
|
||||
clear = true
|
||||
dependencies = ["pack_build_arkzkey", "pack_rename"]
|
||||
|
||||
[tasks.pack_build]
|
||||
command = "wasm-pack"
|
||||
args = ["build", "--release", "--target", "web", "--scope", "waku"]
|
||||
env = { "RUSTFLAGS" = "--cfg feature=\"stateless\"" }
|
||||
|
||||
[tasks.pack_build_arkzkey]
|
||||
command = "wasm-pack"
|
||||
args = ["build", "--release", "--target", "web", "--scope", "waku"]
|
||||
env = { "RUSTFLAGS" = "--cfg feature=\"stateless\" --cfg feature=\"arkzkey\"" }
|
||||
|
||||
[tasks.pack_rename]
|
||||
script = "sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/package.json.bak"
|
||||
|
||||
[tasks.test]
|
||||
command = "wasm-pack"
|
||||
args = ["test", "--release", "--node"]
|
||||
dependencies = ["build"]
|
||||
args = [
|
||||
"test",
|
||||
"--release",
|
||||
"--node",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
env = { "RUSTFLAGS" = "--cfg feature=\"stateless\"" }
|
||||
|
||||
[tasks.test_arkzkey]
|
||||
command = "wasm-pack"
|
||||
args = [
|
||||
"test",
|
||||
"--release",
|
||||
"--node",
|
||||
"--target",
|
||||
"wasm32-unknown-unknown",
|
||||
"--",
|
||||
"--nocapture",
|
||||
]
|
||||
env = { "RUSTFLAGS" = "--cfg feature=\"stateless\" --cfg feature=\"arkzkey\"" }
|
||||
dependencies = ["build_arkzkey"]
|
||||
|
||||
[tasks.bench]
|
||||
disabled = true
|
||||
|
||||
[tasks.login]
|
||||
command = "wasm-pack"
|
||||
|
||||
@@ -1,41 +1,55 @@
|
||||
# RLN for WASM
|
||||
|
||||
This library is used in [waku-org/js-rln](https://github.com/waku-org/js-rln/)
|
||||
|
||||
## Building the library
|
||||
1. Install `wasm-pack`
|
||||
```
|
||||
> **Note**: This project requires `wasm-pack` for compiling Rust to WebAssembly and `cargo-make` for running the build commands. Make sure both are installed before proceeding.
|
||||
|
||||
Install `wasm-pack`:
|
||||
|
||||
```bash
|
||||
curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
|
||||
```
|
||||
2. Install `cargo-make`
|
||||
```
|
||||
|
||||
Install `cargo-make`
|
||||
|
||||
```bash
|
||||
cargo install cargo-make
|
||||
```
|
||||
|
||||
OR
|
||||
Or install everything needed for `zerokit` at the root of the repository:
|
||||
|
||||
```
|
||||
```bash
|
||||
make installdeps
|
||||
```
|
||||
3. Compile zerokit for `wasm32-unknown-unknown`:
|
||||
```
|
||||
|
||||
## Building the library
|
||||
|
||||
First, navigate to the rln-wasm directory:
|
||||
|
||||
```bash
|
||||
cd rln-wasm
|
||||
cargo make build
|
||||
```
|
||||
4. Compile a slimmer version of zerokit for `wasm32-unknown-unknown`:
|
||||
```
|
||||
cd rln-wasm
|
||||
cargo make post-build
|
||||
```
|
||||
|
||||
## Running tests
|
||||
Compile zerokit for `wasm32-unknown-unknown`:
|
||||
|
||||
```bash
|
||||
cargo make build
|
||||
```
|
||||
cd rln-wasm
|
||||
|
||||
Or compile with the **arkzkey** feature enabled
|
||||
|
||||
```bash
|
||||
cargo make build_arkzkey
|
||||
```
|
||||
|
||||
## Running tests and benchmarks
|
||||
|
||||
```bash
|
||||
cargo make test
|
||||
```
|
||||
|
||||
## Publishing a npm package
|
||||
Or test with the **arkzkey** feature enabled
|
||||
|
||||
```bash
|
||||
cargo make test_arkzkey
|
||||
```
|
||||
cd rln-wasm
|
||||
cargo make login
|
||||
cargo make publish
|
||||
```
|
||||
@@ -1,331 +1,324 @@
|
||||
module.exports = async function builder(code, options) {
|
||||
options = options || {};
|
||||
|
||||
options = options || {};
|
||||
let wasmModule;
|
||||
try {
|
||||
wasmModule = await WebAssembly.compile(code);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
console.log(
|
||||
"\nTry to run circom --c in order to generate c++ code instead\n"
|
||||
);
|
||||
throw new Error(err);
|
||||
}
|
||||
|
||||
let wasmModule;
|
||||
try {
|
||||
wasmModule = await WebAssembly.compile(code);
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
console.log("\nTry to run circom --c in order to generate c++ code instead\n");
|
||||
throw new Error(err);
|
||||
}
|
||||
let wc;
|
||||
|
||||
let wc;
|
||||
|
||||
let errStr = "";
|
||||
let msgStr = "";
|
||||
|
||||
const instance = await WebAssembly.instantiate(wasmModule, {
|
||||
runtime: {
|
||||
exceptionHandler : function(code) {
|
||||
let err;
|
||||
if (code == 1) {
|
||||
err = "Signal not found.\n";
|
||||
} else if (code == 2) {
|
||||
err = "Too many signals set.\n";
|
||||
} else if (code == 3) {
|
||||
err = "Signal already set.\n";
|
||||
} else if (code == 4) {
|
||||
err = "Assert Failed.\n";
|
||||
} else if (code == 5) {
|
||||
err = "Not enough memory.\n";
|
||||
} else if (code == 6) {
|
||||
err = "Input signal array access exceeds the size.\n";
|
||||
} else {
|
||||
err = "Unknown error.\n";
|
||||
}
|
||||
throw new Error(err + errStr);
|
||||
},
|
||||
printErrorMessage : function() {
|
||||
errStr += getMessage() + "\n";
|
||||
// console.error(getMessage());
|
||||
},
|
||||
writeBufferMessage : function() {
|
||||
const msg = getMessage();
|
||||
// Any calls to `log()` will always end with a `\n`, so that's when we print and reset
|
||||
if (msg === "\n") {
|
||||
console.log(msgStr);
|
||||
msgStr = "";
|
||||
} else {
|
||||
// If we've buffered other content, put a space in between the items
|
||||
if (msgStr !== "") {
|
||||
msgStr += " "
|
||||
}
|
||||
// Then append the message to the message we are creating
|
||||
msgStr += msg;
|
||||
}
|
||||
},
|
||||
showSharedRWMemory : function() {
|
||||
printSharedRWMemory ();
|
||||
}
|
||||
let errStr = "";
|
||||
let msgStr = "";
|
||||
|
||||
const instance = await WebAssembly.instantiate(wasmModule, {
|
||||
runtime: {
|
||||
exceptionHandler: function (code) {
|
||||
let err;
|
||||
if (code == 1) {
|
||||
err = "Signal not found.\n";
|
||||
} else if (code == 2) {
|
||||
err = "Too many signals set.\n";
|
||||
} else if (code == 3) {
|
||||
err = "Signal already set.\n";
|
||||
} else if (code == 4) {
|
||||
err = "Assert Failed.\n";
|
||||
} else if (code == 5) {
|
||||
err = "Not enough memory.\n";
|
||||
} else if (code == 6) {
|
||||
err = "Input signal array access exceeds the size.\n";
|
||||
} else {
|
||||
err = "Unknown error.\n";
|
||||
}
|
||||
});
|
||||
throw new Error(err + errStr);
|
||||
},
|
||||
printErrorMessage: function () {
|
||||
errStr += getMessage() + "\n";
|
||||
// console.error(getMessage());
|
||||
},
|
||||
writeBufferMessage: function () {
|
||||
const msg = getMessage();
|
||||
// Any calls to `log()` will always end with a `\n`, so that's when we print and reset
|
||||
if (msg === "\n") {
|
||||
console.log(msgStr);
|
||||
msgStr = "";
|
||||
} else {
|
||||
// If we've buffered other content, put a space in between the items
|
||||
if (msgStr !== "") {
|
||||
msgStr += " ";
|
||||
}
|
||||
// Then append the message to the message we are creating
|
||||
msgStr += msg;
|
||||
}
|
||||
},
|
||||
showSharedRWMemory: function () {
|
||||
printSharedRWMemory();
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const sanityCheck =
|
||||
options
|
||||
// options &&
|
||||
// (
|
||||
// options.sanityCheck ||
|
||||
// options.logGetSignal ||
|
||||
// options.logSetSignal ||
|
||||
// options.logStartComponent ||
|
||||
// options.logFinishComponent
|
||||
// );
|
||||
const sanityCheck = options;
|
||||
// options &&
|
||||
// (
|
||||
// options.sanityCheck ||
|
||||
// options.logGetSignal ||
|
||||
// options.logSetSignal ||
|
||||
// options.logStartComponent ||
|
||||
// options.logFinishComponent
|
||||
// );
|
||||
|
||||
|
||||
wc = new WitnessCalculator(instance, sanityCheck);
|
||||
return wc;
|
||||
wc = new WitnessCalculator(instance, sanityCheck);
|
||||
return wc;
|
||||
|
||||
function getMessage() {
|
||||
var message = "";
|
||||
var c = instance.exports.getMessageChar();
|
||||
while ( c != 0 ) {
|
||||
message += String.fromCharCode(c);
|
||||
c = instance.exports.getMessageChar();
|
||||
}
|
||||
return message;
|
||||
function getMessage() {
|
||||
var message = "";
|
||||
var c = instance.exports.getMessageChar();
|
||||
while (c != 0) {
|
||||
message += String.fromCharCode(c);
|
||||
c = instance.exports.getMessageChar();
|
||||
}
|
||||
|
||||
function printSharedRWMemory () {
|
||||
const shared_rw_memory_size = instance.exports.getFieldNumLen32();
|
||||
const arr = new Uint32Array(shared_rw_memory_size);
|
||||
for (let j=0; j<shared_rw_memory_size; j++) {
|
||||
arr[shared_rw_memory_size-1-j] = instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
return message;
|
||||
}
|
||||
|
||||
// If we've buffered other content, put a space in between the items
|
||||
if (msgStr !== "") {
|
||||
msgStr += " "
|
||||
}
|
||||
// Then append the value to the message we are creating
|
||||
msgStr += (fromArray32(arr).toString());
|
||||
}
|
||||
function printSharedRWMemory() {
|
||||
const shared_rw_memory_size = instance.exports.getFieldNumLen32();
|
||||
const arr = new Uint32Array(shared_rw_memory_size);
|
||||
for (let j = 0; j < shared_rw_memory_size; j++) {
|
||||
arr[shared_rw_memory_size - 1 - j] =
|
||||
instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
|
||||
// If we've buffered other content, put a space in between the items
|
||||
if (msgStr !== "") {
|
||||
msgStr += " ";
|
||||
}
|
||||
// Then append the value to the message we are creating
|
||||
msgStr += fromArray32(arr).toString();
|
||||
}
|
||||
};
|
||||
|
||||
class WitnessCalculator {
|
||||
constructor(instance, sanityCheck) {
|
||||
this.instance = instance;
|
||||
constructor(instance, sanityCheck) {
|
||||
this.instance = instance;
|
||||
|
||||
this.version = this.instance.exports.getVersion();
|
||||
this.n32 = this.instance.exports.getFieldNumLen32();
|
||||
this.version = this.instance.exports.getVersion();
|
||||
this.n32 = this.instance.exports.getFieldNumLen32();
|
||||
|
||||
this.instance.exports.getRawPrime();
|
||||
const arr = new Uint32Array(this.n32);
|
||||
for (let i=0; i<this.n32; i++) {
|
||||
arr[this.n32-1-i] = this.instance.exports.readSharedRWMemory(i);
|
||||
this.instance.exports.getRawPrime();
|
||||
const arr = new Uint32Array(this.n32);
|
||||
for (let i = 0; i < this.n32; i++) {
|
||||
arr[this.n32 - 1 - i] = this.instance.exports.readSharedRWMemory(i);
|
||||
}
|
||||
this.prime = fromArray32(arr);
|
||||
|
||||
this.witnessSize = this.instance.exports.getWitnessSize();
|
||||
|
||||
this.sanityCheck = sanityCheck;
|
||||
}
|
||||
|
||||
circom_version() {
|
||||
return this.instance.exports.getVersion();
|
||||
}
|
||||
|
||||
async _doCalculateWitness(input, sanityCheck) {
|
||||
//input is assumed to be a map from signals to arrays of bigints
|
||||
this.instance.exports.init(this.sanityCheck || sanityCheck ? 1 : 0);
|
||||
const keys = Object.keys(input);
|
||||
var input_counter = 0;
|
||||
keys.forEach((k) => {
|
||||
const h = fnvHash(k);
|
||||
const hMSB = parseInt(h.slice(0, 8), 16);
|
||||
const hLSB = parseInt(h.slice(8, 16), 16);
|
||||
const fArr = flatArray(input[k]);
|
||||
let signalSize = this.instance.exports.getInputSignalSize(hMSB, hLSB);
|
||||
if (signalSize < 0) {
|
||||
throw new Error(`Signal ${k} not found\n`);
|
||||
}
|
||||
if (fArr.length < signalSize) {
|
||||
throw new Error(`Not enough values for input signal ${k}\n`);
|
||||
}
|
||||
if (fArr.length > signalSize) {
|
||||
throw new Error(`Too many values for input signal ${k}\n`);
|
||||
}
|
||||
for (let i = 0; i < fArr.length; i++) {
|
||||
const arrFr = toArray32(BigInt(fArr[i]) % this.prime, this.n32);
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
this.instance.exports.writeSharedRWMemory(j, arrFr[this.n32 - 1 - j]);
|
||||
}
|
||||
this.prime = fromArray32(arr);
|
||||
|
||||
this.witnessSize = this.instance.exports.getWitnessSize();
|
||||
|
||||
this.sanityCheck = sanityCheck;
|
||||
}
|
||||
|
||||
circom_version() {
|
||||
return this.instance.exports.getVersion();
|
||||
}
|
||||
|
||||
async _doCalculateWitness(input, sanityCheck) {
|
||||
//input is assumed to be a map from signals to arrays of bigints
|
||||
this.instance.exports.init((this.sanityCheck || sanityCheck) ? 1 : 0);
|
||||
const keys = Object.keys(input);
|
||||
var input_counter = 0;
|
||||
keys.forEach( (k) => {
|
||||
const h = fnvHash(k);
|
||||
const hMSB = parseInt(h.slice(0,8), 16);
|
||||
const hLSB = parseInt(h.slice(8,16), 16);
|
||||
const fArr = flatArray(input[k]);
|
||||
let signalSize = this.instance.exports.getInputSignalSize(hMSB, hLSB);
|
||||
if (signalSize < 0){
|
||||
throw new Error(`Signal ${k} not found\n`);
|
||||
}
|
||||
if (fArr.length < signalSize) {
|
||||
throw new Error(`Not enough values for input signal ${k}\n`);
|
||||
}
|
||||
if (fArr.length > signalSize) {
|
||||
throw new Error(`Too many values for input signal ${k}\n`);
|
||||
}
|
||||
for (let i=0; i<fArr.length; i++) {
|
||||
const arrFr = toArray32(BigInt(fArr[i])%this.prime,this.n32)
|
||||
for (let j=0; j<this.n32; j++) {
|
||||
this.instance.exports.writeSharedRWMemory(j,arrFr[this.n32-1-j]);
|
||||
}
|
||||
try {
|
||||
this.instance.exports.setInputSignal(hMSB, hLSB,i);
|
||||
input_counter++;
|
||||
} catch (err) {
|
||||
// console.log(`After adding signal ${i} of ${k}`)
|
||||
throw new Error(err);
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
if (input_counter < this.instance.exports.getInputSize()) {
|
||||
throw new Error(`Not all inputs have been set. Only ${input_counter} out of ${this.instance.exports.getInputSize()}`);
|
||||
}
|
||||
}
|
||||
|
||||
async calculateWitness(input, sanityCheck) {
|
||||
|
||||
const w = [];
|
||||
|
||||
await this._doCalculateWitness(input, sanityCheck);
|
||||
|
||||
for (let i=0; i<this.witnessSize; i++) {
|
||||
this.instance.exports.getWitness(i);
|
||||
const arr = new Uint32Array(this.n32);
|
||||
for (let j=0; j<this.n32; j++) {
|
||||
arr[this.n32-1-j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
w.push(fromArray32(arr));
|
||||
try {
|
||||
this.instance.exports.setInputSignal(hMSB, hLSB, i);
|
||||
input_counter++;
|
||||
} catch (err) {
|
||||
// console.log(`After adding signal ${i} of ${k}`)
|
||||
throw new Error(err);
|
||||
}
|
||||
|
||||
return w;
|
||||
}
|
||||
});
|
||||
if (input_counter < this.instance.exports.getInputSize()) {
|
||||
throw new Error(
|
||||
`Not all inputs have been set. Only ${input_counter} out of ${this.instance.exports.getInputSize()}`
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
async calculateBinWitness(input, sanityCheck) {
|
||||
async calculateWitness(input, sanityCheck) {
|
||||
const w = [];
|
||||
|
||||
const buff32 = new Uint32Array(this.witnessSize*this.n32);
|
||||
const buff = new Uint8Array( buff32.buffer);
|
||||
await this._doCalculateWitness(input, sanityCheck);
|
||||
await this._doCalculateWitness(input, sanityCheck);
|
||||
|
||||
for (let i=0; i<this.witnessSize; i++) {
|
||||
this.instance.exports.getWitness(i);
|
||||
const pos = i*this.n32;
|
||||
for (let j=0; j<this.n32; j++) {
|
||||
buff32[pos+j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
}
|
||||
|
||||
return buff;
|
||||
}
|
||||
|
||||
|
||||
async calculateWTNSBin(input, sanityCheck) {
|
||||
|
||||
const buff32 = new Uint32Array(this.witnessSize*this.n32+this.n32+11);
|
||||
const buff = new Uint8Array( buff32.buffer);
|
||||
await this._doCalculateWitness(input, sanityCheck);
|
||||
|
||||
//"wtns"
|
||||
buff[0] = "w".charCodeAt(0)
|
||||
buff[1] = "t".charCodeAt(0)
|
||||
buff[2] = "n".charCodeAt(0)
|
||||
buff[3] = "s".charCodeAt(0)
|
||||
|
||||
//version 2
|
||||
buff32[1] = 2;
|
||||
|
||||
//number of sections: 2
|
||||
buff32[2] = 2;
|
||||
|
||||
//id section 1
|
||||
buff32[3] = 1;
|
||||
|
||||
const n8 = this.n32*4;
|
||||
//id section 1 length in 64bytes
|
||||
const idSection1length = 8 + n8;
|
||||
const idSection1lengthHex = idSection1length.toString(16);
|
||||
buff32[4] = parseInt(idSection1lengthHex.slice(0,8), 16);
|
||||
buff32[5] = parseInt(idSection1lengthHex.slice(8,16), 16);
|
||||
|
||||
//this.n32
|
||||
buff32[6] = n8;
|
||||
|
||||
//prime number
|
||||
this.instance.exports.getRawPrime();
|
||||
|
||||
var pos = 7;
|
||||
for (let j=0; j<this.n32; j++) {
|
||||
buff32[pos+j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
pos += this.n32;
|
||||
|
||||
// witness size
|
||||
buff32[pos] = this.witnessSize;
|
||||
pos++;
|
||||
|
||||
//id section 2
|
||||
buff32[pos] = 2;
|
||||
pos++;
|
||||
|
||||
// section 2 length
|
||||
const idSection2length = n8*this.witnessSize;
|
||||
const idSection2lengthHex = idSection2length.toString(16);
|
||||
buff32[pos] = parseInt(idSection2lengthHex.slice(0,8), 16);
|
||||
buff32[pos+1] = parseInt(idSection2lengthHex.slice(8,16), 16);
|
||||
|
||||
pos += 2;
|
||||
for (let i=0; i<this.witnessSize; i++) {
|
||||
this.instance.exports.getWitness(i);
|
||||
for (let j=0; j<this.n32; j++) {
|
||||
buff32[pos+j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
pos += this.n32;
|
||||
}
|
||||
|
||||
return buff;
|
||||
for (let i = 0; i < this.witnessSize; i++) {
|
||||
this.instance.exports.getWitness(i);
|
||||
const arr = new Uint32Array(this.n32);
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
arr[this.n32 - 1 - j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
w.push(fromArray32(arr));
|
||||
}
|
||||
|
||||
return w;
|
||||
}
|
||||
|
||||
async calculateBinWitness(input, sanityCheck) {
|
||||
const buff32 = new Uint32Array(this.witnessSize * this.n32);
|
||||
const buff = new Uint8Array(buff32.buffer);
|
||||
await this._doCalculateWitness(input, sanityCheck);
|
||||
|
||||
for (let i = 0; i < this.witnessSize; i++) {
|
||||
this.instance.exports.getWitness(i);
|
||||
const pos = i * this.n32;
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
}
|
||||
|
||||
return buff;
|
||||
}
|
||||
|
||||
async calculateWTNSBin(input, sanityCheck) {
|
||||
const buff32 = new Uint32Array(this.witnessSize * this.n32 + this.n32 + 11);
|
||||
const buff = new Uint8Array(buff32.buffer);
|
||||
await this._doCalculateWitness(input, sanityCheck);
|
||||
|
||||
//"wtns"
|
||||
buff[0] = "w".charCodeAt(0);
|
||||
buff[1] = "t".charCodeAt(0);
|
||||
buff[2] = "n".charCodeAt(0);
|
||||
buff[3] = "s".charCodeAt(0);
|
||||
|
||||
//version 2
|
||||
buff32[1] = 2;
|
||||
|
||||
//number of sections: 2
|
||||
buff32[2] = 2;
|
||||
|
||||
//id section 1
|
||||
buff32[3] = 1;
|
||||
|
||||
const n8 = this.n32 * 4;
|
||||
//id section 1 length in 64bytes
|
||||
const idSection1length = 8 + n8;
|
||||
const idSection1lengthHex = idSection1length.toString(16);
|
||||
buff32[4] = parseInt(idSection1lengthHex.slice(0, 8), 16);
|
||||
buff32[5] = parseInt(idSection1lengthHex.slice(8, 16), 16);
|
||||
|
||||
//this.n32
|
||||
buff32[6] = n8;
|
||||
|
||||
//prime number
|
||||
this.instance.exports.getRawPrime();
|
||||
|
||||
var pos = 7;
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
pos += this.n32;
|
||||
|
||||
// witness size
|
||||
buff32[pos] = this.witnessSize;
|
||||
pos++;
|
||||
|
||||
//id section 2
|
||||
buff32[pos] = 2;
|
||||
pos++;
|
||||
|
||||
// section 2 length
|
||||
const idSection2length = n8 * this.witnessSize;
|
||||
const idSection2lengthHex = idSection2length.toString(16);
|
||||
buff32[pos] = parseInt(idSection2lengthHex.slice(0, 8), 16);
|
||||
buff32[pos + 1] = parseInt(idSection2lengthHex.slice(8, 16), 16);
|
||||
|
||||
pos += 2;
|
||||
for (let i = 0; i < this.witnessSize; i++) {
|
||||
this.instance.exports.getWitness(i);
|
||||
for (let j = 0; j < this.n32; j++) {
|
||||
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
|
||||
}
|
||||
pos += this.n32;
|
||||
}
|
||||
|
||||
return buff;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function toArray32(rem,size) {
|
||||
const res = []; //new Uint32Array(size); //has no unshift
|
||||
const radix = BigInt(0x100000000);
|
||||
while (rem) {
|
||||
res.unshift( Number(rem % radix));
|
||||
rem = rem / radix;
|
||||
function toArray32(rem, size) {
|
||||
const res = []; //new Uint32Array(size); //has no unshift
|
||||
const radix = BigInt(0x100000000);
|
||||
while (rem) {
|
||||
res.unshift(Number(rem % radix));
|
||||
rem = rem / radix;
|
||||
}
|
||||
if (size) {
|
||||
var i = size - res.length;
|
||||
while (i > 0) {
|
||||
res.unshift(0);
|
||||
i--;
|
||||
}
|
||||
if (size) {
|
||||
var i = size - res.length;
|
||||
while (i>0) {
|
||||
res.unshift(0);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
function fromArray32(arr) { //returns a BigInt
|
||||
var res = BigInt(0);
|
||||
const radix = BigInt(0x100000000);
|
||||
for (let i = 0; i<arr.length; i++) {
|
||||
res = res*radix + BigInt(arr[i]);
|
||||
}
|
||||
return res;
|
||||
function fromArray32(arr) {
|
||||
//returns a BigInt
|
||||
var res = BigInt(0);
|
||||
const radix = BigInt(0x100000000);
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
res = res * radix + BigInt(arr[i]);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
function flatArray(a) {
|
||||
var res = [];
|
||||
fillArray(res, a);
|
||||
return res;
|
||||
var res = [];
|
||||
fillArray(res, a);
|
||||
return res;
|
||||
|
||||
function fillArray(res, a) {
|
||||
if (Array.isArray(a)) {
|
||||
for (let i=0; i<a.length; i++) {
|
||||
fillArray(res, a[i]);
|
||||
}
|
||||
} else {
|
||||
res.push(a);
|
||||
}
|
||||
function fillArray(res, a) {
|
||||
if (Array.isArray(a)) {
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
fillArray(res, a[i]);
|
||||
}
|
||||
} else {
|
||||
res.push(a);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function fnvHash(str) {
|
||||
const uint64_max = BigInt(2) ** BigInt(64);
|
||||
let hash = BigInt("0xCBF29CE484222325");
|
||||
for (var i = 0; i < str.length; i++) {
|
||||
hash ^= BigInt(str[i].charCodeAt());
|
||||
hash *= BigInt(0x100000001B3);
|
||||
hash %= uint64_max;
|
||||
}
|
||||
let shash = hash.toString(16);
|
||||
let n = 16 - shash.length;
|
||||
shash = '0'.repeat(n).concat(shash);
|
||||
return shash;
|
||||
const uint64_max = BigInt(2) ** BigInt(64);
|
||||
let hash = BigInt("0xCBF29CE484222325");
|
||||
for (var i = 0; i < str.length; i++) {
|
||||
hash ^= BigInt(str[i].charCodeAt());
|
||||
hash *= BigInt(0x100000001b3);
|
||||
hash %= uint64_max;
|
||||
}
|
||||
let shash = hash.toString(16);
|
||||
let n = 16 - shash.length;
|
||||
shash = "0".repeat(n).concat(shash);
|
||||
return shash;
|
||||
}
|
||||
|
||||
@@ -1,16 +1,12 @@
|
||||
#![cfg(target_arch = "wasm32")]
|
||||
|
||||
extern crate wasm_bindgen;
|
||||
extern crate web_sys;
|
||||
|
||||
use std::vec::Vec;
|
||||
|
||||
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
|
||||
use num_bigint::BigInt;
|
||||
use rln::public::{hash, poseidon_hash, RLN};
|
||||
use std::vec::Vec;
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
#[wasm_bindgen]
|
||||
#[wasm_bindgen(js_name = initPanicHook)]
|
||||
pub fn init_panic_hook() {
|
||||
console_error_panic_hook::set_once();
|
||||
}
|
||||
@@ -19,7 +15,7 @@ pub fn init_panic_hook() {
|
||||
pub struct RLNWrapper {
|
||||
// The purpose of this wrapper is to hold a RLN instance with the 'static lifetime
|
||||
// because wasm_bindgen does not allow returning elements with lifetimes
|
||||
instance: RLN<'static>,
|
||||
instance: RLN,
|
||||
}
|
||||
|
||||
// Macro to call methods with arbitrary amount of arguments,
|
||||
@@ -61,24 +57,6 @@ macro_rules! call_with_output_and_error_msg {
|
||||
};
|
||||
}
|
||||
|
||||
// Macro to call_with_error_msg methods with arbitrary amount of arguments,
|
||||
// First argument to the macro is context,
|
||||
// second is the actual method on `RLNWrapper`
|
||||
// rest are all other arguments to the method
|
||||
macro_rules! call_with_error_msg {
|
||||
($instance:expr, $method:ident, $error_msg:expr $(, $arg:expr)*) => {
|
||||
{
|
||||
let new_instance: &mut RLNWrapper = $instance.process();
|
||||
if let Err(err) = new_instance.instance.$method($($arg.process()),*) {
|
||||
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! call {
|
||||
($instance:expr, $method:ident $(, $arg:expr)*) => {
|
||||
{
|
||||
@@ -150,8 +128,8 @@ impl<T> ProcessArg for Vec<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ProcessArg for *const RLN<'a> {
|
||||
type ReturnType = &'a RLN<'a>;
|
||||
impl ProcessArg for *const RLN {
|
||||
type ReturnType = &'static RLN;
|
||||
fn process(self) -> Self::ReturnType {
|
||||
unsafe { &*self }
|
||||
}
|
||||
@@ -181,104 +159,31 @@ impl<'a> ProcessArg for &'a [u8] {
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = newRLN)]
|
||||
pub fn wasm_new(
|
||||
tree_height: usize,
|
||||
zkey: Uint8Array,
|
||||
vk: Uint8Array,
|
||||
) -> Result<*mut RLNWrapper, String> {
|
||||
let instance = RLN::new_with_params(tree_height, zkey.to_vec(), vk.to_vec())
|
||||
.map_err(|err| format!("{:#?}", err))?;
|
||||
pub fn wasm_new(zkey: Uint8Array) -> Result<*mut RLNWrapper, String> {
|
||||
let instance = RLN::new_with_params(zkey.to_vec()).map_err(|err| format!("{:#?}", err))?;
|
||||
let wrapper = RLNWrapper { instance };
|
||||
Ok(Box::into_raw(Box::new(wrapper)))
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = getSerializedRLNWitness)]
|
||||
pub fn wasm_get_serialized_rln_witness(
|
||||
ctx: *mut RLNWrapper,
|
||||
input: Uint8Array,
|
||||
) -> Result<Uint8Array, String> {
|
||||
let rln_witness = call!(ctx, get_serialized_rln_witness, &input.to_vec()[..])
|
||||
.map_err(|err| format!("{:#?}", err))?;
|
||||
Ok(Uint8Array::from(&rln_witness[..]))
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = insertMember)]
|
||||
pub fn wasm_set_next_leaf(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
|
||||
call_with_error_msg!(
|
||||
ctx,
|
||||
set_next_leaf,
|
||||
"could not insert member into merkle tree".to_string(),
|
||||
&input.to_vec()[..]
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = setLeavesFrom)]
|
||||
pub fn wasm_set_leaves_from(
|
||||
ctx: *mut RLNWrapper,
|
||||
index: usize,
|
||||
input: Uint8Array,
|
||||
) -> Result<(), String> {
|
||||
call_with_error_msg!(
|
||||
ctx,
|
||||
set_leaves_from,
|
||||
"could not set multiple leaves".to_string(),
|
||||
index,
|
||||
&*input.to_vec()
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = deleteLeaf)]
|
||||
pub fn wasm_delete_leaf(ctx: *mut RLNWrapper, index: usize) -> Result<(), String> {
|
||||
call_with_error_msg!(ctx, delete_leaf, "could not delete leaf".to_string(), index)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = setMetadata)]
|
||||
pub fn wasm_set_metadata(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
|
||||
call_with_error_msg!(
|
||||
ctx,
|
||||
set_metadata,
|
||||
"could not set metadata".to_string(),
|
||||
&*input.to_vec()
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = getMetadata)]
|
||||
pub fn wasm_get_metadata(ctx: *mut RLNWrapper) -> Result<Uint8Array, String> {
|
||||
call_with_output_and_error_msg!(ctx, get_metadata, "could not get metadata".to_string())
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = initTreeWithLeaves)]
|
||||
pub fn wasm_init_tree_with_leaves(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
|
||||
call_with_error_msg!(
|
||||
ctx,
|
||||
init_tree_with_leaves,
|
||||
"could not init merkle tree".to_string(),
|
||||
&*input.to_vec()
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = RLNWitnessToJson)]
|
||||
pub fn rln_witness_to_json(
|
||||
#[wasm_bindgen(js_name = rlnWitnessToJson)]
|
||||
pub fn wasm_rln_witness_to_json(
|
||||
ctx: *mut RLNWrapper,
|
||||
serialized_witness: Uint8Array,
|
||||
) -> Result<Object, String> {
|
||||
let inputs = call!(ctx, get_rln_witness_json, &serialized_witness.to_vec()[..])
|
||||
.map_err(|err| err.to_string())?;
|
||||
let inputs = call!(
|
||||
ctx,
|
||||
get_rln_witness_bigint_json,
|
||||
&serialized_witness.to_vec()[..]
|
||||
)
|
||||
.map_err(|err| err.to_string())?;
|
||||
let js_value = serde_wasm_bindgen::to_value(&inputs).map_err(|err| err.to_string())?;
|
||||
Object::from_entries(&js_value).map_err(|err| format!("{:#?}", err))
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen]
|
||||
pub fn generate_rln_proof_with_witness(
|
||||
#[wasm_bindgen(js_name = generateRLNProofWithWitness)]
|
||||
pub fn wasm_generate_rln_proof_with_witness(
|
||||
ctx: *mut RLNWrapper,
|
||||
calculated_witness: Vec<JsBigInt>,
|
||||
serialized_witness: Uint8Array,
|
||||
@@ -358,17 +263,6 @@ pub fn wasm_recover_id_secret(
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = verifyRLNProof)]
|
||||
pub fn wasm_verify_rln_proof(ctx: *const RLNWrapper, proof: Uint8Array) -> Result<bool, String> {
|
||||
call_bool_method_with_error_msg!(
|
||||
ctx,
|
||||
verify_rln_proof,
|
||||
"error while verifying rln proof".to_string(),
|
||||
&proof.to_vec()[..]
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = verifyWithRoots)]
|
||||
pub fn wasm_verify_with_roots(
|
||||
@@ -385,12 +279,6 @@ pub fn wasm_verify_with_roots(
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = getRoot)]
|
||||
pub fn wasm_get_root(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
|
||||
call_with_output_and_error_msg!(ctx, get_root, "could not obtain root")
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = hash)]
|
||||
pub fn wasm_hash(input: Uint8Array) -> Result<Uint8Array, String> {
|
||||
fn_call_with_output_and_error_msg!(hash, "could not generate hash", &input.to_vec()[..])
|
||||
|
||||
@@ -3,16 +3,24 @@ const fs = require("fs");
|
||||
// Utils functions for loading circom witness calculator and reading files from test
|
||||
|
||||
module.exports = {
|
||||
read_file: function (path) {
|
||||
return fs.readFileSync(path);
|
||||
},
|
||||
read_file: function (path) {
|
||||
return fs.readFileSync(path);
|
||||
},
|
||||
|
||||
calculateWitness: async function(circom_path, inputs){
|
||||
const wc = require("resources/witness_calculator.js");
|
||||
const wasmFile = fs.readFileSync(circom_path);
|
||||
const wasmFileBuffer = wasmFile.slice(wasmFile.byteOffset, wasmFile.byteOffset + wasmFile.byteLength);
|
||||
const witnessCalculator = await wc(wasmFileBuffer);
|
||||
const calculatedWitness = await witnessCalculator.calculateWitness(inputs, false);
|
||||
return JSON.stringify(calculatedWitness, (key, value) => typeof value === "bigint" ? value.toString() : value);
|
||||
}
|
||||
}
|
||||
calculateWitness: async function (circom_path, inputs) {
|
||||
const wc = require("../resources/witness_calculator.js");
|
||||
const wasmFile = fs.readFileSync(circom_path);
|
||||
const wasmFileBuffer = wasmFile.slice(
|
||||
wasmFile.byteOffset,
|
||||
wasmFile.byteOffset + wasmFile.byteLength
|
||||
);
|
||||
const witnessCalculator = await wc(wasmFileBuffer);
|
||||
const calculatedWitness = await witnessCalculator.calculateWitness(
|
||||
inputs,
|
||||
false
|
||||
);
|
||||
return JSON.stringify(calculatedWitness, (key, value) =>
|
||||
typeof value === "bigint" ? value.toString() : value
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
@@ -2,13 +2,16 @@
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
|
||||
use js_sys::{BigInt as JsBigInt, Date, Object, Uint8Array};
|
||||
use rln::circuit::{Fr, TEST_TREE_HEIGHT};
|
||||
use rln::hashers::{hash_to_field, poseidon_hash};
|
||||
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, normalize_usize};
|
||||
use rln::poseidon_tree::PoseidonTree;
|
||||
use rln::protocol::{prepare_verify_input, rln_witness_from_values, serialize_witness};
|
||||
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le};
|
||||
use rln_wasm::*;
|
||||
use wasm_bindgen::{prelude::*, JsValue};
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
use zerokit_utils::merkle_tree::merkle_tree::ZerokitMerkleTree;
|
||||
|
||||
#[wasm_bindgen(module = "src/utils.js")]
|
||||
extern "C" {
|
||||
@@ -19,130 +22,268 @@ mod tests {
|
||||
async fn calculateWitness(circom_path: &str, input: Object) -> Result<JsValue, JsValue>;
|
||||
}
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
const ZKEY_PATH: &str = "../rln/resources/tree_height_20/rln_final.arkzkey";
|
||||
#[cfg(not(feature = "arkzkey"))]
|
||||
const ZKEY_PATH: &str = "../rln/resources/tree_height_20/rln_final.zkey";
|
||||
|
||||
const CIRCOM_PATH: &str = "../rln/resources/tree_height_20/rln.wasm";
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
pub async fn test_basic_flow() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let circom_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln.wasm");
|
||||
let zkey_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
|
||||
let vk_path =
|
||||
format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.json");
|
||||
let zkey = read_file(&zkey_path).unwrap();
|
||||
let vk = read_file(&vk_path).unwrap();
|
||||
pub async fn rln_wasm_benchmark() {
|
||||
let mut results = String::from("\nbenchmarks:\n");
|
||||
let iterations = 10;
|
||||
|
||||
// Creating an instance of RLN
|
||||
let rln_instance = wasm_new(tree_height, zkey, vk).unwrap();
|
||||
let zkey = read_file(&ZKEY_PATH).expect("Failed to read zkey file");
|
||||
|
||||
// Creating membership key
|
||||
let mem_keys = wasm_key_gen(rln_instance).unwrap();
|
||||
// Benchmark wasm_new
|
||||
let start_wasm_new = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = wasm_new(zkey.clone()).expect("Failed to create RLN instance");
|
||||
}
|
||||
let wasm_new_result = Date::now() - start_wasm_new;
|
||||
|
||||
// Create RLN instance for other benchmarks
|
||||
let rln_instance = wasm_new(zkey).expect("Failed to create RLN instance");
|
||||
let mut tree = PoseidonTree::default(TEST_TREE_HEIGHT).expect("Failed to create tree");
|
||||
|
||||
// Benchmark wasm_key_gen
|
||||
let start_wasm_key_gen = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = wasm_key_gen(rln_instance).expect("Failed to generate keys");
|
||||
}
|
||||
let wasm_key_gen_result = Date::now() - start_wasm_key_gen;
|
||||
|
||||
// Generate identity pair for other benchmarks
|
||||
let mem_keys = wasm_key_gen(rln_instance).expect("Failed to generate keys");
|
||||
let id_key = mem_keys.subarray(0, 32);
|
||||
let id_commitment = mem_keys.subarray(32, 64);
|
||||
let (identity_secret_hash, _) = bytes_le_to_fr(&id_key.to_vec());
|
||||
let (id_commitment, _) = bytes_le_to_fr(&mem_keys.subarray(32, 64).to_vec());
|
||||
|
||||
// Prepare the message
|
||||
let signal = b"Hello World";
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
|
||||
|
||||
let identity_index = tree.leaves_set();
|
||||
|
||||
let user_message_limit = Fr::from(100);
|
||||
|
||||
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
tree.update_next(rate_commitment)
|
||||
.expect("Failed to update tree");
|
||||
|
||||
let message_id = Fr::from(0);
|
||||
let signal: [u8; 32] = [0; 32];
|
||||
let x = hash_to_field(&signal);
|
||||
|
||||
let merkle_proof = tree
|
||||
.proof(identity_index)
|
||||
.expect("Failed to generate merkle proof");
|
||||
|
||||
let rln_witness = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
x,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
)
|
||||
.expect("Failed to create RLN witness");
|
||||
|
||||
let serialized_witness =
|
||||
serialize_witness(&rln_witness).expect("Failed to serialize witness");
|
||||
let witness_buffer = Uint8Array::from(&serialized_witness[..]);
|
||||
|
||||
let json_inputs = wasm_rln_witness_to_json(rln_instance, witness_buffer.clone())
|
||||
.expect("Failed to convert witness to JSON");
|
||||
|
||||
// Benchmark calculateWitness
|
||||
let start_calculate_witness = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = calculateWitness(&CIRCOM_PATH, json_inputs.clone())
|
||||
.await
|
||||
.expect("Failed to calculate witness");
|
||||
}
|
||||
let calculate_witness_result = Date::now() - start_calculate_witness;
|
||||
|
||||
// Calculate witness for other benchmarks
|
||||
let calculated_witness_json = calculateWitness(&CIRCOM_PATH, json_inputs)
|
||||
.await
|
||||
.expect("Failed to calculate witness")
|
||||
.as_string()
|
||||
.expect("Failed to convert calculated witness to string");
|
||||
let calculated_witness_vec_str: Vec<String> =
|
||||
serde_json::from_str(&calculated_witness_json).expect("Failed to parse JSON");
|
||||
let calculated_witness: Vec<JsBigInt> = calculated_witness_vec_str
|
||||
.iter()
|
||||
.map(|x| JsBigInt::new(&x.into()).expect("Failed to create JsBigInt"))
|
||||
.collect();
|
||||
|
||||
// Benchmark wasm_generate_rln_proof_with_witness
|
||||
let start_wasm_generate_rln_proof_with_witness = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ = wasm_generate_rln_proof_with_witness(
|
||||
rln_instance,
|
||||
calculated_witness.clone(),
|
||||
witness_buffer.clone(),
|
||||
)
|
||||
.expect("Failed to generate proof");
|
||||
}
|
||||
let wasm_generate_rln_proof_with_witness_result =
|
||||
Date::now() - start_wasm_generate_rln_proof_with_witness;
|
||||
|
||||
// Generate a proof for other benchmarks
|
||||
let proof =
|
||||
wasm_generate_rln_proof_with_witness(rln_instance, calculated_witness, witness_buffer)
|
||||
.expect("Failed to generate proof");
|
||||
|
||||
let proof_data = proof.to_vec();
|
||||
let verify_input = prepare_verify_input(proof_data, &signal);
|
||||
let input_buffer = Uint8Array::from(&verify_input[..]);
|
||||
|
||||
let root = tree.root();
|
||||
let roots_serialized = fr_to_bytes_le(&root);
|
||||
let roots_buffer = Uint8Array::from(&roots_serialized[..]);
|
||||
|
||||
// Benchmark wasm_verify_with_roots
|
||||
let start_wasm_verify_with_roots = Date::now();
|
||||
for _ in 0..iterations {
|
||||
let _ =
|
||||
wasm_verify_with_roots(rln_instance, input_buffer.clone(), roots_buffer.clone())
|
||||
.expect("Failed to verify proof");
|
||||
}
|
||||
let wasm_verify_with_roots_result = Date::now() - start_wasm_verify_with_roots;
|
||||
|
||||
// Verify the proof with the root
|
||||
let is_proof_valid = wasm_verify_with_roots(rln_instance, input_buffer, roots_buffer)
|
||||
.expect("Failed to verify proof");
|
||||
assert!(is_proof_valid, "verification failed");
|
||||
|
||||
// Format and display results
|
||||
let format_duration = |duration_ms: f64| -> String {
|
||||
let avg_ms = duration_ms / (iterations as f64);
|
||||
if avg_ms >= 1000.0 {
|
||||
format!("{:.3} s", avg_ms / 1000.0)
|
||||
} else {
|
||||
format!("{:.3} ms", avg_ms)
|
||||
}
|
||||
};
|
||||
|
||||
results.push_str(&format!("wasm_new: {}\n", format_duration(wasm_new_result)));
|
||||
results.push_str(&format!(
|
||||
"wasm_key_gen: {}\n",
|
||||
format_duration(wasm_key_gen_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"calculateWitness: {}\n",
|
||||
format_duration(calculate_witness_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"wasm_generate_rln_proof_with_witness: {}\n",
|
||||
format_duration(wasm_generate_rln_proof_with_witness_result)
|
||||
));
|
||||
results.push_str(&format!(
|
||||
"wasm_verify_with_roots: {}\n",
|
||||
format_duration(wasm_verify_with_roots_result)
|
||||
));
|
||||
|
||||
// Log the results
|
||||
wasm_bindgen_test::console_log!("{results}");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
pub async fn rln_wasm_test() {
|
||||
// Read the zkey file
|
||||
let zkey = read_file(&ZKEY_PATH).expect("Failed to read zkey file");
|
||||
|
||||
// Create RLN instance and separated tree
|
||||
let rln_instance = wasm_new(zkey).expect("Failed to create RLN instance");
|
||||
let mut tree = PoseidonTree::default(TEST_TREE_HEIGHT).expect("Failed to create tree");
|
||||
|
||||
let identity_index: usize = 0;
|
||||
// Setting up the epoch and rln_identifier
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
|
||||
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
|
||||
let external_nullifier = fr_to_bytes_le(&external_nullifier);
|
||||
|
||||
// Generate identity pair
|
||||
let mem_keys = wasm_key_gen(rln_instance).expect("Failed to generate keys");
|
||||
let (identity_secret_hash, _) = bytes_le_to_fr(&mem_keys.subarray(0, 32).to_vec());
|
||||
let (id_commitment, _) = bytes_le_to_fr(&mem_keys.subarray(32, 64).to_vec());
|
||||
|
||||
// Get index of the identity
|
||||
let identity_index = tree.leaves_set();
|
||||
|
||||
// Setting up the user message limit
|
||||
let user_message_limit = Fr::from(100);
|
||||
let message_id = fr_to_bytes_le(&Fr::from(0));
|
||||
|
||||
let (id_commitment_fr, _) = bytes_le_to_fr(&id_commitment.to_vec()[..]);
|
||||
let rate_commitment = poseidon_hash(&[id_commitment_fr, user_message_limit]);
|
||||
// Updating the tree with the rate commitment
|
||||
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
tree.update_next(rate_commitment)
|
||||
.expect("Failed to update tree");
|
||||
|
||||
// Insert PK
|
||||
wasm_set_next_leaf(
|
||||
rln_instance,
|
||||
Uint8Array::from(fr_to_bytes_le(&rate_commitment).as_slice()),
|
||||
// Generate merkle proof
|
||||
let merkle_proof = tree
|
||||
.proof(identity_index)
|
||||
.expect("Failed to generate merkle proof");
|
||||
|
||||
// Create message id and signal
|
||||
let message_id = Fr::from(0);
|
||||
let signal: [u8; 32] = [0; 32];
|
||||
let x = hash_to_field(&signal);
|
||||
|
||||
// Prepare input for witness calculation
|
||||
let rln_witness = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
x,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
)
|
||||
.unwrap();
|
||||
.expect("Failed to create RLN witness");
|
||||
|
||||
// Serializing the message
|
||||
let mut serialized_vec: Vec<u8> = Vec::new();
|
||||
serialized_vec.append(&mut id_key.to_vec());
|
||||
serialized_vec.append(&mut normalize_usize(identity_index));
|
||||
serialized_vec.append(&mut fr_to_bytes_le(&user_message_limit).to_vec());
|
||||
serialized_vec.append(&mut message_id.to_vec());
|
||||
serialized_vec.append(&mut external_nullifier.to_vec());
|
||||
serialized_vec.append(&mut normalize_usize(signal.len()));
|
||||
serialized_vec.append(&mut signal.to_vec());
|
||||
let serialized_message = Uint8Array::from(&serialized_vec[..]);
|
||||
|
||||
let serialized_rln_witness =
|
||||
wasm_get_serialized_rln_witness(rln_instance, serialized_message).unwrap();
|
||||
// Serialize the rln witness
|
||||
let serialized_witness =
|
||||
serialize_witness(&rln_witness).expect("Failed to serialize witness");
|
||||
// Convert the serialized witness to a Uint8Array
|
||||
let witness_buffer = Uint8Array::from(&serialized_witness[..]);
|
||||
|
||||
// Obtaining inputs that should be sent to circom witness calculator
|
||||
let json_inputs =
|
||||
rln_witness_to_json(rln_instance, serialized_rln_witness.clone()).unwrap();
|
||||
let json_inputs = wasm_rln_witness_to_json(rln_instance, witness_buffer.clone())
|
||||
.expect("Failed to convert witness to JSON");
|
||||
|
||||
// Calculating witness with JS
|
||||
// (Using a JSON since wasm_bindgen does not like Result<Vec<JsBigInt>,JsValue>)
|
||||
let calculated_witness_json = calculateWitness(&circom_path, json_inputs)
|
||||
let calculated_witness_json = calculateWitness(&CIRCOM_PATH, json_inputs)
|
||||
.await
|
||||
.unwrap()
|
||||
.expect("Failed to calculate witness")
|
||||
.as_string()
|
||||
.unwrap();
|
||||
.expect("Failed to convert calculated witness to string");
|
||||
let calculated_witness_vec_str: Vec<String> =
|
||||
serde_json::from_str(&calculated_witness_json).unwrap();
|
||||
serde_json::from_str(&calculated_witness_json).expect("Failed to parse JSON");
|
||||
let calculated_witness: Vec<JsBigInt> = calculated_witness_vec_str
|
||||
.iter()
|
||||
.map(|x| JsBigInt::new(&x.into()).unwrap())
|
||||
.map(|x| JsBigInt::new(&x.into()).expect("Failed to create JsBigInt"))
|
||||
.collect();
|
||||
|
||||
// Generating proof
|
||||
let proof = generate_rln_proof_with_witness(
|
||||
rln_instance,
|
||||
calculated_witness.into(),
|
||||
serialized_rln_witness,
|
||||
)
|
||||
.unwrap();
|
||||
// Generate a proof from the calculated witness
|
||||
let proof =
|
||||
wasm_generate_rln_proof_with_witness(rln_instance, calculated_witness, witness_buffer)
|
||||
.expect("Failed to generate proof");
|
||||
|
||||
// Add signal_len | signal
|
||||
let mut proof_bytes = proof.to_vec();
|
||||
proof_bytes.append(&mut normalize_usize(signal.len()));
|
||||
proof_bytes.append(&mut signal.to_vec());
|
||||
let proof_with_signal = Uint8Array::from(&proof_bytes[..]);
|
||||
// Prepare the root for verification
|
||||
let root = tree.root();
|
||||
let roots_serialized = fr_to_bytes_le(&root);
|
||||
let roots_buffer = Uint8Array::from(&roots_serialized[..]);
|
||||
|
||||
// Validate Proof
|
||||
let is_proof_valid = wasm_verify_rln_proof(rln_instance, proof_with_signal);
|
||||
// Prepare input for proof verification
|
||||
let proof_data = proof.to_vec();
|
||||
let verify_input = prepare_verify_input(proof_data, &signal);
|
||||
let input_buffer = Uint8Array::from(&verify_input[..]);
|
||||
|
||||
assert!(
|
||||
is_proof_valid.unwrap(),
|
||||
"validating proof generated with wasm failed"
|
||||
);
|
||||
|
||||
// Validating Proof with Roots
|
||||
let root = wasm_get_root(rln_instance).unwrap();
|
||||
let roots = Uint8Array::from(&root.to_vec()[..]);
|
||||
let proof_with_signal = Uint8Array::from(&proof_bytes[..]);
|
||||
|
||||
let is_proof_valid = wasm_verify_with_roots(rln_instance, proof_with_signal, roots);
|
||||
assert!(is_proof_valid.unwrap(), "verifying proof with roots failed");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_metadata() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let zkey_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
|
||||
let vk_path =
|
||||
format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.json");
|
||||
let zkey = read_file(&zkey_path).unwrap();
|
||||
let vk = read_file(&vk_path).unwrap();
|
||||
|
||||
// Creating an instance of RLN
|
||||
let rln_instance = wasm_new(tree_height, zkey, vk).unwrap();
|
||||
|
||||
let test_metadata = Uint8Array::new(&JsValue::from_str("test"));
|
||||
// Inserting random metadata
|
||||
wasm_set_metadata(rln_instance, test_metadata.clone()).unwrap();
|
||||
|
||||
// Getting metadata
|
||||
let metadata = wasm_get_metadata(rln_instance).unwrap();
|
||||
|
||||
assert_eq!(metadata.to_vec(), test_metadata.to_vec());
|
||||
// Verify the proof with the root
|
||||
let is_proof_valid = wasm_verify_with_roots(rln_instance, input_buffer, roots_buffer)
|
||||
.expect("Failed to verify proof");
|
||||
assert!(is_proof_valid, "verification failed");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "rln"
|
||||
version = "0.4.1"
|
||||
version = "0.7.0"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "APIs to manage, compute and verify zkSNARK proofs and RLN primitives"
|
||||
@@ -15,55 +15,86 @@ bench = false
|
||||
# This flag disable cargo doctests, i.e. testing example code-snippets in documentation
|
||||
doctest = false
|
||||
|
||||
|
||||
[dependencies]
|
||||
# ZKP Generation
|
||||
ark-ec = { version = "=0.4.1", default-features = false }
|
||||
ark-ff = { version = "=0.4.1", default-features = false, features = [ "asm"] }
|
||||
ark-std = { version = "=0.4.0", default-features = false }
|
||||
ark-bn254 = { version = "=0.4.0" }
|
||||
ark-groth16 = { version = "=0.4.0", features = ["parallel"], default-features = false }
|
||||
ark-relations = { version = "=0.4.0", default-features = false, features = [ "std" ] }
|
||||
ark-serialize = { version = "=0.4.1", default-features = false }
|
||||
ark-circom = { version = "=0.1.0", default-features = false, features = ["circom-2"] }
|
||||
ark-bn254 = { version = "0.5.0", features = ["std"] }
|
||||
ark-relations = { version = "0.5.1", features = ["std"] }
|
||||
ark-ff = { version = "0.5.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
ark-ec = { version = "0.5.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
|
||||
# WASM
|
||||
wasmer = { version = "=2.3.0", default-features = false }
|
||||
ark-std = { version = "0.5.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
ark-poly = { version = "0.5.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
ark-groth16 = { version = "0.5.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
ark-serialize = { version = "0.5.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
|
||||
# error handling
|
||||
color-eyre = "=0.6.2"
|
||||
thiserror = "=1.0.39"
|
||||
color-eyre = "0.6.3"
|
||||
thiserror = "2.0.12"
|
||||
|
||||
# utilities
|
||||
cfg-if = "=1.0"
|
||||
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
|
||||
num-traits = "=0.2.15"
|
||||
once_cell = "=1.17.1"
|
||||
rand = "=0.8.5"
|
||||
rand_chacha = "=0.3.1"
|
||||
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
|
||||
utils = { package = "zerokit_utils", version = "=0.4.1", path = "../utils/", default-features = false }
|
||||
|
||||
byteorder = "1.5.0"
|
||||
cfg-if = "1.0"
|
||||
num-bigint = { version = "0.4.6", default-features = false, features = [
|
||||
"rand",
|
||||
"std",
|
||||
] }
|
||||
num-traits = "0.2.19"
|
||||
once_cell = "1.21.3"
|
||||
lazy_static = "1.5.0"
|
||||
rand = "0.8.5"
|
||||
rand_chacha = "0.3.1"
|
||||
ruint = { version = "1.14.0", features = ["rand", "serde", "ark-ff-04"] }
|
||||
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
|
||||
utils = { package = "zerokit_utils", version = "0.5.2", path = "../utils", default-features = false }
|
||||
|
||||
# serialization
|
||||
serde_json = "=1.0.96"
|
||||
serde = { version = "=1.0.163", features = ["derive"] }
|
||||
prost = "0.13.5"
|
||||
serde_json = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
|
||||
include_dir = "=0.7.3"
|
||||
document-features = { version = "0.2.11", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
sled = "=0.34.7"
|
||||
criterion = { version = "=0.4.0", features = ["html_reports"] }
|
||||
sled = "0.34.7"
|
||||
criterion = { version = "0.4.0", features = ["html_reports"] }
|
||||
|
||||
[features]
|
||||
default = ["parallel", "wasmer/sys-default", "pmtree-ft"]
|
||||
parallel = ["ark-ec/parallel", "ark-ff/parallel", "ark-std/parallel", "ark-groth16/parallel", "utils/parallel"]
|
||||
wasm = ["wasmer/js", "wasmer/std"]
|
||||
default = ["pmtree-ft"]
|
||||
fullmerkletree = ["default"]
|
||||
stateless = []
|
||||
arkzkey = []
|
||||
|
||||
# Note: pmtree feature is still experimental
|
||||
pmtree-ft = ["utils/pmtree-ft"]
|
||||
|
||||
[[bench]]
|
||||
name = "circuit_loading_arkzkey_benchmark"
|
||||
harness = false
|
||||
required-features = ["arkzkey"]
|
||||
|
||||
[[bench]]
|
||||
name = "circuit_loading_benchmark"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "pmtree_benchmark"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "poseidon_tree_benchmark"
|
||||
harness = false
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
|
||||
@@ -4,7 +4,15 @@ args = ["build", "--release"]
|
||||
|
||||
[tasks.test]
|
||||
command = "cargo"
|
||||
args = ["test", "--release"]
|
||||
args = ["test", "--release", "--", "--nocapture"]
|
||||
|
||||
[tasks.test_stateless]
|
||||
command = "cargo"
|
||||
args = ["test", "--release", "--features", "stateless"]
|
||||
|
||||
[tasks.test_arkzkey]
|
||||
command = "cargo"
|
||||
args = ["test", "--release", "--features", "arkzkey"]
|
||||
|
||||
[tasks.bench]
|
||||
command = "cargo"
|
||||
|
||||
348
rln/README.md
348
rln/README.md
@@ -1,64 +1,17 @@
|
||||
# Zerokit RLN Module
|
||||
|
||||
This module provides APIs to manage, compute and verify [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and RLN primitives.
|
||||
[](https://crates.io/crates/rln)
|
||||
|
||||
## Pre-requisites
|
||||
### Install dependencies and clone repo
|
||||
The Zerokit RLN Module provides a Rust implementation for working with Rate-Limiting Nullifier [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and primitives. This module allows you to:
|
||||
|
||||
```sh
|
||||
make installdeps
|
||||
git clone https://github.com/vacp2p/zerokit.git
|
||||
cd zerokit/rln
|
||||
```
|
||||
- Generate and verify RLN proofs
|
||||
- Work with Merkle trees for commitment storage
|
||||
- Implement rate-limiting mechanisms for distributed systems
|
||||
|
||||
### Build and Test
|
||||
## Quick Start
|
||||
|
||||
To build and test, run the following commands within the module folder
|
||||
```bash
|
||||
cargo make build
|
||||
cargo make test
|
||||
```
|
||||
|
||||
### Compile ZK circuits
|
||||
|
||||
The `rln` (https://github.com/privacy-scaling-explorations/rln) repository, which contains the RLN circuit implementation is a submodule of zerokit RLN.
|
||||
|
||||
To compile the RLN circuit
|
||||
|
||||
``` sh
|
||||
# Update submodules
|
||||
git submodule update --init --recursive
|
||||
|
||||
# Install rln dependencies
|
||||
cd vendor/rln/ && npm install
|
||||
|
||||
# Build circuits
|
||||
./scripts/build-circuits.sh rln
|
||||
|
||||
# Copy over assets
|
||||
cp build/zkeyFiles/rln-final.zkey ../../resources/tree_height_15
|
||||
cp build/zkeyFiles/rln.wasm ../../resources/tree_height_15
|
||||
```
|
||||
|
||||
Note that the above code snippet will compile a RLN circuit with a Merkle tree of height equal `15` based on the default value set in `vendor/rln/circuit/rln.circom`.
|
||||
|
||||
In order to compile a RLN circuit with Merkle tree height `N`, it suffices to change `vendor/rln/circuit/rln.circom` to
|
||||
|
||||
```
|
||||
pragma circom 2.0.0;
|
||||
|
||||
include "./rln-base.circom";
|
||||
|
||||
component main {public [x, epoch, rln_identifier ]} = RLN(N);
|
||||
```
|
||||
|
||||
However, if `N` is too big, this might require a bigger Powers of Tau ceremony than the one hardcoded in `./scripts/build-circuits.sh`, which is `2^14`.
|
||||
In such case we refer to the official [Circom documentation](https://docs.circom.io/getting-started/proving-circuits/#powers-of-tau) for instructions on how to run an appropriate Powers of Tau ceremony and Phase 2 in order to compile the desired circuit.
|
||||
|
||||
|
||||
Currently, the `rln` module comes with 2 [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources) RLN circuits having Merkle tree of height `20` and `32`, respectively.
|
||||
|
||||
## Getting started
|
||||
> [!IMPORTANT]
|
||||
> Version 0.6.1 is required for WASM support or x32 architecture. Current version doesn't support these platforms due to dependency issues. WASM support will return in a future release.
|
||||
|
||||
### Add RLN as dependency
|
||||
|
||||
@@ -69,130 +22,263 @@ We start by adding zerokit RLN to our `Cargo.toml`
|
||||
rln = { git = "https://github.com/vacp2p/zerokit" }
|
||||
```
|
||||
|
||||
### Create a RLN object
|
||||
## Basic Usage Example
|
||||
|
||||
First, we need to create a RLN object for a chosen input Merkle tree size.
|
||||
|
||||
Note that we need to pass to RLN object constructor the path where the circuit (`rln.wasm`, built for the input tree size), the corresponding proving key (`rln_final.zkey`) and verification key (`verification_key.json`, optional) are found.
|
||||
Note that we need to pass to RLN object constructor the path where the graph file (`graph.bin`, built for the input tree size), the corresponding proving key (`rln_final.zkey`) or (`rln_final_uncompr.arkzkey`) and verification key (`verification_key.arkvkey`, optional) are found.
|
||||
|
||||
In the following we will use [cursors](https://doc.rust-lang.org/std/io/struct.Cursor.html) as readers/writers for interfacing with RLN public APIs.
|
||||
|
||||
```rust
|
||||
use rln::protocol::*;
|
||||
use rln::public::*;
|
||||
use std::io::Cursor;
|
||||
|
||||
// We set the RLN parameters:
|
||||
// - the tree height;
|
||||
// - the circuit resource folder (requires a trailing "/").
|
||||
let tree_height = 20;
|
||||
let resources = Cursor::new("../zerokit/rln/resources/tree_height_20/");
|
||||
use rln::{
|
||||
circuit::Fr,
|
||||
hashers::{hash_to_field, poseidon_hash},
|
||||
protocol::{keygen, prepare_prove_input, prepare_verify_input},
|
||||
public::RLN,
|
||||
utils::fr_to_bytes_le,
|
||||
};
|
||||
use serde_json::json;
|
||||
|
||||
// We create a new RLN instance
|
||||
let mut rln = RLN::new(tree_height, resources);
|
||||
fn main() {
|
||||
// 1. Initialize RLN with parameters:
|
||||
// - the tree height;
|
||||
// - the tree config, if it is not defined, the default value will be set
|
||||
let tree_height = 20;
|
||||
let input = Cursor::new(json!({}).to_string());
|
||||
let mut rln = RLN::new(tree_height, input).unwrap();
|
||||
|
||||
// 2. Generate an identity keypair
|
||||
let (identity_secret_hash, id_commitment) = keygen();
|
||||
|
||||
// 3. Add a rate commitment to the Merkle tree
|
||||
let id_index = 10;
|
||||
let user_message_limit = Fr::from(10);
|
||||
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
|
||||
rln.set_leaf(id_index, &mut buffer).unwrap();
|
||||
|
||||
// 4. Set up external nullifier (epoch + app identifier)
|
||||
// We generate epoch from a date seed and we ensure is
|
||||
// mapped to a field element by hashing-to-field its content
|
||||
let epoch = hash_to_field(b"Today at noon, this year");
|
||||
// We generate rln_identifier from a date seed and we ensure is
|
||||
// mapped to a field element by hashing-to-field its content
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
// We generate a external nullifier
|
||||
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
|
||||
// We choose a message_id satisfy 0 <= message_id < user_message_limit
|
||||
let message_id = Fr::from(1);
|
||||
|
||||
// 5. Generate and verify a proof for a message
|
||||
let signal = b"RLN is awesome";
|
||||
|
||||
// 6. Prepare input for generate_rln_proof API
|
||||
// input_data is [ identity_secret<32> | id_index<8> | external_nullifier<32> | user_message_limit<32> | message_id<32> | signal_len<8> | signal<var> ]
|
||||
let prove_input = prepare_prove_input(
|
||||
identity_secret_hash,
|
||||
id_index,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
external_nullifier,
|
||||
signal,
|
||||
);
|
||||
|
||||
// 7. Generate a RLN proof
|
||||
// We generate a RLN proof for proof_input
|
||||
let mut input_buffer = Cursor::new(prove_input);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.generate_rln_proof(&mut input_buffer, &mut output_buffer)
|
||||
.unwrap();
|
||||
|
||||
// We get the public outputs returned by the circuit evaluation
|
||||
// The byte vector `proof_data` is serialized as `[ zk-proof | tree_root | external_nullifier | share_x | share_y | nullifier ]`.
|
||||
let proof_data = output_buffer.into_inner();
|
||||
|
||||
// 8. Verify a RLN proof
|
||||
// Input buffer is serialized as `[proof_data | signal_len | signal ]`, where `proof_data` is (computed as) the output obtained by `generate_rln_proof`.
|
||||
let verify_data = prepare_verify_input(proof_data, signal);
|
||||
|
||||
// We verify the zk-proof against the provided proof values
|
||||
let mut input_buffer = Cursor::new(verify_data);
|
||||
let verified = rln.verify_rln_proof(&mut input_buffer).unwrap();
|
||||
|
||||
// We ensure the proof is valid
|
||||
assert!(verified);
|
||||
}
|
||||
```
|
||||
|
||||
### Generate an identity keypair
|
||||
### Comments for the code above for point 4
|
||||
|
||||
We generate an identity keypair
|
||||
The `external nullifier` includes two parameters.
|
||||
|
||||
```rust
|
||||
// We generate an identity pair
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.key_gen(&mut buffer).unwrap();
|
||||
The first one is `epoch` and it's used to identify messages received in a certain time frame.
|
||||
It usually corresponds to the current UNIX time but can also be set to a random value or generated by a seed, provided that it corresponds to a field element.
|
||||
|
||||
// We deserialize the keygen output to obtain
|
||||
// the identity_secret and id_commitment
|
||||
let (identity_secret_hash, id_commitment) = deserialize_identity_pair(buffer.into_inner());
|
||||
The second one is `rln_identifier` and it's used to prevent a RLN ZK proof generated for one application to be re-used in another one.
|
||||
|
||||
### Features
|
||||
|
||||
- **Multiple Backend Support**: Choose between different zkey formats with feature flags
|
||||
- `arkzkey`: Use the optimized Arkworks-compatible zkey format (faster loading)
|
||||
- `stateless`: For stateless proof verification
|
||||
- **Pre-compiled Circuits**: Ready-to-use circuits with Merkle tree height of 20
|
||||
|
||||
## Building and Testing
|
||||
|
||||
### Prerequisites
|
||||
|
||||
```sh
|
||||
git clone https://github.com/vacp2p/zerokit.git
|
||||
make installdeps
|
||||
cd zerokit/rln
|
||||
```
|
||||
|
||||
### Add Rate commitment to the RLN Merkle tree
|
||||
### Build Commands
|
||||
|
||||
```rust
|
||||
// We define the tree index where id_commitment will be added
|
||||
let id_index = 10;
|
||||
let user_message_limit = 10;
|
||||
```sh
|
||||
# Build with default features
|
||||
cargo make build
|
||||
|
||||
// We serialize id_commitment and pass it to set_leaf
|
||||
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
let mut buffer = Cursor::new(serialize_field_element(rate_commitment));
|
||||
rln.set_leaf(id_index, &mut buffer).unwrap();
|
||||
# Test with default features
|
||||
cargo make test
|
||||
|
||||
# Test with specific features
|
||||
cargo make test_arkzkey # For arkzkey feature
|
||||
cargo make test_stateless # For stateless feature
|
||||
```
|
||||
|
||||
Note that when tree leaves are not explicitly set by the user (in this example, all those with index less and greater than `10`), their values is set to an hardcoded default (all-`0` bytes in current implementation).
|
||||
## Advanced: Custom Circuit Compilation
|
||||
|
||||
### Set epoch
|
||||
The `rln` (<https://github.com/rate-limiting-nullifier/circom-rln>) repository, which contains the RLN circuit implementation is using for pre-compiled RLN circuit for zerokit RLN.
|
||||
If you want to compile your own RLN circuit, you can follow the instructions below.
|
||||
|
||||
The epoch, sometimes referred to as _external nullifier_, is used to identify messages received in a certain time frame. It usually corresponds to the current UNIX time but can also be set to a random value or generated by a seed, provided that it corresponds to a field element.
|
||||
### 1. Compile ZK Circuits for getting the zkey and verification key files
|
||||
|
||||
```rust
|
||||
// We generate epoch from a date seed and we ensure is
|
||||
// mapped to a field element by hashing-to-field its content
|
||||
let epoch = hash_to_field(b"Today at noon, this year");
|
||||
```
|
||||
### Set signal
|
||||
This script actually generates not only the zkey and verification key files for the RLN circuit, but also the execution wasm file used for witness calculation.
|
||||
However, the wasm file is not needed for the `rln` module, because current implementation uses the iden3 graph file for witness calculation.
|
||||
This graph file is generated by the `circom-witnesscalc` tool in [step 2](#2-generate-witness-calculation-graph).
|
||||
|
||||
The signal is the message for which we are computing a RLN proof.
|
||||
To customize the circuit parameters, modify `circom-rln/circuits/rln.circom`:
|
||||
|
||||
```rust
|
||||
// We set our signal
|
||||
let signal = b"RLN is awesome";
|
||||
```circom
|
||||
pragma circom 2.1.0;
|
||||
include "./rln.circom";
|
||||
component main { public [x, externalNullifier] } = RLN(N, M);
|
||||
```
|
||||
|
||||
### Generate a RLN proof
|
||||
Where:
|
||||
|
||||
We prepare the input to the proof generation routine.
|
||||
- `N`: Merkle tree height, determining the maximum membership capacity (2^N members).
|
||||
|
||||
Input buffer is serialized as `[ identity_key | id_index | epoch | rln_identifier | user_message_limit | message_id | signal_len | signal ]`.
|
||||
- `M`: Bit size for range checks, setting an upper bound for the number of messages per epoch (2^M messages).
|
||||
|
||||
```rust
|
||||
// We prepare input to the proof generation routine
|
||||
let proof_input = prepare_prove_input(identity_secret_hash, id_index, epoch, rln_identifier, user_message_limit, message_id, signal);
|
||||
> [!NOTE]
|
||||
> However, if `N` is too big, this might require a larger Powers of Tau ceremony than the one hardcoded in `./scripts/build-circuits.sh`, which is `2^14`. \
|
||||
> In such case, we refer to the official [Circom documentation](https://docs.circom.io/getting-started/proving-circuits/#powers-of-tau) for instructions on how to run an appropriate Powers of Tau ceremony and Phase 2 in order to compile the desired circuit. \
|
||||
> Additionally, while `M` sets an upper bound on the number of messages per epoch (`2^M`), you can configure lower message limit for your use case, as long as it satisfies `user_message_limit ≤ 2^M`. \
|
||||
> Currently, the `rln` module comes with a [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources) RLN circuit with a Merkle tree of height `20` and a bit size of `16`, allowing up to `2^20` registered members and a `2^16` message limit per epoch.
|
||||
|
||||
#### Install circom compiler
|
||||
|
||||
You can follow the instructions below or refer to the [installing Circom](https://docs.circom.io/getting-started/installation/#installing-circom) guide for more details, but make sure to use the specific version `v2.1.0`.
|
||||
|
||||
```sh
|
||||
# Clone the circom repository
|
||||
git clone https://github.com/iden3/circom.git
|
||||
|
||||
# Checkout the specific version
|
||||
cd circom && git checkout v2.1.0
|
||||
|
||||
# Build the circom compiler
|
||||
cargo build --release
|
||||
|
||||
# Install the circom binary globally
|
||||
cargo install --path circom
|
||||
|
||||
# Check the circom version to ensure it's v2.1.0
|
||||
circom --version
|
||||
```
|
||||
|
||||
We are now ready to generate a RLN ZK proof along with the _public outputs_ of the ZK circuit evaluation.
|
||||
#### Generate the zkey and verification key files example
|
||||
|
||||
```rust
|
||||
```sh
|
||||
# Clone the circom-rln repository
|
||||
git clone https://github.com/rate-limiting-nullifier/circom-rln
|
||||
|
||||
// We generate a RLN proof for proof_input
|
||||
let mut in_buffer = Cursor::new(proof_input);
|
||||
let mut out_buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.generate_rln_proof(&mut in_buffer, &mut out_buffer)
|
||||
.unwrap();
|
||||
# Install dependencies
|
||||
cd circom-rln && npm install
|
||||
|
||||
// We get the public outputs returned by the circuit evaluation
|
||||
let proof_data = out_buffer.into_inner();
|
||||
# Build circuits
|
||||
./scripts/build-circuits.sh rln
|
||||
|
||||
# Use the generated zkey file in subsequent steps
|
||||
cp zkeyFiles/rln/final.zkey <path_to_rln_final.zkey>
|
||||
```
|
||||
|
||||
The byte vector `proof_data` is serialized as `[ zk-proof | tree_root | epoch | share_x | share_y | nullifier | rln_identifier ]`.
|
||||
### 2. Generate Witness Calculation Graph
|
||||
|
||||
The execution graph file used for witness calculation can be compiled following instructions in the [circom-witnesscalc](https://github.com/iden3/circom-witnesscalc) repository.
|
||||
As mentioned in step 1, we should use `rln.circom` file from `circom-rln` repository.
|
||||
|
||||
### Verify a RLN proof
|
||||
```sh
|
||||
# Clone the circom-witnesscalc repository
|
||||
git clone https://github.com/iden3/circom-witnesscalc
|
||||
|
||||
We prepare the input to the proof verification routine.
|
||||
# Load the submodules
|
||||
cd circom-witnesscalc && git submodule update --init --recursive
|
||||
|
||||
Input buffer is serialized as `[proof_data | signal_len | signal ]`, where `proof_data` is (computed as) the output obtained by `generate_rln_proof`.
|
||||
# Build the circom-witnesscalc tool
|
||||
cargo build
|
||||
|
||||
```rust
|
||||
// We prepare input to the proof verification routine
|
||||
let verify_data = prepare_verify_input(proof_data, signal);
|
||||
|
||||
// We verify the zk-proof against the provided proof values
|
||||
let mut in_buffer = Cursor::new(verify_data);
|
||||
let verified = rln.verify(&mut in_buffer).unwrap();
|
||||
# Generate the witness calculation graph
|
||||
cargo run --package circom_witnesscalc --bin build-circuit ../circom-rln/circuits/rln.circom <path_to_graph.bin>
|
||||
```
|
||||
|
||||
We check if the proof verification was successful:
|
||||
```rust
|
||||
// We ensure the proof is valid
|
||||
assert!(verified);
|
||||
The `rln` module comes with [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources) execution graph files for the RLN circuit.
|
||||
|
||||
### 3. Generate Arkzkey Representation for zkey and verification key files
|
||||
|
||||
For faster loading, compile the zkey file into the arkzkey format using [ark-zkey](https://github.com/seemenkina/ark-zkey). This is fork of the [original](https://github.com/zkmopro/ark-zkey) repository with the uncompressed zkey support.
|
||||
|
||||
```sh
|
||||
# Clone the ark-zkey repository
|
||||
git clone https://github.com/seemenkina/ark-zkey.git
|
||||
|
||||
# Build the ark-zkey tool
|
||||
cd ark-zkey && cargo build
|
||||
|
||||
# Generate the arkzkey representation for the zkey file
|
||||
cargo run --bin arkzkey-util <path_to_rln_final.zkey>
|
||||
```
|
||||
|
||||
## Get involved!
|
||||
Currently, the `rln` module comes with [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources) arkzkey keys for the RLN circuit.
|
||||
|
||||
## Get involved
|
||||
|
||||
Zerokit RLN public and FFI APIs allow interaction with many more features than what briefly showcased above.
|
||||
|
||||
We invite you to check our API documentation by running
|
||||
|
||||
```rust
|
||||
cargo doc --no-deps
|
||||
```
|
||||
and look at unit tests to have an hint on how to interface and use them.
|
||||
|
||||
and look at unit tests to have an hint on how to interface and use them.
|
||||
|
||||
## Detailed Protocol Flow
|
||||
|
||||
1. **Identity Creation**: Generate a secret key and commitment
|
||||
2. **Rate Commitment**: Add commitment to a Merkle tree
|
||||
3. **External Nullifier Setup**: Combine epoch and application identifier
|
||||
4. **Proof Generation**: Create a zkSNARK proof that:
|
||||
- Proves membership in the Merkle tree
|
||||
- Ensures rate-limiting constraints are satisfied
|
||||
- Generates a nullifier to prevent double-usage
|
||||
5. **Proof Verification**: Verify the proof without revealing the prover's identity
|
||||
|
||||
## Getting Involved
|
||||
|
||||
- Check the [unit tests](https://github.com/vacp2p/zerokit/tree/master/rln/tests) for more usage examples
|
||||
- [RFC specification](https://rfc.vac.dev/spec/32/) for the Rate-Limiting Nullifier protocol
|
||||
- [GitHub repository](https://github.com/vacp2p/zerokit) for the latest updates
|
||||
|
||||
25
rln/benches/circuit_loading_arkzkey_benchmark.rs
Normal file
25
rln/benches/circuit_loading_arkzkey_benchmark.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use rln::circuit::{read_arkzkey_from_bytes_uncompressed, ARKZKEY_BYTES};
|
||||
|
||||
pub fn uncompressed_bench(c: &mut Criterion) {
|
||||
let arkzkey = ARKZKEY_BYTES.to_vec();
|
||||
let size = arkzkey.len() as f32;
|
||||
println!(
|
||||
"Size of uncompressed arkzkey: {:.2?} MB",
|
||||
size / 1024.0 / 1024.0
|
||||
);
|
||||
|
||||
c.bench_function("arkzkey::arkzkey_from_raw_uncompressed", |b| {
|
||||
b.iter(|| {
|
||||
let r = read_arkzkey_from_bytes_uncompressed(&arkzkey);
|
||||
assert_eq!(r.is_ok(), true);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group! {
|
||||
name = benches;
|
||||
config = Criterion::default().sample_size(10);
|
||||
targets = uncompressed_bench
|
||||
}
|
||||
criterion_main!(benches);
|
||||
24
rln/benches/circuit_loading_benchmark.rs
Normal file
24
rln/benches/circuit_loading_benchmark.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use rln::circuit::zkey::read_zkey;
|
||||
use std::io::Cursor;
|
||||
|
||||
pub fn zkey_load_benchmark(c: &mut Criterion) {
|
||||
let zkey = rln::circuit::ZKEY_BYTES.to_vec();
|
||||
let size = zkey.len() as f32;
|
||||
println!("Size of zkey: {:.2?} MB", size / 1024.0 / 1024.0);
|
||||
|
||||
c.bench_function("zkey::zkey_from_raw", |b| {
|
||||
b.iter(|| {
|
||||
let mut reader = Cursor::new(zkey.clone());
|
||||
let r = read_zkey(&mut reader);
|
||||
assert_eq!(r.is_ok(), true);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group! {
|
||||
name = benches;
|
||||
config = Criterion::default().sample_size(10);
|
||||
targets = zkey_load_benchmark
|
||||
}
|
||||
criterion_main!(benches);
|
||||
@@ -1,7 +1,6 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use utils::ZerokitMerkleTree;
|
||||
|
||||
use rln::{circuit::Fr, pm_tree_adapter::PmTree};
|
||||
use utils::ZerokitMerkleTree;
|
||||
|
||||
pub fn pmtree_benchmark(c: &mut Criterion) {
|
||||
let mut tree = PmTree::default(2).unwrap();
|
||||
@@ -22,7 +21,7 @@ pub fn pmtree_benchmark(c: &mut Criterion) {
|
||||
|
||||
c.bench_function("Pmtree::override_range", |b| {
|
||||
b.iter(|| {
|
||||
tree.override_range(0, leaves.clone(), [0, 1, 2, 3])
|
||||
tree.override_range(0, leaves.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
})
|
||||
});
|
||||
@@ -38,6 +37,19 @@ pub fn pmtree_benchmark(c: &mut Criterion) {
|
||||
tree.get(0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
// check intermediate node getter which required additional computation of sub root index
|
||||
c.bench_function("Pmtree::get_subtree_root", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_subtree_root(1, 0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("Pmtree::get_empty_leaves_indices", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_empty_leaves_indices();
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(benches, pmtree_benchmark);
|
||||
|
||||
79
rln/benches/poseidon_tree_benchmark.rs
Normal file
79
rln/benches/poseidon_tree_benchmark.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
|
||||
use rln::{
|
||||
circuit::{Fr, TEST_TREE_HEIGHT},
|
||||
hashers::PoseidonHash,
|
||||
};
|
||||
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleTree};
|
||||
|
||||
pub fn get_leaves(n: u32) -> Vec<Fr> {
|
||||
(0..n).map(|s| Fr::from(s)).collect()
|
||||
}
|
||||
|
||||
pub fn optimal_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
|
||||
c.bench_function("OptimalMerkleTree::<Poseidon>::full_height_gen", |b| {
|
||||
b.iter(|| {
|
||||
OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
let mut group = c.benchmark_group("Set");
|
||||
for &n in [1u32, 10, 100].iter() {
|
||||
let leaves = get_leaves(n);
|
||||
|
||||
let mut tree = OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
|
||||
group.bench_function(
|
||||
BenchmarkId::new("OptimalMerkleTree::<Poseidon>::set", n),
|
||||
|b| {
|
||||
b.iter(|| {
|
||||
for (i, l) in leaves.iter().enumerate() {
|
||||
let _ = tree.set(i, *l);
|
||||
}
|
||||
})
|
||||
},
|
||||
);
|
||||
|
||||
group.bench_function(
|
||||
BenchmarkId::new("OptimalMerkleTree::<Poseidon>::set_range", n),
|
||||
|b| b.iter(|| tree.set_range(0, leaves.iter().cloned())),
|
||||
);
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
pub fn full_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
|
||||
c.bench_function("FullMerkleTree::<Poseidon>::full_height_gen", |b| {
|
||||
b.iter(|| {
|
||||
FullMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
let mut group = c.benchmark_group("Set");
|
||||
for &n in [1u32, 10, 100].iter() {
|
||||
let leaves = get_leaves(n);
|
||||
|
||||
let mut tree = FullMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
|
||||
group.bench_function(
|
||||
BenchmarkId::new("FullMerkleTree::<Poseidon>::set", n),
|
||||
|b| {
|
||||
b.iter(|| {
|
||||
for (i, l) in leaves.iter().enumerate() {
|
||||
let _ = tree.set(i, *l);
|
||||
}
|
||||
})
|
||||
},
|
||||
);
|
||||
|
||||
group.bench_function(
|
||||
BenchmarkId::new("FullMerkleTree::<Poseidon>::set_range", n),
|
||||
|b| b.iter(|| tree.set_range(0, leaves.iter().cloned())),
|
||||
);
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
benches,
|
||||
optimal_merkle_tree_poseidon_benchmark,
|
||||
full_merkle_tree_poseidon_benchmark
|
||||
);
|
||||
criterion_main!(benches);
|
||||
BIN
rln/resources/tree_height_20/graph.bin
Normal file
BIN
rln/resources/tree_height_20/graph.bin
Normal file
Binary file not shown.
Binary file not shown.
BIN
rln/resources/tree_height_20/rln_final.arkzkey
Normal file
BIN
rln/resources/tree_height_20/rln_final.arkzkey
Normal file
Binary file not shown.
@@ -1,114 +0,0 @@
|
||||
{
|
||||
"protocol": "groth16",
|
||||
"curve": "bn128",
|
||||
"nPublic": 5,
|
||||
"vk_alpha_1": [
|
||||
"20491192805390485299153009773594534940189261866228447918068658471970481763042",
|
||||
"9383485363053290200918347156157836566562967994039712273449902621266178545958",
|
||||
"1"
|
||||
],
|
||||
"vk_beta_2": [
|
||||
[
|
||||
"6375614351688725206403948262868962793625744043794305715222011528459656738731",
|
||||
"4252822878758300859123897981450591353533073413197771768651442665752259397132"
|
||||
],
|
||||
[
|
||||
"10505242626370262277552901082094356697409835680220590971873171140371331206856",
|
||||
"21847035105528745403288232691147584728191162732299865338377159692350059136679"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_gamma_2": [
|
||||
[
|
||||
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
|
||||
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
|
||||
],
|
||||
[
|
||||
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
|
||||
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_delta_2": [
|
||||
[
|
||||
"17077735495685170943380938230836408503627170115414840315502244846025577589191",
|
||||
"14030085636943255545683322474441991939484590437387381169642530788494152024614"
|
||||
],
|
||||
[
|
||||
"11568745146423307387256571230823432454624378106569286849514884592874522611163",
|
||||
"1838524899938769516485895655063198583192139511330418290063560641219523306282"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_alphabeta_12": [
|
||||
[
|
||||
[
|
||||
"2029413683389138792403550203267699914886160938906632433982220835551125967885",
|
||||
"21072700047562757817161031222997517981543347628379360635925549008442030252106"
|
||||
],
|
||||
[
|
||||
"5940354580057074848093997050200682056184807770593307860589430076672439820312",
|
||||
"12156638873931618554171829126792193045421052652279363021382169897324752428276"
|
||||
],
|
||||
[
|
||||
"7898200236362823042373859371574133993780991612861777490112507062703164551277",
|
||||
"7074218545237549455313236346927434013100842096812539264420499035217050630853"
|
||||
]
|
||||
],
|
||||
[
|
||||
[
|
||||
"7077479683546002997211712695946002074877511277312570035766170199895071832130",
|
||||
"10093483419865920389913245021038182291233451549023025229112148274109565435465"
|
||||
],
|
||||
[
|
||||
"4595479056700221319381530156280926371456704509942304414423590385166031118820",
|
||||
"19831328484489333784475432780421641293929726139240675179672856274388269393268"
|
||||
],
|
||||
[
|
||||
"11934129596455521040620786944827826205713621633706285934057045369193958244500",
|
||||
"8037395052364110730298837004334506829870972346962140206007064471173334027475"
|
||||
]
|
||||
]
|
||||
],
|
||||
"IC": [
|
||||
[
|
||||
"4920513730204767532050733107749276406754520419375654722016092399980613788208",
|
||||
"10950491564509418434657706642388934308456795265036074733953533582377584967294",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"6815064660695497986531118446154820702646540722664044216580897159556261271171",
|
||||
"17838140936832571103329556013529166877877534025488014783346458943575275015438",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"16364982450206976302246609763791333525052810246590359380676749324389440643932",
|
||||
"17092624338100676284548565502349491320314889021833923882585524649862570629227",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"3679639231485547795420532910726924727560917141402837495597760107842698404034",
|
||||
"16213191511474848247596810551723578773353083440353745908057321946068926848382",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"9215428431027260354679105025212521481930206886203677270216204485256690813172",
|
||||
"934602510541226149881779979217731465262250233587980565969044391353665291792",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"8935861545794299876685457331391349387048184820319250771243971382360441890897",
|
||||
"4993459033694759724715904486381952906869986989682015547152342336961693234616",
|
||||
"1"
|
||||
]
|
||||
]
|
||||
}
|
||||
Binary file not shown.
Binary file not shown.
@@ -1,114 +0,0 @@
|
||||
{
|
||||
"protocol": "groth16",
|
||||
"curve": "bn128",
|
||||
"nPublic": 5,
|
||||
"vk_alpha_1": [
|
||||
"20491192805390485299153009773594534940189261866228447918068658471970481763042",
|
||||
"9383485363053290200918347156157836566562967994039712273449902621266178545958",
|
||||
"1"
|
||||
],
|
||||
"vk_beta_2": [
|
||||
[
|
||||
"6375614351688725206403948262868962793625744043794305715222011528459656738731",
|
||||
"4252822878758300859123897981450591353533073413197771768651442665752259397132"
|
||||
],
|
||||
[
|
||||
"10505242626370262277552901082094356697409835680220590971873171140371331206856",
|
||||
"21847035105528745403288232691147584728191162732299865338377159692350059136679"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_gamma_2": [
|
||||
[
|
||||
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
|
||||
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
|
||||
],
|
||||
[
|
||||
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
|
||||
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_delta_2": [
|
||||
[
|
||||
"3689226096868373144622340732612563195789744807442014147637039988348252818659",
|
||||
"18947459102520510468597269280688700807407684209892273827108603062925288762423"
|
||||
],
|
||||
[
|
||||
"5816405977664254142436796931495067997250259145480168934320978750042633353708",
|
||||
"14555486789839131710516067578112557185806110684461247253491378577062852578892"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_alphabeta_12": [
|
||||
[
|
||||
[
|
||||
"2029413683389138792403550203267699914886160938906632433982220835551125967885",
|
||||
"21072700047562757817161031222997517981543347628379360635925549008442030252106"
|
||||
],
|
||||
[
|
||||
"5940354580057074848093997050200682056184807770593307860589430076672439820312",
|
||||
"12156638873931618554171829126792193045421052652279363021382169897324752428276"
|
||||
],
|
||||
[
|
||||
"7898200236362823042373859371574133993780991612861777490112507062703164551277",
|
||||
"7074218545237549455313236346927434013100842096812539264420499035217050630853"
|
||||
]
|
||||
],
|
||||
[
|
||||
[
|
||||
"7077479683546002997211712695946002074877511277312570035766170199895071832130",
|
||||
"10093483419865920389913245021038182291233451549023025229112148274109565435465"
|
||||
],
|
||||
[
|
||||
"4595479056700221319381530156280926371456704509942304414423590385166031118820",
|
||||
"19831328484489333784475432780421641293929726139240675179672856274388269393268"
|
||||
],
|
||||
[
|
||||
"11934129596455521040620786944827826205713621633706285934057045369193958244500",
|
||||
"8037395052364110730298837004334506829870972346962140206007064471173334027475"
|
||||
]
|
||||
]
|
||||
],
|
||||
"IC": [
|
||||
[
|
||||
"5412646265162057015134786739992128493053406364679846617542694915593022919217",
|
||||
"9665511386935901867415947590751330959748921059696950821222365265700369811120",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"4294362651275803035824711662252687124584574009834787359330648404293309808795",
|
||||
"1861758671717754835450145961645465880215655915164196594175485865489885224285",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"1911114017568107170522785254288953144010421698038439931935418407428234018676",
|
||||
"13761363892532562822351086117281964648116890138564516558345965908415019790129",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"16312980235585837964428386585067529342038135099260965575497230302984635878053",
|
||||
"20286500347141875536561618770383759234192052027362539966911091298688849002783",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"21038649368092225315431823433752123495654049075935052064397443455654061176031",
|
||||
"6976971039866104284556300526186000690370678593992968176463280189048347216392",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"971745799362951123575710699973701411260115357326598060711339429906895409324",
|
||||
"12959821343398475313407440786226277845673045139874184400082186049649123071798",
|
||||
"1"
|
||||
]
|
||||
]
|
||||
}
|
||||
@@ -1,267 +0,0 @@
|
||||
// This crate provides interfaces for the zero-knowledge circuit and keys
|
||||
|
||||
use ark_bn254::{
|
||||
Bn254, Fq as ArkFq, Fq2 as ArkFq2, Fr as ArkFr, G1Affine as ArkG1Affine,
|
||||
G1Projective as ArkG1Projective, G2Affine as ArkG2Affine, G2Projective as ArkG2Projective,
|
||||
};
|
||||
use ark_circom::read_zkey;
|
||||
use ark_groth16::{ProvingKey, VerifyingKey};
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use cfg_if::cfg_if;
|
||||
use color_eyre::{Report, Result};
|
||||
use num_bigint::BigUint;
|
||||
use serde_json::Value;
|
||||
use std::io::Cursor;
|
||||
use std::str::FromStr;
|
||||
|
||||
cfg_if! {
|
||||
if #[cfg(not(target_arch = "wasm32"))] {
|
||||
use ark_circom::{WitnessCalculator};
|
||||
use once_cell::sync::OnceCell;
|
||||
use std::sync::Mutex;
|
||||
use wasmer::{Module, Store};
|
||||
use include_dir::{include_dir, Dir};
|
||||
use std::path::Path;
|
||||
}
|
||||
}
|
||||
|
||||
const ZKEY_FILENAME: &str = "rln_final.zkey";
|
||||
const VK_FILENAME: &str = "verification_key.json";
|
||||
const WASM_FILENAME: &str = "rln.wasm";
|
||||
|
||||
// These parameters are used for tests
|
||||
// Note that the circuit and keys in TEST_RESOURCES_FOLDER are compiled for Merkle trees of height 20 & 32
|
||||
// Changing these parameters to other values than these defaults will cause zkSNARK proof verification to fail
|
||||
pub const TEST_PARAMETERS_INDEX: usize = 0;
|
||||
pub const TEST_TREE_HEIGHT: usize = [20, 32][TEST_PARAMETERS_INDEX];
|
||||
pub const TEST_RESOURCES_FOLDER: &str = ["tree_height_20", "tree_height_32"][TEST_PARAMETERS_INDEX];
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
static RESOURCES_DIR: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/resources");
|
||||
|
||||
// The following types define the pairing friendly elliptic curve, the underlying finite fields and groups default to this module
|
||||
// Note that proofs are serialized assuming Fr to be 4x8 = 32 bytes in size. Hence, changing to a curve with different encoding will make proof verification to fail
|
||||
pub type Curve = Bn254;
|
||||
pub type Fr = ArkFr;
|
||||
pub type Fq = ArkFq;
|
||||
pub type Fq2 = ArkFq2;
|
||||
pub type G1Affine = ArkG1Affine;
|
||||
pub type G1Projective = ArkG1Projective;
|
||||
pub type G2Affine = ArkG2Affine;
|
||||
pub type G2Projective = ArkG2Projective;
|
||||
|
||||
// Loads the proving key using a bytes vector
|
||||
pub fn zkey_from_raw(zkey_data: &Vec<u8>) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
|
||||
if !zkey_data.is_empty() {
|
||||
let mut c = Cursor::new(zkey_data);
|
||||
let proving_key_and_matrices = read_zkey(&mut c)?;
|
||||
Ok(proving_key_and_matrices)
|
||||
} else {
|
||||
Err(Report::msg("No proving key found!"))
|
||||
}
|
||||
}
|
||||
|
||||
// Loads the proving key
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn zkey_from_folder(
|
||||
resources_folder: &str,
|
||||
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
|
||||
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
|
||||
if let Some(zkey) = zkey {
|
||||
let mut c = Cursor::new(zkey.contents());
|
||||
let proving_key_and_matrices = read_zkey(&mut c)?;
|
||||
Ok(proving_key_and_matrices)
|
||||
} else {
|
||||
Err(Report::msg("No proving key found!"))
|
||||
}
|
||||
}
|
||||
|
||||
// Loads the verification key from a bytes vector
|
||||
pub fn vk_from_raw(vk_data: &Vec<u8>, zkey_data: &Vec<u8>) -> Result<VerifyingKey<Curve>> {
|
||||
let verifying_key: VerifyingKey<Curve>;
|
||||
|
||||
if !vk_data.is_empty() {
|
||||
verifying_key = vk_from_vector(vk_data)?;
|
||||
Ok(verifying_key)
|
||||
} else if !zkey_data.is_empty() {
|
||||
let (proving_key, _matrices) = zkey_from_raw(zkey_data)?;
|
||||
verifying_key = proving_key.vk;
|
||||
Ok(verifying_key)
|
||||
} else {
|
||||
Err(Report::msg("No proving/verification key found!"))
|
||||
}
|
||||
}
|
||||
|
||||
// Loads the verification key
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn vk_from_folder(resources_folder: &str) -> Result<VerifyingKey<Curve>> {
|
||||
let vk = RESOURCES_DIR.get_file(Path::new(resources_folder).join(VK_FILENAME));
|
||||
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
|
||||
|
||||
let verifying_key: VerifyingKey<Curve>;
|
||||
if let Some(vk) = vk {
|
||||
verifying_key = vk_from_json(vk.contents_utf8().ok_or(Report::msg(
|
||||
"Could not read verification key from JSON file!",
|
||||
))?)?;
|
||||
Ok(verifying_key)
|
||||
} else if let Some(_zkey) = zkey {
|
||||
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
|
||||
verifying_key = proving_key.vk;
|
||||
Ok(verifying_key)
|
||||
} else {
|
||||
Err(Report::msg("No proving/verification key found!"))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
static WITNESS_CALCULATOR: OnceCell<Mutex<WitnessCalculator>> = OnceCell::new();
|
||||
|
||||
// Initializes the witness calculator using a bytes vector
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> Result<&'static Mutex<WitnessCalculator>> {
|
||||
WITNESS_CALCULATOR.get_or_try_init(|| {
|
||||
let store = Store::default();
|
||||
let module = Module::new(&store, wasm_buffer)?;
|
||||
let result = WitnessCalculator::from_module(module)?;
|
||||
Ok::<Mutex<WitnessCalculator>, Report>(Mutex::new(result))
|
||||
})
|
||||
}
|
||||
|
||||
// Initializes the witness calculator
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn circom_from_folder(resources_folder: &str) -> Result<&'static Mutex<WitnessCalculator>> {
|
||||
// We read the wasm file
|
||||
let wasm = RESOURCES_DIR.get_file(Path::new(resources_folder).join(WASM_FILENAME));
|
||||
|
||||
if let Some(wasm) = wasm {
|
||||
let wasm_buffer = wasm.contents();
|
||||
circom_from_raw(wasm_buffer.to_vec())
|
||||
} else {
|
||||
Err(Report::msg("No wasm file found!"))
|
||||
}
|
||||
}
|
||||
|
||||
// The following function implementations are taken/adapted from https://github.com/gakonst/ark-circom/blob/1732e15d6313fe176b0b1abb858ac9e095d0dbd7/src/zkey.rs
|
||||
|
||||
// Utilities to convert a json verification key in a groth16::VerificationKey
|
||||
fn fq_from_str(s: &str) -> Result<Fq> {
|
||||
Ok(Fq::try_from(BigUint::from_str(s)?)?)
|
||||
}
|
||||
|
||||
// Extracts the element in G1 corresponding to its JSON serialization
|
||||
fn json_to_g1(json: &Value, key: &str) -> Result<G1Affine> {
|
||||
let els: Vec<String> = json
|
||||
.get(key)
|
||||
.ok_or(Report::msg("no json value"))?
|
||||
.as_array()
|
||||
.ok_or(Report::msg("value not an array"))?
|
||||
.iter()
|
||||
.map(|i| i.as_str().ok_or(Report::msg("element is not a string")))
|
||||
.map(|x| x.map(|v| v.to_owned()))
|
||||
.collect::<Result<Vec<String>>>()?;
|
||||
|
||||
Ok(G1Affine::from(G1Projective::new(
|
||||
fq_from_str(&els[0])?,
|
||||
fq_from_str(&els[1])?,
|
||||
fq_from_str(&els[2])?,
|
||||
)))
|
||||
}
|
||||
|
||||
// Extracts the vector of G1 elements corresponding to its JSON serialization
|
||||
fn json_to_g1_vec(json: &Value, key: &str) -> Result<Vec<G1Affine>> {
|
||||
let els: Vec<Vec<String>> = json
|
||||
.get(key)
|
||||
.ok_or(Report::msg("no json value"))?
|
||||
.as_array()
|
||||
.ok_or(Report::msg("value not an array"))?
|
||||
.iter()
|
||||
.map(|i| {
|
||||
i.as_array()
|
||||
.ok_or(Report::msg("element is not an array"))
|
||||
.and_then(|array| {
|
||||
array
|
||||
.iter()
|
||||
.map(|x| x.as_str().ok_or(Report::msg("element is not a string")))
|
||||
.map(|x| x.map(|v| v.to_owned()))
|
||||
.collect::<Result<Vec<String>>>()
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<Vec<String>>>>()?;
|
||||
|
||||
let mut res = vec![];
|
||||
for coords in els {
|
||||
res.push(G1Affine::from(G1Projective::new(
|
||||
fq_from_str(&coords[0])?,
|
||||
fq_from_str(&coords[1])?,
|
||||
fq_from_str(&coords[2])?,
|
||||
)))
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
// Extracts the element in G2 corresponding to its JSON serialization
|
||||
fn json_to_g2(json: &Value, key: &str) -> Result<G2Affine> {
|
||||
let els: Vec<Vec<String>> = json
|
||||
.get(key)
|
||||
.ok_or(Report::msg("no json value"))?
|
||||
.as_array()
|
||||
.ok_or(Report::msg("value not an array"))?
|
||||
.iter()
|
||||
.map(|i| {
|
||||
i.as_array()
|
||||
.ok_or(Report::msg("element is not an array"))
|
||||
.and_then(|array| {
|
||||
array
|
||||
.iter()
|
||||
.map(|x| x.as_str().ok_or(Report::msg("element is not a string")))
|
||||
.map(|x| x.map(|v| v.to_owned()))
|
||||
.collect::<Result<Vec<String>>>()
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<Vec<String>>>>()?;
|
||||
|
||||
let x = Fq2::new(fq_from_str(&els[0][0])?, fq_from_str(&els[0][1])?);
|
||||
let y = Fq2::new(fq_from_str(&els[1][0])?, fq_from_str(&els[1][1])?);
|
||||
let z = Fq2::new(fq_from_str(&els[2][0])?, fq_from_str(&els[2][1])?);
|
||||
Ok(G2Affine::from(G2Projective::new(x, y, z)))
|
||||
}
|
||||
|
||||
// Converts JSON to a VerifyingKey
|
||||
fn to_verifying_key(json: serde_json::Value) -> Result<VerifyingKey<Curve>> {
|
||||
Ok(VerifyingKey {
|
||||
alpha_g1: json_to_g1(&json, "vk_alpha_1")?,
|
||||
beta_g2: json_to_g2(&json, "vk_beta_2")?,
|
||||
gamma_g2: json_to_g2(&json, "vk_gamma_2")?,
|
||||
delta_g2: json_to_g2(&json, "vk_delta_2")?,
|
||||
gamma_abc_g1: json_to_g1_vec(&json, "IC")?,
|
||||
})
|
||||
}
|
||||
|
||||
// Computes the verification key from its JSON serialization
|
||||
fn vk_from_json(vk: &str) -> Result<VerifyingKey<Curve>> {
|
||||
let json: Value = serde_json::from_str(vk)?;
|
||||
to_verifying_key(json)
|
||||
}
|
||||
|
||||
// Computes the verification key from a bytes vector containing its JSON serialization
|
||||
fn vk_from_vector(vk: &[u8]) -> Result<VerifyingKey<Curve>> {
|
||||
let json = String::from_utf8(vk.to_vec())?;
|
||||
let json: Value = serde_json::from_str(&json)?;
|
||||
|
||||
to_verifying_key(json)
|
||||
}
|
||||
|
||||
// Checks verification key to be correct with respect to proving key
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn check_vk_from_zkey(
|
||||
resources_folder: &str,
|
||||
verifying_key: VerifyingKey<Curve>,
|
||||
) -> Result<()> {
|
||||
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
|
||||
if proving_key.vk == verifying_key {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(Report::msg("verifying_keys are not equal"))
|
||||
}
|
||||
}
|
||||
73
rln/src/circuit/iden3calc.rs
Normal file
73
rln/src/circuit/iden3calc.rs
Normal file
@@ -0,0 +1,73 @@
|
||||
// This file is based on the code by iden3. Its preimage can be found here:
|
||||
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/lib.rs
|
||||
|
||||
pub mod graph;
|
||||
pub mod proto;
|
||||
pub mod storage;
|
||||
|
||||
use ruint::aliases::U256;
|
||||
use std::collections::HashMap;
|
||||
use storage::deserialize_witnesscalc_graph;
|
||||
|
||||
use crate::circuit::Fr;
|
||||
use graph::{fr_to_u256, Node};
|
||||
|
||||
pub type InputSignalsInfo = HashMap<String, (usize, usize)>;
|
||||
|
||||
pub fn calc_witness<I: IntoIterator<Item = (String, Vec<Fr>)>>(
|
||||
inputs: I,
|
||||
graph_data: &[u8],
|
||||
) -> Vec<Fr> {
|
||||
let inputs: HashMap<String, Vec<U256>> = inputs
|
||||
.into_iter()
|
||||
.map(|(key, value)| (key, value.iter().map(fr_to_u256).collect()))
|
||||
.collect();
|
||||
|
||||
let (nodes, signals, input_mapping): (Vec<Node>, Vec<usize>, InputSignalsInfo) =
|
||||
deserialize_witnesscalc_graph(std::io::Cursor::new(graph_data)).unwrap();
|
||||
|
||||
let mut inputs_buffer = get_inputs_buffer(get_inputs_size(&nodes));
|
||||
populate_inputs(&inputs, &input_mapping, &mut inputs_buffer);
|
||||
|
||||
graph::evaluate(&nodes, inputs_buffer.as_slice(), &signals)
|
||||
}
|
||||
|
||||
fn get_inputs_size(nodes: &[Node]) -> usize {
|
||||
let mut start = false;
|
||||
let mut max_index = 0usize;
|
||||
for &node in nodes.iter() {
|
||||
if let Node::Input(i) = node {
|
||||
if i > max_index {
|
||||
max_index = i;
|
||||
}
|
||||
start = true
|
||||
} else if start {
|
||||
break;
|
||||
}
|
||||
}
|
||||
max_index + 1
|
||||
}
|
||||
|
||||
fn populate_inputs(
|
||||
input_list: &HashMap<String, Vec<U256>>,
|
||||
inputs_info: &InputSignalsInfo,
|
||||
input_buffer: &mut [U256],
|
||||
) {
|
||||
for (key, value) in input_list {
|
||||
let (offset, len) = inputs_info[key];
|
||||
if len != value.len() {
|
||||
panic!("Invalid input length for {}", key);
|
||||
}
|
||||
|
||||
for (i, v) in value.iter().enumerate() {
|
||||
input_buffer[offset + i] = *v;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Allocates inputs vec with position 0 set to 1
|
||||
fn get_inputs_buffer(size: usize) -> Vec<U256> {
|
||||
let mut inputs = vec![U256::ZERO; size];
|
||||
inputs[0] = U256::from(1);
|
||||
inputs
|
||||
}
|
||||
957
rln/src/circuit/iden3calc/graph.rs
Normal file
957
rln/src/circuit/iden3calc/graph.rs
Normal file
@@ -0,0 +1,957 @@
|
||||
// This file is based on the code by iden3. Its preimage can be found here:
|
||||
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/graph.rs
|
||||
|
||||
use ark_ff::{BigInt, BigInteger, One, PrimeField, Zero};
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate};
|
||||
use rand::Rng;
|
||||
use ruint::{aliases::U256, uint};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
collections::HashMap,
|
||||
error::Error,
|
||||
ops::{BitAnd, BitOr, BitXor, Deref, Shl, Shr},
|
||||
};
|
||||
|
||||
use crate::circuit::iden3calc::proto;
|
||||
use crate::circuit::Fr;
|
||||
|
||||
pub const M: U256 =
|
||||
uint!(21888242871839275222246405745257275088548364400416034343698204186575808495617_U256);
|
||||
|
||||
fn ark_se<S, A: CanonicalSerialize>(a: &A, s: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let mut bytes = vec![];
|
||||
a.serialize_with_mode(&mut bytes, Compress::Yes)
|
||||
.map_err(serde::ser::Error::custom)?;
|
||||
s.serialize_bytes(&bytes)
|
||||
}
|
||||
|
||||
fn ark_de<'de, D, A: CanonicalDeserialize>(data: D) -> Result<A, D::Error>
|
||||
where
|
||||
D: serde::de::Deserializer<'de>,
|
||||
{
|
||||
let s: Vec<u8> = serde::de::Deserialize::deserialize(data)?;
|
||||
let a = A::deserialize_with_mode(s.as_slice(), Compress::Yes, Validate::Yes);
|
||||
a.map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn fr_to_u256(x: &Fr) -> U256 {
|
||||
U256::from_limbs(x.into_bigint().0)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn u256_to_fr(x: &U256) -> Fr {
|
||||
Fr::from_bigint(BigInt::new(x.into_limbs())).expect("Failed to convert U256 to Fr")
|
||||
}
|
||||
|
||||
#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub enum Operation {
|
||||
Mul,
|
||||
Div,
|
||||
Add,
|
||||
Sub,
|
||||
Pow,
|
||||
Idiv,
|
||||
Mod,
|
||||
Eq,
|
||||
Neq,
|
||||
Lt,
|
||||
Gt,
|
||||
Leq,
|
||||
Geq,
|
||||
Land,
|
||||
Lor,
|
||||
Shl,
|
||||
Shr,
|
||||
Bor,
|
||||
Band,
|
||||
Bxor,
|
||||
}
|
||||
|
||||
impl Operation {
|
||||
// TODO: rewrite to &U256 type
|
||||
pub fn eval(&self, a: U256, b: U256) -> U256 {
|
||||
use Operation::*;
|
||||
match self {
|
||||
Mul => a.mul_mod(b, M),
|
||||
Div => {
|
||||
if b == U256::ZERO {
|
||||
// as we are simulating a circuit execution with signals
|
||||
// values all equal to 0, just return 0 here in case of
|
||||
// division by zero
|
||||
U256::ZERO
|
||||
} else {
|
||||
a.mul_mod(b.inv_mod(M).unwrap(), M)
|
||||
}
|
||||
}
|
||||
Add => a.add_mod(b, M),
|
||||
Sub => a.add_mod(M - b, M),
|
||||
Pow => a.pow_mod(b, M),
|
||||
Mod => a.div_rem(b).1,
|
||||
Eq => U256::from(a == b),
|
||||
Neq => U256::from(a != b),
|
||||
Lt => u_lt(&a, &b),
|
||||
Gt => u_gt(&a, &b),
|
||||
Leq => u_lte(&a, &b),
|
||||
Geq => u_gte(&a, &b),
|
||||
Land => U256::from(a != U256::ZERO && b != U256::ZERO),
|
||||
Lor => U256::from(a != U256::ZERO || b != U256::ZERO),
|
||||
Shl => compute_shl_uint(a, b),
|
||||
Shr => compute_shr_uint(a, b),
|
||||
// TODO test with conner case when it is possible to get the number
|
||||
// bigger then modulus
|
||||
Bor => a.bitor(b),
|
||||
Band => a.bitand(b),
|
||||
// TODO test with conner case when it is possible to get the number
|
||||
// bigger then modulus
|
||||
Bxor => a.bitxor(b),
|
||||
Idiv => a / b,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eval_fr(&self, a: Fr, b: Fr) -> Fr {
|
||||
use Operation::*;
|
||||
match self {
|
||||
Mul => a * b,
|
||||
// We always should return something on the circuit execution.
|
||||
// So in case of division by 0 we would return 0. And the proof
|
||||
// should be invalid in the end.
|
||||
Div => {
|
||||
if b.is_zero() {
|
||||
Fr::zero()
|
||||
} else {
|
||||
a / b
|
||||
}
|
||||
}
|
||||
Add => a + b,
|
||||
Sub => a - b,
|
||||
Idiv => {
|
||||
if b.is_zero() {
|
||||
Fr::zero()
|
||||
} else {
|
||||
let a_u256 = fr_to_u256(&a);
|
||||
let b_u256 = fr_to_u256(&b);
|
||||
u256_to_fr(&(a_u256 / b_u256))
|
||||
}
|
||||
}
|
||||
Mod => {
|
||||
if b.is_zero() {
|
||||
Fr::zero()
|
||||
} else {
|
||||
let a_u256 = fr_to_u256(&a);
|
||||
let b_u256 = fr_to_u256(&b);
|
||||
u256_to_fr(&(a_u256 % b_u256))
|
||||
}
|
||||
}
|
||||
Eq => match a.cmp(&b) {
|
||||
Ordering::Equal => Fr::one(),
|
||||
_ => Fr::zero(),
|
||||
},
|
||||
Neq => match a.cmp(&b) {
|
||||
Ordering::Equal => Fr::zero(),
|
||||
_ => Fr::one(),
|
||||
},
|
||||
Lt => u256_to_fr(&u_lt(&fr_to_u256(&a), &fr_to_u256(&b))),
|
||||
Gt => u256_to_fr(&u_gt(&fr_to_u256(&a), &fr_to_u256(&b))),
|
||||
Leq => u256_to_fr(&u_lte(&fr_to_u256(&a), &fr_to_u256(&b))),
|
||||
Geq => u256_to_fr(&u_gte(&fr_to_u256(&a), &fr_to_u256(&b))),
|
||||
Land => {
|
||||
if a.is_zero() || b.is_zero() {
|
||||
Fr::zero()
|
||||
} else {
|
||||
Fr::one()
|
||||
}
|
||||
}
|
||||
Lor => {
|
||||
if a.is_zero() && b.is_zero() {
|
||||
Fr::zero()
|
||||
} else {
|
||||
Fr::one()
|
||||
}
|
||||
}
|
||||
Shl => shl(a, b),
|
||||
Shr => shr(a, b),
|
||||
Bor => bit_or(a, b),
|
||||
Band => bit_and(a, b),
|
||||
Bxor => bit_xor(a, b),
|
||||
// TODO implement other operators
|
||||
_ => unimplemented!("operator {:?} not implemented for Montgomery", self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Operation> for proto::DuoOp {
|
||||
fn from(v: &Operation) -> Self {
|
||||
match v {
|
||||
Operation::Mul => proto::DuoOp::Mul,
|
||||
Operation::Div => proto::DuoOp::Div,
|
||||
Operation::Add => proto::DuoOp::Add,
|
||||
Operation::Sub => proto::DuoOp::Sub,
|
||||
Operation::Pow => proto::DuoOp::Pow,
|
||||
Operation::Idiv => proto::DuoOp::Idiv,
|
||||
Operation::Mod => proto::DuoOp::Mod,
|
||||
Operation::Eq => proto::DuoOp::Eq,
|
||||
Operation::Neq => proto::DuoOp::Neq,
|
||||
Operation::Lt => proto::DuoOp::Lt,
|
||||
Operation::Gt => proto::DuoOp::Gt,
|
||||
Operation::Leq => proto::DuoOp::Leq,
|
||||
Operation::Geq => proto::DuoOp::Geq,
|
||||
Operation::Land => proto::DuoOp::Land,
|
||||
Operation::Lor => proto::DuoOp::Lor,
|
||||
Operation::Shl => proto::DuoOp::Shl,
|
||||
Operation::Shr => proto::DuoOp::Shr,
|
||||
Operation::Bor => proto::DuoOp::Bor,
|
||||
Operation::Band => proto::DuoOp::Band,
|
||||
Operation::Bxor => proto::DuoOp::Bxor,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub enum UnoOperation {
|
||||
Neg,
|
||||
Id, // identity - just return self
|
||||
}
|
||||
|
||||
impl UnoOperation {
|
||||
pub fn eval(&self, a: U256) -> U256 {
|
||||
match self {
|
||||
UnoOperation::Neg => {
|
||||
if a == U256::ZERO {
|
||||
U256::ZERO
|
||||
} else {
|
||||
M - a
|
||||
}
|
||||
}
|
||||
UnoOperation::Id => a,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eval_fr(&self, a: Fr) -> Fr {
|
||||
match self {
|
||||
UnoOperation::Neg => {
|
||||
if a.is_zero() {
|
||||
Fr::zero()
|
||||
} else {
|
||||
let mut x = Fr::MODULUS;
|
||||
x.sub_with_borrow(&a.into_bigint());
|
||||
Fr::from_bigint(x).unwrap()
|
||||
}
|
||||
}
|
||||
_ => unimplemented!("uno operator {:?} not implemented for Montgomery", self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&UnoOperation> for proto::UnoOp {
|
||||
fn from(v: &UnoOperation) -> Self {
|
||||
match v {
|
||||
UnoOperation::Neg => proto::UnoOp::Neg,
|
||||
UnoOperation::Id => proto::UnoOp::Id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy, Serialize, Deserialize)]
|
||||
pub enum TresOperation {
|
||||
TernCond,
|
||||
}
|
||||
|
||||
impl TresOperation {
|
||||
pub fn eval(&self, a: U256, b: U256, c: U256) -> U256 {
|
||||
match self {
|
||||
TresOperation::TernCond => {
|
||||
if a == U256::ZERO {
|
||||
c
|
||||
} else {
|
||||
b
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eval_fr(&self, a: Fr, b: Fr, c: Fr) -> Fr {
|
||||
match self {
|
||||
TresOperation::TernCond => {
|
||||
if a.is_zero() {
|
||||
c
|
||||
} else {
|
||||
b
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&TresOperation> for proto::TresOp {
|
||||
fn from(v: &TresOperation) -> Self {
|
||||
match v {
|
||||
TresOperation::TernCond => proto::TresOp::TernCond,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum Node {
|
||||
Input(usize),
|
||||
Constant(U256),
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
MontConstant(Fr),
|
||||
UnoOp(UnoOperation, usize),
|
||||
Op(Operation, usize, usize),
|
||||
TresOp(TresOperation, usize, usize, usize),
|
||||
}
|
||||
|
||||
// TODO remove pub from Vec<Node>
|
||||
#[derive(Default)]
|
||||
pub struct Nodes(pub Vec<Node>);
|
||||
|
||||
impl Nodes {
|
||||
pub fn new() -> Self {
|
||||
Nodes(Vec::new())
|
||||
}
|
||||
|
||||
pub fn to_const(&self, idx: NodeIdx) -> Result<U256, NodeConstErr> {
|
||||
let me = self.0.get(idx.0).ok_or(NodeConstErr::EmptyNode(idx))?;
|
||||
match me {
|
||||
Node::Constant(v) => Ok(*v),
|
||||
Node::UnoOp(op, a) => Ok(op.eval(self.to_const(NodeIdx(*a))?)),
|
||||
Node::Op(op, a, b) => {
|
||||
Ok(op.eval(self.to_const(NodeIdx(*a))?, self.to_const(NodeIdx(*b))?))
|
||||
}
|
||||
Node::TresOp(op, a, b, c) => Ok(op.eval(
|
||||
self.to_const(NodeIdx(*a))?,
|
||||
self.to_const(NodeIdx(*b))?,
|
||||
self.to_const(NodeIdx(*c))?,
|
||||
)),
|
||||
Node::Input(_) => Err(NodeConstErr::InputSignal),
|
||||
Node::MontConstant(_) => {
|
||||
panic!("MontConstant should not be used here")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push(&mut self, n: Node) -> NodeIdx {
|
||||
self.0.push(n);
|
||||
NodeIdx(self.0.len() - 1)
|
||||
}
|
||||
|
||||
pub fn get(&self, idx: NodeIdx) -> Option<&Node> {
|
||||
self.0.get(idx.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Nodes {
|
||||
type Target = Vec<Node>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct NodeIdx(pub usize);
|
||||
|
||||
impl From<usize> for NodeIdx {
|
||||
fn from(v: usize) -> Self {
|
||||
NodeIdx(v)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum NodeConstErr {
|
||||
EmptyNode(NodeIdx),
|
||||
InputSignal,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for NodeConstErr {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
NodeConstErr::EmptyNode(idx) => {
|
||||
write!(f, "empty node at index {}", idx.0)
|
||||
}
|
||||
NodeConstErr::InputSignal => {
|
||||
write!(f, "input signal is not a constant")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Error for NodeConstErr {}
|
||||
|
||||
fn compute_shl_uint(a: U256, b: U256) -> U256 {
|
||||
debug_assert!(b.lt(&U256::from(256)));
|
||||
let ls_limb = b.as_limbs()[0];
|
||||
a.shl(ls_limb as usize)
|
||||
}
|
||||
|
||||
fn compute_shr_uint(a: U256, b: U256) -> U256 {
|
||||
debug_assert!(b.lt(&U256::from(256)));
|
||||
let ls_limb = b.as_limbs()[0];
|
||||
a.shr(ls_limb as usize)
|
||||
}
|
||||
|
||||
/// All references must be backwards.
|
||||
fn assert_valid(nodes: &[Node]) {
|
||||
for (i, &node) in nodes.iter().enumerate() {
|
||||
if let Node::Op(_, a, b) = node {
|
||||
assert!(a < i);
|
||||
assert!(b < i);
|
||||
} else if let Node::UnoOp(_, a) = node {
|
||||
assert!(a < i);
|
||||
} else if let Node::TresOp(_, a, b, c) = node {
|
||||
assert!(a < i);
|
||||
assert!(b < i);
|
||||
assert!(c < i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn optimize(nodes: &mut Vec<Node>, outputs: &mut [usize]) {
|
||||
tree_shake(nodes, outputs);
|
||||
propagate(nodes);
|
||||
value_numbering(nodes, outputs);
|
||||
constants(nodes);
|
||||
tree_shake(nodes, outputs);
|
||||
montgomery_form(nodes);
|
||||
}
|
||||
|
||||
pub fn evaluate(nodes: &[Node], inputs: &[U256], outputs: &[usize]) -> Vec<Fr> {
|
||||
// assert_valid(nodes);
|
||||
|
||||
// Evaluate the graph.
|
||||
let mut values = Vec::with_capacity(nodes.len());
|
||||
for &node in nodes.iter() {
|
||||
let value = match node {
|
||||
Node::Constant(c) => u256_to_fr(&c),
|
||||
Node::MontConstant(c) => c,
|
||||
Node::Input(i) => u256_to_fr(&inputs[i]),
|
||||
Node::Op(op, a, b) => op.eval_fr(values[a], values[b]),
|
||||
Node::UnoOp(op, a) => op.eval_fr(values[a]),
|
||||
Node::TresOp(op, a, b, c) => op.eval_fr(values[a], values[b], values[c]),
|
||||
};
|
||||
values.push(value);
|
||||
}
|
||||
|
||||
// Convert from Montgomery form and return the outputs.
|
||||
let mut out = vec![Fr::from(0); outputs.len()];
|
||||
for i in 0..outputs.len() {
|
||||
out[i] = values[outputs[i]];
|
||||
}
|
||||
|
||||
out
|
||||
}
|
||||
|
||||
/// Constant propagation
|
||||
pub fn propagate(nodes: &mut [Node]) {
|
||||
assert_valid(nodes);
|
||||
for i in 0..nodes.len() {
|
||||
if let Node::Op(op, a, b) = nodes[i] {
|
||||
if let (Node::Constant(va), Node::Constant(vb)) = (nodes[a], nodes[b]) {
|
||||
nodes[i] = Node::Constant(op.eval(va, vb));
|
||||
} else if a == b {
|
||||
// Not constant but equal
|
||||
use Operation::*;
|
||||
if let Some(c) = match op {
|
||||
Eq | Leq | Geq => Some(true),
|
||||
Neq | Lt | Gt => Some(false),
|
||||
_ => None,
|
||||
} {
|
||||
nodes[i] = Node::Constant(U256::from(c));
|
||||
}
|
||||
}
|
||||
} else if let Node::UnoOp(op, a) = nodes[i] {
|
||||
if let Node::Constant(va) = nodes[a] {
|
||||
nodes[i] = Node::Constant(op.eval(va));
|
||||
}
|
||||
} else if let Node::TresOp(op, a, b, c) = nodes[i] {
|
||||
if let (Node::Constant(va), Node::Constant(vb), Node::Constant(vc)) =
|
||||
(nodes[a], nodes[b], nodes[c])
|
||||
{
|
||||
nodes[i] = Node::Constant(op.eval(va, vb, vc));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove unused nodes
|
||||
pub fn tree_shake(nodes: &mut Vec<Node>, outputs: &mut [usize]) {
|
||||
assert_valid(nodes);
|
||||
|
||||
// Mark all nodes that are used.
|
||||
let mut used = vec![false; nodes.len()];
|
||||
for &i in outputs.iter() {
|
||||
used[i] = true;
|
||||
}
|
||||
|
||||
// Work backwards from end as all references are backwards.
|
||||
for i in (0..nodes.len()).rev() {
|
||||
if used[i] {
|
||||
if let Node::Op(_, a, b) = nodes[i] {
|
||||
used[a] = true;
|
||||
used[b] = true;
|
||||
}
|
||||
if let Node::UnoOp(_, a) = nodes[i] {
|
||||
used[a] = true;
|
||||
}
|
||||
if let Node::TresOp(_, a, b, c) = nodes[i] {
|
||||
used[a] = true;
|
||||
used[b] = true;
|
||||
used[c] = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove unused nodes
|
||||
let n = nodes.len();
|
||||
let mut retain = used.iter();
|
||||
nodes.retain(|_| *retain.next().unwrap());
|
||||
|
||||
// Renumber references.
|
||||
let mut renumber = vec![None; n];
|
||||
let mut index = 0;
|
||||
for (i, &used) in used.iter().enumerate() {
|
||||
if used {
|
||||
renumber[i] = Some(index);
|
||||
index += 1;
|
||||
}
|
||||
}
|
||||
assert_eq!(index, nodes.len());
|
||||
for (&used, renumber) in used.iter().zip(renumber.iter()) {
|
||||
assert_eq!(used, renumber.is_some());
|
||||
}
|
||||
|
||||
// Renumber references.
|
||||
for node in nodes.iter_mut() {
|
||||
if let Node::Op(_, a, b) = node {
|
||||
*a = renumber[*a].unwrap();
|
||||
*b = renumber[*b].unwrap();
|
||||
}
|
||||
if let Node::UnoOp(_, a) = node {
|
||||
*a = renumber[*a].unwrap();
|
||||
}
|
||||
if let Node::TresOp(_, a, b, c) = node {
|
||||
*a = renumber[*a].unwrap();
|
||||
*b = renumber[*b].unwrap();
|
||||
*c = renumber[*c].unwrap();
|
||||
}
|
||||
}
|
||||
for output in outputs.iter_mut() {
|
||||
*output = renumber[*output].unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
/// Randomly evaluate the graph
|
||||
fn random_eval(nodes: &mut [Node]) -> Vec<U256> {
|
||||
let mut rng = rand::thread_rng();
|
||||
let mut values = Vec::with_capacity(nodes.len());
|
||||
let mut inputs = HashMap::new();
|
||||
let mut prfs = HashMap::new();
|
||||
let mut prfs_uno = HashMap::new();
|
||||
let mut prfs_tres = HashMap::new();
|
||||
for node in nodes.iter() {
|
||||
use Operation::*;
|
||||
let value = match node {
|
||||
// Constants evaluate to themselves
|
||||
Node::Constant(c) => *c,
|
||||
|
||||
Node::MontConstant(_) => unimplemented!("should not be used"),
|
||||
|
||||
// Algebraic Ops are evaluated directly
|
||||
// Since the field is large, by Swartz-Zippel if
|
||||
// two values are the same then they are likely algebraically equal.
|
||||
Node::Op(op @ (Add | Sub | Mul), a, b) => op.eval(values[*a], values[*b]),
|
||||
|
||||
// Input and non-algebraic ops are random functions
|
||||
// TODO: https://github.com/recmo/uint/issues/95 and use .gen_range(..M)
|
||||
Node::Input(i) => *inputs.entry(*i).or_insert_with(|| rng.gen::<U256>() % M),
|
||||
Node::Op(op, a, b) => *prfs
|
||||
.entry((*op, values[*a], values[*b]))
|
||||
.or_insert_with(|| rng.gen::<U256>() % M),
|
||||
Node::UnoOp(op, a) => *prfs_uno
|
||||
.entry((*op, values[*a]))
|
||||
.or_insert_with(|| rng.gen::<U256>() % M),
|
||||
Node::TresOp(op, a, b, c) => *prfs_tres
|
||||
.entry((*op, values[*a], values[*b], values[*c]))
|
||||
.or_insert_with(|| rng.gen::<U256>() % M),
|
||||
};
|
||||
values.push(value);
|
||||
}
|
||||
values
|
||||
}
|
||||
|
||||
/// Value numbering
|
||||
pub fn value_numbering(nodes: &mut [Node], outputs: &mut [usize]) {
|
||||
assert_valid(nodes);
|
||||
|
||||
// Evaluate the graph in random field elements.
|
||||
let values = random_eval(nodes);
|
||||
|
||||
// Find all nodes with the same value.
|
||||
let mut value_map = HashMap::new();
|
||||
for (i, &value) in values.iter().enumerate() {
|
||||
value_map.entry(value).or_insert_with(Vec::new).push(i);
|
||||
}
|
||||
|
||||
// For nodes that are the same, pick the first index.
|
||||
let renumber: Vec<_> = values.into_iter().map(|v| value_map[&v][0]).collect();
|
||||
|
||||
// Renumber references.
|
||||
for node in nodes.iter_mut() {
|
||||
if let Node::Op(_, a, b) = node {
|
||||
*a = renumber[*a];
|
||||
*b = renumber[*b];
|
||||
}
|
||||
if let Node::UnoOp(_, a) = node {
|
||||
*a = renumber[*a];
|
||||
}
|
||||
if let Node::TresOp(_, a, b, c) = node {
|
||||
*a = renumber[*a];
|
||||
*b = renumber[*b];
|
||||
*c = renumber[*c];
|
||||
}
|
||||
}
|
||||
for output in outputs.iter_mut() {
|
||||
*output = renumber[*output];
|
||||
}
|
||||
}
|
||||
|
||||
/// Probabilistic constant determination
|
||||
pub fn constants(nodes: &mut [Node]) {
|
||||
assert_valid(nodes);
|
||||
|
||||
// Evaluate the graph in random field elements.
|
||||
let values_a = random_eval(nodes);
|
||||
let values_b = random_eval(nodes);
|
||||
|
||||
// Find all nodes with the same value.
|
||||
for i in 0..nodes.len() {
|
||||
if let Node::Constant(_) = nodes[i] {
|
||||
continue;
|
||||
}
|
||||
if values_a[i] == values_b[i] {
|
||||
nodes[i] = Node::Constant(values_a[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert to Montgomery form
|
||||
pub fn montgomery_form(nodes: &mut [Node]) {
|
||||
for node in nodes.iter_mut() {
|
||||
use Node::*;
|
||||
use Operation::*;
|
||||
match node {
|
||||
Constant(c) => *node = MontConstant(u256_to_fr(c)),
|
||||
MontConstant(..) => (),
|
||||
Input(..) => (),
|
||||
Op(
|
||||
Mul | Div | Add | Sub | Idiv | Mod | Eq | Neq | Lt | Gt | Leq | Geq | Land | Lor
|
||||
| Shl | Shr | Bor | Band | Bxor,
|
||||
..,
|
||||
) => (),
|
||||
Op(op @ Pow, ..) => unimplemented!("Operators Montgomery form: {:?}", op),
|
||||
UnoOp(UnoOperation::Neg, ..) => (),
|
||||
UnoOp(op, ..) => unimplemented!("Uno Operators Montgomery form: {:?}", op),
|
||||
TresOp(TresOperation::TernCond, ..) => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn shl(a: Fr, b: Fr) -> Fr {
|
||||
if b.is_zero() {
|
||||
return a;
|
||||
}
|
||||
|
||||
if b.cmp(&Fr::from(Fr::MODULUS_BIT_SIZE)).is_ge() {
|
||||
return Fr::zero();
|
||||
}
|
||||
|
||||
let n = b.into_bigint().0[0] as u32;
|
||||
let a = a.into_bigint();
|
||||
Fr::from_bigint(a << n).unwrap()
|
||||
}
|
||||
|
||||
fn shr(a: Fr, b: Fr) -> Fr {
|
||||
if b.is_zero() {
|
||||
return a;
|
||||
}
|
||||
|
||||
match b.cmp(&Fr::from(254u64)) {
|
||||
Ordering::Equal => return Fr::zero(),
|
||||
Ordering::Greater => return Fr::zero(),
|
||||
_ => (),
|
||||
};
|
||||
|
||||
let mut n = b.into_bigint().to_bytes_le()[0];
|
||||
let mut result = a.into_bigint();
|
||||
let c = result.as_mut();
|
||||
while n >= 64 {
|
||||
for i in 0..3 {
|
||||
c[i as usize] = c[(i + 1) as usize];
|
||||
}
|
||||
c[3] = 0;
|
||||
n -= 64;
|
||||
}
|
||||
|
||||
if n == 0 {
|
||||
return Fr::from_bigint(result).unwrap();
|
||||
}
|
||||
|
||||
let mask: u64 = (1 << n) - 1;
|
||||
let mut carrier: u64 = c[3] & mask;
|
||||
c[3] >>= n;
|
||||
for i in (0..3).rev() {
|
||||
let new_carrier = c[i] & mask;
|
||||
c[i] = (c[i] >> n) | (carrier << (64 - n));
|
||||
carrier = new_carrier;
|
||||
}
|
||||
Fr::from_bigint(result).unwrap()
|
||||
}
|
||||
|
||||
fn bit_and(a: Fr, b: Fr) -> Fr {
|
||||
let a = a.into_bigint();
|
||||
let b = b.into_bigint();
|
||||
let c: [u64; 4] = [
|
||||
a.0[0] & b.0[0],
|
||||
a.0[1] & b.0[1],
|
||||
a.0[2] & b.0[2],
|
||||
a.0[3] & b.0[3],
|
||||
];
|
||||
let mut d: BigInt<4> = BigInt::new(c);
|
||||
if d > Fr::MODULUS {
|
||||
d.sub_with_borrow(&Fr::MODULUS);
|
||||
}
|
||||
|
||||
Fr::from_bigint(d).unwrap()
|
||||
}
|
||||
|
||||
fn bit_or(a: Fr, b: Fr) -> Fr {
|
||||
let a = a.into_bigint();
|
||||
let b = b.into_bigint();
|
||||
let c: [u64; 4] = [
|
||||
a.0[0] | b.0[0],
|
||||
a.0[1] | b.0[1],
|
||||
a.0[2] | b.0[2],
|
||||
a.0[3] | b.0[3],
|
||||
];
|
||||
let mut d: BigInt<4> = BigInt::new(c);
|
||||
if d > Fr::MODULUS {
|
||||
d.sub_with_borrow(&Fr::MODULUS);
|
||||
}
|
||||
|
||||
Fr::from_bigint(d).unwrap()
|
||||
}
|
||||
|
||||
fn bit_xor(a: Fr, b: Fr) -> Fr {
|
||||
let a = a.into_bigint();
|
||||
let b = b.into_bigint();
|
||||
let c: [u64; 4] = [
|
||||
a.0[0] ^ b.0[0],
|
||||
a.0[1] ^ b.0[1],
|
||||
a.0[2] ^ b.0[2],
|
||||
a.0[3] ^ b.0[3],
|
||||
];
|
||||
let mut d: BigInt<4> = BigInt::new(c);
|
||||
if d > Fr::MODULUS {
|
||||
d.sub_with_borrow(&Fr::MODULUS);
|
||||
}
|
||||
|
||||
Fr::from_bigint(d).unwrap()
|
||||
}
|
||||
|
||||
// M / 2
|
||||
const HALF_M: U256 =
|
||||
uint!(10944121435919637611123202872628637544274182200208017171849102093287904247808_U256);
|
||||
|
||||
fn u_gte(a: &U256, b: &U256) -> U256 {
|
||||
let a_neg = &HALF_M < a;
|
||||
let b_neg = &HALF_M < b;
|
||||
|
||||
match (a_neg, b_neg) {
|
||||
(false, false) => U256::from(a >= b),
|
||||
(true, false) => uint!(0_U256),
|
||||
(false, true) => uint!(1_U256),
|
||||
(true, true) => U256::from(a >= b),
|
||||
}
|
||||
}
|
||||
|
||||
fn u_lte(a: &U256, b: &U256) -> U256 {
|
||||
let a_neg = &HALF_M < a;
|
||||
let b_neg = &HALF_M < b;
|
||||
|
||||
match (a_neg, b_neg) {
|
||||
(false, false) => U256::from(a <= b),
|
||||
(true, false) => uint!(1_U256),
|
||||
(false, true) => uint!(0_U256),
|
||||
(true, true) => U256::from(a <= b),
|
||||
}
|
||||
}
|
||||
|
||||
fn u_gt(a: &U256, b: &U256) -> U256 {
|
||||
let a_neg = &HALF_M < a;
|
||||
let b_neg = &HALF_M < b;
|
||||
|
||||
match (a_neg, b_neg) {
|
||||
(false, false) => U256::from(a > b),
|
||||
(true, false) => uint!(0_U256),
|
||||
(false, true) => uint!(1_U256),
|
||||
(true, true) => U256::from(a > b),
|
||||
}
|
||||
}
|
||||
|
||||
fn u_lt(a: &U256, b: &U256) -> U256 {
|
||||
let a_neg = &HALF_M < a;
|
||||
let b_neg = &HALF_M < b;
|
||||
|
||||
match (a_neg, b_neg) {
|
||||
(false, false) => U256::from(a < b),
|
||||
(true, false) => uint!(1_U256),
|
||||
(false, true) => uint!(0_U256),
|
||||
(true, true) => U256::from(a < b),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use ruint::uint;
|
||||
use std::ops::Div;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[test]
|
||||
fn test_ok() {
|
||||
let a = Fr::from(4u64);
|
||||
let b = Fr::from(2u64);
|
||||
let c = shl(a, b);
|
||||
assert_eq!(c.cmp(&Fr::from(16u64)), Ordering::Equal)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_div() {
|
||||
assert_eq!(
|
||||
Operation::Div.eval_fr(Fr::from(2u64), Fr::from(3u64)),
|
||||
Fr::from_str(
|
||||
"7296080957279758407415468581752425029516121466805344781232734728858602831873"
|
||||
)
|
||||
.unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Operation::Div.eval_fr(Fr::from(6u64), Fr::from(2u64)),
|
||||
Fr::from_str("3").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Operation::Div.eval_fr(Fr::from(7u64), Fr::from(2u64)),
|
||||
Fr::from_str(
|
||||
"10944121435919637611123202872628637544274182200208017171849102093287904247812"
|
||||
)
|
||||
.unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_idiv() {
|
||||
assert_eq!(
|
||||
Operation::Idiv.eval_fr(Fr::from(2u64), Fr::from(3u64)),
|
||||
Fr::from_str("0").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Operation::Idiv.eval_fr(Fr::from(6u64), Fr::from(2u64)),
|
||||
Fr::from_str("3").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Operation::Idiv.eval_fr(Fr::from(7u64), Fr::from(2u64)),
|
||||
Fr::from_str("3").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fr_mod() {
|
||||
assert_eq!(
|
||||
Operation::Mod.eval_fr(Fr::from(7u64), Fr::from(2u64)),
|
||||
Fr::from_str("1").unwrap()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
Operation::Mod.eval_fr(Fr::from(7u64), Fr::from(9u64)),
|
||||
Fr::from_str("7").unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_u_gte() {
|
||||
let result = u_gte(&uint!(10_U256), &uint!(3_U256));
|
||||
assert_eq!(result, uint!(1_U256));
|
||||
|
||||
let result = u_gte(&uint!(3_U256), &uint!(3_U256));
|
||||
assert_eq!(result, uint!(1_U256));
|
||||
|
||||
let result = u_gte(&uint!(2_U256), &uint!(3_U256));
|
||||
assert_eq!(result, uint!(0_U256));
|
||||
|
||||
// -1 >= 3 => 0
|
||||
let result = u_gte(
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495616_U256
|
||||
),
|
||||
&uint!(3_U256),
|
||||
);
|
||||
assert_eq!(result, uint!(0_U256));
|
||||
|
||||
// -1 >= -2 => 1
|
||||
let result = u_gte(
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495616_U256
|
||||
),
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
|
||||
),
|
||||
);
|
||||
assert_eq!(result, uint!(1_U256));
|
||||
|
||||
// -2 >= -1 => 0
|
||||
let result = u_gte(
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
|
||||
),
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495616_U256
|
||||
),
|
||||
);
|
||||
assert_eq!(result, uint!(0_U256));
|
||||
|
||||
// -2 == -2 => 1
|
||||
let result = u_gte(
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
|
||||
),
|
||||
&uint!(
|
||||
21888242871839275222246405745257275088548364400416034343698204186575808495615_U256
|
||||
),
|
||||
);
|
||||
assert_eq!(result, uint!(1_U256));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_x() {
|
||||
let x = M.div(uint!(2_U256));
|
||||
|
||||
println!("x: {:?}", x.as_limbs());
|
||||
println!("x: {}", M);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_2() {
|
||||
let nodes: Vec<Node> = vec![];
|
||||
// let node = nodes[0];
|
||||
let node = nodes.get(0);
|
||||
println!("{:?}", node);
|
||||
}
|
||||
}
|
||||
117
rln/src/circuit/iden3calc/proto.rs
Normal file
117
rln/src/circuit/iden3calc/proto.rs
Normal file
@@ -0,0 +1,117 @@
|
||||
// This file has been generated by prost-build during compilation of the code by iden3
|
||||
// and modified manually. The *.proto file used to generate this on can be found here:
|
||||
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/protos/messages.proto
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct BigUInt {
|
||||
#[prost(bytes = "vec", tag = "1")]
|
||||
pub value_le: Vec<u8>,
|
||||
}
|
||||
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
|
||||
pub struct InputNode {
|
||||
#[prost(uint32, tag = "1")]
|
||||
pub idx: u32,
|
||||
}
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct ConstantNode {
|
||||
#[prost(message, optional, tag = "1")]
|
||||
pub value: Option<BigUInt>,
|
||||
}
|
||||
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
|
||||
pub struct UnoOpNode {
|
||||
#[prost(enumeration = "UnoOp", tag = "1")]
|
||||
pub op: i32,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub a_idx: u32,
|
||||
}
|
||||
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
|
||||
pub struct DuoOpNode {
|
||||
#[prost(enumeration = "DuoOp", tag = "1")]
|
||||
pub op: i32,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub a_idx: u32,
|
||||
#[prost(uint32, tag = "3")]
|
||||
pub b_idx: u32,
|
||||
}
|
||||
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
|
||||
pub struct TresOpNode {
|
||||
#[prost(enumeration = "TresOp", tag = "1")]
|
||||
pub op: i32,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub a_idx: u32,
|
||||
#[prost(uint32, tag = "3")]
|
||||
pub b_idx: u32,
|
||||
#[prost(uint32, tag = "4")]
|
||||
pub c_idx: u32,
|
||||
}
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct Node {
|
||||
#[prost(oneof = "node::Node", tags = "1, 2, 3, 4, 5")]
|
||||
pub node: Option<node::Node>,
|
||||
}
|
||||
/// Nested message and enum types in `Node`.
|
||||
pub mod node {
|
||||
#[derive(Clone, PartialEq, ::prost::Oneof)]
|
||||
pub enum Node {
|
||||
#[prost(message, tag = "1")]
|
||||
Input(super::InputNode),
|
||||
#[prost(message, tag = "2")]
|
||||
Constant(super::ConstantNode),
|
||||
#[prost(message, tag = "3")]
|
||||
UnoOp(super::UnoOpNode),
|
||||
#[prost(message, tag = "4")]
|
||||
DuoOp(super::DuoOpNode),
|
||||
#[prost(message, tag = "5")]
|
||||
TresOp(super::TresOpNode),
|
||||
}
|
||||
}
|
||||
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
|
||||
pub struct SignalDescription {
|
||||
#[prost(uint32, tag = "1")]
|
||||
pub offset: u32,
|
||||
#[prost(uint32, tag = "2")]
|
||||
pub len: u32,
|
||||
}
|
||||
#[derive(Clone, PartialEq, ::prost::Message)]
|
||||
pub struct GraphMetadata {
|
||||
#[prost(uint32, repeated, tag = "1")]
|
||||
pub witness_signals: Vec<u32>,
|
||||
#[prost(map = "string, message", tag = "2")]
|
||||
pub inputs: HashMap<String, SignalDescription>,
|
||||
}
|
||||
#[derive(Clone, Copy, Debug, PartialEq, ::prost::Enumeration)]
|
||||
pub enum DuoOp {
|
||||
Mul = 0,
|
||||
Div = 1,
|
||||
Add = 2,
|
||||
Sub = 3,
|
||||
Pow = 4,
|
||||
Idiv = 5,
|
||||
Mod = 6,
|
||||
Eq = 7,
|
||||
Neq = 8,
|
||||
Lt = 9,
|
||||
Gt = 10,
|
||||
Leq = 11,
|
||||
Geq = 12,
|
||||
Land = 13,
|
||||
Lor = 14,
|
||||
Shl = 15,
|
||||
Shr = 16,
|
||||
Bor = 17,
|
||||
Band = 18,
|
||||
Bxor = 19,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, ::prost::Enumeration)]
|
||||
pub enum UnoOp {
|
||||
Neg = 0,
|
||||
Id = 1,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, ::prost::Enumeration)]
|
||||
pub enum TresOp {
|
||||
TernCond = 0,
|
||||
}
|
||||
497
rln/src/circuit/iden3calc/storage.rs
Normal file
497
rln/src/circuit/iden3calc/storage.rs
Normal file
@@ -0,0 +1,497 @@
|
||||
// This file is based on the code by iden3. Its preimage can be found here:
|
||||
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/storage.rs
|
||||
|
||||
use ark_bn254::Fr;
|
||||
use ark_ff::PrimeField;
|
||||
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
|
||||
use prost::Message;
|
||||
use std::io::{Read, Write};
|
||||
|
||||
use crate::circuit::iden3calc::{
|
||||
graph,
|
||||
graph::{Operation, TresOperation, UnoOperation},
|
||||
proto, InputSignalsInfo,
|
||||
};
|
||||
|
||||
// format of the wtns.graph file:
|
||||
// + magic line: wtns.graph.001
|
||||
// + 4 bytes unsigned LE 32-bit integer: number of nodes
|
||||
// + series of protobuf serialized nodes. Each node prefixed by varint length
|
||||
// + protobuf serialized GraphMetadata
|
||||
// + 8 bytes unsigned LE 64-bit integer: offset of GraphMetadata message
|
||||
|
||||
const WITNESSCALC_GRAPH_MAGIC: &[u8] = b"wtns.graph.001";
|
||||
|
||||
const MAX_VARINT_LENGTH: usize = 10;
|
||||
|
||||
impl From<proto::Node> for graph::Node {
|
||||
fn from(value: proto::Node) -> Self {
|
||||
match value.node.unwrap() {
|
||||
proto::node::Node::Input(input_node) => graph::Node::Input(input_node.idx as usize),
|
||||
proto::node::Node::Constant(constant_node) => {
|
||||
let i = constant_node.value.unwrap();
|
||||
graph::Node::MontConstant(Fr::from_le_bytes_mod_order(i.value_le.as_slice()))
|
||||
}
|
||||
proto::node::Node::UnoOp(uno_op_node) => {
|
||||
let op = proto::UnoOp::try_from(uno_op_node.op).unwrap();
|
||||
graph::Node::UnoOp(op.into(), uno_op_node.a_idx as usize)
|
||||
}
|
||||
proto::node::Node::DuoOp(duo_op_node) => {
|
||||
let op = proto::DuoOp::try_from(duo_op_node.op).unwrap();
|
||||
graph::Node::Op(
|
||||
op.into(),
|
||||
duo_op_node.a_idx as usize,
|
||||
duo_op_node.b_idx as usize,
|
||||
)
|
||||
}
|
||||
proto::node::Node::TresOp(tres_op_node) => {
|
||||
let op = proto::TresOp::try_from(tres_op_node.op).unwrap();
|
||||
graph::Node::TresOp(
|
||||
op.into(),
|
||||
tres_op_node.a_idx as usize,
|
||||
tres_op_node.b_idx as usize,
|
||||
tres_op_node.c_idx as usize,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&graph::Node> for proto::node::Node {
|
||||
fn from(node: &graph::Node) -> Self {
|
||||
match node {
|
||||
graph::Node::Input(i) => proto::node::Node::Input(proto::InputNode { idx: *i as u32 }),
|
||||
graph::Node::Constant(_) => {
|
||||
panic!("We are not supposed to write Constant to the witnesscalc graph. All Constant should be converted to MontConstant.");
|
||||
}
|
||||
graph::Node::UnoOp(op, a) => {
|
||||
let op = proto::UnoOp::from(op);
|
||||
proto::node::Node::UnoOp(proto::UnoOpNode {
|
||||
op: op as i32,
|
||||
a_idx: *a as u32,
|
||||
})
|
||||
}
|
||||
graph::Node::Op(op, a, b) => proto::node::Node::DuoOp(proto::DuoOpNode {
|
||||
op: proto::DuoOp::from(op) as i32,
|
||||
a_idx: *a as u32,
|
||||
b_idx: *b as u32,
|
||||
}),
|
||||
graph::Node::TresOp(op, a, b, c) => proto::node::Node::TresOp(proto::TresOpNode {
|
||||
op: proto::TresOp::from(op) as i32,
|
||||
a_idx: *a as u32,
|
||||
b_idx: *b as u32,
|
||||
c_idx: *c as u32,
|
||||
}),
|
||||
graph::Node::MontConstant(c) => {
|
||||
let bi = Into::<num_bigint::BigUint>::into(*c);
|
||||
let i = proto::BigUInt {
|
||||
value_le: bi.to_bytes_le(),
|
||||
};
|
||||
proto::node::Node::Constant(proto::ConstantNode { value: Some(i) })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proto::UnoOp> for UnoOperation {
|
||||
fn from(value: proto::UnoOp) -> Self {
|
||||
match value {
|
||||
proto::UnoOp::Neg => UnoOperation::Neg,
|
||||
proto::UnoOp::Id => UnoOperation::Id,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proto::DuoOp> for Operation {
|
||||
fn from(value: proto::DuoOp) -> Self {
|
||||
match value {
|
||||
proto::DuoOp::Mul => Operation::Mul,
|
||||
proto::DuoOp::Div => Operation::Div,
|
||||
proto::DuoOp::Add => Operation::Add,
|
||||
proto::DuoOp::Sub => Operation::Sub,
|
||||
proto::DuoOp::Pow => Operation::Pow,
|
||||
proto::DuoOp::Idiv => Operation::Idiv,
|
||||
proto::DuoOp::Mod => Operation::Mod,
|
||||
proto::DuoOp::Eq => Operation::Eq,
|
||||
proto::DuoOp::Neq => Operation::Neq,
|
||||
proto::DuoOp::Lt => Operation::Lt,
|
||||
proto::DuoOp::Gt => Operation::Gt,
|
||||
proto::DuoOp::Leq => Operation::Leq,
|
||||
proto::DuoOp::Geq => Operation::Geq,
|
||||
proto::DuoOp::Land => Operation::Land,
|
||||
proto::DuoOp::Lor => Operation::Lor,
|
||||
proto::DuoOp::Shl => Operation::Shl,
|
||||
proto::DuoOp::Shr => Operation::Shr,
|
||||
proto::DuoOp::Bor => Operation::Bor,
|
||||
proto::DuoOp::Band => Operation::Band,
|
||||
proto::DuoOp::Bxor => Operation::Bxor,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<proto::TresOp> for graph::TresOperation {
|
||||
fn from(value: proto::TresOp) -> Self {
|
||||
match value {
|
||||
proto::TresOp::TernCond => TresOperation::TernCond,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serialize_witnesscalc_graph<T: Write>(
|
||||
mut w: T,
|
||||
nodes: &Vec<graph::Node>,
|
||||
witness_signals: &[usize],
|
||||
input_signals: &InputSignalsInfo,
|
||||
) -> std::io::Result<()> {
|
||||
let mut ptr = 0usize;
|
||||
w.write_all(WITNESSCALC_GRAPH_MAGIC).unwrap();
|
||||
ptr += WITNESSCALC_GRAPH_MAGIC.len();
|
||||
|
||||
w.write_u64::<LittleEndian>(nodes.len() as u64)?;
|
||||
ptr += 8;
|
||||
|
||||
let metadata = proto::GraphMetadata {
|
||||
witness_signals: witness_signals
|
||||
.iter()
|
||||
.map(|x| *x as u32)
|
||||
.collect::<Vec<u32>>(),
|
||||
inputs: input_signals
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
let sig = proto::SignalDescription {
|
||||
offset: v.0 as u32,
|
||||
len: v.1 as u32,
|
||||
};
|
||||
(k.clone(), sig)
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
||||
// capacity of buf should be enough to hold the largest message + 10 bytes
|
||||
// of varint length
|
||||
let mut buf = Vec::with_capacity(metadata.encoded_len() + MAX_VARINT_LENGTH);
|
||||
|
||||
for node in nodes {
|
||||
let node_pb = proto::Node {
|
||||
node: Some(proto::node::Node::from(node)),
|
||||
};
|
||||
|
||||
assert_eq!(buf.len(), 0);
|
||||
node_pb.encode_length_delimited(&mut buf)?;
|
||||
ptr += buf.len();
|
||||
|
||||
w.write_all(&buf)?;
|
||||
buf.clear();
|
||||
}
|
||||
|
||||
metadata.encode_length_delimited(&mut buf)?;
|
||||
w.write_all(&buf)?;
|
||||
buf.clear();
|
||||
|
||||
w.write_u64::<LittleEndian>(ptr as u64)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn read_message_length<R: Read>(rw: &mut WriteBackReader<R>) -> std::io::Result<usize> {
|
||||
let mut buf = [0u8; MAX_VARINT_LENGTH];
|
||||
let bytes_read = rw.read(&mut buf)?;
|
||||
if bytes_read == 0 {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::UnexpectedEof,
|
||||
"Unexpected EOF",
|
||||
));
|
||||
}
|
||||
|
||||
let len_delimiter = prost::decode_length_delimiter(buf.as_ref())?;
|
||||
|
||||
let lnln = prost::length_delimiter_len(len_delimiter);
|
||||
|
||||
if lnln < bytes_read {
|
||||
rw.write_all(&buf[lnln..bytes_read])?;
|
||||
}
|
||||
|
||||
Ok(len_delimiter)
|
||||
}
|
||||
|
||||
fn read_message<R: Read, M: Message + std::default::Default>(
|
||||
rw: &mut WriteBackReader<R>,
|
||||
) -> std::io::Result<M> {
|
||||
let ln = read_message_length(rw)?;
|
||||
let mut buf = vec![0u8; ln];
|
||||
let bytes_read = rw.read(&mut buf)?;
|
||||
if bytes_read != ln {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::UnexpectedEof,
|
||||
"Unexpected EOF",
|
||||
));
|
||||
}
|
||||
|
||||
let msg = prost::Message::decode(&buf[..])?;
|
||||
|
||||
Ok(msg)
|
||||
}
|
||||
|
||||
pub fn deserialize_witnesscalc_graph(
|
||||
r: impl Read,
|
||||
) -> std::io::Result<(Vec<graph::Node>, Vec<usize>, InputSignalsInfo)> {
|
||||
let mut br = WriteBackReader::new(r);
|
||||
let mut magic = [0u8; WITNESSCALC_GRAPH_MAGIC.len()];
|
||||
|
||||
br.read_exact(&mut magic)?;
|
||||
|
||||
if !magic.eq(WITNESSCALC_GRAPH_MAGIC) {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidData,
|
||||
"Invalid magic",
|
||||
));
|
||||
}
|
||||
|
||||
let nodes_num = br.read_u64::<LittleEndian>()?;
|
||||
let mut nodes = Vec::with_capacity(nodes_num as usize);
|
||||
for _ in 0..nodes_num {
|
||||
let n: proto::Node = read_message(&mut br)?;
|
||||
let n2: graph::Node = n.into();
|
||||
nodes.push(n2);
|
||||
}
|
||||
|
||||
let md: proto::GraphMetadata = read_message(&mut br)?;
|
||||
|
||||
let witness_signals = md
|
||||
.witness_signals
|
||||
.iter()
|
||||
.map(|x| *x as usize)
|
||||
.collect::<Vec<usize>>();
|
||||
|
||||
let input_signals = md
|
||||
.inputs
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), (v.offset as usize, v.len as usize)))
|
||||
.collect::<InputSignalsInfo>();
|
||||
|
||||
Ok((nodes, witness_signals, input_signals))
|
||||
}
|
||||
|
||||
struct WriteBackReader<R: Read> {
|
||||
reader: R,
|
||||
buffer: Vec<u8>,
|
||||
}
|
||||
|
||||
impl<R: Read> WriteBackReader<R> {
|
||||
fn new(reader: R) -> Self {
|
||||
WriteBackReader {
|
||||
reader,
|
||||
buffer: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read> Read for WriteBackReader<R> {
|
||||
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
if buf.is_empty() {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
let mut n = 0usize;
|
||||
|
||||
if !self.buffer.is_empty() {
|
||||
n = std::cmp::min(buf.len(), self.buffer.len());
|
||||
self.buffer[self.buffer.len() - n..]
|
||||
.iter()
|
||||
.rev()
|
||||
.enumerate()
|
||||
.for_each(|(i, x)| {
|
||||
buf[i] = *x;
|
||||
});
|
||||
self.buffer.truncate(self.buffer.len() - n);
|
||||
}
|
||||
|
||||
while n < buf.len() {
|
||||
let m = self.reader.read(&mut buf[n..])?;
|
||||
if m == 0 {
|
||||
break;
|
||||
}
|
||||
n += m;
|
||||
}
|
||||
|
||||
Ok(n)
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read> Write for WriteBackReader<R> {
|
||||
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
|
||||
self.buffer.reserve(buf.len());
|
||||
self.buffer.extend(buf.iter().rev());
|
||||
Ok(buf.len())
|
||||
}
|
||||
|
||||
fn flush(&mut self) -> std::io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use byteorder::ByteOrder;
|
||||
use core::str::FromStr;
|
||||
use graph::{Operation, TresOperation, UnoOperation};
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[test]
|
||||
fn test_read_message() {
|
||||
let mut buf = Vec::new();
|
||||
let n1 = proto::Node {
|
||||
node: Some(proto::node::Node::Input(proto::InputNode { idx: 1 })),
|
||||
};
|
||||
n1.encode_length_delimited(&mut buf).unwrap();
|
||||
|
||||
let n2 = proto::Node {
|
||||
node: Some(proto::node::Node::Input(proto::InputNode { idx: 2 })),
|
||||
};
|
||||
n2.encode_length_delimited(&mut buf).unwrap();
|
||||
|
||||
let mut reader = std::io::Cursor::new(&buf);
|
||||
|
||||
let mut rw = WriteBackReader::new(&mut reader);
|
||||
|
||||
let got_n1: proto::Node = read_message(&mut rw).unwrap();
|
||||
assert!(n1.eq(&got_n1));
|
||||
|
||||
let got_n2: proto::Node = read_message(&mut rw).unwrap();
|
||||
assert!(n2.eq(&got_n2));
|
||||
|
||||
assert_eq!(reader.position(), buf.len() as u64);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_read_message_variant() {
|
||||
let nodes = vec![
|
||||
proto::Node {
|
||||
node: Some(proto::node::Node::from(&graph::Node::Input(0))),
|
||||
},
|
||||
proto::Node {
|
||||
node: Some(proto::node::Node::from(&graph::Node::MontConstant(
|
||||
Fr::from_str("1").unwrap(),
|
||||
))),
|
||||
},
|
||||
proto::Node {
|
||||
node: Some(proto::node::Node::from(&graph::Node::UnoOp(
|
||||
UnoOperation::Id,
|
||||
4,
|
||||
))),
|
||||
},
|
||||
proto::Node {
|
||||
node: Some(proto::node::Node::from(&graph::Node::Op(
|
||||
Operation::Mul,
|
||||
5,
|
||||
6,
|
||||
))),
|
||||
},
|
||||
proto::Node {
|
||||
node: Some(proto::node::Node::from(&graph::Node::TresOp(
|
||||
TresOperation::TernCond,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
))),
|
||||
},
|
||||
];
|
||||
|
||||
let mut buf = Vec::new();
|
||||
for n in &nodes {
|
||||
n.encode_length_delimited(&mut buf).unwrap();
|
||||
}
|
||||
|
||||
let mut nodes_got: Vec<proto::Node> = Vec::new();
|
||||
let mut reader = std::io::Cursor::new(&buf);
|
||||
let mut rw = WriteBackReader::new(&mut reader);
|
||||
for _ in 0..nodes.len() {
|
||||
nodes_got.push(read_message(&mut rw).unwrap());
|
||||
}
|
||||
|
||||
assert_eq!(nodes, nodes_got);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_back_reader() {
|
||||
let data = [1u8, 2, 3, 4, 5, 6];
|
||||
let mut r = WriteBackReader::new(std::io::Cursor::new(&data));
|
||||
|
||||
let buf = &mut [0u8; 5];
|
||||
r.read(buf).unwrap();
|
||||
assert_eq!(buf, &[1, 2, 3, 4, 5]);
|
||||
|
||||
// return [4, 5] to reader
|
||||
r.write(&buf[3..]).unwrap();
|
||||
// return [2, 3] to reader
|
||||
r.write(&buf[1..3]).unwrap();
|
||||
|
||||
buf.fill(0);
|
||||
|
||||
// read 3 bytes, expect [2, 3, 4] after returns
|
||||
let mut n = r.read(&mut buf[..3]).unwrap();
|
||||
assert_eq!(n, 3);
|
||||
assert_eq!(buf, &[2, 3, 4, 0, 0]);
|
||||
|
||||
buf.fill(0);
|
||||
|
||||
// read everything left in reader
|
||||
n = r.read(buf).unwrap();
|
||||
assert_eq!(n, 2);
|
||||
assert_eq!(buf, &[5, 6, 0, 0, 0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_inputs() {
|
||||
let nodes = vec![
|
||||
graph::Node::Input(0),
|
||||
graph::Node::MontConstant(Fr::from_str("1").unwrap()),
|
||||
graph::Node::UnoOp(UnoOperation::Id, 4),
|
||||
graph::Node::Op(Operation::Mul, 5, 6),
|
||||
graph::Node::TresOp(TresOperation::TernCond, 7, 8, 9),
|
||||
];
|
||||
|
||||
let witness_signals = vec![4, 1];
|
||||
|
||||
let mut input_signals: InputSignalsInfo = HashMap::new();
|
||||
input_signals.insert("sig1".to_string(), (1, 3));
|
||||
input_signals.insert("sig2".to_string(), (5, 1));
|
||||
|
||||
let mut tmp = Vec::new();
|
||||
serialize_witnesscalc_graph(&mut tmp, &nodes, &witness_signals, &input_signals).unwrap();
|
||||
|
||||
let mut reader = std::io::Cursor::new(&tmp);
|
||||
|
||||
let (nodes_res, witness_signals_res, input_signals_res) =
|
||||
deserialize_witnesscalc_graph(&mut reader).unwrap();
|
||||
|
||||
assert_eq!(nodes, nodes_res);
|
||||
assert_eq!(input_signals, input_signals_res);
|
||||
assert_eq!(witness_signals, witness_signals_res);
|
||||
|
||||
let metadata_start = LittleEndian::read_u64(&tmp[tmp.len() - 8..]);
|
||||
|
||||
let mt_reader = std::io::Cursor::new(&tmp[metadata_start as usize..]);
|
||||
let mut rw = WriteBackReader::new(mt_reader);
|
||||
let metadata: proto::GraphMetadata = read_message(&mut rw).unwrap();
|
||||
|
||||
let metadata_want = proto::GraphMetadata {
|
||||
witness_signals: vec![4, 1],
|
||||
inputs: input_signals
|
||||
.iter()
|
||||
.map(|(k, v)| {
|
||||
(
|
||||
k.clone(),
|
||||
proto::SignalDescription {
|
||||
offset: v.0 as u32,
|
||||
len: v.1 as u32,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
||||
assert_eq!(metadata, metadata_want);
|
||||
}
|
||||
}
|
||||
161
rln/src/circuit/mod.rs
Normal file
161
rln/src/circuit/mod.rs
Normal file
@@ -0,0 +1,161 @@
|
||||
// This crate provides interfaces for the zero-knowledge circuit and keys
|
||||
|
||||
pub mod iden3calc;
|
||||
pub mod qap;
|
||||
pub mod zkey;
|
||||
|
||||
use ::lazy_static::lazy_static;
|
||||
use ark_bn254::{
|
||||
Bn254, Fq as ArkFq, Fq2 as ArkFq2, Fr as ArkFr, G1Affine as ArkG1Affine,
|
||||
G1Projective as ArkG1Projective, G2Affine as ArkG2Affine, G2Projective as ArkG2Projective,
|
||||
};
|
||||
use ark_groth16::ProvingKey;
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use cfg_if::cfg_if;
|
||||
use color_eyre::{Report, Result};
|
||||
|
||||
use crate::circuit::iden3calc::calc_witness;
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
use {
|
||||
ark_ff::Field, ark_serialize::CanonicalDeserialize, ark_serialize::CanonicalSerialize,
|
||||
color_eyre::eyre::WrapErr,
|
||||
};
|
||||
|
||||
#[cfg(not(feature = "arkzkey"))]
|
||||
use {crate::circuit::zkey::read_zkey, std::io::Cursor};
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
pub const ARKZKEY_BYTES: &[u8] = include_bytes!("../../resources/tree_height_20/rln_final.arkzkey");
|
||||
|
||||
pub const ZKEY_BYTES: &[u8] = include_bytes!("../../resources/tree_height_20/rln_final.zkey");
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
const GRAPH_BYTES: &[u8] = include_bytes!("../../resources/tree_height_20/graph.bin");
|
||||
|
||||
lazy_static! {
|
||||
static ref ZKEY: (ProvingKey<Curve>, ConstraintMatrices<Fr>) = {
|
||||
cfg_if! {
|
||||
if #[cfg(feature = "arkzkey")] {
|
||||
read_arkzkey_from_bytes_uncompressed(ARKZKEY_BYTES).expect("Failed to read arkzkey")
|
||||
} else {
|
||||
let mut reader = Cursor::new(ZKEY_BYTES);
|
||||
read_zkey(&mut reader).expect("Failed to read zkey")
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub const TEST_TREE_HEIGHT: usize = 20;
|
||||
|
||||
// The following types define the pairing friendly elliptic curve, the underlying finite fields and groups default to this module
|
||||
// Note that proofs are serialized assuming Fr to be 4x8 = 32 bytes in size. Hence, changing to a curve with different encoding will make proof verification to fail
|
||||
pub type Curve = Bn254;
|
||||
pub type Fr = ArkFr;
|
||||
pub type Fq = ArkFq;
|
||||
pub type Fq2 = ArkFq2;
|
||||
pub type G1Affine = ArkG1Affine;
|
||||
pub type G1Projective = ArkG1Projective;
|
||||
pub type G2Affine = ArkG2Affine;
|
||||
pub type G2Projective = ArkG2Projective;
|
||||
|
||||
// Loads the proving key using a bytes vector
|
||||
pub fn zkey_from_raw(zkey_data: &[u8]) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
|
||||
if zkey_data.is_empty() {
|
||||
return Err(Report::msg("No proving key found!"));
|
||||
}
|
||||
|
||||
let proving_key_and_matrices = match () {
|
||||
#[cfg(feature = "arkzkey")]
|
||||
() => read_arkzkey_from_bytes_uncompressed(zkey_data)?,
|
||||
#[cfg(not(feature = "arkzkey"))]
|
||||
() => {
|
||||
let mut reader = Cursor::new(zkey_data);
|
||||
read_zkey(&mut reader)?
|
||||
}
|
||||
};
|
||||
|
||||
Ok(proving_key_and_matrices)
|
||||
}
|
||||
|
||||
// Loads the proving key
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn zkey_from_folder() -> &'static (ProvingKey<Curve>, ConstraintMatrices<Fr>) {
|
||||
&ZKEY
|
||||
}
|
||||
|
||||
pub fn calculate_rln_witness<I: IntoIterator<Item = (String, Vec<Fr>)>>(
|
||||
inputs: I,
|
||||
graph_data: &[u8],
|
||||
) -> Vec<Fr> {
|
||||
calc_witness(inputs, graph_data)
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn graph_from_folder() -> &'static [u8] {
|
||||
GRAPH_BYTES
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// Functions and structs from [arkz-key](https://github.com/zkmopro/ark-zkey/blob/main/src/lib.rs#L106)
|
||||
// without print and allow to choose between compressed and uncompressed arkzkey
|
||||
////////////////////////////////////////////////////////
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
|
||||
pub struct SerializableProvingKey(pub ProvingKey<Bn254>);
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
|
||||
pub struct SerializableConstraintMatrices<F: Field> {
|
||||
pub num_instance_variables: usize,
|
||||
pub num_witness_variables: usize,
|
||||
pub num_constraints: usize,
|
||||
pub a_num_non_zero: usize,
|
||||
pub b_num_non_zero: usize,
|
||||
pub c_num_non_zero: usize,
|
||||
pub a: SerializableMatrix<F>,
|
||||
pub b: SerializableMatrix<F>,
|
||||
pub c: SerializableMatrix<F>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
|
||||
pub struct SerializableMatrix<F: Field> {
|
||||
pub data: Vec<Vec<(F, usize)>>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "arkzkey")]
|
||||
pub fn read_arkzkey_from_bytes_uncompressed(
|
||||
arkzkey_data: &[u8],
|
||||
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
|
||||
if arkzkey_data.is_empty() {
|
||||
return Err(Report::msg("No proving key found!"));
|
||||
}
|
||||
|
||||
let mut cursor = std::io::Cursor::new(arkzkey_data);
|
||||
|
||||
let serialized_proving_key =
|
||||
SerializableProvingKey::deserialize_uncompressed_unchecked(&mut cursor)
|
||||
.wrap_err("Failed to deserialize proving key")?;
|
||||
|
||||
let serialized_constraint_matrices =
|
||||
SerializableConstraintMatrices::deserialize_uncompressed_unchecked(&mut cursor)
|
||||
.wrap_err("Failed to deserialize constraint matrices")?;
|
||||
|
||||
// Get on right form for API
|
||||
let proving_key: ProvingKey<Bn254> = serialized_proving_key.0;
|
||||
let constraint_matrices: ConstraintMatrices<ark_bn254::Fr> = ConstraintMatrices {
|
||||
num_instance_variables: serialized_constraint_matrices.num_instance_variables,
|
||||
num_witness_variables: serialized_constraint_matrices.num_witness_variables,
|
||||
num_constraints: serialized_constraint_matrices.num_constraints,
|
||||
a_num_non_zero: serialized_constraint_matrices.a_num_non_zero,
|
||||
b_num_non_zero: serialized_constraint_matrices.b_num_non_zero,
|
||||
c_num_non_zero: serialized_constraint_matrices.c_num_non_zero,
|
||||
a: serialized_constraint_matrices.a.data,
|
||||
b: serialized_constraint_matrices.b.data,
|
||||
c: serialized_constraint_matrices.c.data,
|
||||
};
|
||||
|
||||
Ok((proving_key, constraint_matrices))
|
||||
}
|
||||
114
rln/src/circuit/qap.rs
Normal file
114
rln/src/circuit/qap.rs
Normal file
@@ -0,0 +1,114 @@
|
||||
// This file is based on the code by arkworks. Its preimage can be found here:
|
||||
// https://github.com/arkworks-rs/circom-compat/blob/3c95ed98e23a408b4d99a53e483a9bba39685a4e/src/circom/qap.rs
|
||||
|
||||
use ark_ff::PrimeField;
|
||||
use ark_groth16::r1cs_to_qap::{evaluate_constraint, LibsnarkReduction, R1CSToQAP};
|
||||
use ark_poly::EvaluationDomain;
|
||||
use ark_relations::r1cs::{ConstraintMatrices, ConstraintSystemRef, SynthesisError};
|
||||
use ark_std::{cfg_into_iter, cfg_iter, cfg_iter_mut, vec};
|
||||
|
||||
/// Implements the witness map used by snarkjs. The arkworks witness map calculates the
|
||||
/// coefficients of H through computing (AB-C)/Z in the evaluation domain and going back to the
|
||||
/// coefficients domain. snarkjs instead precomputes the Lagrange form of the powers of tau bases
|
||||
/// in a domain twice as large and the witness map is computed as the odd coefficients of (AB-C)
|
||||
/// in that domain. This serves as HZ when computing the C proof element.
|
||||
pub struct CircomReduction;
|
||||
|
||||
impl R1CSToQAP for CircomReduction {
|
||||
#[allow(clippy::type_complexity)]
|
||||
fn instance_map_with_evaluation<F: PrimeField, D: EvaluationDomain<F>>(
|
||||
cs: ConstraintSystemRef<F>,
|
||||
t: &F,
|
||||
) -> Result<(Vec<F>, Vec<F>, Vec<F>, F, usize, usize), SynthesisError> {
|
||||
LibsnarkReduction::instance_map_with_evaluation::<F, D>(cs, t)
|
||||
}
|
||||
|
||||
fn witness_map_from_matrices<F: PrimeField, D: EvaluationDomain<F>>(
|
||||
matrices: &ConstraintMatrices<F>,
|
||||
num_inputs: usize,
|
||||
num_constraints: usize,
|
||||
full_assignment: &[F],
|
||||
) -> Result<Vec<F>, SynthesisError> {
|
||||
let zero = F::zero();
|
||||
let domain =
|
||||
D::new(num_constraints + num_inputs).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;
|
||||
let domain_size = domain.size();
|
||||
|
||||
let mut a = vec![zero; domain_size];
|
||||
let mut b = vec![zero; domain_size];
|
||||
|
||||
#[allow(unexpected_cfgs)]
|
||||
cfg_iter_mut!(a[..num_constraints])
|
||||
.zip(cfg_iter_mut!(b[..num_constraints]))
|
||||
.zip(cfg_iter!(&matrices.a))
|
||||
.zip(cfg_iter!(&matrices.b))
|
||||
.for_each(|(((a, b), at_i), bt_i)| {
|
||||
*a = evaluate_constraint(at_i, full_assignment);
|
||||
*b = evaluate_constraint(bt_i, full_assignment);
|
||||
});
|
||||
|
||||
{
|
||||
let start = num_constraints;
|
||||
let end = start + num_inputs;
|
||||
a[start..end].clone_from_slice(&full_assignment[..num_inputs]);
|
||||
}
|
||||
|
||||
let mut c = vec![zero; domain_size];
|
||||
#[allow(unexpected_cfgs)]
|
||||
cfg_iter_mut!(c[..num_constraints])
|
||||
.zip(&a)
|
||||
.zip(&b)
|
||||
.for_each(|((c_i, &a), &b)| {
|
||||
*c_i = a * b;
|
||||
});
|
||||
|
||||
domain.ifft_in_place(&mut a);
|
||||
domain.ifft_in_place(&mut b);
|
||||
|
||||
let root_of_unity = {
|
||||
let domain_size_double = 2 * domain_size;
|
||||
let domain_double =
|
||||
D::new(domain_size_double).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;
|
||||
domain_double.element(1)
|
||||
};
|
||||
D::distribute_powers_and_mul_by_const(&mut a, root_of_unity, F::one());
|
||||
D::distribute_powers_and_mul_by_const(&mut b, root_of_unity, F::one());
|
||||
|
||||
domain.fft_in_place(&mut a);
|
||||
domain.fft_in_place(&mut b);
|
||||
|
||||
let mut ab = domain.mul_polynomials_in_evaluation_domain(&a, &b);
|
||||
drop(a);
|
||||
drop(b);
|
||||
|
||||
domain.ifft_in_place(&mut c);
|
||||
D::distribute_powers_and_mul_by_const(&mut c, root_of_unity, F::one());
|
||||
domain.fft_in_place(&mut c);
|
||||
|
||||
#[allow(unexpected_cfgs)]
|
||||
cfg_iter_mut!(ab)
|
||||
.zip(c)
|
||||
.for_each(|(ab_i, c_i)| *ab_i -= &c_i);
|
||||
|
||||
Ok(ab)
|
||||
}
|
||||
|
||||
fn h_query_scalars<F: PrimeField, D: EvaluationDomain<F>>(
|
||||
max_power: usize,
|
||||
t: F,
|
||||
_: F,
|
||||
delta_inverse: F,
|
||||
) -> Result<Vec<F>, SynthesisError> {
|
||||
// the usual H query has domain-1 powers. Z has domain powers. So HZ has 2*domain-1 powers.
|
||||
#[allow(unexpected_cfgs)]
|
||||
let mut scalars = cfg_into_iter!(0..2 * max_power + 1)
|
||||
.map(|i| delta_inverse * t.pow([i as u64]))
|
||||
.collect::<Vec<_>>();
|
||||
let domain_size = scalars.len();
|
||||
let domain = D::new(domain_size).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;
|
||||
// generate the lagrange coefficients
|
||||
domain.ifft_in_place(&mut scalars);
|
||||
#[allow(unexpected_cfgs)]
|
||||
Ok(cfg_into_iter!(scalars).skip(1).step_by(2).collect())
|
||||
}
|
||||
}
|
||||
371
rln/src/circuit/zkey.rs
Normal file
371
rln/src/circuit/zkey.rs
Normal file
@@ -0,0 +1,371 @@
|
||||
// This file is based on the code by arkworks. Its preimage can be found here:
|
||||
// https://github.com/arkworks-rs/circom-compat/blob/3c95ed98e23a408b4d99a53e483a9bba39685a4e/src/zkey.rs
|
||||
|
||||
//! ZKey Parsing
|
||||
//!
|
||||
//! Each ZKey file is broken into sections:
|
||||
//! Header(1)
|
||||
//! Prover Type 1 Groth
|
||||
//! HeaderGroth(2)
|
||||
//! n8q
|
||||
//! q
|
||||
//! n8r
|
||||
//! r
|
||||
//! NVars
|
||||
//! NPub
|
||||
//! DomainSize (multiple of 2
|
||||
//! alpha1
|
||||
//! beta1
|
||||
//! delta1
|
||||
//! beta2
|
||||
//! gamma2
|
||||
//! delta2
|
||||
//! IC(3)
|
||||
//! Coefs(4)
|
||||
//! PointsA(5)
|
||||
//! PointsB1(6)
|
||||
//! PointsB2(7)
|
||||
//! PointsC(8)
|
||||
//! PointsH(9)
|
||||
//! Contributions(10)
|
||||
use ark_ff::{BigInteger256, PrimeField};
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use ark_serialize::{CanonicalDeserialize, SerializationError};
|
||||
use ark_std::log2;
|
||||
use byteorder::{LittleEndian, ReadBytesExt};
|
||||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::{Read, Seek, SeekFrom},
|
||||
};
|
||||
|
||||
use ark_bn254::{Bn254, Fq, Fq2, Fr, G1Affine, G2Affine};
|
||||
use ark_groth16::{ProvingKey, VerifyingKey};
|
||||
use num_traits::Zero;
|
||||
|
||||
type IoResult<T> = Result<T, SerializationError>;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct Section {
|
||||
position: u64,
|
||||
#[allow(dead_code)]
|
||||
size: usize,
|
||||
}
|
||||
|
||||
/// Reads a SnarkJS ZKey file into an Arkworks ProvingKey.
|
||||
pub fn read_zkey<R: Read + Seek>(
|
||||
reader: &mut R,
|
||||
) -> IoResult<(ProvingKey<Bn254>, ConstraintMatrices<Fr>)> {
|
||||
let mut binfile = BinFile::new(reader)?;
|
||||
let proving_key = binfile.proving_key()?;
|
||||
let matrices = binfile.matrices()?;
|
||||
Ok((proving_key, matrices))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct BinFile<'a, R> {
|
||||
#[allow(dead_code)]
|
||||
ftype: String,
|
||||
#[allow(dead_code)]
|
||||
version: u32,
|
||||
sections: HashMap<u32, Vec<Section>>,
|
||||
reader: &'a mut R,
|
||||
}
|
||||
|
||||
impl<'a, R: Read + Seek> BinFile<'a, R> {
|
||||
fn new(reader: &'a mut R) -> IoResult<Self> {
|
||||
let mut magic = [0u8; 4];
|
||||
reader.read_exact(&mut magic)?;
|
||||
|
||||
let version = reader.read_u32::<LittleEndian>()?;
|
||||
|
||||
let num_sections = reader.read_u32::<LittleEndian>()?;
|
||||
|
||||
let mut sections = HashMap::new();
|
||||
for _ in 0..num_sections {
|
||||
let section_id = reader.read_u32::<LittleEndian>()?;
|
||||
let section_length = reader.read_u64::<LittleEndian>()?;
|
||||
|
||||
let section = sections.entry(section_id).or_insert_with(Vec::new);
|
||||
section.push(Section {
|
||||
position: reader.stream_position()?,
|
||||
size: section_length as usize,
|
||||
});
|
||||
|
||||
reader.seek(SeekFrom::Current(section_length as i64))?;
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
ftype: std::str::from_utf8(&magic[..]).unwrap().to_string(),
|
||||
version,
|
||||
sections,
|
||||
reader,
|
||||
})
|
||||
}
|
||||
|
||||
fn proving_key(&mut self) -> IoResult<ProvingKey<Bn254>> {
|
||||
let header = self.groth_header()?;
|
||||
let ic = self.ic(header.n_public)?;
|
||||
|
||||
let a_query = self.a_query(header.n_vars)?;
|
||||
let b_g1_query = self.b_g1_query(header.n_vars)?;
|
||||
let b_g2_query = self.b_g2_query(header.n_vars)?;
|
||||
let l_query = self.l_query(header.n_vars - header.n_public - 1)?;
|
||||
let h_query = self.h_query(header.domain_size as usize)?;
|
||||
|
||||
let vk = VerifyingKey::<Bn254> {
|
||||
alpha_g1: header.verifying_key.alpha_g1,
|
||||
beta_g2: header.verifying_key.beta_g2,
|
||||
gamma_g2: header.verifying_key.gamma_g2,
|
||||
delta_g2: header.verifying_key.delta_g2,
|
||||
gamma_abc_g1: ic,
|
||||
};
|
||||
|
||||
let pk = ProvingKey::<Bn254> {
|
||||
vk,
|
||||
beta_g1: header.verifying_key.beta_g1,
|
||||
delta_g1: header.verifying_key.delta_g1,
|
||||
a_query,
|
||||
b_g1_query,
|
||||
b_g2_query,
|
||||
h_query,
|
||||
l_query,
|
||||
};
|
||||
|
||||
Ok(pk)
|
||||
}
|
||||
|
||||
fn get_section(&self, id: u32) -> Section {
|
||||
self.sections.get(&id).unwrap()[0].clone()
|
||||
}
|
||||
|
||||
fn groth_header(&mut self) -> IoResult<HeaderGroth> {
|
||||
let section = self.get_section(2);
|
||||
let header = HeaderGroth::new(&mut self.reader, §ion)?;
|
||||
Ok(header)
|
||||
}
|
||||
|
||||
fn ic(&mut self, n_public: usize) -> IoResult<Vec<G1Affine>> {
|
||||
// the range is non-inclusive so we do +1 to get all inputs
|
||||
self.g1_section(n_public + 1, 3)
|
||||
}
|
||||
|
||||
/// Returns the [`ConstraintMatrices`] corresponding to the zkey
|
||||
pub fn matrices(&mut self) -> IoResult<ConstraintMatrices<Fr>> {
|
||||
let header = self.groth_header()?;
|
||||
|
||||
let section = self.get_section(4);
|
||||
self.reader.seek(SeekFrom::Start(section.position))?;
|
||||
let num_coeffs: u32 = self.reader.read_u32::<LittleEndian>()?;
|
||||
|
||||
// insantiate AB
|
||||
let mut matrices = vec![vec![vec![]; header.domain_size as usize]; 2];
|
||||
let mut max_constraint_index = 0;
|
||||
for _ in 0..num_coeffs {
|
||||
let matrix: u32 = self.reader.read_u32::<LittleEndian>()?;
|
||||
let constraint: u32 = self.reader.read_u32::<LittleEndian>()?;
|
||||
let signal: u32 = self.reader.read_u32::<LittleEndian>()?;
|
||||
|
||||
let value: Fr = deserialize_field_fr(&mut self.reader)?;
|
||||
max_constraint_index = std::cmp::max(max_constraint_index, constraint);
|
||||
matrices[matrix as usize][constraint as usize].push((value, signal as usize));
|
||||
}
|
||||
|
||||
let num_constraints = max_constraint_index as usize - header.n_public;
|
||||
// Remove the public input constraints, Arkworks adds them later
|
||||
matrices.iter_mut().for_each(|m| {
|
||||
m.truncate(num_constraints);
|
||||
});
|
||||
// This is taken from Arkworks' to_matrices() function
|
||||
let a = matrices[0].clone();
|
||||
let b = matrices[1].clone();
|
||||
let a_num_non_zero: usize = a.iter().map(|lc| lc.len()).sum();
|
||||
let b_num_non_zero: usize = b.iter().map(|lc| lc.len()).sum();
|
||||
let matrices = ConstraintMatrices {
|
||||
num_instance_variables: header.n_public + 1,
|
||||
num_witness_variables: header.n_vars - header.n_public,
|
||||
num_constraints,
|
||||
|
||||
a_num_non_zero,
|
||||
b_num_non_zero,
|
||||
c_num_non_zero: 0,
|
||||
|
||||
a,
|
||||
b,
|
||||
c: vec![],
|
||||
};
|
||||
|
||||
Ok(matrices)
|
||||
}
|
||||
|
||||
fn a_query(&mut self, n_vars: usize) -> IoResult<Vec<G1Affine>> {
|
||||
self.g1_section(n_vars, 5)
|
||||
}
|
||||
|
||||
fn b_g1_query(&mut self, n_vars: usize) -> IoResult<Vec<G1Affine>> {
|
||||
self.g1_section(n_vars, 6)
|
||||
}
|
||||
|
||||
fn b_g2_query(&mut self, n_vars: usize) -> IoResult<Vec<G2Affine>> {
|
||||
self.g2_section(n_vars, 7)
|
||||
}
|
||||
|
||||
fn l_query(&mut self, n_vars: usize) -> IoResult<Vec<G1Affine>> {
|
||||
self.g1_section(n_vars, 8)
|
||||
}
|
||||
|
||||
fn h_query(&mut self, n_vars: usize) -> IoResult<Vec<G1Affine>> {
|
||||
self.g1_section(n_vars, 9)
|
||||
}
|
||||
|
||||
fn g1_section(&mut self, num: usize, section_id: usize) -> IoResult<Vec<G1Affine>> {
|
||||
let section = self.get_section(section_id as u32);
|
||||
self.reader.seek(SeekFrom::Start(section.position))?;
|
||||
deserialize_g1_vec(self.reader, num as u32)
|
||||
}
|
||||
|
||||
fn g2_section(&mut self, num: usize, section_id: usize) -> IoResult<Vec<G2Affine>> {
|
||||
let section = self.get_section(section_id as u32);
|
||||
self.reader.seek(SeekFrom::Start(section.position))?;
|
||||
deserialize_g2_vec(self.reader, num as u32)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug, CanonicalDeserialize)]
|
||||
pub struct ZVerifyingKey {
|
||||
alpha_g1: G1Affine,
|
||||
beta_g1: G1Affine,
|
||||
beta_g2: G2Affine,
|
||||
gamma_g2: G2Affine,
|
||||
delta_g1: G1Affine,
|
||||
delta_g2: G2Affine,
|
||||
}
|
||||
|
||||
impl ZVerifyingKey {
|
||||
fn new<R: Read>(reader: &mut R) -> IoResult<Self> {
|
||||
let alpha_g1 = deserialize_g1(reader)?;
|
||||
let beta_g1 = deserialize_g1(reader)?;
|
||||
let beta_g2 = deserialize_g2(reader)?;
|
||||
let gamma_g2 = deserialize_g2(reader)?;
|
||||
let delta_g1 = deserialize_g1(reader)?;
|
||||
let delta_g2 = deserialize_g2(reader)?;
|
||||
|
||||
Ok(Self {
|
||||
alpha_g1,
|
||||
beta_g1,
|
||||
beta_g2,
|
||||
gamma_g2,
|
||||
delta_g1,
|
||||
delta_g2,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct HeaderGroth {
|
||||
#[allow(dead_code)]
|
||||
n8q: u32,
|
||||
#[allow(dead_code)]
|
||||
q: BigInteger256,
|
||||
#[allow(dead_code)]
|
||||
n8r: u32,
|
||||
#[allow(dead_code)]
|
||||
r: BigInteger256,
|
||||
|
||||
n_vars: usize,
|
||||
n_public: usize,
|
||||
|
||||
domain_size: u32,
|
||||
#[allow(dead_code)]
|
||||
power: u32,
|
||||
|
||||
verifying_key: ZVerifyingKey,
|
||||
}
|
||||
|
||||
impl HeaderGroth {
|
||||
fn new<R: Read + Seek>(reader: &mut R, section: &Section) -> IoResult<Self> {
|
||||
reader.seek(SeekFrom::Start(section.position))?;
|
||||
Self::read(reader)
|
||||
}
|
||||
|
||||
fn read<R: Read>(mut reader: &mut R) -> IoResult<Self> {
|
||||
// TODO: Impl From<u32> in Arkworks
|
||||
let n8q: u32 = u32::deserialize_uncompressed(&mut reader)?;
|
||||
// group order r of Bn254
|
||||
let q = BigInteger256::deserialize_uncompressed(&mut reader)?;
|
||||
|
||||
let n8r: u32 = u32::deserialize_uncompressed(&mut reader)?;
|
||||
// Prime field modulus
|
||||
let r = BigInteger256::deserialize_uncompressed(&mut reader)?;
|
||||
|
||||
let n_vars = u32::deserialize_uncompressed(&mut reader)? as usize;
|
||||
let n_public = u32::deserialize_uncompressed(&mut reader)? as usize;
|
||||
|
||||
let domain_size: u32 = u32::deserialize_uncompressed(&mut reader)?;
|
||||
let power = log2(domain_size as usize);
|
||||
|
||||
let verifying_key = ZVerifyingKey::new(&mut reader)?;
|
||||
|
||||
Ok(Self {
|
||||
n8q,
|
||||
q,
|
||||
n8r,
|
||||
r,
|
||||
n_vars,
|
||||
n_public,
|
||||
domain_size,
|
||||
power,
|
||||
verifying_key,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// need to divide by R, since snarkjs outputs the zkey with coefficients
|
||||
// multiplieid by R^2
|
||||
fn deserialize_field_fr<R: Read>(reader: &mut R) -> IoResult<Fr> {
|
||||
let bigint = BigInteger256::deserialize_uncompressed(reader)?;
|
||||
Ok(Fr::new_unchecked(Fr::new_unchecked(bigint).into_bigint()))
|
||||
}
|
||||
|
||||
// skips the multiplication by R because Circom points are already in Montgomery form
|
||||
fn deserialize_field<R: Read>(reader: &mut R) -> IoResult<Fq> {
|
||||
let bigint = BigInteger256::deserialize_uncompressed(reader)?;
|
||||
// if you use Fq::new it multiplies by R
|
||||
Ok(Fq::new_unchecked(bigint))
|
||||
}
|
||||
|
||||
pub fn deserialize_field2<R: Read>(reader: &mut R) -> IoResult<Fq2> {
|
||||
let c0 = deserialize_field(reader)?;
|
||||
let c1 = deserialize_field(reader)?;
|
||||
Ok(Fq2::new(c0, c1))
|
||||
}
|
||||
|
||||
fn deserialize_g1<R: Read>(reader: &mut R) -> IoResult<G1Affine> {
|
||||
let x = deserialize_field(reader)?;
|
||||
let y = deserialize_field(reader)?;
|
||||
let infinity = x.is_zero() && y.is_zero();
|
||||
if infinity {
|
||||
Ok(G1Affine::identity())
|
||||
} else {
|
||||
Ok(G1Affine::new(x, y))
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_g2<R: Read>(reader: &mut R) -> IoResult<G2Affine> {
|
||||
let f1 = deserialize_field2(reader)?;
|
||||
let f2 = deserialize_field2(reader)?;
|
||||
let infinity = f1.is_zero() && f2.is_zero();
|
||||
if infinity {
|
||||
Ok(G2Affine::identity())
|
||||
} else {
|
||||
Ok(G2Affine::new(f1, f2))
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_g1_vec<R: Read>(reader: &mut R, n_vars: u32) -> IoResult<Vec<G1Affine>> {
|
||||
(0..n_vars).map(|_| deserialize_g1(reader)).collect()
|
||||
}
|
||||
|
||||
fn deserialize_g2_vec<R: Read>(reader: &mut R, n_vars: u32) -> IoResult<Vec<G2Affine>> {
|
||||
(0..n_vars).map(|_| deserialize_g2(reader)).collect()
|
||||
}
|
||||
@@ -8,6 +8,7 @@ use crate::public::{hash as public_hash, poseidon_hash as public_poseidon_hash,
|
||||
// First argument to the macro is context,
|
||||
// second is the actual method on `RLN`
|
||||
// rest are all other arguments to the method
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
macro_rules! call {
|
||||
($instance:expr, $method:ident $(, $arg:expr)*) => {
|
||||
{
|
||||
@@ -143,24 +144,24 @@ impl ProcessArg for *const Buffer {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ProcessArg for *const RLN<'a> {
|
||||
type ReturnType = &'a RLN<'a>;
|
||||
impl ProcessArg for *const RLN {
|
||||
type ReturnType = &'static RLN;
|
||||
fn process(self) -> Self::ReturnType {
|
||||
unsafe { &*self }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ProcessArg for *mut RLN<'a> {
|
||||
type ReturnType = &'a mut RLN<'a>;
|
||||
impl ProcessArg for *mut RLN {
|
||||
type ReturnType = &'static mut RLN;
|
||||
fn process(self) -> Self::ReturnType {
|
||||
unsafe { &mut *self }
|
||||
}
|
||||
}
|
||||
|
||||
/// Buffer struct is taken from
|
||||
/// <https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs>
|
||||
///
|
||||
/// Also heavily inspired by <https://github.com/kilic/rln/blob/master/src/ffi.rs>
|
||||
///// Buffer struct is taken from
|
||||
///// <https://github.com/celo-org/celo-threshold-bls-rs/blob/master/crates/threshold-bls-ffi/src/ffi.rs>
|
||||
/////
|
||||
///// Also heavily inspired by <https://github.com/kilic/rln/blob/master/src/ffi.rs>
|
||||
|
||||
#[repr(C)]
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
@@ -192,6 +193,7 @@ impl<'a> From<&Buffer> for &'a [u8] {
|
||||
////////////////////////////////////////////////////////
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut *mut RLN) -> bool {
|
||||
match RLN::new(tree_height, input_buffer.process()) {
|
||||
@@ -207,20 +209,35 @@ pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[cfg(feature = "stateless")]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new(ctx: *mut *mut RLN) -> bool {
|
||||
match RLN::new() {
|
||||
Ok(rln) => {
|
||||
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
|
||||
true
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("could not instantiate rln: {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new_with_params(
|
||||
tree_height: usize,
|
||||
circom_buffer: *const Buffer,
|
||||
zkey_buffer: *const Buffer,
|
||||
vk_buffer: *const Buffer,
|
||||
graph_data: *const Buffer,
|
||||
tree_config: *const Buffer,
|
||||
ctx: *mut *mut RLN,
|
||||
) -> bool {
|
||||
match RLN::new_with_params(
|
||||
tree_height,
|
||||
circom_buffer.process().to_vec(),
|
||||
zkey_buffer.process().to_vec(),
|
||||
vk_buffer.process().to_vec(),
|
||||
graph_data.process().to_vec(),
|
||||
tree_config.process(),
|
||||
) {
|
||||
Ok(rln) => {
|
||||
@@ -234,47 +251,77 @@ pub extern "C" fn new_with_params(
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[cfg(feature = "stateless")]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new_with_params(
|
||||
zkey_buffer: *const Buffer,
|
||||
graph_buffer: *const Buffer,
|
||||
ctx: *mut *mut RLN,
|
||||
) -> bool {
|
||||
match RLN::new_with_params(
|
||||
zkey_buffer.process().to_vec(),
|
||||
graph_buffer.process().to_vec(),
|
||||
) {
|
||||
Ok(rln) => {
|
||||
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
|
||||
true
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("could not instantiate rln: {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// Merkle tree APIs
|
||||
////////////////////////////////////////////////////////
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn set_tree(ctx: *mut RLN, tree_height: usize) -> bool {
|
||||
call!(ctx, set_tree, tree_height)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn delete_leaf(ctx: *mut RLN, index: usize) -> bool {
|
||||
call!(ctx, delete_leaf, index)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn set_leaf(ctx: *mut RLN, index: usize, input_buffer: *const Buffer) -> bool {
|
||||
call!(ctx, set_leaf, index, input_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn get_leaf(ctx: *mut RLN, index: usize, output_buffer: *mut Buffer) -> bool {
|
||||
call_with_output_arg!(ctx, get_leaf, output_buffer, index)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn leaves_set(ctx: *mut RLN) -> usize {
|
||||
ctx.process().leaves_set()
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn set_next_leaf(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
|
||||
call!(ctx, set_next_leaf, input_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn set_leaves_from(
|
||||
ctx: *mut RLN,
|
||||
index: usize,
|
||||
@@ -285,12 +332,14 @@ pub extern "C" fn set_leaves_from(
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn init_tree_with_leaves(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
|
||||
call!(ctx, init_tree_with_leaves, input_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn atomic_operation(
|
||||
ctx: *mut RLN,
|
||||
index: usize,
|
||||
@@ -302,6 +351,7 @@ pub extern "C" fn atomic_operation(
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn seq_atomic_operation(
|
||||
ctx: *mut RLN,
|
||||
leaves_buffer: *const Buffer,
|
||||
@@ -318,12 +368,14 @@ pub extern "C" fn seq_atomic_operation(
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn get_root(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
|
||||
call_with_output_arg!(ctx, get_root, output_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn get_proof(ctx: *const RLN, index: usize, output_buffer: *mut Buffer) -> bool {
|
||||
call_with_output_arg!(ctx, get_proof, output_buffer, index)
|
||||
}
|
||||
@@ -353,6 +405,7 @@ pub extern "C" fn verify(
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn generate_rln_proof(
|
||||
ctx: *mut RLN,
|
||||
input_buffer: *const Buffer,
|
||||
@@ -363,6 +416,22 @@ pub extern "C" fn generate_rln_proof(
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn generate_rln_proof_with_witness(
|
||||
ctx: *mut RLN,
|
||||
input_buffer: *const Buffer,
|
||||
output_buffer: *mut Buffer,
|
||||
) -> bool {
|
||||
call_with_output_arg!(
|
||||
ctx,
|
||||
generate_rln_proof_with_witness,
|
||||
output_buffer,
|
||||
input_buffer
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn verify_rln_proof(
|
||||
ctx: *const RLN,
|
||||
proof_buffer: *const Buffer,
|
||||
@@ -446,18 +515,21 @@ pub extern "C" fn recover_id_secret(
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn set_metadata(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
|
||||
call!(ctx, set_metadata, input_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn get_metadata(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
|
||||
call_with_output_arg!(ctx, get_metadata, output_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn flush(ctx: *mut RLN) -> bool {
|
||||
call!(ctx, flush)
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ static POSEIDON: Lazy<Poseidon<Fr>> = Lazy::new(|| Poseidon::<Fr>::from(&ROUND_P
|
||||
|
||||
pub fn poseidon_hash(input: &[Fr]) -> Fr {
|
||||
POSEIDON
|
||||
.hash(input.to_vec())
|
||||
.hash(input)
|
||||
.expect("hash with fixed input size can't fail")
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
pub mod circuit;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub mod ffi;
|
||||
pub mod hashers;
|
||||
#[cfg(feature = "pmtree-ft")]
|
||||
pub mod pm_tree_adapter;
|
||||
@@ -10,6 +10,3 @@ pub mod public;
|
||||
#[cfg(test)]
|
||||
pub mod public_api_tests;
|
||||
pub mod utils;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub mod ffi;
|
||||
|
||||
@@ -5,6 +5,7 @@ use std::str::FromStr;
|
||||
use color_eyre::{Report, Result};
|
||||
use serde_json::Value;
|
||||
|
||||
use utils::pmtree::tree::Key;
|
||||
use utils::pmtree::{Database, Hasher};
|
||||
use utils::*;
|
||||
|
||||
@@ -16,6 +17,9 @@ const METADATA_KEY: [u8; 8] = *b"metadata";
|
||||
|
||||
pub struct PmTree {
|
||||
tree: pmtree::MerkleTree<SledDB, PoseidonHash>,
|
||||
/// The indices of leaves which are set into zero upto next_index.
|
||||
/// Set to 0 if the leaf is empty and set to 1 in otherwise.
|
||||
cached_leaves_indices: Vec<u8>,
|
||||
// metadata that an application may use to store additional information
|
||||
metadata: Vec<u8>,
|
||||
}
|
||||
@@ -143,6 +147,7 @@ impl ZerokitMerkleTree for PmTree {
|
||||
|
||||
Ok(PmTree {
|
||||
tree,
|
||||
cached_leaves_indices: vec![0; 1 << depth],
|
||||
metadata: Vec::new(),
|
||||
})
|
||||
}
|
||||
@@ -155,7 +160,7 @@ impl ZerokitMerkleTree for PmTree {
|
||||
self.tree.capacity()
|
||||
}
|
||||
|
||||
fn leaves_set(&mut self) -> usize {
|
||||
fn leaves_set(&self) -> usize {
|
||||
self.tree.leaves_set()
|
||||
}
|
||||
|
||||
@@ -170,7 +175,9 @@ impl ZerokitMerkleTree for PmTree {
|
||||
fn set(&mut self, index: usize, leaf: FrOf<Self::Hasher>) -> Result<()> {
|
||||
self.tree
|
||||
.set(index, leaf)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
.map_err(|e| Report::msg(e.to_string()))?;
|
||||
self.cached_leaves_indices[index] = 1;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_range<I: IntoIterator<Item = FrOf<Self::Hasher>>>(
|
||||
@@ -178,15 +185,51 @@ impl ZerokitMerkleTree for PmTree {
|
||||
start: usize,
|
||||
values: I,
|
||||
) -> Result<()> {
|
||||
let v = values.into_iter().collect::<Vec<_>>();
|
||||
self.tree
|
||||
.set_range(start, values)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
.set_range(start, v.clone().into_iter())
|
||||
.map_err(|e| Report::msg(e.to_string()))?;
|
||||
for i in start..v.len() {
|
||||
self.cached_leaves_indices[i] = 1
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>> {
|
||||
self.tree.get(index).map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
|
||||
fn get_subtree_root(&self, n: usize, index: usize) -> Result<FrOf<Self::Hasher>> {
|
||||
if n > self.depth() {
|
||||
return Err(Report::msg("level exceeds depth size"));
|
||||
}
|
||||
if index >= self.capacity() {
|
||||
return Err(Report::msg("index exceeds set size"));
|
||||
}
|
||||
if n == 0 {
|
||||
Ok(self.root())
|
||||
} else if n == self.depth() {
|
||||
self.get(index)
|
||||
} else {
|
||||
let node = self
|
||||
.tree
|
||||
.get_elem(Key::new(n, index >> (self.depth() - n)))
|
||||
.unwrap();
|
||||
Ok(node)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_empty_leaves_indices(&self) -> Vec<usize> {
|
||||
let next_idx = self.leaves_set();
|
||||
self.cached_leaves_indices
|
||||
.iter()
|
||||
.take(next_idx)
|
||||
.enumerate()
|
||||
.filter(|&(_, &v)| v == 0u8)
|
||||
.map(|(idx, _)| idx)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn override_range<I: IntoIterator<Item = FrOf<Self::Hasher>>, J: IntoIterator<Item = usize>>(
|
||||
&mut self,
|
||||
start: usize,
|
||||
@@ -201,7 +244,7 @@ impl ZerokitMerkleTree for PmTree {
|
||||
(0, 0) => Err(Report::msg("no leaves or indices to be removed")),
|
||||
(1, 0) => self.set(start, leaves[0]),
|
||||
(0, 1) => self.delete(indices[0]),
|
||||
(_, 0) => self.set_range_with_leaves(start, leaves),
|
||||
(_, 0) => self.set_range(start, leaves.into_iter()),
|
||||
(0, _) => self.remove_indices(&indices),
|
||||
(_, _) => self.remove_indices_and_set_leaves(start, leaves, &indices),
|
||||
}
|
||||
@@ -216,7 +259,9 @@ impl ZerokitMerkleTree for PmTree {
|
||||
fn delete(&mut self, index: usize) -> Result<()> {
|
||||
self.tree
|
||||
.delete(index)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
.map_err(|e| Report::msg(e.to_string()))?;
|
||||
self.cached_leaves_indices[index] = 0;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn proof(&self, index: usize) -> Result<Self::Proof> {
|
||||
@@ -246,7 +291,8 @@ impl ZerokitMerkleTree for PmTree {
|
||||
let data = self.tree.db.get(METADATA_KEY)?;
|
||||
|
||||
if data.is_none() {
|
||||
return Err(Report::msg("metadata does not exist"));
|
||||
// send empty Metadata
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
Ok(data.unwrap())
|
||||
}
|
||||
@@ -260,12 +306,6 @@ type PmTreeHasher = <PmTree as ZerokitMerkleTree>::Hasher;
|
||||
type FrOfPmTreeHasher = FrOf<PmTreeHasher>;
|
||||
|
||||
impl PmTree {
|
||||
fn set_range_with_leaves(&mut self, start: usize, leaves: Vec<FrOfPmTreeHasher>) -> Result<()> {
|
||||
self.tree
|
||||
.set_range(start, leaves)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
|
||||
fn remove_indices(&mut self, indices: &[usize]) -> Result<()> {
|
||||
let start = indices[0];
|
||||
let end = indices.last().unwrap() + 1;
|
||||
@@ -274,7 +314,12 @@ impl PmTree {
|
||||
|
||||
self.tree
|
||||
.set_range(start, new_leaves)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
.map_err(|e| Report::msg(e.to_string()))?;
|
||||
|
||||
for i in start..end {
|
||||
self.cached_leaves_indices[i] = 0
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn remove_indices_and_set_leaves(
|
||||
@@ -300,8 +345,17 @@ impl PmTree {
|
||||
}
|
||||
|
||||
self.tree
|
||||
.set_range(min_index, set_values)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
.set_range(start, set_values)
|
||||
.map_err(|e| Report::msg(e.to_string()))?;
|
||||
|
||||
for i in indices {
|
||||
self.cached_leaves_indices[*i] = 0;
|
||||
}
|
||||
|
||||
for i in start..(max_index - min_index) {
|
||||
self.cached_leaves_indices[i] = 1
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,42 +1,45 @@
|
||||
// This crate collects all the underlying primitives used to implement RLN
|
||||
|
||||
use ark_circom::{CircomReduction, WitnessCalculator};
|
||||
use ark_bn254::Fr;
|
||||
use ark_groth16::{prepare_verifying_key, Groth16, Proof as ArkProof, ProvingKey, VerifyingKey};
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use ark_relations::r1cs::SynthesisError;
|
||||
use ark_relations::r1cs::{ConstraintMatrices, SynthesisError};
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use color_eyre::{Report, Result};
|
||||
use num_bigint::BigInt;
|
||||
use rand::{Rng, SeedableRng};
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use std::sync::Mutex;
|
||||
#[cfg(debug_assertions)]
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[cfg(test)]
|
||||
use std::time::Instant;
|
||||
use thiserror::Error;
|
||||
use tiny_keccak::{Hasher as _, Keccak};
|
||||
|
||||
use crate::circuit::{Curve, Fr};
|
||||
use crate::hashers::hash_to_field;
|
||||
use crate::hashers::poseidon_hash;
|
||||
use crate::circuit::{calculate_rln_witness, qap::CircomReduction, Curve};
|
||||
use crate::hashers::{hash_to_field, poseidon_hash};
|
||||
use crate::poseidon_tree::*;
|
||||
use crate::public::RLN_IDENTIFIER;
|
||||
use crate::utils::*;
|
||||
use cfg_if::cfg_if;
|
||||
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
///////////////////////////////////////////////////////
|
||||
// RLN Witness data structure and utility functions
|
||||
///////////////////////////////////////////////////////
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct RLNWitnessInput {
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
identity_secret: Fr,
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
user_message_limit: Fr,
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
message_id: Fr,
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
path_elements: Vec<Fr>,
|
||||
identity_path_index: Vec<u8>,
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
x: Fr,
|
||||
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
|
||||
external_nullifier: Fr,
|
||||
}
|
||||
|
||||
@@ -95,18 +98,26 @@ pub fn deserialize_identity_tuple(serialized: Vec<u8>) -> (Fr, Fr, Fr, Fr) {
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
|
||||
/// input data is [ identity_secret<32> | user_message_limit<32> | message_id<32> | path_elements[<32>] | identity_path_index<8> | x<32> | external_nullifier<32> ]
|
||||
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Result<Vec<u8>> {
|
||||
// Check if message_id is within user_message_limit
|
||||
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
|
||||
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_witness.identity_secret));
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_witness.user_message_limit));
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_witness.message_id));
|
||||
serialized.append(&mut vec_fr_to_bytes_le(&rln_witness.path_elements)?);
|
||||
serialized.append(&mut vec_u8_to_bytes_le(&rln_witness.identity_path_index)?);
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_witness.x));
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_witness.external_nullifier));
|
||||
// Calculate capacity for Vec:
|
||||
// - 5 fixed field elements: identity_secret, user_message_limit, message_id, x, external_nullifier
|
||||
// - variable number of path elements
|
||||
// - identity_path_index (variable size)
|
||||
let mut serialized: Vec<u8> = Vec::with_capacity(
|
||||
fr_byte_size() * (5 + rln_witness.path_elements.len())
|
||||
+ rln_witness.identity_path_index.len(),
|
||||
);
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&rln_witness.identity_secret));
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&rln_witness.user_message_limit));
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&rln_witness.message_id));
|
||||
serialized.extend_from_slice(&vec_fr_to_bytes_le(&rln_witness.path_elements)?);
|
||||
serialized.extend_from_slice(&vec_u8_to_bytes_le(&rln_witness.identity_path_index)?);
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&rln_witness.x));
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&rln_witness.external_nullifier));
|
||||
|
||||
Ok(serialized)
|
||||
}
|
||||
@@ -162,7 +173,7 @@ pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)
|
||||
|
||||
// This function deserializes input for kilic's rln generate_proof public API
|
||||
// https://github.com/kilic/rln/blob/7ac74183f8b69b399e3bc96c1ae8ab61c026dc43/src/public.rs#L148
|
||||
// input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | signal_len<8> | signal<var> ]
|
||||
// input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
|
||||
// return value is a rln witness populated according to this information
|
||||
pub fn proof_inputs_to_rln_witness(
|
||||
tree: &mut PoseidonTree,
|
||||
@@ -214,56 +225,7 @@ pub fn proof_inputs_to_rln_witness(
|
||||
))
|
||||
}
|
||||
|
||||
/// Returns `RLNWitnessInput` given a file with JSON serialized values.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if `message_id` is not within `user_message_limit`.
|
||||
pub fn rln_witness_from_json(input_json_str: &str) -> Result<RLNWitnessInput> {
|
||||
let input_json: serde_json::Value =
|
||||
serde_json::from_str(input_json_str).expect("JSON was not well-formatted");
|
||||
|
||||
let user_message_limit = str_to_fr(&input_json["userMessageLimit"].to_string(), 10)?;
|
||||
|
||||
let message_id = str_to_fr(&input_json["messageId"].to_string(), 10)?;
|
||||
|
||||
message_id_range_check(&message_id, &user_message_limit)?;
|
||||
|
||||
let identity_secret = str_to_fr(&input_json["identitySecret"].to_string(), 10)?;
|
||||
|
||||
let path_elements = input_json["pathElements"]
|
||||
.as_array()
|
||||
.ok_or(Report::msg("not an array"))?
|
||||
.iter()
|
||||
.map(|v| str_to_fr(&v.to_string(), 10))
|
||||
.collect::<Result<_>>()?;
|
||||
|
||||
let identity_path_index_array = input_json["identityPathIndex"]
|
||||
.as_array()
|
||||
.ok_or(Report::msg("not an array"))?;
|
||||
|
||||
let mut identity_path_index: Vec<u8> = vec![];
|
||||
|
||||
for v in identity_path_index_array {
|
||||
identity_path_index.push(v.as_u64().ok_or(Report::msg("not a u64 value"))? as u8);
|
||||
}
|
||||
|
||||
let x = str_to_fr(&input_json["x"].to_string(), 10)?;
|
||||
|
||||
let external_nullifier = str_to_fr(&input_json["externalNullifier"].to_string(), 10)?;
|
||||
|
||||
Ok(RLNWitnessInput {
|
||||
identity_secret,
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
message_id,
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates `RLNWitnessInput` from it's fields.
|
||||
/// Creates [`RLNWitnessInput`] from it's fields.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
@@ -350,14 +312,17 @@ pub fn proof_values_from_witness(rln_witness: &RLNWitnessInput) -> Result<RLNPro
|
||||
})
|
||||
}
|
||||
|
||||
/// input_data is [ root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
pub fn serialize_proof_values(rln_proof_values: &RLNProofValues) -> Vec<u8> {
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
// Calculate capacity for Vec:
|
||||
// 5 field elements: root, external_nullifier, x, y, nullifier
|
||||
let mut serialized = Vec::with_capacity(fr_byte_size() * 5);
|
||||
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.root));
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.external_nullifier));
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.x));
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.y));
|
||||
serialized.append(&mut fr_to_bytes_le(&rln_proof_values.nullifier));
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.root));
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.external_nullifier));
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.x));
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.y));
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.nullifier));
|
||||
|
||||
serialized
|
||||
}
|
||||
@@ -394,30 +359,43 @@ pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) {
|
||||
)
|
||||
}
|
||||
|
||||
// input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
|
||||
pub fn prepare_prove_input(
|
||||
identity_secret: Fr,
|
||||
id_index: usize,
|
||||
user_message_limit: Fr,
|
||||
message_id: Fr,
|
||||
external_nullifier: Fr,
|
||||
signal: &[u8],
|
||||
) -> Vec<u8> {
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
// Calculate capacity for Vec:
|
||||
// - 4 field elements: identity_secret, user_message_limit, message_id, external_nullifier
|
||||
// - 16 bytes for two normalized usize values (id_index<8> + signal_len<8>)
|
||||
// - variable length signal data
|
||||
let mut serialized = Vec::with_capacity(fr_byte_size() * 4 + 16 + signal.len()); // length of 4 fr elements + 16 bytes (id_index + len) + signal length
|
||||
|
||||
serialized.append(&mut fr_to_bytes_le(&identity_secret));
|
||||
serialized.append(&mut normalize_usize(id_index));
|
||||
serialized.append(&mut fr_to_bytes_le(&external_nullifier));
|
||||
serialized.append(&mut normalize_usize(signal.len()));
|
||||
serialized.append(&mut signal.to_vec());
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&identity_secret));
|
||||
serialized.extend_from_slice(&normalize_usize(id_index));
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&user_message_limit));
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&message_id));
|
||||
serialized.extend_from_slice(&fr_to_bytes_le(&external_nullifier));
|
||||
serialized.extend_from_slice(&normalize_usize(signal.len()));
|
||||
serialized.extend_from_slice(signal);
|
||||
|
||||
serialized
|
||||
}
|
||||
|
||||
#[allow(clippy::redundant_clone)]
|
||||
// input_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var> ]
|
||||
pub fn prepare_verify_input(proof_data: Vec<u8>, signal: &[u8]) -> Vec<u8> {
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
// Calculate capacity for Vec:
|
||||
// - proof_data contains the proof and proof values (proof<128> + root<32> + external_nullifier<32> + x<32> + y<32> + nullifier<32>)
|
||||
// - 8 bytes for normalized signal length value (signal_len<8>)
|
||||
// - variable length signal data
|
||||
let mut serialized = Vec::with_capacity(proof_data.len() + 8 + signal.len());
|
||||
|
||||
serialized.append(&mut proof_data.clone());
|
||||
serialized.append(&mut normalize_usize(signal.len()));
|
||||
serialized.append(&mut signal.to_vec());
|
||||
serialized.extend(proof_data);
|
||||
serialized.extend_from_slice(&normalize_usize(signal.len()));
|
||||
serialized.extend_from_slice(signal);
|
||||
|
||||
serialized
|
||||
}
|
||||
@@ -585,13 +563,13 @@ pub fn generate_proof_with_witness(
|
||||
proving_key: &(ProvingKey<Curve>, ConstraintMatrices<Fr>),
|
||||
) -> Result<ArkProof<Curve>, ProofError> {
|
||||
// If in debug mode, we measure and later print time take to compute witness
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(test)]
|
||||
let now = Instant::now();
|
||||
|
||||
let full_assignment =
|
||||
calculate_witness_element::<Curve>(witness).map_err(ProofError::WitnessError)?;
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(test)]
|
||||
println!("witness generation took: {:.2?}", now.elapsed());
|
||||
|
||||
// Random Values
|
||||
@@ -600,7 +578,7 @@ pub fn generate_proof_with_witness(
|
||||
let s = Fr::rand(&mut rng);
|
||||
|
||||
// If in debug mode, we measure and later print time take to compute proof
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(test)]
|
||||
let now = Instant::now();
|
||||
|
||||
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
|
||||
@@ -613,7 +591,7 @@ pub fn generate_proof_with_witness(
|
||||
full_assignment.as_slice(),
|
||||
)?;
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(test)]
|
||||
println!("proof generation took: {:.2?}", now.elapsed());
|
||||
|
||||
Ok(proof)
|
||||
@@ -626,40 +604,23 @@ pub fn generate_proof_with_witness(
|
||||
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
|
||||
pub fn inputs_for_witness_calculation(
|
||||
rln_witness: &RLNWitnessInput,
|
||||
) -> Result<[(&str, Vec<BigInt>); 7]> {
|
||||
) -> Result<[(&str, Vec<Fr>); 7]> {
|
||||
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
|
||||
|
||||
// We convert the path indexes to field elements
|
||||
// TODO: check if necessary
|
||||
let mut path_elements = Vec::new();
|
||||
|
||||
for v in rln_witness.path_elements.iter() {
|
||||
path_elements.push(to_bigint(v)?);
|
||||
}
|
||||
|
||||
let mut identity_path_index = Vec::new();
|
||||
let mut identity_path_index = Vec::with_capacity(rln_witness.identity_path_index.len());
|
||||
rln_witness
|
||||
.identity_path_index
|
||||
.iter()
|
||||
.for_each(|v| identity_path_index.push(BigInt::from(*v)));
|
||||
.for_each(|v| identity_path_index.push(Fr::from(*v)));
|
||||
|
||||
Ok([
|
||||
(
|
||||
"identitySecret",
|
||||
vec![to_bigint(&rln_witness.identity_secret)?],
|
||||
),
|
||||
(
|
||||
"userMessageLimit",
|
||||
vec![to_bigint(&rln_witness.user_message_limit)?],
|
||||
),
|
||||
("messageId", vec![to_bigint(&rln_witness.message_id)?]),
|
||||
("pathElements", path_elements),
|
||||
("identitySecret", vec![rln_witness.identity_secret]),
|
||||
("userMessageLimit", vec![rln_witness.user_message_limit]),
|
||||
("messageId", vec![rln_witness.message_id]),
|
||||
("pathElements", rln_witness.path_elements.clone()),
|
||||
("identityPathIndex", identity_path_index),
|
||||
("x", vec![to_bigint(&rln_witness.x)?]),
|
||||
(
|
||||
"externalNullifier",
|
||||
vec![to_bigint(&rln_witness.external_nullifier)?],
|
||||
),
|
||||
("x", vec![rln_witness.x]),
|
||||
("externalNullifier", vec![rln_witness.external_nullifier]),
|
||||
])
|
||||
}
|
||||
|
||||
@@ -669,34 +630,20 @@ pub fn inputs_for_witness_calculation(
|
||||
///
|
||||
/// Returns a [`ProofError`] if proving fails.
|
||||
pub fn generate_proof(
|
||||
#[cfg(not(target_arch = "wasm32"))] witness_calculator: &Mutex<WitnessCalculator>,
|
||||
#[cfg(target_arch = "wasm32")] witness_calculator: &mut WitnessCalculator,
|
||||
proving_key: &(ProvingKey<Curve>, ConstraintMatrices<Fr>),
|
||||
rln_witness: &RLNWitnessInput,
|
||||
graph_data: &[u8],
|
||||
) -> Result<ArkProof<Curve>, ProofError> {
|
||||
let inputs = inputs_for_witness_calculation(rln_witness)?
|
||||
.into_iter()
|
||||
.map(|(name, values)| (name.to_string(), values));
|
||||
|
||||
// If in debug mode, we measure and later print time take to compute witness
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(test)]
|
||||
let now = Instant::now();
|
||||
let full_assignment = calculate_rln_witness(inputs, graph_data);
|
||||
|
||||
cfg_if! {
|
||||
if #[cfg(target_arch = "wasm32")] {
|
||||
let full_assignment = witness_calculator
|
||||
.calculate_witness_element::<Curve, _>(inputs, false)
|
||||
.map_err(ProofError::WitnessError)?;
|
||||
} else {
|
||||
let full_assignment = witness_calculator
|
||||
.lock()
|
||||
.expect("witness_calculator mutex should not get poisoned")
|
||||
.calculate_witness_element::<Curve, _>(inputs, false)
|
||||
.map_err(ProofError::WitnessError)?;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(test)]
|
||||
println!("witness generation took: {:.2?}", now.elapsed());
|
||||
|
||||
// Random Values
|
||||
@@ -705,7 +652,7 @@ pub fn generate_proof(
|
||||
let s = Fr::rand(&mut rng);
|
||||
|
||||
// If in debug mode, we measure and later print time take to compute proof
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(test)]
|
||||
let now = Instant::now();
|
||||
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
|
||||
&proving_key.0,
|
||||
@@ -717,7 +664,7 @@ pub fn generate_proof(
|
||||
full_assignment.as_slice(),
|
||||
)?;
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(test)]
|
||||
println!("proof generation took: {:.2?}", now.elapsed());
|
||||
|
||||
Ok(proof)
|
||||
@@ -748,26 +695,69 @@ pub fn verify_proof(
|
||||
//let pr: ArkProof<Curve> = (*proof).into();
|
||||
|
||||
// If in debug mode, we measure and later print time take to verify proof
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(test)]
|
||||
let now = Instant::now();
|
||||
|
||||
let verified = Groth16::<_, CircomReduction>::verify_proof(&pvk, proof, &inputs)?;
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
#[cfg(test)]
|
||||
println!("verify took: {:.2?}", now.elapsed());
|
||||
|
||||
Ok(verified)
|
||||
}
|
||||
|
||||
/// Get CIRCOM JSON inputs
|
||||
///
|
||||
/// Returns a JSON object containing the inputs necessary to calculate
|
||||
/// the witness with CIRCOM on javascript
|
||||
// auxiliary function for serialisation Fr to json using ark serilize
|
||||
fn ark_se<S, A: CanonicalSerialize>(a: &A, s: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let mut bytes = vec![];
|
||||
a.serialize_compressed(&mut bytes)
|
||||
.map_err(serde::ser::Error::custom)?;
|
||||
s.serialize_bytes(&bytes)
|
||||
}
|
||||
|
||||
// auxiliary function for deserialisation Fr to json using ark serilize
|
||||
fn ark_de<'de, D, A: CanonicalDeserialize>(data: D) -> Result<A, D::Error>
|
||||
where
|
||||
D: serde::de::Deserializer<'de>,
|
||||
{
|
||||
let s: Vec<u8> = serde::de::Deserialize::deserialize(data)?;
|
||||
let a = A::deserialize_compressed_unchecked(s.as_slice());
|
||||
a.map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
/// Converts a JSON value into [`RLNWitnessInput`] object.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
|
||||
pub fn get_json_inputs(rln_witness: &RLNWitnessInput) -> Result<serde_json::Value> {
|
||||
pub fn rln_witness_from_json(input_json: serde_json::Value) -> Result<RLNWitnessInput> {
|
||||
let rln_witness: RLNWitnessInput = serde_json::from_value(input_json).unwrap();
|
||||
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
|
||||
|
||||
Ok(rln_witness)
|
||||
}
|
||||
|
||||
/// Converts a [`RLNWitnessInput`] object to the corresponding JSON serialization.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if `message_id` is not within `user_message_limit`.
|
||||
pub fn rln_witness_to_json(rln_witness: &RLNWitnessInput) -> Result<serde_json::Value> {
|
||||
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
|
||||
|
||||
let rln_witness_json = serde_json::to_value(rln_witness)?;
|
||||
Ok(rln_witness_json)
|
||||
}
|
||||
|
||||
/// Converts a [`RLNWitnessInput`] object to the corresponding JSON serialization.
|
||||
/// Before serialisation the data should be translated into big int for further calculation in the witness calculator.
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns an error if `message_id` is not within `user_message_limit`.
|
||||
pub fn rln_witness_to_bigint_json(rln_witness: &RLNWitnessInput) -> Result<serde_json::Value> {
|
||||
message_id_range_check(&rln_witness.message_id, &rln_witness.user_message_limit)?;
|
||||
|
||||
let mut path_elements = Vec::new();
|
||||
|
||||
@@ -1,33 +1,31 @@
|
||||
use crate::circuit::{vk_from_raw, zkey_from_raw, Curve, Fr};
|
||||
/// This is the main public API for RLN module. It is used by the FFI, and should be
|
||||
/// used by tests etc. as well
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
use {
|
||||
crate::{circuit::TEST_TREE_HEIGHT, poseidon_tree::PoseidonTree},
|
||||
serde_json::{json, Value},
|
||||
std::str::FromStr,
|
||||
utils::{Hasher, ZerokitMerkleProof, ZerokitMerkleTree},
|
||||
};
|
||||
|
||||
use crate::circuit::{zkey_from_raw, Curve, Fr};
|
||||
use crate::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash};
|
||||
use crate::poseidon_tree::PoseidonTree;
|
||||
use crate::protocol::*;
|
||||
use crate::utils::*;
|
||||
/// This is the main public API for RLN module. It is used by the FFI, and should be
|
||||
/// used by tests etc as well
|
||||
use ark_groth16::Proof as ArkProof;
|
||||
use ark_groth16::{ProvingKey, VerifyingKey};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use {
|
||||
crate::circuit::{graph_from_folder, zkey_from_folder},
|
||||
std::default::Default,
|
||||
};
|
||||
|
||||
use ark_groth16::{Proof as ArkProof, ProvingKey, VerifyingKey};
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, Write};
|
||||
use cfg_if::cfg_if;
|
||||
use color_eyre::{Report, Result};
|
||||
use num_bigint::BigInt;
|
||||
use std::io::Cursor;
|
||||
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
cfg_if! {
|
||||
if #[cfg(not(target_arch = "wasm32"))] {
|
||||
use std::default::Default;
|
||||
use std::sync::Mutex;
|
||||
use crate::circuit::{circom_from_folder, vk_from_folder, circom_from_raw, zkey_from_folder, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
|
||||
use ark_circom::WitnessCalculator;
|
||||
use serde_json::{json, Value};
|
||||
use utils::{Hasher};
|
||||
use std::str::FromStr;
|
||||
} else {
|
||||
use std::marker::*;
|
||||
}
|
||||
}
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
use num_bigint::BigInt;
|
||||
|
||||
/// The application-specific RLN identifier.
|
||||
///
|
||||
@@ -39,53 +37,44 @@ pub const RLN_IDENTIFIER: &[u8] = b"zerokit/rln/010203040506070809";
|
||||
/// It implements the methods required to update the internal Merkle Tree, generate and verify RLN ZK proofs.
|
||||
///
|
||||
/// I/O is mostly done using writers and readers implementing `std::io::Write` and `std::io::Read`, respectively.
|
||||
pub struct RLN<'a> {
|
||||
pub struct RLN {
|
||||
proving_key: (ProvingKey<Curve>, ConstraintMatrices<Fr>),
|
||||
pub(crate) verification_key: VerifyingKey<Curve>,
|
||||
pub(crate) tree: PoseidonTree,
|
||||
|
||||
// The witness calculator can't be loaded in zerokit. Since this struct
|
||||
// contains a lifetime, a PhantomData is necessary to avoid a compiler
|
||||
// error since the lifetime is not being used
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) witness_calculator: &'a Mutex<WitnessCalculator>,
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
_marker: PhantomData<&'a ()>,
|
||||
pub(crate) graph_data: Vec<u8>,
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub(crate) tree: PoseidonTree,
|
||||
}
|
||||
|
||||
impl RLN<'_> {
|
||||
impl RLN {
|
||||
/// Creates a new RLN object by loading circuit resources from a folder.
|
||||
///
|
||||
/// Input parameters are
|
||||
/// - `tree_height`: the height of the internal Merkle tree
|
||||
/// - `input_data`: a reader for the string path of the resource folder containing the ZK circuit (`rln.wasm`), the proving key (`rln_final.zkey`) and the verification key (`verification_key.json`).
|
||||
/// - `input_data`: include `tree_config` a reader for a string containing a json with the merkle tree configuration
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
/// use std::io::Cursor;
|
||||
///
|
||||
/// let tree_height = 20;
|
||||
/// let resources = Cursor::new(json!({"resources_folder": "tree_height_20"});
|
||||
/// let input = Cursor::new(json!({}).to_string());
|
||||
///
|
||||
/// // We create a new RLN instance
|
||||
/// let mut rln = RLN::new(tree_height, resources);
|
||||
/// let mut rln = RLN::new(tree_height, input);
|
||||
/// ```
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn new<R: Read>(tree_height: usize, mut input_data: R) -> Result<RLN<'static>> {
|
||||
#[cfg(all(not(target_arch = "wasm32"), not(feature = "stateless")))]
|
||||
pub fn new<R: Read>(tree_height: usize, mut input_data: R) -> Result<RLN> {
|
||||
// We read input
|
||||
let mut input: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut input)?;
|
||||
|
||||
let rln_config: Value = serde_json::from_str(&String::from_utf8(input)?)?;
|
||||
let resources_folder = rln_config["resources_folder"]
|
||||
.as_str()
|
||||
.unwrap_or(TEST_RESOURCES_FOLDER);
|
||||
let tree_config = rln_config["tree_config"].to_string();
|
||||
|
||||
let witness_calculator = circom_from_folder(resources_folder)?;
|
||||
|
||||
let proving_key = zkey_from_folder(resources_folder)?;
|
||||
let verification_key = vk_from_folder(resources_folder)?;
|
||||
let proving_key = zkey_from_folder().to_owned();
|
||||
let verification_key = proving_key.0.vk.to_owned();
|
||||
let graph_data = graph_from_folder().to_owned();
|
||||
|
||||
let tree_config: <PoseidonTree as ZerokitMerkleTree>::Config = if tree_config.is_empty() {
|
||||
<PoseidonTree as ZerokitMerkleTree>::Config::default()
|
||||
@@ -101,12 +90,33 @@ impl RLN<'_> {
|
||||
)?;
|
||||
|
||||
Ok(RLN {
|
||||
witness_calculator,
|
||||
proving_key,
|
||||
verification_key,
|
||||
graph_data,
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
tree,
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new stateless RLN object by loading circuit resources from a folder.
|
||||
///
|
||||
/// Example:
|
||||
///
|
||||
/// ```
|
||||
/// // We create a new RLN instance
|
||||
/// let mut rln = RLN::new();
|
||||
/// ```
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "stateless")))]
|
||||
#[cfg(all(not(target_arch = "wasm32"), feature = "stateless"))]
|
||||
pub fn new() -> Result<RLN> {
|
||||
let proving_key = zkey_from_folder().to_owned();
|
||||
let verification_key = proving_key.0.vk.to_owned();
|
||||
let graph_data = graph_from_folder().to_owned();
|
||||
|
||||
Ok(RLN {
|
||||
proving_key,
|
||||
verification_key,
|
||||
graph_data,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -114,10 +124,9 @@ impl RLN<'_> {
|
||||
///
|
||||
/// Input parameters are
|
||||
/// - `tree_height`: the height of the internal Merkle tree
|
||||
/// - `circom_vec`: a byte vector containing the ZK circuit (`rln.wasm`) as binary file
|
||||
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) as binary file
|
||||
/// - `vk_vec`: a byte vector containing to the verification key (`verification_key.json`) as binary file
|
||||
/// - `tree_config`: a reader for a string containing a json with the merkle tree configuration
|
||||
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) as binary file
|
||||
/// - `graph_data`: a byte vector containing the graph data (`graph.bin`) as binary file
|
||||
/// - `tree_config_input`: a reader for a string containing a json with the merkle tree configuration
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
@@ -128,38 +137,34 @@ impl RLN<'_> {
|
||||
/// let resources_folder = "./resources/tree_height_20/";
|
||||
///
|
||||
/// let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
/// for filename in ["rln.wasm", "rln_final.zkey", "verification_key.json"] {
|
||||
/// for filename in ["rln_final.zkey", "graph.bin"] {
|
||||
/// let fullpath = format!("{resources_folder}{filename}");
|
||||
/// let mut file = File::open(&fullpath).expect("no file found");
|
||||
/// let metadata = std::fs::metadata(&fullpath).expect("unable to read metadata");
|
||||
/// let mut buffer = vec![0; metadata.len() as usize];
|
||||
/// file.read_exact(&mut buffer).expect("buffer overflow");
|
||||
/// resources.push(buffer);
|
||||
/// let tree_config = "{}".to_string();
|
||||
/// let tree_config_buffer = &Buffer::from(tree_config.as_bytes());
|
||||
/// }
|
||||
///
|
||||
/// let tree_config = "".to_string();
|
||||
/// let tree_config_buffer = &Buffer::from(tree_config.as_bytes());
|
||||
///
|
||||
/// let mut rln = RLN::new_with_params(
|
||||
/// tree_height,
|
||||
/// resources[0].clone(),
|
||||
/// resources[1].clone(),
|
||||
/// resources[2].clone(),
|
||||
/// tree_config_buffer,
|
||||
/// );
|
||||
/// ```
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(not(target_arch = "wasm32"), not(feature = "stateless")))]
|
||||
pub fn new_with_params<R: Read>(
|
||||
tree_height: usize,
|
||||
circom_vec: Vec<u8>,
|
||||
zkey_vec: Vec<u8>,
|
||||
vk_vec: Vec<u8>,
|
||||
graph_data: Vec<u8>,
|
||||
mut tree_config_input: R,
|
||||
) -> Result<RLN<'static>> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let witness_calculator = circom_from_raw(circom_vec)?;
|
||||
|
||||
) -> Result<RLN> {
|
||||
let proving_key = zkey_from_raw(&zkey_vec)?;
|
||||
let verification_key = vk_from_raw(&vk_vec, &zkey_vec)?;
|
||||
let verification_key = proving_key.0.vk.to_owned();
|
||||
|
||||
let mut tree_config_vec: Vec<u8> = Vec::new();
|
||||
tree_config_input.read_to_end(&mut tree_config_vec)?;
|
||||
@@ -179,33 +184,81 @@ impl RLN<'_> {
|
||||
)?;
|
||||
|
||||
Ok(RLN {
|
||||
witness_calculator,
|
||||
proving_key,
|
||||
verification_key,
|
||||
graph_data,
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
tree,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn new_with_params(
|
||||
tree_height: usize,
|
||||
zkey_vec: Vec<u8>,
|
||||
vk_vec: Vec<u8>,
|
||||
) -> Result<RLN<'static>> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let witness_calculator = circom_from_raw(circom_vec)?;
|
||||
|
||||
/// Creates a new stateless RLN object by passing circuit resources as byte vectors.
|
||||
///
|
||||
/// Input parameters are
|
||||
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) as binary file
|
||||
/// - `graph_data`: a byte vector containing the graph data (`graph.bin`) as binary file
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
/// use std::fs::File;
|
||||
/// use std::io::Read;
|
||||
///
|
||||
/// let resources_folder = "./resources/tree_height_20/";
|
||||
///
|
||||
/// let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
/// for filename in ["rln_final.zkey", "graph.bin"] {
|
||||
/// let fullpath = format!("{resources_folder}{filename}");
|
||||
/// let mut file = File::open(&fullpath).expect("no file found");
|
||||
/// let metadata = std::fs::metadata(&fullpath).expect("unable to read metadata");
|
||||
/// let mut buffer = vec![0; metadata.len() as usize];
|
||||
/// file.read_exact(&mut buffer).expect("buffer overflow");
|
||||
/// resources.push(buffer);
|
||||
/// }
|
||||
///
|
||||
/// let mut rln = RLN::new_with_params(
|
||||
/// resources[0].clone(),
|
||||
/// resources[1].clone(),
|
||||
/// );
|
||||
/// ```
|
||||
#[cfg(all(not(target_arch = "wasm32"), feature = "stateless"))]
|
||||
pub fn new_with_params(zkey_vec: Vec<u8>, graph_data: Vec<u8>) -> Result<RLN> {
|
||||
let proving_key = zkey_from_raw(&zkey_vec)?;
|
||||
let verification_key = vk_from_raw(&vk_vec, &zkey_vec)?;
|
||||
|
||||
// We compute a default empty tree
|
||||
let tree = PoseidonTree::default(tree_height)?;
|
||||
let verification_key = proving_key.0.vk.to_owned();
|
||||
|
||||
Ok(RLN {
|
||||
proving_key,
|
||||
verification_key,
|
||||
graph_data,
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new stateless RLN object by passing circuit resources as a byte vector.
|
||||
///
|
||||
/// Input parameters are
|
||||
/// - `zkey_vec`: a byte vector containing the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) as binary file
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
/// use std::fs::File;
|
||||
/// use std::io::Read;
|
||||
///
|
||||
/// let zkey_path = "./resources/tree_height_20/rln_final.zkey";
|
||||
///
|
||||
/// let mut file = File::open(zkey_path).expect("Failed to open file");
|
||||
/// let metadata = std::fs::metadata(zkey_path).expect("Failed to read metadata");
|
||||
/// let mut zkey_vec = vec![0; metadata.len() as usize];
|
||||
/// file.read_exact(&mut zkey_vec).expect("Failed to read file");
|
||||
///
|
||||
/// let mut rln = RLN::new_with_params(zkey_vec)?;
|
||||
/// ```
|
||||
#[cfg(all(target_arch = "wasm32", feature = "stateless"))]
|
||||
pub fn new_with_params(zkey_vec: Vec<u8>) -> Result<RLN> {
|
||||
let proving_key = zkey_from_raw(&zkey_vec)?;
|
||||
let verification_key = proving_key.0.vk.to_owned();
|
||||
|
||||
Ok(RLN {
|
||||
proving_key,
|
||||
verification_key,
|
||||
tree,
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -218,6 +271,7 @@ impl RLN<'_> {
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `tree_height`: the height of the Merkle tree.
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn set_tree(&mut self, tree_height: usize) -> Result<()> {
|
||||
// We compute a default empty tree of desired height
|
||||
self.tree = PoseidonTree::default(tree_height)?;
|
||||
@@ -248,6 +302,7 @@ impl RLN<'_> {
|
||||
/// let mut buffer = Cursor::new(serialize_field_element(rate_commitment));
|
||||
/// rln.set_leaf(id_index, &mut buffer).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn set_leaf<R: Read>(&mut self, index: usize, mut input_data: R) -> Result<()> {
|
||||
// We read input
|
||||
let mut leaf_byte: Vec<u8> = Vec::new();
|
||||
@@ -265,6 +320,9 @@ impl RLN<'_> {
|
||||
/// Input values are:
|
||||
/// - `index`: the index of the leaf
|
||||
///
|
||||
/// Output values are:
|
||||
/// - `output_data`: a writer receiving the serialization of the metadata
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
/// use crate::protocol::*;
|
||||
@@ -274,6 +332,7 @@ impl RLN<'_> {
|
||||
/// let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
/// rln.get_leaf(id_index, &mut buffer).unwrap();
|
||||
/// let rate_commitment = deserialize_field_element(&buffer.into_inner()).unwrap();
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_leaf<W: Write>(&self, index: usize, mut output_data: W) -> Result<()> {
|
||||
// We get the leaf at input index
|
||||
let leaf = self.tree.get(index)?;
|
||||
@@ -316,6 +375,7 @@ impl RLN<'_> {
|
||||
/// let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves));
|
||||
/// rln.set_leaves_from(index, &mut buffer).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn set_leaves_from<R: Read>(&mut self, index: usize, mut input_data: R) -> Result<()> {
|
||||
// We read input
|
||||
let mut leaves_byte: Vec<u8> = Vec::new();
|
||||
@@ -325,7 +385,7 @@ impl RLN<'_> {
|
||||
|
||||
// We set the leaves
|
||||
self.tree
|
||||
.override_range(index, leaves, [])
|
||||
.override_range(index, leaves.into_iter(), [].into_iter())
|
||||
.map_err(|_| Report::msg("Could not set leaves"))?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -336,6 +396,7 @@ impl RLN<'_> {
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of multiple leaf values (serialization done with [`rln::utils::vec_fr_to_bytes_le`](crate::utils::vec_fr_to_bytes_le))
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn init_tree_with_leaves<R: Read>(&mut self, input_data: R) -> Result<()> {
|
||||
// reset the tree
|
||||
// NOTE: this requires the tree to be initialized with the correct height initially
|
||||
@@ -384,8 +445,9 @@ impl RLN<'_> {
|
||||
/// // We atomically add leaves and remove indices from the tree
|
||||
/// let mut leaves_buffer = Cursor::new(vec_fr_to_bytes_le(&leaves));
|
||||
/// let mut indices_buffer = Cursor::new(vec_u8_to_bytes_le(&indices));
|
||||
/// rln.set_leaves_from(index, &mut leaves_buffer, indices_buffer).unwrap();
|
||||
/// rln.atomic_operation(index, &mut leaves_buffer, indices_buffer).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn atomic_operation<R: Read>(
|
||||
&mut self,
|
||||
index: usize,
|
||||
@@ -406,11 +468,12 @@ impl RLN<'_> {
|
||||
|
||||
// We set the leaves
|
||||
self.tree
|
||||
.override_range(index, leaves, indices)
|
||||
.override_range(index, leaves.into_iter(), indices.into_iter())
|
||||
.map_err(|e| Report::msg(format!("Could not perform the batch operation: {e}")))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn leaves_set(&mut self) -> usize {
|
||||
self.tree.leaves_set()
|
||||
}
|
||||
@@ -458,6 +521,7 @@ impl RLN<'_> {
|
||||
/// let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
|
||||
/// rln.set_next_leaf(&mut buffer).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn set_next_leaf<R: Read>(&mut self, mut input_data: R) -> Result<()> {
|
||||
// We read input
|
||||
let mut leaf_byte: Vec<u8> = Vec::new();
|
||||
@@ -470,7 +534,7 @@ impl RLN<'_> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sets the value of the leaf at position index to the harcoded default value.
|
||||
/// Sets the value of the leaf at position index to the hardcoded default value.
|
||||
///
|
||||
/// This function does not change the internal Merkle tree `next_index` value.
|
||||
///
|
||||
@@ -483,6 +547,7 @@ impl RLN<'_> {
|
||||
/// let index = 10;
|
||||
/// rln.delete_leaf(index).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn delete_leaf(&mut self, index: usize) -> Result<()> {
|
||||
self.tree.delete(index)?;
|
||||
Ok(())
|
||||
@@ -500,6 +565,7 @@ impl RLN<'_> {
|
||||
/// let metadata = b"some metadata";
|
||||
/// rln.set_metadata(metadata).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn set_metadata(&mut self, metadata: &[u8]) -> Result<()> {
|
||||
self.tree.set_metadata(metadata)?;
|
||||
Ok(())
|
||||
@@ -519,6 +585,7 @@ impl RLN<'_> {
|
||||
/// rln.get_metadata(&mut buffer).unwrap();
|
||||
/// let metadata = buffer.into_inner();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_metadata<W: Write>(&self, mut output_data: W) -> Result<()> {
|
||||
let metadata = self.tree.metadata()?;
|
||||
output_data.write_all(&metadata)?;
|
||||
@@ -538,6 +605,7 @@ impl RLN<'_> {
|
||||
/// rln.get_root(&mut buffer).unwrap();
|
||||
/// let (root, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_root<W: Write>(&self, mut output_data: W) -> Result<()> {
|
||||
let root = self.tree.root();
|
||||
output_data.write_all(&fr_to_bytes_le(&root))?;
|
||||
@@ -545,6 +613,34 @@ impl RLN<'_> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the root of subtree in the Merkle tree
|
||||
///
|
||||
/// Output values are:
|
||||
/// - `output_data`: a writer receiving the serialization of the node value (serialization done with [`rln::utils::fr_to_bytes_le`](crate::utils::fr_to_bytes_le))
|
||||
///
|
||||
/// Example
|
||||
/// ```
|
||||
/// use rln::utils::*;
|
||||
///
|
||||
/// let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
/// let level = 1;
|
||||
/// let index = 2;
|
||||
/// rln.get_subtree_root(level, index, &mut buffer).unwrap();
|
||||
/// let (subroot, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_subtree_root<W: Write>(
|
||||
&self,
|
||||
level: usize,
|
||||
index: usize,
|
||||
mut output_data: W,
|
||||
) -> Result<()> {
|
||||
let subroot = self.tree.get_subtree_root(level, index)?;
|
||||
output_data.write_all(&fr_to_bytes_le(&subroot))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the Merkle proof of the leaf at position index
|
||||
///
|
||||
/// Input values are:
|
||||
@@ -566,6 +662,7 @@ impl RLN<'_> {
|
||||
/// let (path_elements, read) = bytes_le_to_vec_fr(&buffer_inner);
|
||||
/// let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec());
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_proof<W: Write>(&self, index: usize, mut output_data: W) -> Result<()> {
|
||||
let merkle_proof = self.tree.proof(index).expect("proof should exist");
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
@@ -577,13 +674,52 @@ impl RLN<'_> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns indices of leaves in the tree are set to zero (upto the final leaf that was set).
|
||||
///
|
||||
/// Output values are:
|
||||
/// - `output_data`: a writer receiving the serialization of the indices of leaves.
|
||||
///
|
||||
/// Example
|
||||
/// ```
|
||||
/// use rln::circuit::Fr;
|
||||
/// use rln::utils::*;
|
||||
///
|
||||
/// let start_index = 5;
|
||||
/// let no_of_leaves = 256;
|
||||
///
|
||||
/// // We generate a vector of random leaves
|
||||
/// let mut leaves: Vec<Fr> = Vec::new();
|
||||
/// let mut rng = thread_rng();
|
||||
/// for _ in 0..no_of_leaves {
|
||||
/// let (_, id_commitment) = keygen();
|
||||
/// let rate_commitment = poseidon_hash(&[id_commitment, 1.into()]);
|
||||
/// leaves.push(rate_commitment);
|
||||
/// }
|
||||
///
|
||||
/// // We add leaves in a batch into the tree
|
||||
/// let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves));
|
||||
/// rln.set_leaves_from(index, &mut buffer).unwrap();
|
||||
///
|
||||
/// // Get indices of first empty leaves upto start_index
|
||||
/// let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
/// rln.get_empty_leaves_indices(&mut buffer).unwrap();
|
||||
/// let idxs = bytes_le_to_vec_usize(&buffer.into_inner()).unwrap();
|
||||
/// assert_eq!(idxs, [0, 1, 2, 3, 4]);
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_empty_leaves_indices<W: Write>(&self, mut output_data: W) -> Result<()> {
|
||||
let idxs = self.tree.get_empty_leaves_indices();
|
||||
idxs.serialize_compressed(&mut output_data)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// zkSNARK APIs
|
||||
////////////////////////////////////////////////////////
|
||||
/// Computes a zkSNARK RLN proof using a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput).
|
||||
/// Computes a zkSNARK RLN proof using a [`RLNWitnessInput`].
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object, containing the public and private inputs to the ZK circuits (serialization done using [`rln::protocol::serialize_witness`](crate::protocol::serialize_witness))
|
||||
/// - `input_data`: a reader for the serialization of a [`RLNWitnessInput`] object, containing the public and private inputs to the ZK circuits (serialization done using [`rln::protocol::serialize_witness`](crate::protocol::serialize_witness))
|
||||
///
|
||||
/// Output values are:
|
||||
/// - `output_data`: a writer receiving the serialization of the zkSNARK proof
|
||||
@@ -608,17 +744,11 @@ impl RLN<'_> {
|
||||
mut output_data: W,
|
||||
) -> Result<()> {
|
||||
// We read input RLN witness and we serialize_compressed it
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut serialized)?;
|
||||
let (rln_witness, _) = deserialize_witness(&serialized)?;
|
||||
let mut serialized_witness: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut serialized_witness)?;
|
||||
let (rln_witness, _) = deserialize_witness(&serialized_witness)?;
|
||||
|
||||
/*
|
||||
if self.witness_calculator.is_none() {
|
||||
self.witness_calculator = CIRCOM(&self.resources_folder);
|
||||
}
|
||||
*/
|
||||
|
||||
let proof = generate_proof(self.witness_calculator, &self.proving_key, &rln_witness)?;
|
||||
let proof = generate_proof(&self.proving_key, &rln_witness, &self.graph_data)?;
|
||||
|
||||
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
|
||||
proof.serialize_compressed(&mut output_data)?;
|
||||
@@ -629,7 +759,8 @@ impl RLN<'_> {
|
||||
/// Verifies a zkSNARK RLN proof.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of the RLN zkSNARK proof concatenated with a serialization of the circuit output values, i.e. `[ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]`, where <_> indicates the byte length.
|
||||
/// - `input_data`: a reader for the serialization of the RLN zkSNARK proof concatenated with a serialization of the circuit output values,
|
||||
/// i.e. `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]`, where <_> indicates the byte length.
|
||||
///
|
||||
/// The function returns true if the zkSNARK proof is valid with respect to the provided circuit output values, false otherwise.
|
||||
///
|
||||
@@ -664,7 +795,7 @@ impl RLN<'_> {
|
||||
pub fn verify<R: Read>(&self, mut input_data: R) -> Result<bool> {
|
||||
// Input data is serialized for Curve as:
|
||||
// serialized_proof (compressed, 4*32 bytes) || serialized_proof_values (6*32 bytes), i.e.
|
||||
// [ proof<128> | root<32> | external_nullifier<32> | share_x<32> | share_y<32> | nullifier<32> ]
|
||||
// [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let mut input_byte: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut input_byte)?;
|
||||
let proof = ArkProof::deserialize_compressed(&mut Cursor::new(&input_byte[..128]))?;
|
||||
@@ -676,18 +807,19 @@ impl RLN<'_> {
|
||||
Ok(verified)
|
||||
}
|
||||
|
||||
/// Computes a zkSNARK RLN proof from the identity secret, the Merkle tree index, the epoch and signal.
|
||||
/// Computes a zkSNARK RLN proof from the identity secret, the Merkle tree index, the user message limit, the message id, the external nullifier (which include epoch and rln identifier) and signal.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of `[ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]`
|
||||
/// - `input_data`: a reader for the serialization of `[ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]`
|
||||
///
|
||||
/// Output values are:
|
||||
/// - `output_data`: a writer receiving the serialization of the zkSNARK proof and the circuit evaluations outputs, i.e. `[ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]`
|
||||
/// - `output_data`: a writer receiving the serialization of the zkSNARK proof and the circuit evaluations outputs, i.e. `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]`
|
||||
///
|
||||
/// Example
|
||||
/// Example
|
||||
/// ```
|
||||
/// use rln::protocol::*:
|
||||
/// use rln::utils::*;
|
||||
/// use rln::hashers::*;
|
||||
///
|
||||
/// // Generate identity pair
|
||||
/// let (identity_secret_hash, id_commitment) = keygen();
|
||||
@@ -698,30 +830,35 @@ impl RLN<'_> {
|
||||
/// let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
|
||||
/// rln.set_leaf(identity_index, &mut buffer).unwrap();
|
||||
///
|
||||
/// // We generate a random signal
|
||||
/// let mut rng = rand::thread_rng();
|
||||
/// let signal: [u8; 32] = rng.gen();
|
||||
/// // We generate a random epoch
|
||||
/// let epoch = hash_to_field(b"test-epoch");
|
||||
/// // We generate a random rln_identifier
|
||||
/// let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
/// // We generate a external nullifier
|
||||
/// let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
|
||||
/// // We choose a message_id satisfy 0 <= message_id < MESSAGE_LIMIT
|
||||
/// let message_id = Fr::from(1);
|
||||
///
|
||||
/// // We prepare input for generate_rln_proof API
|
||||
/// // input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
|
||||
/// let mut serialized: Vec<u8> = Vec::new();
|
||||
/// serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
|
||||
/// serialized.append(&mut normalize_usize(identity_index));
|
||||
/// serialized.append(&mut fr_to_bytes_le(&epoch));
|
||||
/// serialized.append(&mut normalize_usize(signal_len).resize(8,0));
|
||||
/// serialized.append(&mut signal.to_vec());
|
||||
/// // input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
|
||||
/// let prove_input = prepare_prove_input(
|
||||
/// identity_secret_hash,
|
||||
/// identity_index,
|
||||
/// user_message_limit,
|
||||
/// message_id,
|
||||
/// external_nullifier,
|
||||
/// &signal,
|
||||
/// );
|
||||
///
|
||||
/// let mut input_buffer = Cursor::new(serialized);
|
||||
/// let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
/// rln.generate_rln_proof(&mut input_buffer, &mut output_buffer)
|
||||
/// .unwrap();
|
||||
///
|
||||
/// // proof_data is [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]
|
||||
/// // proof_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]
|
||||
/// let mut proof_data = output_buffer.into_inner();
|
||||
/// ```
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(not(target_arch = "wasm32"), not(feature = "stateless")))]
|
||||
pub fn generate_rln_proof<R: Read, W: Write>(
|
||||
&mut self,
|
||||
mut input_data: R,
|
||||
@@ -733,7 +870,7 @@ impl RLN<'_> {
|
||||
let (rln_witness, _) = proof_inputs_to_rln_witness(&mut self.tree, &witness_byte)?;
|
||||
let proof_values = proof_values_from_witness(&rln_witness)?;
|
||||
|
||||
let proof = generate_proof(self.witness_calculator, &self.proving_key, &rln_witness)?;
|
||||
let proof = generate_proof(&self.proving_key, &rln_witness, &self.graph_data)?;
|
||||
|
||||
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
|
||||
// This proof is compressed, i.e. 128 bytes long
|
||||
@@ -743,19 +880,40 @@ impl RLN<'_> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO: this function seems to use redundant witness (as bigint and serialized) and should be refactored
|
||||
// Generate RLN Proof using a witness calculated from outside zerokit
|
||||
//
|
||||
// output_data is [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]
|
||||
// we skip it from documentation for now
|
||||
#[doc(hidden)]
|
||||
/// Generate RLN Proof using a witness calculated from outside zerokit
|
||||
///
|
||||
/// output_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn generate_rln_proof_with_witness<R: Read, W: Write>(
|
||||
&mut self,
|
||||
mut input_data: R,
|
||||
mut output_data: W,
|
||||
) -> Result<()> {
|
||||
let mut serialized_witness: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut serialized_witness)?;
|
||||
let (rln_witness, _) = deserialize_witness(&serialized_witness)?;
|
||||
let proof_values = proof_values_from_witness(&rln_witness)?;
|
||||
|
||||
let proof = generate_proof(&self.proving_key, &rln_witness, &self.graph_data)?;
|
||||
|
||||
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
|
||||
// This proof is compressed, i.e. 128 bytes long
|
||||
proof.serialize_compressed(&mut output_data)?;
|
||||
output_data.write_all(&serialize_proof_values(&proof_values))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Generate RLN Proof using a witness calculated from outside zerokit
|
||||
///
|
||||
/// output_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn generate_rln_proof_with_witness<W: Write>(
|
||||
&mut self,
|
||||
calculated_witness: Vec<BigInt>,
|
||||
rln_witness_vec: Vec<u8>,
|
||||
serialized_witness: Vec<u8>,
|
||||
mut output_data: W,
|
||||
) -> Result<()> {
|
||||
let (rln_witness, _) = deserialize_witness(&rln_witness_vec[..])?;
|
||||
let (rln_witness, _) = deserialize_witness(&serialized_witness[..])?;
|
||||
let proof_values = proof_values_from_witness(&rln_witness)?;
|
||||
|
||||
let proof = generate_proof_with_witness(calculated_witness, &self.proving_key).unwrap();
|
||||
@@ -770,11 +928,12 @@ impl RLN<'_> {
|
||||
/// Verifies a zkSNARK RLN proof against the provided proof values and the state of the internal Merkle tree.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of the RLN zkSNARK proof concatenated with a serialization of the circuit output values and the signal information, i.e. `[ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> | signal_len<8> | signal<var> ]`
|
||||
/// - `input_data`: a reader for the serialization of the RLN zkSNARK proof concatenated with a serialization of the circuit output values and the signal information,
|
||||
/// i.e. `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var>]`, where <_> indicates the byte length.
|
||||
///
|
||||
/// The function returns true if the zkSNARK proof is valid with respect to the provided circuit output values and signal. Returns false otherwise.
|
||||
///
|
||||
/// Note that contrary to [`verify`](crate::public::RLN::verify), this function takes additionaly as input the signal and further verifies if
|
||||
/// Note that contrary to [`verify`](crate::public::RLN::verify), this function takes additionally as input the signal and further verifies if
|
||||
/// - the Merkle tree root corresponds to the root provided as input;
|
||||
/// - the input signal corresponds to the Shamir's x coordinate provided as input
|
||||
/// - the hardcoded application [RLN identifier](crate::public::RLN_IDENTIFIER) corresponds to the RLN identifier provided as input
|
||||
@@ -784,16 +943,16 @@ impl RLN<'_> {
|
||||
/// // proof_data is computed as in the example code snippet provided for rln::public::RLN::generate_rln_proof
|
||||
///
|
||||
/// // We prepare input for verify_rln_proof API
|
||||
/// // input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
|
||||
/// // input_data is `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var>]`
|
||||
/// // that is [ proof_data || signal_len<8> | signal<var> ]
|
||||
/// proof_data.append(&mut normalize_usize(signal_len));
|
||||
/// proof_data.append(&mut signal.to_vec());
|
||||
/// let verify_input = prepare_verify_input(proof_data, &signal);
|
||||
///
|
||||
/// let mut input_buffer = Cursor::new(proof_data);
|
||||
/// let mut input_buffer = Cursor::new(verify_input);
|
||||
/// let verified = rln.verify_rln_proof(&mut input_buffer).unwrap();
|
||||
///
|
||||
/// assert!(verified);
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn verify_rln_proof<R: Read>(&self, mut input_data: R) -> Result<bool> {
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut serialized)?;
|
||||
@@ -821,15 +980,15 @@ impl RLN<'_> {
|
||||
/// Verifies a zkSNARK RLN proof against the provided proof values and a set of allowed Merkle tree roots.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of the RLN zkSNARK proof concatenated with a serialization of the circuit output values and the signal information, i.e. `[ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> | signal_len<8> | signal<var> ]`
|
||||
/// - `roots_data`: a reader for the serialization of a vector of roots, i.e. `[ number_of_roots<8> | root_1<32> | ... | root_n<32> ]` (number_of_roots is a uint64 in little-endian, roots are serialized using `rln::utils::fr_to_bytes_le`))
|
||||
/// - `input_data`: a reader for the serialization of the RLN zkSNARK proof concatenated with a serialization of the circuit output values and the signal information, i.e. `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var>]`
|
||||
/// - `roots_data`: a reader for the serialization of a vector of roots, i.e. `[ number_of_roots<8> | root_1<32> | ... | root_n<32> ]` (number_of_roots is an uint64 in little-endian, roots are serialized using `rln::utils::fr_to_bytes_le`)
|
||||
///
|
||||
/// The function returns true if the zkSNARK proof is valid with respect to the provided circuit output values, signal and roots. Returns false otherwise.
|
||||
///
|
||||
/// Note that contrary to [`verify_rln_proof`](crate::public::RLN::verify_rln_proof), this function does not check if the internal Merkle tree root corresponds to the root provided as input, but rather checks if the root provided as input in `input_data` corresponds to one of the roots serialized in `roots_data`.
|
||||
///
|
||||
/// If `roots_data` contains no root (is empty), root validation is skipped and the proof will be correctly verified only if the other proof values results valid (i.e., zk-proof, signal, x-coordinate, RLN identifier)
|
||||
///
|
||||
///
|
||||
/// Example
|
||||
/// ```
|
||||
/// // proof_data is computed as in the example code snippet provided for rln::public::RLN::generate_rln_proof
|
||||
@@ -864,7 +1023,7 @@ impl RLN<'_> {
|
||||
/// roots_serialized.append(&mut fr_to_bytes_le(&root));
|
||||
/// roots_buffer = Cursor::new(roots_serialized.clone());
|
||||
/// let verified = rln
|
||||
/// .verify_with_roots(&mut input_buffer.clone(), &mut roots_buffer)
|
||||
/// .verify_with_roots(&mut input_buffer, &mut roots_buffer)
|
||||
/// .unwrap();
|
||||
///
|
||||
/// assert!(verified);
|
||||
@@ -967,7 +1126,7 @@ impl RLN<'_> {
|
||||
/// Generated credentials are compatible with [Semaphore](https://semaphore.appliedzkp.org/docs/guides/identities)'s credentials.
|
||||
///
|
||||
/// Output values are:
|
||||
/// - `output_data`: a writer receiving the serialization of the identity tapdoor, identity nullifier, identity secret and identity commitment (serialization done with `rln::utils::fr_to_bytes_le`)
|
||||
/// - `output_data`: a writer receiving the serialization of the identity trapdoor, identity nullifier, identity secret and identity commitment (serialization done with `rln::utils::fr_to_bytes_le`)
|
||||
///
|
||||
/// Example
|
||||
/// ```
|
||||
@@ -1042,7 +1201,7 @@ impl RLN<'_> {
|
||||
/// - `input_data`: a reader for the byte vector containing the seed
|
||||
///
|
||||
/// Output values are:
|
||||
/// - `output_data`: a writer receiving the serialization of the identity tapdoor, identity nullifier, identity secret and identity commitment (serialization done with `rln::utils::fr_to_bytes_le`)
|
||||
/// - `output_data`: a writer receiving the serialization of the identity trapdoor, identity nullifier, identity secret and identity commitment (serialization done with `rln::utils::fr_to_bytes_le`)
|
||||
///
|
||||
/// Example
|
||||
/// ```
|
||||
@@ -1076,10 +1235,12 @@ impl RLN<'_> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Recovers the identity secret from two set of proof values computed for same secret in same epoch.
|
||||
/// Recovers the identity secret from two set of proof values computed for same secret in same epoch with same rln identifier.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_proof_data_1`: a reader for the serialization of a RLN zkSNARK proof concatenated with a serialization of the circuit output values and -optionally- the signal information, i.e. either `[ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]` or `[ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> | signal_len<8> | signal<var> ]` (to maintain compatibility with both output of [`generate_rln_proof`](crate::public::RLN::generate_rln_proof) and input of [`verify_rln_proof`](crate::public::RLN::verify_rln_proof))
|
||||
/// - `input_proof_data_1`: a reader for the serialization of a RLN zkSNARK proof concatenated with a serialization of the circuit output values and -optionally- the signal information,
|
||||
/// i.e. either `[proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]`
|
||||
/// or `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var> ]` (to maintain compatibility with both output of [`generate_rln_proof`](crate::public::RLN::generate_rln_proof) and input of [`verify_rln_proof`](crate::public::RLN::verify_rln_proof))
|
||||
/// - `input_proof_data_2`: same as `input_proof_data_1`
|
||||
///
|
||||
/// Output values are:
|
||||
@@ -1087,7 +1248,7 @@ impl RLN<'_> {
|
||||
///
|
||||
/// Example
|
||||
/// ```
|
||||
/// // identity_secret_hash, proof_data_1 and proof_data_2 are computed as in the example code snippet provided for rln::public::RLN::generate_rln_proof using same identity secret and epoch (but not necessarily same signal)
|
||||
/// // identity_secret_hash, proof_data_1 and proof_data_2 are computed as in the example code snippet provided for rln::public::RLN::generate_rln_proof using same identity secret, epoch and rln identifier (but not necessarily same signal)
|
||||
///
|
||||
/// let mut input_proof_data_1 = Cursor::new(proof_data_1);
|
||||
/// let mut input_proof_data_2 = Cursor::new(proof_data_2);
|
||||
@@ -1114,7 +1275,7 @@ impl RLN<'_> {
|
||||
mut input_proof_data_2: R,
|
||||
mut output_data: W,
|
||||
) -> Result<()> {
|
||||
// We serialize_compressed the two proofs and we get the corresponding RLNProofValues objects
|
||||
// We serialize_compressed the two proofs, and we get the corresponding RLNProofValues objects
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
input_proof_data_1.read_to_end(&mut serialized)?;
|
||||
// We skip deserialization of the zk-proof at the beginning
|
||||
@@ -1127,10 +1288,10 @@ impl RLN<'_> {
|
||||
let (proof_values_2, _) = deserialize_proof_values(&serialized[128..]);
|
||||
let external_nullifier_2 = proof_values_2.external_nullifier;
|
||||
|
||||
// We continue only if the proof values are for the same epoch
|
||||
// The idea is that proof values that go as input to this function are verified first (with zk-proof verify), hence ensuring validity of epoch and other fields.
|
||||
// We continue only if the proof values are for the same external nullifier (which includes epoch and rln identifier)
|
||||
// The idea is that proof values that go as input to this function are verified first (with zk-proof verify), hence ensuring validity of external nullifier and other fields.
|
||||
// Only in case all fields are valid, an external_nullifier for the message will be stored (otherwise signal/proof will be simply discarded)
|
||||
// If the nullifier matches one already seen, we can recovery of identity secret.
|
||||
// If the nullifier matches one already seen, we can recover of identity secret.
|
||||
if external_nullifier_1 == external_nullifier_2 {
|
||||
// We extract the two shares
|
||||
let share1 = (proof_values_1.x, proof_values_1.y);
|
||||
@@ -1150,12 +1311,13 @@ impl RLN<'_> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the serialization of a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) populated from the identity secret, the Merkle tree index, the epoch and signal.
|
||||
/// Returns the serialization of a [`RLNWitnessInput`] populated from the identity secret, the Merkle tree index, the user message limit, the message id, the external nullifier (which include epoch and rln identifier) and signal.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of `[ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]`
|
||||
/// - `input_data`: a reader for the serialization of `[ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]`
|
||||
///
|
||||
/// The function returns the corresponding [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object serialized using [`rln::protocol::serialize_witness`](crate::protocol::serialize_witness)).
|
||||
/// The function returns the corresponding [`RLNWitnessInput`] object serialized using [`rln::protocol::serialize_witness`](crate::protocol::serialize_witness).
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_serialized_rln_witness<R: Read>(&mut self, mut input_data: R) -> Result<Vec<u8>> {
|
||||
// We read input RLN witness and we serialize_compressed it
|
||||
let mut witness_byte: Vec<u8> = Vec::new();
|
||||
@@ -1165,32 +1327,53 @@ impl RLN<'_> {
|
||||
serialize_witness(&rln_witness)
|
||||
}
|
||||
|
||||
/// Converts a byte serialization of a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object to the corresponding JSON serialization.
|
||||
/// Converts a byte serialization of a [`RLNWitnessInput`] object to the corresponding JSON serialization.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `serialized_witness`: the byte serialization of a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object (serialization done with [`rln::protocol::serialize_witness`](crate::protocol::serialize_witness)).
|
||||
/// - `serialized_witness`: the byte serialization of a [`RLNWitnessInput`] object (serialization done with [`rln::protocol::serialize_witness`](crate::protocol::serialize_witness)).
|
||||
///
|
||||
/// The function returns the corresponding JSON encoding of the input [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object.
|
||||
/// The function returns the corresponding JSON encoding of the input [`RLNWitnessInput`] object.
|
||||
pub fn get_rln_witness_json(&mut self, serialized_witness: &[u8]) -> Result<serde_json::Value> {
|
||||
let (rln_witness, _) = deserialize_witness(serialized_witness)?;
|
||||
get_json_inputs(&rln_witness)
|
||||
rln_witness_to_json(&rln_witness)
|
||||
}
|
||||
|
||||
/// Converts a byte serialization of a [`RLNWitnessInput`] object to the corresponding JSON serialization.
|
||||
/// Before serialization the data will be translated into big int for further calculation in the witness calculator.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `serialized_witness`: the byte serialization of a [`RLNWitnessInput`] object (serialization done with [`rln::protocol::serialize_witness`](crate::protocol::serialize_witness)).
|
||||
///
|
||||
/// The function returns the corresponding JSON encoding of the input [`RLNWitnessInput`] object.
|
||||
pub fn get_rln_witness_bigint_json(
|
||||
&mut self,
|
||||
serialized_witness: &[u8],
|
||||
) -> Result<serde_json::Value> {
|
||||
let (rln_witness, _) = deserialize_witness(serialized_witness)?;
|
||||
rln_witness_to_bigint_json(&rln_witness)
|
||||
}
|
||||
|
||||
/// Closes the connection to the Merkle tree database.
|
||||
/// This function should be called before the RLN object is dropped.
|
||||
/// If not called, the connection will be closed when the RLN object is dropped.
|
||||
/// This improves robustness of the tree.
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn flush(&mut self) -> Result<()> {
|
||||
self.tree.close_db_connection()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
impl Default for RLN<'_> {
|
||||
impl Default for RLN {
|
||||
fn default() -> Self {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let buffer = Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
Self::new(tree_height, buffer).unwrap()
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
{
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let buffer = Cursor::new(json!({}).to_string());
|
||||
Self::new(tree_height, buffer).unwrap()
|
||||
}
|
||||
#[cfg(feature = "stateless")]
|
||||
Self::new().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
277
rln/src/utils.rs
277
rln/src/utils.rs
@@ -1,22 +1,26 @@
|
||||
// This crate provides cross-module useful utilities (mainly type conversions) not necessarily specific to RLN
|
||||
|
||||
use crate::circuit::Fr;
|
||||
use ark_ff::PrimeField;
|
||||
use color_eyre::{Report, Result};
|
||||
use num_bigint::{BigInt, BigUint};
|
||||
use num_traits::Num;
|
||||
use std::iter::Extend;
|
||||
use serde_json::json;
|
||||
use std::io::Cursor;
|
||||
|
||||
use crate::circuit::Fr;
|
||||
|
||||
#[inline(always)]
|
||||
pub fn to_bigint(el: &Fr) -> Result<BigInt> {
|
||||
let res: BigUint = (*el).try_into()?;
|
||||
Ok(res.into())
|
||||
Ok(BigUint::from(*el).into())
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn fr_byte_size() -> usize {
|
||||
let mbs = <Fr as PrimeField>::MODULUS_BIT_SIZE;
|
||||
((mbs + 64 - (mbs % 64)) / 8) as usize
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr> {
|
||||
if !(radix == 10 || radix == 16) {
|
||||
return Err(Report::msg("wrong radix"));
|
||||
@@ -28,13 +32,14 @@ pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr> {
|
||||
input_clean = input_clean.trim().to_string();
|
||||
|
||||
if radix == 10 {
|
||||
Ok(BigUint::from_str_radix(&input_clean, radix)?.try_into()?)
|
||||
Ok(BigUint::from_str_radix(&input_clean, radix)?.into())
|
||||
} else {
|
||||
input_clean = input_clean.replace("0x", "");
|
||||
Ok(BigUint::from_str_radix(&input_clean, radix)?.try_into()?)
|
||||
Ok(BigUint::from_str_radix(&input_clean, radix)?.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn bytes_le_to_fr(input: &[u8]) -> (Fr, usize) {
|
||||
let el_size = fr_byte_size();
|
||||
(
|
||||
@@ -43,77 +48,50 @@ pub fn bytes_le_to_fr(input: &[u8]) -> (Fr, usize) {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn bytes_be_to_fr(input: &[u8]) -> (Fr, usize) {
|
||||
let el_size = fr_byte_size();
|
||||
(
|
||||
Fr::from(BigUint::from_bytes_be(&input[0..el_size])),
|
||||
el_size,
|
||||
)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn fr_to_bytes_le(input: &Fr) -> Vec<u8> {
|
||||
let input_biguint: BigUint = (*input).into();
|
||||
let mut res = input_biguint.to_bytes_le();
|
||||
//BigUint conversion ignores most significant zero bytes. We restore them otherwise serialization will fail (length % 8 != 0)
|
||||
while res.len() != fr_byte_size() {
|
||||
res.push(0);
|
||||
}
|
||||
res
|
||||
}
|
||||
|
||||
pub fn fr_to_bytes_be(input: &Fr) -> Vec<u8> {
|
||||
let input_biguint: BigUint = (*input).into();
|
||||
let mut res = input_biguint.to_bytes_be();
|
||||
// BigUint conversion ignores most significant zero bytes. We restore them otherwise serialization might fail
|
||||
// Fr elements are stored using 64 bits nimbs
|
||||
while res.len() != fr_byte_size() {
|
||||
res.insert(0, 0);
|
||||
}
|
||||
res.resize(fr_byte_size(), 0);
|
||||
res
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Result<Vec<u8>> {
|
||||
let mut bytes: Vec<u8> = Vec::new();
|
||||
//We store the vector length
|
||||
bytes.extend(u64::try_from(input.len())?.to_le_bytes().to_vec());
|
||||
// Calculate capacity for Vec:
|
||||
// - 8 bytes for normalized vector length (usize)
|
||||
// - each Fr element requires fr_byte_size() bytes (typically 32 bytes)
|
||||
let mut bytes = Vec::with_capacity(8 + input.len() * fr_byte_size());
|
||||
|
||||
// We store the vector length
|
||||
bytes.extend_from_slice(&normalize_usize(input.len()));
|
||||
|
||||
// We store each element
|
||||
input.iter().for_each(|el| bytes.extend(fr_to_bytes_le(el)));
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
pub fn vec_fr_to_bytes_be(input: &[Fr]) -> Result<Vec<u8>> {
|
||||
let mut bytes: Vec<u8> = Vec::new();
|
||||
//We store the vector length
|
||||
bytes.extend(u64::try_from(input.len())?.to_be_bytes().to_vec());
|
||||
|
||||
// We store each element
|
||||
input.iter().for_each(|el| bytes.extend(fr_to_bytes_be(el)));
|
||||
for el in input {
|
||||
bytes.extend_from_slice(&fr_to_bytes_le(el));
|
||||
}
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn vec_u8_to_bytes_le(input: &[u8]) -> Result<Vec<u8>> {
|
||||
let mut bytes: Vec<u8> = Vec::new();
|
||||
//We store the vector length
|
||||
bytes.extend(u64::try_from(input.len())?.to_le_bytes().to_vec());
|
||||
// Calculate capacity for Vec:
|
||||
// - 8 bytes for normalized vector length (usize)
|
||||
// - variable length input data
|
||||
let mut bytes = Vec::with_capacity(8 + input.len());
|
||||
|
||||
bytes.extend(input);
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
pub fn vec_u8_to_bytes_be(input: Vec<u8>) -> Result<Vec<u8>> {
|
||||
let mut bytes: Vec<u8> = Vec::new();
|
||||
//We store the vector length
|
||||
bytes.extend(u64::try_from(input.len())?.to_be_bytes().to_vec());
|
||||
|
||||
bytes.extend(input);
|
||||
// We store the vector length
|
||||
bytes.extend_from_slice(&normalize_usize(input.len()));
|
||||
|
||||
// We store the input
|
||||
bytes.extend_from_slice(input);
|
||||
|
||||
Ok(bytes)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
|
||||
let mut read: usize = 0;
|
||||
|
||||
@@ -126,19 +104,7 @@ pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
|
||||
Ok((res, read))
|
||||
}
|
||||
|
||||
pub fn bytes_be_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
|
||||
let mut read: usize = 0;
|
||||
|
||||
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into()?))?;
|
||||
read += 8;
|
||||
|
||||
let res = input[8..8 + len].to_vec();
|
||||
|
||||
read += res.len();
|
||||
|
||||
Ok((res, read))
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
pub fn bytes_le_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
|
||||
let mut read: usize = 0;
|
||||
let mut res: Vec<Fr> = Vec::new();
|
||||
@@ -156,159 +122,32 @@ pub fn bytes_le_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
|
||||
Ok((res, read))
|
||||
}
|
||||
|
||||
pub fn bytes_be_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
|
||||
let mut read: usize = 0;
|
||||
let mut res: Vec<Fr> = Vec::new();
|
||||
|
||||
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into()?))?;
|
||||
read += 8;
|
||||
|
||||
let el_size = fr_byte_size();
|
||||
for i in 0..len {
|
||||
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]);
|
||||
res.push(curr_el);
|
||||
read += el_size;
|
||||
}
|
||||
|
||||
Ok((res, read))
|
||||
}
|
||||
|
||||
pub fn normalize_usize(input: usize) -> Vec<u8> {
|
||||
let mut normalized_usize = input.to_le_bytes().to_vec();
|
||||
normalized_usize.resize(8, 0);
|
||||
normalized_usize
|
||||
}
|
||||
|
||||
/* Old conversion utilities between different libraries data types
|
||||
|
||||
// Conversion Utilities between poseidon-rs Field and arkworks Fr (in order to call directly poseidon-rs' poseidon_hash)
|
||||
|
||||
use ff::{PrimeField as _, PrimeFieldRepr as _};
|
||||
use poseidon_rs::Fr as PosFr;
|
||||
|
||||
pub fn fr_to_posfr(value: Fr) -> PosFr {
|
||||
let mut bytes = [0_u8; 32];
|
||||
let byte_vec = value.into_repr().to_bytes_be();
|
||||
bytes.copy_from_slice(&byte_vec[..]);
|
||||
let mut repr = <PosFr as ff::PrimeField>::Repr::default();
|
||||
repr.read_be(&bytes[..])
|
||||
.expect("read from correctly sized slice always succeeds");
|
||||
PosFr::from_repr(repr).expect("value is always in range")
|
||||
}
|
||||
|
||||
pub fn posfr_to_fr(value: PosFr) -> Fr {
|
||||
let mut bytes = [0u8; 32];
|
||||
value
|
||||
.into_repr()
|
||||
.write_be(&mut bytes[..])
|
||||
.expect("write to correctly sized slice always succeeds");
|
||||
Fr::from_be_bytes_mod_order(&bytes)
|
||||
}
|
||||
|
||||
|
||||
// Conversion Utilities between semaphore-rs Field and arkworks Fr
|
||||
|
||||
use semaphore::Field;
|
||||
|
||||
pub fn to_fr(el: &Field) -> Fr {
|
||||
Fr::try_from(*el).unwrap()
|
||||
}
|
||||
|
||||
pub fn to_field(el: &Fr) -> Field {
|
||||
(*el).try_into().unwrap()
|
||||
}
|
||||
|
||||
pub fn vec_to_fr(v: &[Field]) -> Vec<Fr> {
|
||||
v.iter().map(|el| to_fr(el)).collect()
|
||||
}
|
||||
|
||||
pub fn vec_to_field(v: &[Fr]) -> Vec<Field> {
|
||||
v.iter().map(|el| to_field(el)).collect()
|
||||
}
|
||||
|
||||
pub fn vec_fr_to_field(input: &[Fr]) -> Vec<Field> {
|
||||
input.iter().map(|el| to_field(el)).collect()
|
||||
}
|
||||
|
||||
pub fn vec_field_to_fr(input: &[Field]) -> Vec<Fr> {
|
||||
input.iter().map(|el| to_fr(el)).collect()
|
||||
}
|
||||
|
||||
pub fn str_to_field(input: String, radix: i32) -> Field {
|
||||
assert!((radix == 10) || (radix == 16));
|
||||
|
||||
// We remove any quote present and we trim
|
||||
let single_quote: char = '\"';
|
||||
let input_clean = input.replace(single_quote, "");
|
||||
let input_clean = input_clean.trim();
|
||||
|
||||
if radix == 10 {
|
||||
Field::from_str(&format!(
|
||||
"{:01$x}",
|
||||
BigUint::from_str(input_clean).unwrap(),
|
||||
64
|
||||
))
|
||||
.unwrap()
|
||||
#[inline(always)]
|
||||
pub fn bytes_le_to_vec_usize(input: &[u8]) -> Result<Vec<usize>> {
|
||||
let nof_elem = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
|
||||
if nof_elem == 0 {
|
||||
Ok(vec![])
|
||||
} else {
|
||||
let input_clean = input_clean.replace("0x", "");
|
||||
Field::from_str(&format!("{:0>64}", &input_clean)).unwrap()
|
||||
let elements: Vec<usize> = input[8..]
|
||||
.chunks(8)
|
||||
.map(|ch| usize::from_le_bytes(ch[0..8].try_into().unwrap()))
|
||||
.collect();
|
||||
Ok(elements)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bytes_le_to_field(input: &[u8]) -> (Field, usize) {
|
||||
let (fr_el, read) = bytes_le_to_fr(input);
|
||||
(to_field(&fr_el), read)
|
||||
/// Normalizes a `usize` into an 8-byte array, ensuring consistency across architectures.
|
||||
/// On 32-bit systems, the result is zero-padded to 8 bytes.
|
||||
/// On 64-bit systems, it directly represents the `usize` value.
|
||||
#[inline(always)]
|
||||
pub fn normalize_usize(input: usize) -> [u8; 8] {
|
||||
let mut bytes = [0u8; 8];
|
||||
let input_bytes = input.to_le_bytes();
|
||||
bytes[..input_bytes.len()].copy_from_slice(&input_bytes);
|
||||
bytes
|
||||
}
|
||||
|
||||
pub fn bytes_be_to_field(input: &[u8]) -> (Field, usize) {
|
||||
let (fr_el, read) = bytes_be_to_fr(input);
|
||||
(to_field(&fr_el), read)
|
||||
#[inline(always)] // using for test
|
||||
pub fn generate_input_buffer() -> Cursor<String> {
|
||||
Cursor::new(json!({}).to_string())
|
||||
}
|
||||
|
||||
|
||||
pub fn field_to_bytes_le(input: &Field) -> Vec<u8> {
|
||||
fr_to_bytes_le(&to_fr(input))
|
||||
}
|
||||
|
||||
pub fn field_to_bytes_be(input: &Field) -> Vec<u8> {
|
||||
fr_to_bytes_be(&to_fr(input))
|
||||
}
|
||||
|
||||
|
||||
pub fn vec_field_to_bytes_le(input: &[Field]) -> Vec<u8> {
|
||||
vec_fr_to_bytes_le(&vec_field_to_fr(input))
|
||||
}
|
||||
|
||||
pub fn vec_field_to_bytes_be(input: &[Field]) -> Vec<u8> {
|
||||
vec_fr_to_bytes_be(&vec_field_to_fr(input))
|
||||
}
|
||||
|
||||
|
||||
pub fn bytes_le_to_vec_field(input: &[u8]) -> (Vec<Field>, usize) {
|
||||
let (vec_fr, read) = bytes_le_to_vec_fr(input);
|
||||
(vec_fr_to_field(&vec_fr), read)
|
||||
}
|
||||
|
||||
pub fn bytes_be_to_vec_field(input: &[u8]) -> (Vec<Field>, usize) {
|
||||
let (vec_fr, read) = bytes_be_to_vec_fr(input);
|
||||
(vec_fr_to_field(&vec_fr), read)
|
||||
}
|
||||
|
||||
// Arithmetic over Field elements (wrapped over arkworks algebra crate)
|
||||
|
||||
pub fn add(a: &Field, b: &Field) -> Field {
|
||||
to_field(&(to_fr(a) + to_fr(b)))
|
||||
}
|
||||
|
||||
pub fn mul(a: &Field, b: &Field) -> Field {
|
||||
to_field(&(to_fr(a) * to_fr(b)))
|
||||
}
|
||||
|
||||
pub fn div(a: &Field, b: &Field) -> Field {
|
||||
to_field(&(to_fr(a) / to_fr(b)))
|
||||
}
|
||||
|
||||
pub fn inv(a: &Field) -> Field {
|
||||
to_field(&(Fr::from(1) / to_fr(a)))
|
||||
}
|
||||
*/
|
||||
|
||||
1322
rln/tests/ffi.rs
1322
rln/tests/ffi.rs
File diff suppressed because it is too large
Load Diff
@@ -4,48 +4,26 @@
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use rln::circuit::*;
|
||||
use rln::hashers::PoseidonHash;
|
||||
use rln::hashers::{poseidon_hash, PoseidonHash};
|
||||
use rln::{circuit::*, poseidon_tree::PoseidonTree};
|
||||
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
#[test]
|
||||
/// A basic performance comparison between the two supported Merkle Tree implementations
|
||||
fn test_zerokit_merkle_implementations_performances() {
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
let tree_height = 20;
|
||||
// The test is checked correctness for `FullMerkleTree` and `OptimalMerkleTree` with Poseidon hash
|
||||
fn test_zerokit_merkle_implementations() {
|
||||
let sample_size = 100;
|
||||
|
||||
let leaves: Vec<Fr> = (0..sample_size).map(|s| Fr::from(s)).collect();
|
||||
|
||||
let mut gen_time_full: u128 = 0;
|
||||
let mut upd_time_full: u128 = 0;
|
||||
let mut gen_time_opt: u128 = 0;
|
||||
let mut upd_time_opt: u128 = 0;
|
||||
|
||||
for _ in 0..sample_size.try_into().unwrap() {
|
||||
let now = Instant::now();
|
||||
FullMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
|
||||
gen_time_full += now.elapsed().as_nanos();
|
||||
|
||||
let now = Instant::now();
|
||||
OptimalMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
|
||||
gen_time_opt += now.elapsed().as_nanos();
|
||||
}
|
||||
|
||||
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
|
||||
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
|
||||
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
|
||||
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_HEIGHT).unwrap();
|
||||
|
||||
for i in 0..sample_size.try_into().unwrap() {
|
||||
let now = Instant::now();
|
||||
tree_full.set(i, leaves[i]).unwrap();
|
||||
upd_time_full += now.elapsed().as_nanos();
|
||||
let proof = tree_full.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
|
||||
let now = Instant::now();
|
||||
tree_opt.set(i, leaves[i]).unwrap();
|
||||
upd_time_opt += now.elapsed().as_nanos();
|
||||
assert_eq!(tree_opt.root(), tree_full.root());
|
||||
let proof = tree_opt.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
}
|
||||
@@ -55,26 +33,110 @@ mod test {
|
||||
let tree_opt_root = tree_opt.root();
|
||||
|
||||
assert_eq!(tree_full_root, tree_opt_root);
|
||||
}
|
||||
|
||||
println!(" Average tree generation time:");
|
||||
println!(
|
||||
" - Full Merkle Tree: {:?}",
|
||||
Duration::from_nanos((gen_time_full / sample_size).try_into().unwrap())
|
||||
);
|
||||
println!(
|
||||
" - Optimal Merkle Tree: {:?}",
|
||||
Duration::from_nanos((gen_time_opt / sample_size).try_into().unwrap())
|
||||
#[test]
|
||||
fn test_subtree_root() {
|
||||
const DEPTH: usize = 3;
|
||||
const LEAVES_LEN: usize = 8;
|
||||
|
||||
let mut tree = PoseidonTree::default(DEPTH).unwrap();
|
||||
let leaves: Vec<Fr> = (0..LEAVES_LEN).map(|s| Fr::from(s as i32)).collect();
|
||||
let _ = tree.set_range(0, leaves.into_iter());
|
||||
|
||||
for i in 0..LEAVES_LEN {
|
||||
// check leaves
|
||||
assert_eq!(
|
||||
tree.get(i).unwrap(),
|
||||
tree.get_subtree_root(DEPTH, i).unwrap()
|
||||
);
|
||||
// check root
|
||||
assert_eq!(tree.root(), tree.get_subtree_root(0, i).unwrap());
|
||||
}
|
||||
|
||||
// check intermediate nodes
|
||||
for n in (1..=DEPTH).rev() {
|
||||
for i in (0..(1 << n)).step_by(2) {
|
||||
let idx_l = i * (1 << (DEPTH - n));
|
||||
let idx_r = (i + 1) * (1 << (DEPTH - n));
|
||||
let idx_sr = idx_l;
|
||||
|
||||
let prev_l = tree.get_subtree_root(n, idx_l).unwrap();
|
||||
let prev_r = tree.get_subtree_root(n, idx_r).unwrap();
|
||||
let subroot = tree.get_subtree_root(n - 1, idx_sr).unwrap();
|
||||
|
||||
assert_eq!(poseidon_hash(&[prev_l, prev_r]), subroot);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_empty_leaves_indices() {
|
||||
let depth = 4;
|
||||
let nof_leaves: usize = 1 << (depth - 1);
|
||||
|
||||
let mut tree = PoseidonTree::default(depth).unwrap();
|
||||
let leaves: Vec<Fr> = (0..nof_leaves).map(|s| Fr::from(s as i32)).collect();
|
||||
|
||||
// check set_range
|
||||
let _ = tree.set_range(0, leaves.clone().into_iter());
|
||||
assert!(tree.get_empty_leaves_indices().is_empty());
|
||||
|
||||
let mut vec_idxs = Vec::new();
|
||||
// check delete function
|
||||
for i in 0..nof_leaves {
|
||||
vec_idxs.push(i);
|
||||
let _ = tree.delete(i);
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec_idxs);
|
||||
}
|
||||
// check set function
|
||||
for i in (0..nof_leaves).rev() {
|
||||
vec_idxs.pop();
|
||||
let _ = tree.set(i, leaves[i]);
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec_idxs);
|
||||
}
|
||||
|
||||
// check remove_indices_and_set_leaves inside override_range function
|
||||
assert!(tree.get_empty_leaves_indices().is_empty());
|
||||
let leaves_2: Vec<Fr> = (0..2).map(|s| Fr::from(s as i32)).collect();
|
||||
tree.override_range(0, leaves_2.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec![2, 3]);
|
||||
|
||||
// check remove_indices inside override_range function
|
||||
tree.override_range(0, [].into_iter(), [0, 1].into_iter())
|
||||
.unwrap();
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec![0, 1, 2, 3]);
|
||||
|
||||
// check set_range inside override_range function
|
||||
tree.override_range(0, leaves_2.clone().into_iter(), [].into_iter())
|
||||
.unwrap();
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec![2, 3]);
|
||||
|
||||
let leaves_4: Vec<Fr> = (0..4).map(|s| Fr::from(s as i32)).collect();
|
||||
// check if the indexes for write and delete are the same
|
||||
tree.override_range(0, leaves_4.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
assert!(tree.get_empty_leaves_indices().is_empty());
|
||||
|
||||
// check if indexes for deletion are before indexes for overwriting
|
||||
tree.override_range(4, leaves_4.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
// The result will be like this, because in the set_range function in pmtree
|
||||
// the next_index value is increased not by the number of elements to insert,
|
||||
// but by the union of indices for deleting and inserting.
|
||||
assert_eq!(
|
||||
tree.get_empty_leaves_indices(),
|
||||
vec![0, 1, 2, 3, 8, 9, 10, 11]
|
||||
);
|
||||
|
||||
println!(" Average update_next execution time:");
|
||||
println!(
|
||||
" - Full Merkle Tree: {:?}",
|
||||
Duration::from_nanos((upd_time_full / sample_size).try_into().unwrap())
|
||||
);
|
||||
|
||||
println!(
|
||||
" - Optimal Merkle Tree: {:?}",
|
||||
Duration::from_nanos((upd_time_opt / sample_size).try_into().unwrap())
|
||||
);
|
||||
// check if the indices for write and delete do not overlap completely
|
||||
tree.override_range(2, leaves_4.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
// The result will be like this, because in the set_range function in pmtree
|
||||
// the next_index value is increased not by the number of elements to insert,
|
||||
// but by the union of indices for deleting and inserting.
|
||||
// + we've already set to 6 and 7 in previous test
|
||||
assert_eq!(tree.get_empty_leaves_indices(), vec![0, 1, 8, 9, 10, 11]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use ark_ff::BigInt;
|
||||
use rln::circuit::{
|
||||
circom_from_folder, vk_from_folder, zkey_from_folder, Fr, TEST_RESOURCES_FOLDER,
|
||||
TEST_TREE_HEIGHT,
|
||||
};
|
||||
use rln::circuit::{graph_from_folder, zkey_from_folder};
|
||||
use rln::circuit::{Fr, TEST_TREE_HEIGHT};
|
||||
use rln::hashers::{hash_to_field, poseidon_hash};
|
||||
use rln::poseidon_tree::PoseidonTree;
|
||||
use rln::protocol::*;
|
||||
@@ -13,65 +11,9 @@ mod test {
|
||||
|
||||
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
|
||||
|
||||
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
|
||||
const WITNESS_JSON_20: &str = r#"
|
||||
{
|
||||
"externalNullifier": "21074405743803627666274838159589343934394162804826017440941339048886754734203",
|
||||
"identityPathIndex": [
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
1,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
1,
|
||||
1,
|
||||
1,
|
||||
0
|
||||
],
|
||||
"identitySecret": "2301650865650889795878889082892690584512243988708213561328369865554257051708",
|
||||
"messageId": "1",
|
||||
"pathElements": [
|
||||
"14082964758224722211945379872337797638951236517417253447686770846170014042825",
|
||||
"6628418579821163687428454604867534487917867918886059133241840211975892987309",
|
||||
"12745863228198753394445659605634840709296716381893463421165313830643281758511",
|
||||
"56118267389743063830320351452083247040583061493621478539311100137113963555",
|
||||
"3648731943306935051357703221473866306053186513730785325303257057776816073765",
|
||||
"10548621390442503192989374711060717107954536293658152583621924810330521179016",
|
||||
"11741160669079729961275351458682156164905457324981803454515784688429276743441",
|
||||
"17165464309215350864730477596846156251863702878546777829650812432906796008534",
|
||||
"18947162586829418653666557598416458949428989734998924978331450666032720066913",
|
||||
"8809427088917589399897132358419395928548406347152047718919154153577297139202",
|
||||
"6261460226929242970747566981077801929281729646713842579109271945192964422300",
|
||||
"13871468675790284383809887052382100311103716176061564908030808887079542722597",
|
||||
"10413964486611723004584705484327518190402370933255450052832412709168190985805",
|
||||
"3978387560092078849178760154060822400741873818692524912249877867958842934383",
|
||||
"14014915591348694328771517896715085647041518432952027841088176673715002508448",
|
||||
"17680675606519345547327984724173632294904524423937145835611954334756161077843",
|
||||
"17107175244885276119916848057745382329169223109661217238296871427531065458152",
|
||||
"18326186549441826262593357123467931475982067066825042001499291800252145875109",
|
||||
"7043961192177345916232559778383741091053414803377017307095275172896944935996",
|
||||
"2807630271073553218355393059254209097448243975722083008310815929736065268921"
|
||||
],
|
||||
"userMessageLimit": "100",
|
||||
"x": "20645213238265527935869146898028115621427162613172918400241870500502509785943"
|
||||
}
|
||||
"#;
|
||||
|
||||
#[test]
|
||||
// We test Merkle tree generation, proofs and verification
|
||||
fn test_merkle_proof() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
|
||||
// generate identity
|
||||
@@ -82,7 +24,7 @@ mod test {
|
||||
// generate merkle tree
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
tree_height,
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
@@ -92,141 +34,49 @@ mod test {
|
||||
// We check correct computation of the root
|
||||
let root = tree.root();
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 || TEST_TREE_HEIGHT == 32 {
|
||||
assert_eq!(
|
||||
root,
|
||||
BigInt([
|
||||
4939322235247991215,
|
||||
5110804094006647505,
|
||||
4427606543677101242,
|
||||
910933464535675827
|
||||
])
|
||||
.into()
|
||||
);
|
||||
}
|
||||
assert_eq!(
|
||||
root,
|
||||
BigInt([
|
||||
4939322235247991215,
|
||||
5110804094006647505,
|
||||
4427606543677101242,
|
||||
910933464535675827
|
||||
])
|
||||
.into()
|
||||
);
|
||||
|
||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
// These values refers to TEST_TREE_HEIGHT == 16
|
||||
let mut expected_path_elements = vec![
|
||||
str_to_fr(
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
];
|
||||
let expected_path_elements: Vec<Fr> = [
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
]
|
||||
.map(|e| str_to_fr(e, 16).unwrap())
|
||||
.to_vec();
|
||||
|
||||
let mut expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
// We add the remaining elements for the case TEST_TREE_HEIGHT = 20
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
]);
|
||||
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
|
||||
}
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
16,
|
||||
)
|
||||
.unwrap()]);
|
||||
expected_identity_path_index.append(&mut vec![0]);
|
||||
}
|
||||
let expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
assert_eq!(path_elements, expected_path_elements);
|
||||
assert_eq!(identity_path_index, expected_identity_path_index);
|
||||
@@ -235,34 +85,8 @@ mod test {
|
||||
assert!(tree.verify(&rate_commitment, &merkle_proof).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_witness_from_json() {
|
||||
// We generate all relevant keys
|
||||
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let builder = circom_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
|
||||
// We compute witness from the json input example
|
||||
let witness_json = WITNESS_JSON_20;
|
||||
let rln_witness = rln_witness_from_json(witness_json).unwrap();
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
|
||||
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
|
||||
|
||||
// Let's verify the proof
|
||||
let verified = verify_proof(&verification_key, &proof, &proof_values);
|
||||
|
||||
assert!(verified.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_end_to_end() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
fn get_test_witness() -> RLNWitnessInput {
|
||||
let leaf_index = 3;
|
||||
|
||||
// Generate identity pair
|
||||
let (identity_secret_hash, id_commitment) = keygen();
|
||||
let user_message_limit = Fr::from(100);
|
||||
@@ -271,7 +95,7 @@ mod test {
|
||||
//// generate merkle tree
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
tree_height,
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
@@ -288,7 +112,7 @@ mod test {
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
|
||||
|
||||
let rln_witness: RLNWitnessInput = rln_witness_from_values(
|
||||
rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
x,
|
||||
@@ -296,17 +120,49 @@ mod test {
|
||||
user_message_limit,
|
||||
Fr::from(1),
|
||||
)
|
||||
.unwrap();
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_witness_from_json() {
|
||||
// We generate all relevant keys
|
||||
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let builder = circom_from_folder(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let proving_key = zkey_from_folder();
|
||||
let verification_key = &proving_key.0.vk;
|
||||
let graph_data = graph_from_folder();
|
||||
// We compute witness from the json input
|
||||
let rln_witness = get_test_witness();
|
||||
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
|
||||
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
|
||||
assert_eq!(rln_witness_deser, rln_witness);
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
|
||||
let proof = generate_proof(&proving_key, &rln_witness_deser, &graph_data).unwrap();
|
||||
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
|
||||
|
||||
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
|
||||
// Let's verify the proof
|
||||
let verified = verify_proof(&verification_key, &proof, &proof_values);
|
||||
|
||||
assert!(verified.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_end_to_end() {
|
||||
let rln_witness = get_test_witness();
|
||||
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
|
||||
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
|
||||
assert_eq!(rln_witness_deser, rln_witness);
|
||||
|
||||
// We generate all relevant keys
|
||||
let proving_key = zkey_from_folder();
|
||||
let verification_key = &proving_key.0.vk;
|
||||
let graph_data = graph_from_folder();
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(&proving_key, &rln_witness_deser, &graph_data).unwrap();
|
||||
|
||||
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
|
||||
|
||||
// Let's verify the proof
|
||||
let success = verify_proof(&verification_key, &proof, &proof_values).unwrap();
|
||||
@@ -316,11 +172,13 @@ mod test {
|
||||
|
||||
#[test]
|
||||
fn test_witness_serialization() {
|
||||
// We test witness JSON serialization
|
||||
let rln_witness = get_test_witness();
|
||||
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
|
||||
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
|
||||
assert_eq!(rln_witness_deser, rln_witness);
|
||||
|
||||
// We test witness serialization
|
||||
let witness_json: &str = WITNESS_JSON_20;
|
||||
|
||||
let rln_witness = rln_witness_from_json(witness_json).unwrap();
|
||||
|
||||
let ser = serialize_witness(&rln_witness).unwrap();
|
||||
let (deser, _) = deserialize_witness(&ser).unwrap();
|
||||
assert_eq!(rln_witness, deser);
|
||||
|
||||
@@ -1,61 +1,64 @@
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use ark_ff::BigInt;
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
use {
|
||||
ark_ff::BigInt,
|
||||
rln::{circuit::TEST_TREE_HEIGHT, protocol::compute_tree_root},
|
||||
};
|
||||
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
use rln::circuit::{Fr, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
|
||||
use rln::circuit::Fr;
|
||||
use rln::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
|
||||
use rln::protocol::{compute_tree_root, deserialize_identity_tuple};
|
||||
use rln::protocol::deserialize_identity_tuple;
|
||||
use rln::public::{hash as public_hash, poseidon_hash as public_poseidon_hash, RLN};
|
||||
use rln::utils::*;
|
||||
use serde_json::json;
|
||||
use std::io::Cursor;
|
||||
|
||||
#[test]
|
||||
// This test is similar to the one in lib, but uses only public API
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
fn test_merkle_proof() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
let user_message_limit = 1;
|
||||
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
let mut rln = RLN::new(TEST_TREE_HEIGHT, generate_input_buffer()).unwrap();
|
||||
|
||||
// generate identity
|
||||
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
|
||||
let id_commitment = utils_poseidon_hash(&vec![identity_secret_hash]);
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit.into()]);
|
||||
|
||||
// check that leaves indices is empty
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_empty_leaves_indices(&mut buffer).unwrap();
|
||||
let idxs = bytes_le_to_vec_usize(&buffer.into_inner()).unwrap();
|
||||
assert!(idxs.is_empty());
|
||||
|
||||
// We pass rate_commitment as Read buffer to RLN's set_leaf
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
|
||||
rln.set_leaf(leaf_index, &mut buffer).unwrap();
|
||||
|
||||
// check that leaves before leaf_index is set to zero
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_empty_leaves_indices(&mut buffer).unwrap();
|
||||
let idxs = bytes_le_to_vec_usize(&buffer.into_inner()).unwrap();
|
||||
assert_eq!(idxs, [0, 1, 2]);
|
||||
|
||||
// We check correct computation of the root
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_root(&mut buffer).unwrap();
|
||||
let (root, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
assert_eq!(
|
||||
root,
|
||||
Fr::from(BigInt([
|
||||
17110646155607829651,
|
||||
5040045984242729823,
|
||||
6965416728592533086,
|
||||
2328960363755461975
|
||||
]))
|
||||
);
|
||||
} else if TEST_TREE_HEIGHT == 32 {
|
||||
assert_eq!(
|
||||
root,
|
||||
str_to_fr(
|
||||
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
|
||||
16
|
||||
)
|
||||
.unwrap()
|
||||
);
|
||||
}
|
||||
assert_eq!(
|
||||
root,
|
||||
Fr::from(BigInt([
|
||||
17110646155607829651,
|
||||
5040045984242729823,
|
||||
6965416728592533086,
|
||||
2328960363755461975
|
||||
]))
|
||||
);
|
||||
|
||||
// We check correct computation of merkle proof
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
@@ -66,126 +69,60 @@ mod test {
|
||||
let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec()).unwrap();
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
let mut expected_path_elements = vec![
|
||||
str_to_fr(
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
];
|
||||
let expected_path_elements: Vec<Fr> = [
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
]
|
||||
.map(|e| str_to_fr(e, 16).unwrap())
|
||||
.to_vec();
|
||||
|
||||
let mut expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
// We add the remaining elements for the case TEST_TREE_HEIGHT = 20
|
||||
if TEST_TREE_HEIGHT == 20 || TEST_TREE_HEIGHT == 32 {
|
||||
expected_path_elements.append(&mut vec![
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
]);
|
||||
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
|
||||
}
|
||||
|
||||
if TEST_TREE_HEIGHT == 20 {
|
||||
expected_path_elements.append(&mut vec![str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
16,
|
||||
)
|
||||
.unwrap()]);
|
||||
expected_identity_path_index.append(&mut vec![0]);
|
||||
}
|
||||
let expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
assert_eq!(path_elements, expected_path_elements);
|
||||
assert_eq!(identity_path_index, expected_identity_path_index);
|
||||
|
||||
// check subtree root computation for leaf 0 for all corresponding node until the root
|
||||
let l_idx = 0;
|
||||
for n in (1..=TEST_TREE_HEIGHT).rev() {
|
||||
let idx_l = l_idx * (1 << (TEST_TREE_HEIGHT - n));
|
||||
let idx_r = (l_idx + 1) * (1 << (TEST_TREE_HEIGHT - n));
|
||||
let idx_sr = idx_l;
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_subtree_root(n, idx_l, &mut buffer).unwrap();
|
||||
let (prev_l, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_subtree_root(n, idx_r, &mut buffer).unwrap();
|
||||
let (prev_r, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_subtree_root(n - 1, idx_sr, &mut buffer).unwrap();
|
||||
let (subroot, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
let res = utils_poseidon_hash(&[prev_l, prev_r]);
|
||||
assert_eq!(res, subroot);
|
||||
}
|
||||
|
||||
// We double check that the proof computed from public API is correct
|
||||
let root_from_proof = compute_tree_root(
|
||||
&identity_secret_hash,
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
[package]
|
||||
name = "semaphore-wrapper"
|
||||
version = "0.3.0"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[features]
|
||||
default = []
|
||||
dylib = [ "wasmer/dylib", "wasmer-engine-dylib", "wasmer-compiler-cranelift" ]
|
||||
|
||||
[dependencies]
|
||||
ark-bn254 = { version = "0.3.0" }
|
||||
ark-circom = { git = "https://github.com/gakonst/ark-circom", features=["circom-2"], rev = "35ce5a9" }
|
||||
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
|
||||
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
|
||||
ark-relations = { version = "0.3.0", default-features = false }
|
||||
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
|
||||
color-eyre = "0.6.2"
|
||||
once_cell = "1.17.1"
|
||||
rand = "0.8.5"
|
||||
semaphore = { git = "https://github.com/worldcoin/semaphore-rs", rev = "ee658c2"}
|
||||
ethers-core = { version = "2.0.10", default-features = false }
|
||||
ruint = { version = "1.10.0", features = [ "serde", "num-bigint", "ark-ff" ] }
|
||||
serde = "1.0"
|
||||
thiserror = "1.0.39"
|
||||
wasmer = { version = "2.3" }
|
||||
|
||||
[dev-dependencies]
|
||||
rand_chacha = "0.3.1"
|
||||
serde_json = "1.0.96"
|
||||
|
||||
[build-dependencies]
|
||||
color-eyre = "0.6.2"
|
||||
wasmer = { version = "2.3" }
|
||||
wasmer-engine-dylib = { version = "2.3.0", optional = true }
|
||||
wasmer-compiler-cranelift = { version = "3.3.0", optional = true }
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
lto = true
|
||||
panic = "abort"
|
||||
opt-level = 3
|
||||
|
||||
# Compilation profile for any non-workspace member.
|
||||
# Dependencies are optimized, even in a dev build. This improves dev performance
|
||||
# while having neglible impact on incremental build times.
|
||||
[profile.dev.package."*"]
|
||||
opt-level = 3
|
||||
@@ -1,7 +0,0 @@
|
||||
[tasks.build]
|
||||
command = "cargo"
|
||||
args = ["build", "--release"]
|
||||
|
||||
[tasks.test]
|
||||
command = "cargo"
|
||||
args = ["test", "--release"]
|
||||
@@ -1,18 +0,0 @@
|
||||
# Semaphore example package
|
||||
|
||||
This is basically a wrapper around/copy of
|
||||
https://github.com/worldcoin/semaphore-rs to illustrate how e.g. RLN package
|
||||
can be structured like.
|
||||
|
||||
Goal is also to provide a basic FFI around protocol.rs, which is currently not
|
||||
in scope for that project.
|
||||
|
||||
See that project for more information.
|
||||
|
||||
## Build and Test
|
||||
|
||||
To build and test, run the following commands within the module folder
|
||||
```bash
|
||||
cargo make build
|
||||
cargo make test
|
||||
```
|
||||
@@ -1,111 +0,0 @@
|
||||
// Adapted from semaphore-rs/build.rs
|
||||
use color_eyre::eyre::{eyre, Result};
|
||||
use std::{
|
||||
path::{Component, Path, PathBuf},
|
||||
process::Command,
|
||||
};
|
||||
|
||||
const ZKEY_FILE: &str = "./vendor/semaphore/build/snark/semaphore_final.zkey";
|
||||
const WASM_FILE: &str = "./vendor/semaphore/build/snark/semaphore.wasm";
|
||||
|
||||
// See <https://internals.rust-lang.org/t/path-to-lexical-absolute/14940>
|
||||
fn absolute(path: &str) -> Result<PathBuf> {
|
||||
let path = Path::new(path);
|
||||
let mut absolute = if path.is_absolute() {
|
||||
PathBuf::new()
|
||||
} else {
|
||||
std::env::current_dir()?
|
||||
};
|
||||
for component in path.components() {
|
||||
match component {
|
||||
Component::CurDir => {}
|
||||
Component::ParentDir => {
|
||||
absolute.pop();
|
||||
}
|
||||
component => absolute.push(component.as_os_str()),
|
||||
}
|
||||
}
|
||||
Ok(absolute)
|
||||
}
|
||||
|
||||
fn build_circuit() -> Result<()> {
|
||||
println!("cargo:rerun-if-changed=./vendor/semaphore");
|
||||
let run = |cmd: &[&str]| -> Result<()> {
|
||||
// TODO: Use ExitCode::exit_ok() when stable.
|
||||
Command::new(cmd[0])
|
||||
.args(cmd[1..].iter())
|
||||
.current_dir("./vendor/semaphore")
|
||||
.status()?
|
||||
.success()
|
||||
.then_some(())
|
||||
.ok_or(eyre!("procees returned failure"))?;
|
||||
Ok(())
|
||||
};
|
||||
|
||||
// Compute absolute paths
|
||||
let zkey_file = absolute(ZKEY_FILE)?;
|
||||
let wasm_file = absolute(WASM_FILE)?;
|
||||
|
||||
// Build circuits if not exists
|
||||
// TODO: This does not rebuild if the semaphore submodule is changed.
|
||||
// NOTE: This requires npm / nodejs to be installed.
|
||||
if !(zkey_file.exists() && wasm_file.exists()) {
|
||||
run(&["npm", "install"])?;
|
||||
run(&["npm", "exec", "ts-node", "./scripts/compile-circuits.ts"])?;
|
||||
}
|
||||
assert!(zkey_file.exists());
|
||||
assert!(wasm_file.exists());
|
||||
|
||||
// Export generated paths
|
||||
println!("cargo:rustc-env=BUILD_RS_ZKEY_FILE={}", zkey_file.display());
|
||||
println!("cargo:rustc-env=BUILD_RS_WASM_FILE={}", wasm_file.display());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(feature = "dylib")]
|
||||
fn build_dylib() -> Result<()> {
|
||||
use enumset::enum_set;
|
||||
use std::{env, str::FromStr};
|
||||
use wasmer::{Module, Store, Target, Triple};
|
||||
use wasmer_compiler_cranelift::Cranelift;
|
||||
use wasmer_engine_dylib::Dylib;
|
||||
|
||||
let wasm_file = absolute(WASM_FILE)?;
|
||||
assert!(wasm_file.exists());
|
||||
|
||||
let out_dir = env::var("OUT_DIR")?;
|
||||
let out_dir = Path::new(&out_dir).to_path_buf();
|
||||
let dylib_file = out_dir.join("semaphore.dylib");
|
||||
println!(
|
||||
"cargo:rustc-env=CIRCUIT_WASM_DYLIB={}",
|
||||
dylib_file.display()
|
||||
);
|
||||
|
||||
if dylib_file.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Create a WASM engine for the target that can compile
|
||||
let triple = Triple::from_str(&env::var("TARGET")?).map_err(|e| eyre!(e))?;
|
||||
let cpu_features = enum_set!();
|
||||
let target = Target::new(triple, cpu_features);
|
||||
let compiler_config = Cranelift::default();
|
||||
let engine = Dylib::new(compiler_config).target(target).engine();
|
||||
|
||||
// Compile the WASM module
|
||||
let store = Store::new(&engine);
|
||||
let module = Module::from_file(&store, &wasm_file)?;
|
||||
module.serialize_to_file(&dylib_file)?;
|
||||
assert!(dylib_file.exists());
|
||||
println!("cargo:warning=Circuit dylib is in {}", dylib_file.display());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
build_circuit()?;
|
||||
#[cfg(feature = "dylib")]
|
||||
build_dylib()?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
// Adapted from semaphore-rs/src/circuit.rs
|
||||
use ark_bn254::{Bn254, Fr};
|
||||
use ark_circom::{read_zkey, WitnessCalculator};
|
||||
use ark_groth16::ProvingKey;
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use core::include_bytes;
|
||||
use once_cell::sync::{Lazy, OnceCell};
|
||||
use std::{io::Cursor, sync::Mutex};
|
||||
use wasmer::{Module, Store};
|
||||
|
||||
#[cfg(feature = "dylib")]
|
||||
use std::{env, path::Path};
|
||||
#[cfg(feature = "dylib")]
|
||||
use wasmer::Dylib;
|
||||
|
||||
const ZKEY_BYTES: &[u8] = include_bytes!(env!("BUILD_RS_ZKEY_FILE"));
|
||||
|
||||
#[cfg(not(feature = "dylib"))]
|
||||
const WASM: &[u8] = include_bytes!(env!("BUILD_RS_WASM_FILE"));
|
||||
|
||||
static ZKEY: Lazy<(ProvingKey<Bn254>, ConstraintMatrices<Fr>)> = Lazy::new(|| {
|
||||
let mut reader = Cursor::new(ZKEY_BYTES);
|
||||
read_zkey(&mut reader).expect("zkey should be valid")
|
||||
});
|
||||
|
||||
static WITNESS_CALCULATOR: OnceCell<Mutex<WitnessCalculator>> = OnceCell::new();
|
||||
|
||||
/// Initialize the library.
|
||||
#[cfg(feature = "dylib")]
|
||||
pub fn initialize(dylib_path: &Path) {
|
||||
WITNESS_CALCULATOR
|
||||
.set(from_dylib(dylib_path))
|
||||
.expect("Failed to initialize witness calculator");
|
||||
|
||||
// Force init of ZKEY
|
||||
Lazy::force(&ZKEY);
|
||||
}
|
||||
|
||||
#[cfg(feature = "dylib")]
|
||||
fn from_dylib(path: &Path) -> Mutex<WitnessCalculator> {
|
||||
let store = Store::new(&Dylib::headless().engine());
|
||||
// The module must be exported using [`Module::serialize`].
|
||||
let module = unsafe {
|
||||
Module::deserialize_from_file(&store, path).expect("Failed to load wasm dylib module")
|
||||
};
|
||||
let result =
|
||||
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
|
||||
Mutex::new(result)
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn zkey() -> &'static (ProvingKey<Bn254>, ConstraintMatrices<Fr>) {
|
||||
&ZKEY
|
||||
}
|
||||
|
||||
#[cfg(feature = "dylib")]
|
||||
#[must_use]
|
||||
pub fn witness_calculator() -> &'static Mutex<WitnessCalculator> {
|
||||
WITNESS_CALCULATOR.get_or_init(|| {
|
||||
let path = env::var("CIRCUIT_WASM_DYLIB").expect(
|
||||
"Semaphore-rs is not initialized. The library needs to be initialized before use when \
|
||||
build with the `cdylib` feature. You can initialize by calling `initialize` or \
|
||||
seting the `CIRCUIT_WASM_DYLIB` environment variable.",
|
||||
);
|
||||
from_dylib(Path::new(&path))
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "dylib"))]
|
||||
#[must_use]
|
||||
pub fn witness_calculator() -> &'static Mutex<WitnessCalculator> {
|
||||
WITNESS_CALCULATOR.get_or_init(|| {
|
||||
let store = Store::default();
|
||||
let module = Module::from_binary(&store, WASM).expect("wasm should be valid");
|
||||
let result =
|
||||
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
|
||||
Mutex::new(result)
|
||||
})
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
#![allow(clippy::multiple_crate_versions)]
|
||||
|
||||
pub mod circuit;
|
||||
pub mod protocol;
|
||||
|
||||
#[cfg(feature = "dylib")]
|
||||
pub use circuit::initialize;
|
||||
@@ -1,215 +0,0 @@
|
||||
// Adapted from semaphore-rs/src/protocol.rs
|
||||
// For illustration purposes only as an example protocol
|
||||
|
||||
// Private module
|
||||
use crate::circuit::{witness_calculator, zkey};
|
||||
|
||||
use ark_bn254::{Bn254, Parameters};
|
||||
use ark_circom::CircomReduction;
|
||||
use ark_ec::bn::Bn;
|
||||
use ark_groth16::{
|
||||
create_proof_with_reduction_and_matrices, prepare_verifying_key, Proof as ArkProof,
|
||||
};
|
||||
use ark_relations::r1cs::SynthesisError;
|
||||
use ark_std::UniformRand;
|
||||
use color_eyre::{Report, Result};
|
||||
use ethers_core::types::U256;
|
||||
use rand::{thread_rng, Rng};
|
||||
use semaphore::{
|
||||
identity::Identity,
|
||||
merkle_tree::{self, Branch},
|
||||
poseidon,
|
||||
poseidon_tree::PoseidonHash,
|
||||
Field,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::time::Instant;
|
||||
use thiserror::Error;
|
||||
|
||||
// Matches the private G1Tup type in ark-circom.
|
||||
pub type G1 = (U256, U256);
|
||||
|
||||
// Matches the private G2Tup type in ark-circom.
|
||||
pub type G2 = ([U256; 2], [U256; 2]);
|
||||
|
||||
/// Wrap a proof object so we have serde support
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct Proof(G1, G2, G1);
|
||||
|
||||
impl From<ArkProof<Bn<Parameters>>> for Proof {
|
||||
fn from(proof: ArkProof<Bn<Parameters>>) -> Self {
|
||||
let proof = ark_circom::ethereum::Proof::from(proof);
|
||||
let (a, b, c) = proof.as_tuple();
|
||||
Self(a, b, c)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Proof> for ArkProof<Bn<Parameters>> {
|
||||
fn from(proof: Proof) -> Self {
|
||||
let eth_proof = ark_circom::ethereum::Proof {
|
||||
a: ark_circom::ethereum::G1 {
|
||||
x: proof.0 .0,
|
||||
y: proof.0 .1,
|
||||
},
|
||||
#[rustfmt::skip] // Rustfmt inserts some confusing spaces
|
||||
b: ark_circom::ethereum::G2 {
|
||||
// The order of coefficients is flipped.
|
||||
x: [proof.1.0[1], proof.1.0[0]],
|
||||
y: [proof.1.1[1], proof.1.1[0]],
|
||||
},
|
||||
c: ark_circom::ethereum::G1 {
|
||||
x: proof.2 .0,
|
||||
y: proof.2 .1,
|
||||
},
|
||||
};
|
||||
eth_proof.into()
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper to merkle proof into a bigint vector
|
||||
/// TODO: we should create a From trait for this
|
||||
fn merkle_proof_to_vec(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<Field> {
|
||||
proof
|
||||
.0
|
||||
.iter()
|
||||
.map(|x| match x {
|
||||
Branch::Left(value) | Branch::Right(value) => *value,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Generates the nullifier hash
|
||||
#[must_use]
|
||||
pub fn generate_nullifier_hash(identity: &Identity, external_nullifier: Field) -> Field {
|
||||
poseidon::hash2(external_nullifier, identity.nullifier)
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum ProofError {
|
||||
#[error("Error reading circuit key: {0}")]
|
||||
CircuitKeyError(#[from] std::io::Error),
|
||||
#[error("Error producing witness: {0}")]
|
||||
WitnessError(Report),
|
||||
#[error("Error producing proof: {0}")]
|
||||
SynthesisError(#[from] SynthesisError),
|
||||
#[error("Error converting public input: {0}")]
|
||||
ToFieldError(#[from] ruint::ToFieldError),
|
||||
}
|
||||
|
||||
/// Generates a semaphore proof
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns a [`ProofError`] if proving fails.
|
||||
pub fn generate_proof(
|
||||
identity: &Identity,
|
||||
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
|
||||
external_nullifier_hash: Field,
|
||||
signal_hash: Field,
|
||||
) -> Result<Proof, ProofError> {
|
||||
generate_proof_rng(
|
||||
identity,
|
||||
merkle_proof,
|
||||
external_nullifier_hash,
|
||||
signal_hash,
|
||||
&mut thread_rng(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Generates a semaphore proof from entropy
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns a [`ProofError`] if proving fails.
|
||||
pub fn generate_proof_rng(
|
||||
identity: &Identity,
|
||||
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
|
||||
external_nullifier_hash: Field,
|
||||
signal_hash: Field,
|
||||
rng: &mut impl Rng,
|
||||
) -> Result<Proof, ProofError> {
|
||||
generate_proof_rs(
|
||||
identity,
|
||||
merkle_proof,
|
||||
external_nullifier_hash,
|
||||
signal_hash,
|
||||
ark_bn254::Fr::rand(rng),
|
||||
ark_bn254::Fr::rand(rng),
|
||||
)
|
||||
}
|
||||
|
||||
fn generate_proof_rs(
|
||||
identity: &Identity,
|
||||
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
|
||||
external_nullifier_hash: Field,
|
||||
signal_hash: Field,
|
||||
r: ark_bn254::Fr,
|
||||
s: ark_bn254::Fr,
|
||||
) -> Result<Proof, ProofError> {
|
||||
let inputs = [
|
||||
("identityNullifier", vec![identity.nullifier]),
|
||||
("identityTrapdoor", vec![identity.trapdoor]),
|
||||
("treePathIndices", merkle_proof.path_index()),
|
||||
("treeSiblings", merkle_proof_to_vec(merkle_proof)),
|
||||
("externalNullifier", vec![external_nullifier_hash]),
|
||||
("signalHash", vec![signal_hash]),
|
||||
];
|
||||
let inputs = inputs.into_iter().map(|(name, values)| {
|
||||
(
|
||||
name.to_string(),
|
||||
values.iter().copied().map(Into::into).collect::<Vec<_>>(),
|
||||
)
|
||||
});
|
||||
|
||||
let now = Instant::now();
|
||||
|
||||
let full_assignment = witness_calculator()
|
||||
.lock()
|
||||
.expect("witness_calculator mutex should not get poisoned")
|
||||
.calculate_witness_element::<Bn254, _>(inputs, false)
|
||||
.map_err(ProofError::WitnessError)?;
|
||||
|
||||
println!("witness generation took: {:.2?}", now.elapsed());
|
||||
|
||||
let now = Instant::now();
|
||||
let zkey = zkey();
|
||||
let ark_proof = create_proof_with_reduction_and_matrices::<_, CircomReduction>(
|
||||
&zkey.0,
|
||||
r,
|
||||
s,
|
||||
&zkey.1,
|
||||
zkey.1.num_instance_variables,
|
||||
zkey.1.num_constraints,
|
||||
full_assignment.as_slice(),
|
||||
)?;
|
||||
let proof = ark_proof.into();
|
||||
println!("proof generation took: {:.2?}", now.elapsed());
|
||||
|
||||
Ok(proof)
|
||||
}
|
||||
|
||||
/// Verifies a given semaphore proof
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns a [`ProofError`] if verifying fails. Verification failure does not
|
||||
/// necessarily mean the proof is incorrect.
|
||||
pub fn verify_proof(
|
||||
root: Field,
|
||||
nullifier_hash: Field,
|
||||
signal_hash: Field,
|
||||
external_nullifier_hash: Field,
|
||||
proof: &Proof,
|
||||
) -> Result<bool, ProofError> {
|
||||
let zkey = zkey();
|
||||
let pvk = prepare_verifying_key(&zkey.0.vk);
|
||||
|
||||
let public_inputs = [root, nullifier_hash, signal_hash, external_nullifier_hash]
|
||||
.iter()
|
||||
.map(ark_bn254::Fr::try_from)
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let ark_proof = (*proof).into();
|
||||
let result = ark_groth16::verify_proof(&pvk, &ark_proof, &public_inputs[..])?;
|
||||
Ok(result)
|
||||
}
|
||||
@@ -1,115 +0,0 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ark_bn254::Parameters;
|
||||
use ark_ec::bn::Bn;
|
||||
use ark_groth16::Proof as ArkProof;
|
||||
use rand::{Rng, SeedableRng as _};
|
||||
use rand_chacha::ChaChaRng;
|
||||
use semaphore::{hash_to_field, identity::Identity, poseidon_tree::PoseidonTree, Field};
|
||||
use semaphore_wrapper::protocol::{
|
||||
generate_nullifier_hash, generate_proof, generate_proof_rng, verify_proof, Proof,
|
||||
};
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn test_semaphore() {
|
||||
// generate identity
|
||||
let id = Identity::from_seed(b"secret");
|
||||
|
||||
// generate merkle tree
|
||||
let leaf = Field::from(0);
|
||||
let mut tree = PoseidonTree::new(21, leaf);
|
||||
tree.set(0, id.commitment());
|
||||
|
||||
let merkle_proof = tree.proof(0).expect("proof should exist");
|
||||
let root = tree.root().into();
|
||||
|
||||
// change signal and external_nullifier here
|
||||
let signal_hash = hash_to_field(b"xxx");
|
||||
let external_nullifier_hash = hash_to_field(b"appId");
|
||||
|
||||
let nullifier_hash = generate_nullifier_hash(&id, external_nullifier_hash);
|
||||
|
||||
let proof =
|
||||
generate_proof(&id, &merkle_proof, external_nullifier_hash, signal_hash).unwrap();
|
||||
|
||||
let success = verify_proof(
|
||||
root,
|
||||
nullifier_hash,
|
||||
signal_hash,
|
||||
external_nullifier_hash,
|
||||
&proof,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
assert!(success);
|
||||
}
|
||||
|
||||
fn arb_proof(seed: u64) -> Proof {
|
||||
// Deterministic randomness for testing
|
||||
let mut rng = ChaChaRng::seed_from_u64(seed);
|
||||
|
||||
// generate identity
|
||||
let seed: [u8; 16] = rng.gen();
|
||||
let id = Identity::from_seed(&seed);
|
||||
|
||||
// generate merkle tree
|
||||
let leaf = Field::from(0);
|
||||
let mut tree = PoseidonTree::new(21, leaf);
|
||||
tree.set(0, id.commitment());
|
||||
|
||||
let merkle_proof = tree.proof(0).expect("proof should exist");
|
||||
|
||||
let external_nullifier: [u8; 16] = rng.gen();
|
||||
let external_nullifier_hash = hash_to_field(&external_nullifier);
|
||||
|
||||
let signal: [u8; 16] = rng.gen();
|
||||
let signal_hash = hash_to_field(&signal);
|
||||
|
||||
generate_proof_rng(
|
||||
&id,
|
||||
&merkle_proof,
|
||||
external_nullifier_hash,
|
||||
signal_hash,
|
||||
&mut rng,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proof_cast_roundtrip() {
|
||||
let proof = arb_proof(123);
|
||||
let ark_proof: ArkProof<Bn<Parameters>> = proof.into();
|
||||
let result: Proof = ark_proof.into();
|
||||
assert_eq!(proof, result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proof_serialize() {
|
||||
let proof = arb_proof(456);
|
||||
let json = serde_json::to_value(&proof).unwrap();
|
||||
assert_eq!(
|
||||
json,
|
||||
json!([
|
||||
[
|
||||
"0x249ae469686987ee9368da60dd177a8c42891c02f5760e955e590c79d55cfab2",
|
||||
"0xf22e25870f49388459d388afb24dcf6ec11bb2d4def1e2ec26d6e42f373aad8"
|
||||
],
|
||||
[
|
||||
[
|
||||
"0x17bd25dbd7436c30ea5b8a3a47aadf11ed646c4b25cc14a84ff8cbe0252ff1f8",
|
||||
"0x1c140668c56688367416534d57b4a14e5a825efdd5e121a6a2099f6dc4cd277b"
|
||||
],
|
||||
[
|
||||
"0x26a8524759d969ea0682a092cf7a551697d81962d6c998f543f81e52d83e05e1",
|
||||
"0x273eb3f796fd1807b9df9c6d769d983e3dabdc61677b75d48bb7691303b2c8dd"
|
||||
]
|
||||
],
|
||||
[
|
||||
"0x62715c53a0eb4c46dbb5f73f1fd7449b9c63d37c1ece65debc39b472065a90f",
|
||||
"0x114f7becc66f1cd7a8b01c89db8233622372fc0b6fc037c4313bca41e2377fd9"
|
||||
]
|
||||
])
|
||||
);
|
||||
}
|
||||
}
|
||||
1
semaphore/vendor/semaphore
vendored
1
semaphore/vendor/semaphore
vendored
Submodule semaphore/vendor/semaphore deleted from 5186a940ff
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "zerokit_utils"
|
||||
version = "0.4.1"
|
||||
version = "0.5.2"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "Various utilities for Zerokit"
|
||||
@@ -12,25 +12,35 @@ repository = "https://github.com/vacp2p/zerokit"
|
||||
bench = false
|
||||
|
||||
[dependencies]
|
||||
ark-ff = { version = "=0.4.1", default-features = false, features = ["asm"] }
|
||||
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
|
||||
color-eyre = "=0.6.2"
|
||||
pmtree = { package = "pmtree", version = "=2.0.0", optional = true}
|
||||
sled = "=0.34.7"
|
||||
serde = "=1.0.163"
|
||||
ark-ff = { version = "0.5.0", default-features = false, features = [
|
||||
"parallel",
|
||||
] }
|
||||
num-bigint = { version = "0.4.6", default-features = false, features = [
|
||||
"rand",
|
||||
] }
|
||||
# color-eyre = "0.6.3"
|
||||
thiserror = "2.0.12"
|
||||
pmtree = { package = "vacp2p_pmtree", version = "2.0.2", optional = true }
|
||||
sled = "0.34.7"
|
||||
serde = "1.0"
|
||||
lazy_static = "1.5.0"
|
||||
hex = "0.4"
|
||||
|
||||
[dev-dependencies]
|
||||
ark-bn254 = "=0.4.0"
|
||||
num-traits = "=0.2.15"
|
||||
hex-literal = "=0.3.4"
|
||||
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
|
||||
criterion = { version = "=0.4.0", features = ["html_reports"] }
|
||||
ark-bn254 = { version = "0.5.0", features = ["std"] }
|
||||
num-traits = "0.2.19"
|
||||
hex-literal = "1.0.0"
|
||||
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
|
||||
criterion = { version = "0.4.0", features = ["html_reports"] }
|
||||
|
||||
[features]
|
||||
default = ["parallel"]
|
||||
parallel = ["ark-ff/parallel"]
|
||||
default = []
|
||||
pmtree-ft = ["pmtree"]
|
||||
|
||||
[[bench]]
|
||||
name = "merkle_tree_benchmark"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "poseidon_benchmark"
|
||||
harness = false
|
||||
|
||||
@@ -8,4 +8,4 @@ args = ["test", "--release"]
|
||||
|
||||
[tasks.bench]
|
||||
command = "cargo"
|
||||
args = ["bench"]
|
||||
args = ["bench"]
|
||||
|
||||
112
utils/README.md
112
utils/README.md
@@ -1,15 +1,111 @@
|
||||
# Utils crate
|
||||
# Zerokit Utils Crate
|
||||
|
||||
## Building
|
||||
[](https://crates.io/crates/zerokit_utils)
|
||||
|
||||
1. `cargo build`
|
||||
Cryptographic primitives for zero-knowledge applications, featuring efficient Merkle tree implementations and a Poseidon hash function.
|
||||
|
||||
## Testing
|
||||
## Overview
|
||||
|
||||
1. `cargo test`
|
||||
This crate provides core cryptographic components optimized for zero-knowledge proof systems:
|
||||
|
||||
## Benchmarking
|
||||
1. Multiple Merkle tree implementations with different space/time tradeoffs
|
||||
2. A Poseidon hash implementation
|
||||
|
||||
1. `cargo bench`
|
||||
## Merkle Tree Implementations
|
||||
|
||||
To view the results of the benchmark, open the `target/criterion/report/index.html` file generated after the bench
|
||||
The crate supports two interchangeable Merkle tree implementations:
|
||||
|
||||
- **FullMerkleTree**
|
||||
- Stores each tree node in memory
|
||||
- **OptimalMerkleTree**
|
||||
- Only stores nodes used to prove accumulation of set leaves
|
||||
|
||||
### Implementation notes
|
||||
|
||||
Glossary:
|
||||
|
||||
* depth: level of leaves if we count from levels from 0
|
||||
* number of levels: depth + 1
|
||||
* capacity (== number of leaves) -- 1 << depth
|
||||
* total number of nodes: 1 << (depth + 1)) - 1
|
||||
|
||||
So for instance:
|
||||
* depth: 3
|
||||
* number of levels: 4
|
||||
* capacity (number of leaves): 8
|
||||
* total number of nodes: 15
|
||||
|
||||
```mermaid
|
||||
flowchart TD
|
||||
A[Root] --> N1
|
||||
A[Root] --> N2
|
||||
N1 --> N3
|
||||
N1 --> N4
|
||||
N2 --> N5
|
||||
N2 --> N6
|
||||
N3 -->|Leaf| L1
|
||||
N3 -->|Leaf| L2
|
||||
N4 -->|Leaf| L3
|
||||
N4 -->|Leaf| L4
|
||||
N5 -->|Leaf| L5
|
||||
N5 -->|Leaf| L6
|
||||
N6 -->|Leaf| L7
|
||||
N6 -->|Leaf| L8
|
||||
```
|
||||
|
||||
|
||||
## Poseidon Hash Implementation
|
||||
|
||||
This crate provides an implementation to compute the Poseidon hash round constants and MDS matrices:
|
||||
|
||||
- **Customizable parameters**: Supports different security levels and input sizes
|
||||
- **Arkworks-friendly**: Adapted to work over arkworks field traits and custom data structures
|
||||
|
||||
### Security Note
|
||||
|
||||
The MDS matrices are generated iteratively using the Grain LFSR until certain criteria are met.
|
||||
According to the paper, such matrices must respect specific conditions which are checked by 3 different algorithms in the reference implementation.
|
||||
|
||||
These validation algorithms are not currently implemented in this crate.
|
||||
For the hardcoded parameters, the first random matrix generated satisfies these conditions.
|
||||
If using different parameters, you should check against the reference implementation how many matrices are generated before outputting the correct one,
|
||||
and pass this number to the `skip_matrices` parameter of the `find_poseidon_ark_and_mds` function.
|
||||
|
||||
## Installation
|
||||
|
||||
Add Zerokit Utils to your Rust project:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
zerokit-utils = "0.5.1"
|
||||
```
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
- **FullMerkleTree**: Use when memory is abundant and operation speed is critical
|
||||
- **OptimalMerkleTree**: Use when memory efficiency is more important than raw speed
|
||||
- **Poseidon**: Offers a good balance between security and performance for ZK applications
|
||||
|
||||
## Building and Testing
|
||||
|
||||
```bash
|
||||
# Build the crate
|
||||
cargo make build
|
||||
|
||||
# Run tests
|
||||
cargo make test
|
||||
|
||||
# Run benchmarks
|
||||
cargo make bench
|
||||
```
|
||||
|
||||
To view the results of the benchmark, open the `target/criterion/report/index.html` file generated after the bench
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
- The Merkle tree implementations are adapted from:
|
||||
- [kilic/rln](https://github.com/kilic/rln/blob/master/src/merkle.rs)
|
||||
- [worldcoin/semaphore-rs](https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/merkle_tree.rs)
|
||||
|
||||
- The Poseidon implementation references:
|
||||
- [Poseidon reference implementation](https://extgit.iaik.tugraz.at/krypto/hadeshash/-/blob/master/code/generate_parameters_grain.sage)
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use hex_literal::hex;
|
||||
use lazy_static::lazy_static;
|
||||
use std::{fmt::Display, str::FromStr};
|
||||
use tiny_keccak::{Hasher as _, Keccak};
|
||||
use zerokit_utils::{
|
||||
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,
|
||||
@@ -9,38 +11,59 @@ use zerokit_utils::{
|
||||
#[derive(Clone, Copy, Eq, PartialEq)]
|
||||
struct Keccak256;
|
||||
|
||||
#[derive(Clone, Copy, Eq, PartialEq, Debug, Default)]
|
||||
struct TestFr([u8; 32]);
|
||||
|
||||
impl Hasher for Keccak256 {
|
||||
type Fr = [u8; 32];
|
||||
type Fr = TestFr;
|
||||
|
||||
fn default_leaf() -> Self::Fr {
|
||||
[0; 32]
|
||||
TestFr([0; 32])
|
||||
}
|
||||
|
||||
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
|
||||
let mut output = [0; 32];
|
||||
let mut hasher = Keccak::v256();
|
||||
for element in inputs {
|
||||
hasher.update(element);
|
||||
hasher.update(element.0.as_slice());
|
||||
}
|
||||
hasher.finalize(&mut output);
|
||||
output
|
||||
TestFr(output)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
let mut tree =
|
||||
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default()).unwrap();
|
||||
impl Display for TestFr {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", String::from_utf8_lossy(self.0.as_slice()))
|
||||
}
|
||||
}
|
||||
|
||||
let leaves = [
|
||||
impl FromStr for TestFr {
|
||||
type Err = std::string::FromUtf8Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Ok(TestFr(s.as_bytes().try_into().unwrap()))
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref LEAVES: [TestFr; 4] = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
];
|
||||
]
|
||||
.map(TestFr);
|
||||
}
|
||||
|
||||
pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
let mut tree =
|
||||
OptimalMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), OptimalMerkleConfig::default())
|
||||
.unwrap();
|
||||
|
||||
c.bench_function("OptimalMerkleTree::set", |b| {
|
||||
b.iter(|| {
|
||||
tree.set(0, leaves[0]).unwrap();
|
||||
tree.set(0, LEAVES[0]).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
@@ -52,7 +75,8 @@ pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
|
||||
c.bench_function("OptimalMerkleTree::override_range", |b| {
|
||||
b.iter(|| {
|
||||
tree.override_range(0, leaves, [0, 1, 2, 3]).unwrap();
|
||||
tree.override_range(0, LEAVES.into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
@@ -67,22 +91,28 @@ pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
tree.get(0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
// check intermediate node getter which required additional computation of sub root index
|
||||
c.bench_function("OptimalMerkleTree::get_subtree_root", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_subtree_root(1, 0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("OptimalMerkleTree::get_empty_leaves_indices", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_empty_leaves_indices();
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
pub fn full_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
let mut tree =
|
||||
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
|
||||
|
||||
let leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
];
|
||||
FullMerkleTree::<Keccak256>::new(2, TestFr([0; 32]), FullMerkleConfig::default()).unwrap();
|
||||
|
||||
c.bench_function("FullMerkleTree::set", |b| {
|
||||
b.iter(|| {
|
||||
tree.set(0, leaves[0]).unwrap();
|
||||
tree.set(0, LEAVES[0]).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
@@ -94,7 +124,8 @@ pub fn full_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
|
||||
c.bench_function("FullMerkleTree::override_range", |b| {
|
||||
b.iter(|| {
|
||||
tree.override_range(0, leaves, [0, 1, 2, 3]).unwrap();
|
||||
tree.override_range(0, LEAVES.into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
@@ -109,6 +140,19 @@ pub fn full_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
tree.get(0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
// check intermediate node getter which required additional computation of sub root index
|
||||
c.bench_function("FullMerkleTree::get_subtree_root", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_subtree_root(1, 0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("FullMerkleTree::get_empty_leaves_indices", |b| {
|
||||
b.iter(|| {
|
||||
tree.get_empty_leaves_indices();
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
|
||||
65
utils/benches/poseidon_benchmark.rs
Normal file
65
utils/benches/poseidon_benchmark.rs
Normal file
@@ -0,0 +1,65 @@
|
||||
use ark_bn254::Fr;
|
||||
use criterion::{
|
||||
black_box, criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion, Throughput,
|
||||
};
|
||||
use zerokit_utils::Poseidon;
|
||||
|
||||
const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
|
||||
(2, 8, 56, 0),
|
||||
(3, 8, 57, 0),
|
||||
(4, 8, 56, 0),
|
||||
(5, 8, 60, 0),
|
||||
(6, 8, 60, 0),
|
||||
(7, 8, 63, 0),
|
||||
(8, 8, 64, 0),
|
||||
(9, 8, 63, 0),
|
||||
];
|
||||
|
||||
pub fn poseidon_benchmark(c: &mut Criterion) {
|
||||
let hasher = Poseidon::<Fr>::from(&ROUND_PARAMS);
|
||||
let mut group = c.benchmark_group("poseidon Fr");
|
||||
|
||||
for size in [10u32, 100, 1000].iter() {
|
||||
group.throughput(Throughput::Elements(*size as u64));
|
||||
|
||||
group.bench_with_input(BenchmarkId::new("Array hash", size), size, |b, &size| {
|
||||
b.iter_batched(
|
||||
// Setup: create values for each benchmark iteration
|
||||
|| {
|
||||
let mut values = Vec::with_capacity(size as usize);
|
||||
for i in 0..size {
|
||||
values.push([Fr::from(i)]);
|
||||
}
|
||||
values
|
||||
},
|
||||
// Actual benchmark
|
||||
|values| {
|
||||
for v in values.iter() {
|
||||
let _ = hasher.hash(black_box(&v[..]));
|
||||
}
|
||||
},
|
||||
BatchSize::SmallInput,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
// Benchmark single hash operation separately
|
||||
group.bench_function("Single hash", |b| {
|
||||
let input = [Fr::from(u64::MAX)];
|
||||
b.iter(|| {
|
||||
let _ = hasher.hash(black_box(&input[..]));
|
||||
})
|
||||
});
|
||||
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group! {
|
||||
name = benches;
|
||||
config = Criterion::default()
|
||||
.warm_up_time(std::time::Duration::from_millis(500))
|
||||
.measurement_time(std::time::Duration::from_secs(4))
|
||||
.sample_size(20);
|
||||
targets = poseidon_benchmark
|
||||
}
|
||||
criterion_main!(benches);
|
||||
@@ -1,20 +1,21 @@
|
||||
use crate::merkle_tree::{FrOf, Hasher, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
use color_eyre::{Report, Result};
|
||||
// use color_eyre::{Report, Result};
|
||||
use std::{
|
||||
cmp::max,
|
||||
fmt::Debug,
|
||||
iter::{once, repeat, successors},
|
||||
iter::{once, repeat_n, successors},
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
use num_traits::Zero;
|
||||
use crate::ZerokitMerkleTreeError;
|
||||
////////////////////////////////////////////////////////////
|
||||
/// Full Merkle Tree Implementation
|
||||
///// Full Merkle Tree Implementation
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
/// Merkle tree with all leaf and intermediate hashes stored
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct FullMerkleTree<H: Hasher> {
|
||||
/// The depth of the tree, i.e. the number of levels from leaf to root
|
||||
/// The depth of the tree, i.e., the number of levels from leaf to root
|
||||
depth: usize,
|
||||
|
||||
/// The nodes cached from the empty part of the tree (where leaves are set to default).
|
||||
@@ -26,6 +27,10 @@ pub struct FullMerkleTree<H: Hasher> {
|
||||
/// The tree nodes
|
||||
nodes: Vec<H::Fr>,
|
||||
|
||||
/// The indices of leaves which are set into zero upto next_index.
|
||||
/// Set to 0 if the leaf is empty and set to 1 in otherwise.
|
||||
cached_leaves_indices: Vec<u8>,
|
||||
|
||||
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
|
||||
// (deletions leave next_index unchanged)
|
||||
next_index: usize,
|
||||
@@ -52,9 +57,9 @@ pub struct FullMerkleProof<H: Hasher>(pub Vec<FullMerkleBranch<H>>);
|
||||
pub struct FullMerkleConfig(());
|
||||
|
||||
impl FromStr for FullMerkleConfig {
|
||||
type Err = Report;
|
||||
type Err = ();
|
||||
|
||||
fn from_str(_s: &str) -> Result<Self> {
|
||||
fn from_str(_s: &str) -> Result<Self, ()> {
|
||||
Ok(FullMerkleConfig::default())
|
||||
}
|
||||
}
|
||||
@@ -68,13 +73,13 @@ where
|
||||
type Hasher = H;
|
||||
type Config = FullMerkleConfig;
|
||||
|
||||
fn default(depth: usize) -> Result<Self> {
|
||||
fn default(depth: usize) -> Result<Self, ZerokitMerkleTreeError> {
|
||||
FullMerkleTree::<H>::new(depth, Self::Hasher::default_leaf(), Self::Config::default())
|
||||
}
|
||||
|
||||
/// Creates a new `MerkleTree`
|
||||
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
|
||||
fn new(depth: usize, initial_leaf: FrOf<Self::Hasher>, _config: Self::Config) -> Result<Self> {
|
||||
fn new(depth: usize, initial_leaf: FrOf<Self::Hasher>, _config: Self::Config) -> Result<Self, ZerokitMerkleTreeError> {
|
||||
// Compute cache node values, leaf to root
|
||||
let cached_nodes = successors(Some(initial_leaf), |prev| Some(H::hash(&[*prev, *prev])))
|
||||
.take(depth + 1)
|
||||
@@ -85,7 +90,7 @@ where
|
||||
.iter()
|
||||
.rev()
|
||||
.enumerate()
|
||||
.flat_map(|(levels, hash)| repeat(hash).take(1 << levels))
|
||||
.flat_map(|(levels, hash)| repeat_n(hash, 1 << levels))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
debug_assert!(nodes.len() == (1 << (depth + 1)) - 1);
|
||||
@@ -96,12 +101,13 @@ where
|
||||
depth,
|
||||
cached_nodes,
|
||||
nodes,
|
||||
cached_leaves_indices: vec![0; 1 << depth],
|
||||
next_index,
|
||||
metadata: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn close_db_connection(&mut self) -> Result<()> {
|
||||
fn close_db_connection(&mut self) -> Result<(), ZerokitMerkleTreeError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -110,54 +116,90 @@ where
|
||||
self.depth
|
||||
}
|
||||
|
||||
// Returns the capacity of the tree, i.e. the maximum number of accumulatable leaves
|
||||
// Returns the capacity of the tree, i.e., the maximum number of accumulatable leaves
|
||||
fn capacity(&self) -> usize {
|
||||
1 << self.depth
|
||||
}
|
||||
|
||||
// Returns the total number of leaves set
|
||||
fn leaves_set(&mut self) -> usize {
|
||||
fn leaves_set(&self) -> usize {
|
||||
self.next_index
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
// Returns the root of the tree
|
||||
fn root(&self) -> FrOf<Self::Hasher> {
|
||||
self.nodes[0]
|
||||
}
|
||||
|
||||
// Sets a leaf at the specified tree index
|
||||
fn set(&mut self, leaf: usize, hash: FrOf<Self::Hasher>) -> Result<()> {
|
||||
fn set(&mut self, leaf: usize, hash: FrOf<Self::Hasher>) -> Result<(), ZerokitMerkleTreeError> {
|
||||
self.set_range(leaf, once(hash))?;
|
||||
self.next_index = max(self.next_index, leaf + 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Get a leaf from the specified tree index
|
||||
fn get(&self, leaf: usize) -> Result<FrOf<Self::Hasher>> {
|
||||
fn get(&self, leaf: usize) -> Result<FrOf<Self::Hasher>, ZerokitMerkleTreeError> {
|
||||
if leaf >= self.capacity() {
|
||||
return Err(Report::msg("leaf index out of bounds"));
|
||||
return Err(ZerokitMerkleTreeError::InvalidLeaf);
|
||||
}
|
||||
Ok(self.nodes[self.capacity() + leaf - 1])
|
||||
}
|
||||
|
||||
fn get_subtree_root(&self, n: usize, index: usize) -> Result<H::Fr, ZerokitMerkleTreeError> {
|
||||
if n > self.depth() {
|
||||
return Err(ZerokitMerkleTreeError::LevelExceedsDepth);
|
||||
}
|
||||
if index >= self.capacity() {
|
||||
return Err(ZerokitMerkleTreeError::InvalidLeaf);
|
||||
}
|
||||
if n == 0 {
|
||||
Ok(self.root())
|
||||
} else if n == self.depth {
|
||||
self.get(index)
|
||||
} else {
|
||||
let mut idx = self.capacity() + index - 1;
|
||||
let mut nd = self.depth;
|
||||
loop {
|
||||
let parent = self.parent(idx).unwrap();
|
||||
nd -= 1;
|
||||
if nd == n {
|
||||
return Ok(self.nodes[parent]);
|
||||
} else {
|
||||
idx = parent;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
fn get_empty_leaves_indices(&self) -> Vec<usize> {
|
||||
self.cached_leaves_indices
|
||||
.iter()
|
||||
.take(self.next_index)
|
||||
.enumerate()
|
||||
.filter(|&(_, &v)| v == 0u8)
|
||||
.map(|(idx, _)| idx)
|
||||
.collect()
|
||||
}
|
||||
|
||||
// Sets tree nodes, starting from start index
|
||||
// Function proper of FullMerkleTree implementation
|
||||
fn set_range<I: IntoIterator<Item = FrOf<Self::Hasher>>>(
|
||||
&mut self,
|
||||
start: usize,
|
||||
hashes: I,
|
||||
) -> Result<()> {
|
||||
) -> Result<(), ZerokitMerkleTreeError> {
|
||||
let index = self.capacity() + start - 1;
|
||||
let mut count = 0;
|
||||
// first count number of hashes, and check that they fit in the tree
|
||||
// first count the number of hashes and check that they fit in the tree
|
||||
// then insert into the tree
|
||||
let hashes = hashes.into_iter().collect::<Vec<_>>();
|
||||
if hashes.len() + start > self.capacity() {
|
||||
return Err(Report::msg("provided hashes do not fit in the tree"));
|
||||
return Err(ZerokitMerkleTreeError::TooManySet);
|
||||
}
|
||||
hashes.into_iter().for_each(|hash| {
|
||||
self.nodes[index + count] = hash;
|
||||
self.cached_leaves_indices[start + count] = 1;
|
||||
count += 1;
|
||||
});
|
||||
if count != 0 {
|
||||
@@ -167,55 +209,54 @@ where
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
|
||||
fn override_range<I, J>(&mut self, start: usize, leaves: I, indices: J) -> Result<(), ZerokitMerkleTreeError>
|
||||
where
|
||||
I: IntoIterator<Item = FrOf<Self::Hasher>>,
|
||||
J: IntoIterator<Item = usize>,
|
||||
{
|
||||
let index = self.capacity() + start - 1;
|
||||
let mut count = 0;
|
||||
let leaves = leaves.into_iter().collect::<Vec<_>>();
|
||||
let to_remove_indices = to_remove_indices.into_iter().collect::<Vec<_>>();
|
||||
// first count number of hashes, and check that they fit in the tree
|
||||
// then insert into the tree
|
||||
if leaves.len() + start - to_remove_indices.len() > self.capacity() {
|
||||
return Err(Report::msg("provided hashes do not fit in the tree"));
|
||||
let indices = indices.into_iter().collect::<Vec<_>>();
|
||||
let min_index = *indices.first().unwrap();
|
||||
let leaves_vec = leaves.into_iter().collect::<Vec<_>>();
|
||||
|
||||
let max_index = start + leaves_vec.len();
|
||||
|
||||
let mut set_values = vec![Self::Hasher::default_leaf(); max_index - min_index];
|
||||
|
||||
for i in min_index..start {
|
||||
if !indices.contains(&i) {
|
||||
let value = self.get(i)?;
|
||||
set_values[i - min_index] = value;
|
||||
}
|
||||
}
|
||||
|
||||
// remove leaves
|
||||
for i in &to_remove_indices {
|
||||
self.delete(*i)?;
|
||||
for i in 0..leaves_vec.len() {
|
||||
set_values[start - min_index + i] = leaves_vec[i];
|
||||
}
|
||||
|
||||
// insert new leaves
|
||||
for hash in leaves {
|
||||
self.nodes[index + count] = hash;
|
||||
count += 1;
|
||||
for i in indices {
|
||||
self.cached_leaves_indices[i] = 0;
|
||||
}
|
||||
|
||||
if count != 0 {
|
||||
self.update_nodes(index, index + (count - 1))?;
|
||||
self.next_index = max(self.next_index, start + count - to_remove_indices.len());
|
||||
}
|
||||
Ok(())
|
||||
self.set_range(start, set_values.into_iter())
|
||||
}
|
||||
|
||||
// Sets a leaf at the next available index
|
||||
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<()> {
|
||||
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<(), ZerokitMerkleTreeError> {
|
||||
self.set(self.next_index, leaf)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
|
||||
fn delete(&mut self, index: usize) -> Result<()> {
|
||||
fn delete(&mut self, index: usize) -> Result<(), ZerokitMerkleTreeError> {
|
||||
// We reset the leaf only if we previously set a leaf at that index
|
||||
if index < self.next_index {
|
||||
self.set(index, H::default_leaf())?;
|
||||
self.cached_leaves_indices[index] = 0;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Computes a merkle proof the the leaf at the specified index
|
||||
// Computes a merkle proof the leaf at the specified index
|
||||
fn proof(&self, leaf: usize) -> Result<FullMerkleProof<H>> {
|
||||
if leaf >= self.capacity() {
|
||||
return Err(Report::msg("index exceeds set size"));
|
||||
@@ -299,14 +340,12 @@ impl<H: Hasher> ZerokitMerkleProof for FullMerkleProof<H> {
|
||||
type Index = u8;
|
||||
type Hasher = H;
|
||||
|
||||
#[must_use]
|
||||
// Returns the length of a Merkle proof
|
||||
fn length(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
/// Computes the leaf index corresponding to a Merkle proof
|
||||
#[must_use]
|
||||
fn leaf_index(&self) -> usize {
|
||||
self.0.iter().rev().fold(0, |index, branch| match branch {
|
||||
FullMerkleBranch::Left(_) => index << 1,
|
||||
@@ -314,7 +353,6 @@ impl<H: Hasher> ZerokitMerkleProof for FullMerkleProof<H> {
|
||||
})
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
/// Returns the path elements forming a Merkle proof
|
||||
fn get_path_elements(&self) -> Vec<FrOf<Self::Hasher>> {
|
||||
self.0
|
||||
@@ -326,7 +364,6 @@ impl<H: Hasher> ZerokitMerkleProof for FullMerkleProof<H> {
|
||||
}
|
||||
|
||||
/// Returns the path indexes forming a Merkle proof
|
||||
#[must_use]
|
||||
fn get_path_index(&self) -> Vec<Self::Index> {
|
||||
self.0
|
||||
.iter()
|
||||
@@ -338,7 +375,6 @@ impl<H: Hasher> ZerokitMerkleProof for FullMerkleProof<H> {
|
||||
}
|
||||
|
||||
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
|
||||
#[must_use]
|
||||
fn compute_root_from(&self, hash: &FrOf<Self::Hasher>) -> FrOf<Self::Hasher> {
|
||||
self.0.iter().fold(*hash, |hash, branch| match branch {
|
||||
FullMerkleBranch::Left(sibling) => H::hash(&[hash, *sibling]),
|
||||
|
||||
@@ -14,14 +14,14 @@
|
||||
//! * Implement serialization for tree and Merkle proof
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
use color_eyre::Result;
|
||||
use num_traits::Zero;
|
||||
// use color_eyre::Result;
|
||||
|
||||
/// In the Hasher trait we define the node type, the default leaf
|
||||
/// and the hash function used to initialize a Merkle Tree implementation
|
||||
pub trait Hasher {
|
||||
/// Type of the leaf and tree node
|
||||
type Fr: Clone + Copy + Eq;
|
||||
type Fr: Clone + Copy + Eq + Default + std::fmt::Debug + std::fmt::Display + FromStr;
|
||||
|
||||
/// Returns the default tree leaf
|
||||
fn default_leaf() -> Self::Fr;
|
||||
@@ -32,6 +32,30 @@ pub trait Hasher {
|
||||
|
||||
pub type FrOf<H> = <H as Hasher>::Fr;
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum ZerokitMerkleTreeError {
|
||||
#[error("Invalid index")]
|
||||
InvalidIndex,
|
||||
// InvalidProof,
|
||||
#[error("Leaf index out of bounds")]
|
||||
InvalidLeaf,
|
||||
|
||||
#[error("Level exceeds tree depth")]
|
||||
LevelExceedsDepth,
|
||||
#[error("Subtree index out of bounds")]
|
||||
InvalidSubTreeIndex,
|
||||
|
||||
#[error("set_range got too many leaves")]
|
||||
TooManySet
|
||||
|
||||
// InvalidConfig,
|
||||
// InvalidMetadata,
|
||||
// InvalidDepth,
|
||||
// InvalidCapacity,
|
||||
// InvalidLeavesSet,
|
||||
// InvalidRoot,
|
||||
}
|
||||
|
||||
/// In the ZerokitMerkleTree trait we define the methods that are required to be implemented by a Merkle tree
|
||||
/// Including, OptimalMerkleTree, FullMerkleTree
|
||||
pub trait ZerokitMerkleTree {
|
||||
@@ -39,33 +63,35 @@ pub trait ZerokitMerkleTree {
|
||||
type Hasher: Hasher;
|
||||
type Config: Default + FromStr;
|
||||
|
||||
fn default(depth: usize) -> Result<Self>
|
||||
fn default(depth: usize) -> Result<Self, ZerokitMerkleTreeError>
|
||||
where
|
||||
Self: Sized;
|
||||
fn new(depth: usize, default_leaf: FrOf<Self::Hasher>, config: Self::Config) -> Result<Self>
|
||||
fn new(depth: usize, default_leaf: FrOf<Self::Hasher>, config: Self::Config) -> Result<Self, ZerokitMerkleTreeError>
|
||||
where
|
||||
Self: Sized;
|
||||
fn depth(&self) -> usize;
|
||||
fn capacity(&self) -> usize;
|
||||
fn leaves_set(&mut self) -> usize;
|
||||
fn leaves_set(&self) -> usize;
|
||||
fn root(&self) -> FrOf<Self::Hasher>;
|
||||
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>>;
|
||||
fn set(&mut self, index: usize, leaf: FrOf<Self::Hasher>) -> Result<()>;
|
||||
fn set_range<I>(&mut self, start: usize, leaves: I) -> Result<()>
|
||||
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>, ZerokitMerkleTreeError>;
|
||||
fn get_subtree_root(&self, n: usize, index: usize) -> Result<FrOf<Self::Hasher>, ZerokitMerkleTreeError>;
|
||||
fn set(&mut self, index: usize, leaf: FrOf<Self::Hasher>) -> Result<(), ZerokitMerkleTreeError>;
|
||||
fn set_range<I>(&mut self, start: usize, leaves: I) -> Result<(), ZerokitMerkleTreeError>
|
||||
where
|
||||
I: IntoIterator<Item = FrOf<Self::Hasher>>;
|
||||
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>>;
|
||||
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
|
||||
I: ExactSizeIterator<Item = FrOf<Self::Hasher>>;
|
||||
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>, ZerokitMerkleTreeError>;
|
||||
fn get_empty_leaves_indices(&self) -> Vec<usize>;
|
||||
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<(), ZerokitMerkleTreeError>
|
||||
where
|
||||
I: IntoIterator<Item = FrOf<Self::Hasher>>,
|
||||
J: IntoIterator<Item = usize>;
|
||||
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<()>;
|
||||
fn delete(&mut self, index: usize) -> Result<()>;
|
||||
fn proof(&self, index: usize) -> Result<Self::Proof>;
|
||||
fn verify(&self, leaf: &FrOf<Self::Hasher>, witness: &Self::Proof) -> Result<bool>;
|
||||
fn set_metadata(&mut self, metadata: &[u8]) -> Result<()>;
|
||||
fn metadata(&self) -> Result<Vec<u8>>;
|
||||
fn close_db_connection(&mut self) -> Result<()>;
|
||||
I: ExactSizeIterator<Item = FrOf<Self::Hasher>>,
|
||||
J: ExactSizeIterator<Item = usize>;
|
||||
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<(), ZerokitMerkleTreeError>;
|
||||
fn delete(&mut self, index: usize) -> Result<(), ZerokitMerkleTreeError>;
|
||||
fn proof(&self, index: usize) -> Result<Self::Proof, ZerokitMerkleTreeError>;
|
||||
fn verify(&self, leaf: &FrOf<Self::Hasher>, witness: &Self::Proof) -> Result<bool, ZerokitMerkleTreeError>;
|
||||
fn set_metadata(&mut self, metadata: &[u8]) -> Result<(), ZerokitMerkleTreeError>;
|
||||
fn metadata(&self) -> Result<Vec<u8>, ZerokitMerkleTreeError>;
|
||||
fn close_db_connection(&mut self) -> Result<(), ZerokitMerkleTreeError>;
|
||||
}
|
||||
|
||||
pub trait ZerokitMerkleProof {
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use crate::merkle_tree::{Hasher, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
use crate::FrOf;
|
||||
use color_eyre::{Report, Result};
|
||||
use std::cmp::min;
|
||||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
use std::{cmp::max, fmt::Debug};
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
/// Optimal Merkle Tree Implementation
|
||||
///// Optimal Merkle Tree Implementation
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
/// The Merkle tree structure
|
||||
@@ -27,6 +27,10 @@ where
|
||||
/// The tree nodes
|
||||
nodes: HashMap<(usize, usize), H::Fr>,
|
||||
|
||||
/// The indices of leaves which are set into zero upto next_index.
|
||||
/// Set to 0 if the leaf is empty and set to 1 in otherwise.
|
||||
cached_leaves_indices: Vec<u8>,
|
||||
|
||||
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
|
||||
// (deletions leave next_index unchanged)
|
||||
next_index: usize,
|
||||
@@ -51,7 +55,9 @@ impl FromStr for OptimalMerkleConfig {
|
||||
}
|
||||
}
|
||||
|
||||
/// Implementations
|
||||
////////////////////////////////////////////////////////////
|
||||
///// Implementations
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
impl<H: Hasher> ZerokitMerkleTree for OptimalMerkleTree<H>
|
||||
where
|
||||
@@ -75,9 +81,10 @@ where
|
||||
}
|
||||
cached_nodes.reverse();
|
||||
Ok(OptimalMerkleTree {
|
||||
cached_nodes: cached_nodes.clone(),
|
||||
cached_nodes,
|
||||
depth,
|
||||
nodes: HashMap::new(),
|
||||
nodes: HashMap::with_capacity(1 << depth),
|
||||
cached_leaves_indices: vec![0; 1 << depth],
|
||||
next_index: 0,
|
||||
metadata: Vec::new(),
|
||||
})
|
||||
@@ -98,24 +105,40 @@ where
|
||||
}
|
||||
|
||||
// Returns the total number of leaves set
|
||||
fn leaves_set(&mut self) -> usize {
|
||||
fn leaves_set(&self) -> usize {
|
||||
self.next_index
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
// Returns the root of the tree
|
||||
fn root(&self) -> H::Fr {
|
||||
self.get_node(0, 0)
|
||||
}
|
||||
|
||||
fn get_subtree_root(&self, n: usize, index: usize) -> Result<H::Fr> {
|
||||
if n > self.depth() {
|
||||
return Err(Report::msg("level exceeds depth size"));
|
||||
}
|
||||
if index >= self.capacity() {
|
||||
return Err(Report::msg("index exceeds set size"));
|
||||
}
|
||||
if n == 0 {
|
||||
Ok(self.root())
|
||||
} else if n == self.depth {
|
||||
self.get(index)
|
||||
} else {
|
||||
Ok(self.get_node(n, index >> (self.depth - n)))
|
||||
}
|
||||
}
|
||||
|
||||
// Sets a leaf at the specified tree index
|
||||
fn set(&mut self, index: usize, leaf: H::Fr) -> Result<()> {
|
||||
if index >= self.capacity() {
|
||||
return Err(Report::msg("index exceeds set size"));
|
||||
}
|
||||
self.nodes.insert((self.depth, index), leaf);
|
||||
self.recalculate_from(index)?;
|
||||
self.update_hashes(index, 1)?;
|
||||
self.next_index = max(self.next_index, index + 1);
|
||||
self.cached_leaves_indices[index] = 1;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -127,49 +150,66 @@ where
|
||||
Ok(self.get_node(self.depth, index))
|
||||
}
|
||||
|
||||
fn get_empty_leaves_indices(&self) -> Vec<usize> {
|
||||
self.cached_leaves_indices
|
||||
.iter()
|
||||
.take(self.next_index)
|
||||
.enumerate()
|
||||
.filter(|&(_, &v)| v == 0u8)
|
||||
.map(|(idx, _)| idx)
|
||||
.collect()
|
||||
}
|
||||
|
||||
// Sets multiple leaves from the specified tree index
|
||||
fn set_range<I: IntoIterator<Item = H::Fr>>(&mut self, start: usize, leaves: I) -> Result<()> {
|
||||
let leaves = leaves.into_iter().collect::<Vec<_>>();
|
||||
fn set_range<I: ExactSizeIterator<Item = H::Fr>>(
|
||||
&mut self,
|
||||
start: usize,
|
||||
leaves: I,
|
||||
) -> Result<()> {
|
||||
// check if the range is valid
|
||||
if start + leaves.len() > self.capacity() {
|
||||
let leaves_len = leaves.len();
|
||||
if start + leaves_len > self.capacity() {
|
||||
return Err(Report::msg("provided range exceeds set size"));
|
||||
}
|
||||
for (i, leaf) in leaves.iter().enumerate() {
|
||||
self.nodes.insert((self.depth, start + i), *leaf);
|
||||
self.recalculate_from(start + i)?;
|
||||
for (i, leaf) in leaves.enumerate() {
|
||||
self.nodes.insert((self.depth, start + i), leaf);
|
||||
self.cached_leaves_indices[start + i] = 1;
|
||||
}
|
||||
self.next_index = max(self.next_index, start + leaves.len());
|
||||
self.update_hashes(start, leaves_len)?;
|
||||
self.next_index = max(self.next_index, start + leaves_len);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
|
||||
fn override_range<I, J>(&mut self, start: usize, leaves: I, indices: J) -> Result<()>
|
||||
where
|
||||
I: IntoIterator<Item = FrOf<Self::Hasher>>,
|
||||
J: IntoIterator<Item = usize>,
|
||||
I: ExactSizeIterator<Item = FrOf<Self::Hasher>>,
|
||||
J: ExactSizeIterator<Item = usize>,
|
||||
{
|
||||
let leaves = leaves.into_iter().collect::<Vec<_>>();
|
||||
let to_remove_indices = to_remove_indices.into_iter().collect::<Vec<_>>();
|
||||
// check if the range is valid
|
||||
if leaves.len() + start - to_remove_indices.len() > self.capacity() {
|
||||
return Err(Report::msg("provided range exceeds set size"));
|
||||
let indices = indices.into_iter().collect::<Vec<_>>();
|
||||
let min_index = *indices.first().unwrap();
|
||||
let leaves_vec = leaves.into_iter().collect::<Vec<_>>();
|
||||
|
||||
let max_index = start + leaves_vec.len();
|
||||
|
||||
let mut set_values = vec![Self::Hasher::default_leaf(); max_index - min_index];
|
||||
|
||||
for i in min_index..start {
|
||||
if !indices.contains(&i) {
|
||||
let value = self.get_leaf(i);
|
||||
set_values[i - min_index] = value;
|
||||
}
|
||||
}
|
||||
|
||||
// remove leaves
|
||||
for i in &to_remove_indices {
|
||||
self.delete(*i)?;
|
||||
for i in 0..leaves_vec.len() {
|
||||
set_values[start - min_index + i] = leaves_vec[i];
|
||||
}
|
||||
|
||||
// add leaves
|
||||
for (i, leaf) in leaves.iter().enumerate() {
|
||||
self.nodes.insert((self.depth, start + i), *leaf);
|
||||
self.recalculate_from(start + i)?;
|
||||
for i in indices {
|
||||
self.cached_leaves_indices[i] = 0;
|
||||
}
|
||||
|
||||
self.next_index = max(
|
||||
self.next_index,
|
||||
start + leaves.len() - to_remove_indices.len(),
|
||||
);
|
||||
Ok(())
|
||||
self.set_range(start, set_values.into_iter())
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
|
||||
// Sets a leaf at the next available index
|
||||
@@ -183,11 +223,12 @@ where
|
||||
// We reset the leaf only if we previously set a leaf at that index
|
||||
if index < self.next_index {
|
||||
self.set(index, H::default_leaf())?;
|
||||
self.cached_leaves_indices[index] = 0;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Computes a merkle proof the the leaf at the specified index
|
||||
// Computes a merkle proof the leaf at the specified index
|
||||
fn proof(&self, index: usize) -> Result<Self::Proof> {
|
||||
if index >= self.capacity() {
|
||||
return Err(Report::msg("index exceeds set size"));
|
||||
@@ -266,6 +307,7 @@ where
|
||||
i >>= 1;
|
||||
depth -= 1;
|
||||
self.nodes.insert((depth, i), h);
|
||||
self.cached_leaves_indices[index] = 1;
|
||||
if depth == 0 {
|
||||
break;
|
||||
}
|
||||
@@ -278,6 +320,60 @@ where
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Update hashes after some leaves have been set or updated
|
||||
/// index - first leaf index (which has been set or updated)
|
||||
/// length - number of elements set or updated
|
||||
fn update_hashes(&mut self, index: usize, length: usize) -> Result<()> {
|
||||
// parent depth & index (used to store in the tree)
|
||||
let mut parent_depth = self.depth - 1; // tree depth (or leaves depth) - 1
|
||||
let mut parent_index = index >> 1;
|
||||
let mut parent_index_bak = parent_index;
|
||||
// maximum index at this depth
|
||||
let parent_max_index_0 = (1 << parent_depth) / 2;
|
||||
// Based on given length (number of elements we will update)
|
||||
// we could restrict the parent_max_index
|
||||
let current_index_max = if (index + length) % 2 == 0 {
|
||||
index + length + 2
|
||||
} else {
|
||||
index + length + 1
|
||||
};
|
||||
let mut parent_max_index = min(current_index_max >> 1, parent_max_index_0);
|
||||
|
||||
// current depth & index (used to compute the hash)
|
||||
// current depth initially == tree depth (or leaves depth)
|
||||
let mut current_depth = self.depth;
|
||||
let mut current_index = if index % 2 == 0 { index } else { index - 1 };
|
||||
let mut current_index_bak = current_index;
|
||||
|
||||
loop {
|
||||
// Hash 2 values at (current depth, current_index) & (current_depth, current_index + 1)
|
||||
let n_hash = self.hash_couple(current_depth, current_index);
|
||||
// Insert this hash at (parent_depth, parent_index)
|
||||
self.nodes.insert((parent_depth, parent_index), n_hash);
|
||||
|
||||
if parent_depth == 0 {
|
||||
// We just set the root hash of the tree - nothing to do anymore
|
||||
break;
|
||||
}
|
||||
// Incr parent index
|
||||
parent_index += 1;
|
||||
// Incr current index (+2 because we've just hashed current index & current_index + 1)
|
||||
current_index += 2;
|
||||
if parent_index >= parent_max_index {
|
||||
// reset (aka decr depth & reset indexes)
|
||||
parent_depth -= 1;
|
||||
parent_index = parent_index_bak >> 1;
|
||||
parent_index_bak = parent_index;
|
||||
parent_max_index >>= 1;
|
||||
current_depth -= 1;
|
||||
current_index = current_index_bak >> 1;
|
||||
current_index_bak = current_index;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: Hasher> ZerokitMerkleProof for OptimalMerkleProof<H>
|
||||
@@ -287,14 +383,12 @@ where
|
||||
type Index = u8;
|
||||
type Hasher = H;
|
||||
|
||||
#[must_use]
|
||||
// Returns the length of a Merkle proof
|
||||
fn length(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
|
||||
/// Computes the leaf index corresponding to a Merkle proof
|
||||
#[must_use]
|
||||
fn leaf_index(&self) -> usize {
|
||||
// In current implementation the path indexes in a proof correspond to the binary representation of the leaf index
|
||||
let mut binary_repr = self.get_path_index();
|
||||
@@ -304,19 +398,16 @@ where
|
||||
.fold(0, |acc, digit| (acc << 1) + usize::from(digit))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
/// Returns the path elements forming a Merkle proof
|
||||
fn get_path_elements(&self) -> Vec<H::Fr> {
|
||||
self.0.iter().map(|x| x.0).collect()
|
||||
}
|
||||
|
||||
/// Returns the path indexes forming a Merkle proof
|
||||
#[must_use]
|
||||
fn get_path_index(&self) -> Vec<u8> {
|
||||
self.0.iter().map(|x| x.1).collect()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
|
||||
fn compute_root_from(&self, leaf: &H::Fr) -> H::Fr {
|
||||
let mut acc: H::Fr = *leaf;
|
||||
|
||||
@@ -9,8 +9,6 @@
|
||||
|
||||
// The following implementation was adapted from https://github.com/arkworks-rs/sponge/blob/7d9b3a474c9ddb62890014aeaefcb142ac2b3776/src/poseidon/grain_lfsr.rs
|
||||
|
||||
#![allow(dead_code)]
|
||||
|
||||
use ark_ff::PrimeField;
|
||||
use num_bigint::BigUint;
|
||||
|
||||
@@ -20,7 +18,6 @@ pub struct PoseidonGrainLFSR {
|
||||
pub head: usize,
|
||||
}
|
||||
|
||||
#[allow(unused_variables)]
|
||||
impl PoseidonGrainLFSR {
|
||||
pub fn new(
|
||||
is_field: u64,
|
||||
|
||||
@@ -7,7 +7,7 @@ use crate::poseidon_constants::find_poseidon_ark_and_mds;
|
||||
use ark_ff::PrimeField;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct RoundParamenters<F: PrimeField> {
|
||||
pub struct RoundParameters<F: PrimeField> {
|
||||
pub t: usize,
|
||||
pub n_rounds_f: usize,
|
||||
pub n_rounds_p: usize,
|
||||
@@ -17,16 +17,16 @@ pub struct RoundParamenters<F: PrimeField> {
|
||||
}
|
||||
|
||||
pub struct Poseidon<F: PrimeField> {
|
||||
round_params: Vec<RoundParamenters<F>>,
|
||||
round_params: Vec<RoundParameters<F>>,
|
||||
}
|
||||
impl<F: PrimeField> Poseidon<F> {
|
||||
// Loads round parameters and generates round constants
|
||||
// poseidon_params is a vector containing tuples (t, RF, RP, skip_matrices)
|
||||
// where: t is the rate (input lenght + 1), RF is the number of full rounds, RP is the number of partial rounds
|
||||
// where: t is the rate (input length + 1), RF is the number of full rounds, RP is the number of partial rounds
|
||||
// and skip_matrices is a (temporary) parameter used to generate secure MDS matrices (see comments in the description of find_poseidon_ark_and_mds)
|
||||
// TODO: implement automatic generation of round parameters
|
||||
pub fn from(poseidon_params: &[(usize, usize, usize, usize)]) -> Self {
|
||||
let mut read_params = Vec::<RoundParamenters<F>>::new();
|
||||
let mut read_params = Vec::<RoundParameters<F>>::with_capacity(poseidon_params.len());
|
||||
|
||||
for &(t, n_rounds_f, n_rounds_p, skip_matrices) in poseidon_params {
|
||||
let (ark, mds) = find_poseidon_ark_and_mds::<F>(
|
||||
@@ -38,7 +38,7 @@ impl<F: PrimeField> Poseidon<F> {
|
||||
n_rounds_p as u64,
|
||||
skip_matrices,
|
||||
);
|
||||
let rp = RoundParamenters {
|
||||
let rp = RoundParameters {
|
||||
t,
|
||||
n_rounds_p,
|
||||
n_rounds_f,
|
||||
@@ -54,24 +54,24 @@ impl<F: PrimeField> Poseidon<F> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_parameters(&self) -> Vec<RoundParamenters<F>> {
|
||||
self.round_params.clone()
|
||||
pub fn get_parameters(&self) -> &Vec<RoundParameters<F>> {
|
||||
&self.round_params
|
||||
}
|
||||
|
||||
pub fn ark(&self, state: &mut [F], c: &[F], it: usize) {
|
||||
for i in 0..state.len() {
|
||||
state[i] += c[it + i];
|
||||
}
|
||||
state.iter_mut().enumerate().for_each(|(i, elem)| {
|
||||
*elem += c[it + i];
|
||||
});
|
||||
}
|
||||
|
||||
pub fn sbox(&self, n_rounds_f: usize, n_rounds_p: usize, state: &mut [F], i: usize) {
|
||||
if (i < n_rounds_f / 2) || (i >= n_rounds_f / 2 + n_rounds_p) {
|
||||
for current_state in &mut state.iter_mut() {
|
||||
state.iter_mut().for_each(|current_state| {
|
||||
let aux = *current_state;
|
||||
*current_state *= *current_state;
|
||||
*current_state *= *current_state;
|
||||
*current_state *= aux;
|
||||
}
|
||||
})
|
||||
} else {
|
||||
let aux = state[0];
|
||||
state[0] *= state[0];
|
||||
@@ -80,21 +80,20 @@ impl<F: PrimeField> Poseidon<F> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mix(&self, state: &[F], m: &[Vec<F>]) -> Vec<F> {
|
||||
let mut new_state: Vec<F> = Vec::new();
|
||||
pub fn mix_2(&self, state: &[F], m: &[Vec<F>], state_2: &mut [F]) {
|
||||
for i in 0..state.len() {
|
||||
new_state.push(F::zero());
|
||||
for (j, state_item) in state.iter().enumerate() {
|
||||
let mut mij = m[i][j];
|
||||
mij *= state_item;
|
||||
new_state[i] += mij;
|
||||
// Cache the row reference
|
||||
let row = &m[i];
|
||||
let mut acc = F::ZERO;
|
||||
for j in 0..state.len() {
|
||||
acc += row[j] * state[j];
|
||||
}
|
||||
state_2[i] = acc;
|
||||
}
|
||||
new_state.clone()
|
||||
}
|
||||
|
||||
pub fn hash(&self, inp: Vec<F>) -> Result<F, String> {
|
||||
// Note that the rate t becomes input lenght + 1, hence for lenght N we pick parameters with T = N + 1
|
||||
pub fn hash(&self, inp: &[F]) -> Result<F, String> {
|
||||
// Note that the rate t becomes input length + 1; hence for length N we pick parameters with T = N + 1
|
||||
let t = inp.len() + 1;
|
||||
|
||||
// We seek the index (Poseidon's round_params is an ordered vector) for the parameters corresponding to t
|
||||
@@ -106,8 +105,9 @@ impl<F: PrimeField> Poseidon<F> {
|
||||
|
||||
let param_index = param_index.unwrap();
|
||||
|
||||
let mut state = vec![F::zero(); t];
|
||||
state[1..].clone_from_slice(&inp);
|
||||
let mut state = vec![F::ZERO; t];
|
||||
let mut state_2 = state.clone();
|
||||
state[1..].clone_from_slice(inp);
|
||||
|
||||
for i in 0..(self.round_params[param_index].n_rounds_f
|
||||
+ self.round_params[param_index].n_rounds_p)
|
||||
@@ -123,7 +123,8 @@ impl<F: PrimeField> Poseidon<F> {
|
||||
&mut state,
|
||||
i,
|
||||
);
|
||||
state = self.mix(&state, &self.round_params[param_index].m);
|
||||
self.mix_2(&state, &self.round_params[param_index].m, &mut state_2);
|
||||
std::mem::swap(&mut state, &mut state_2);
|
||||
}
|
||||
|
||||
Ok(state[0])
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
// Tests adapted from https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/merkle_tree.rs
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
pub mod test {
|
||||
use hex_literal::hex;
|
||||
use std::{fmt::Display, str::FromStr};
|
||||
use tiny_keccak::{Hasher as _, Keccak};
|
||||
use zerokit_utils::{
|
||||
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,
|
||||
@@ -10,74 +11,257 @@ mod test {
|
||||
#[derive(Clone, Copy, Eq, PartialEq)]
|
||||
struct Keccak256;
|
||||
|
||||
#[derive(Clone, Copy, Eq, PartialEq, Debug, Default)]
|
||||
struct TestFr([u8; 32]);
|
||||
|
||||
impl Hasher for Keccak256 {
|
||||
type Fr = [u8; 32];
|
||||
type Fr = TestFr;
|
||||
|
||||
fn default_leaf() -> Self::Fr {
|
||||
[0; 32]
|
||||
TestFr([0; 32])
|
||||
}
|
||||
|
||||
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
|
||||
let mut output = [0; 32];
|
||||
let mut hasher = Keccak::v256();
|
||||
for element in inputs {
|
||||
hasher.update(element);
|
||||
hasher.update(element.0.as_slice());
|
||||
}
|
||||
hasher.finalize(&mut output);
|
||||
output
|
||||
TestFr(output)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for TestFr {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", hex::encode(self.0.as_slice()))
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for TestFr {
|
||||
type Err = std::string::FromUtf8Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
Ok(TestFr(s.as_bytes().try_into().unwrap()))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u32> for TestFr {
|
||||
fn from(value: u32) -> Self {
|
||||
let mut bytes: Vec<u8> = vec![0; 28];
|
||||
bytes.extend_from_slice(&value.to_be_bytes());
|
||||
TestFr(bytes.as_slice().try_into().unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
const DEFAULT_DEPTH: usize = 2;
|
||||
|
||||
fn default_full_merkle_tree(depth: usize) -> FullMerkleTree<Keccak256> {
|
||||
FullMerkleTree::<Keccak256>::new(depth, TestFr([0; 32]), FullMerkleConfig::default())
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn default_optimal_merkle_tree(depth: usize) -> OptimalMerkleTree<Keccak256> {
|
||||
OptimalMerkleTree::<Keccak256>::new(depth, TestFr([0; 32]), OptimalMerkleConfig::default())
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_root() {
|
||||
let leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
];
|
||||
|
||||
let default_tree_root =
|
||||
hex!("b4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30");
|
||||
let default_tree_root = TestFr(hex!(
|
||||
"b4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30"
|
||||
));
|
||||
|
||||
let roots = [
|
||||
hex!("c1ba1812ff680ce84c1d5b4f1087eeb08147a4d510f3496b2849df3a73f5af95"),
|
||||
hex!("893760ec5b5bee236f29e85aef64f17139c3c1b7ff24ce64eb6315fca0f2485b"),
|
||||
hex!("222ff5e0b5877792c2bc1670e2ccd0c2c97cd7bb1672a57d598db05092d3d72c"),
|
||||
hex!("a9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36"),
|
||||
];
|
||||
]
|
||||
.map(TestFr);
|
||||
|
||||
let mut tree =
|
||||
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
|
||||
let nof_leaves = 4;
|
||||
let leaves: Vec<TestFr> = (1..=nof_leaves as u32).map(TestFr::from).collect();
|
||||
|
||||
let mut tree = default_full_merkle_tree(DEFAULT_DEPTH);
|
||||
assert_eq!(tree.root(), default_tree_root);
|
||||
for i in 0..leaves.len() {
|
||||
for i in 0..nof_leaves {
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
assert_eq!(tree.root(), roots[i]);
|
||||
}
|
||||
|
||||
let mut tree =
|
||||
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
|
||||
.unwrap();
|
||||
let mut tree = default_optimal_merkle_tree(DEFAULT_DEPTH);
|
||||
assert_eq!(tree.root(), default_tree_root);
|
||||
for i in 0..leaves.len() {
|
||||
for i in 0..nof_leaves {
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
assert_eq!(tree.root(), roots[i]);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_empty_leaves_indices() {
|
||||
let depth = 4;
|
||||
let nof_leaves: usize = 1 << (depth - 1);
|
||||
let leaves: Vec<TestFr> = (0..nof_leaves as u32).map(TestFr::from).collect();
|
||||
let leaves_2: Vec<TestFr> = (0u32..2).map(TestFr::from).collect();
|
||||
let leaves_4: Vec<TestFr> = (0u32..4).map(TestFr::from).collect();
|
||||
|
||||
let mut tree_full = default_full_merkle_tree(depth);
|
||||
let _ = tree_full.set_range(0, leaves.clone().into_iter());
|
||||
assert!(tree_full.get_empty_leaves_indices().is_empty());
|
||||
|
||||
let mut vec_idxs = Vec::new();
|
||||
for i in 0..nof_leaves {
|
||||
vec_idxs.push(i);
|
||||
let _ = tree_full.delete(i);
|
||||
assert_eq!(tree_full.get_empty_leaves_indices(), vec_idxs);
|
||||
}
|
||||
|
||||
for i in (0..nof_leaves).rev() {
|
||||
vec_idxs.pop();
|
||||
let _ = tree_full.set(i, leaves[i]);
|
||||
assert_eq!(tree_full.get_empty_leaves_indices(), vec_idxs);
|
||||
}
|
||||
|
||||
// Check situation when the number of items to insert is less than the number of items to delete
|
||||
tree_full
|
||||
.override_range(0, leaves_2.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
|
||||
// check if the indexes for write and delete are the same
|
||||
tree_full
|
||||
.override_range(0, leaves_4.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
assert_eq!(tree_full.get_empty_leaves_indices(), vec![]);
|
||||
|
||||
// check if indexes for deletion are before indexes for overwriting
|
||||
tree_full
|
||||
.override_range(4, leaves_4.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
assert_eq!(tree_full.get_empty_leaves_indices(), vec![0, 1, 2, 3]);
|
||||
|
||||
// check if the indices for write and delete do not overlap completely
|
||||
tree_full
|
||||
.override_range(2, leaves_4.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
assert_eq!(tree_full.get_empty_leaves_indices(), vec![0, 1]);
|
||||
|
||||
//// Optimal Merkle Tree Trest
|
||||
|
||||
let mut tree_opt = default_optimal_merkle_tree(depth);
|
||||
let _ = tree_opt.set_range(0, leaves.clone().into_iter());
|
||||
assert!(tree_opt.get_empty_leaves_indices().is_empty());
|
||||
|
||||
let mut vec_idxs = Vec::new();
|
||||
for i in 0..nof_leaves {
|
||||
vec_idxs.push(i);
|
||||
let _ = tree_opt.delete(i);
|
||||
assert_eq!(tree_opt.get_empty_leaves_indices(), vec_idxs);
|
||||
}
|
||||
for i in (0..nof_leaves).rev() {
|
||||
vec_idxs.pop();
|
||||
let _ = tree_opt.set(i, leaves[i]);
|
||||
assert_eq!(tree_opt.get_empty_leaves_indices(), vec_idxs);
|
||||
}
|
||||
|
||||
// Check situation when the number of items to insert is less than the number of items to delete
|
||||
tree_opt
|
||||
.override_range(0, leaves_2.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
|
||||
// check if the indexes for write and delete are the same
|
||||
tree_opt
|
||||
.override_range(0, leaves_4.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
assert_eq!(tree_opt.get_empty_leaves_indices(), vec![]);
|
||||
|
||||
// check if indexes for deletion are before indexes for overwriting
|
||||
tree_opt
|
||||
.override_range(4, leaves_4.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
assert_eq!(tree_opt.get_empty_leaves_indices(), vec![0, 1, 2, 3]);
|
||||
|
||||
// check if the indices for write and delete do not overlap completely
|
||||
tree_opt
|
||||
.override_range(2, leaves_4.clone().into_iter(), [0, 1, 2, 3].into_iter())
|
||||
.unwrap();
|
||||
assert_eq!(tree_opt.get_empty_leaves_indices(), vec![0, 1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_subtree_root() {
|
||||
let depth = 3;
|
||||
let nof_leaves: usize = 4;
|
||||
let leaves: Vec<TestFr> = (0..nof_leaves as u32).map(TestFr::from).collect();
|
||||
|
||||
let mut tree_full = default_optimal_merkle_tree(depth);
|
||||
let _ = tree_full.set_range(0, leaves.iter().cloned());
|
||||
|
||||
for i in 0..nof_leaves {
|
||||
// check leaves
|
||||
assert_eq!(
|
||||
tree_full.get(i).unwrap(),
|
||||
tree_full.get_subtree_root(depth, i).unwrap()
|
||||
);
|
||||
|
||||
// check root
|
||||
assert_eq!(tree_full.root(), tree_full.get_subtree_root(0, i).unwrap());
|
||||
}
|
||||
|
||||
// check intermediate nodes
|
||||
for n in (1..=depth).rev() {
|
||||
for i in (0..(1 << n)).step_by(2) {
|
||||
let idx_l = i * (1 << (depth - n));
|
||||
let idx_r = (i + 1) * (1 << (depth - n));
|
||||
let idx_sr = idx_l;
|
||||
|
||||
let prev_l = tree_full.get_subtree_root(n, idx_l).unwrap();
|
||||
let prev_r = tree_full.get_subtree_root(n, idx_r).unwrap();
|
||||
let subroot = tree_full.get_subtree_root(n - 1, idx_sr).unwrap();
|
||||
|
||||
// check intermediate nodes
|
||||
assert_eq!(Keccak256::hash(&[prev_l, prev_r]), subroot);
|
||||
}
|
||||
}
|
||||
|
||||
let mut tree_opt = default_full_merkle_tree(depth);
|
||||
let _ = tree_opt.set_range(0, leaves.iter().cloned());
|
||||
|
||||
for i in 0..nof_leaves {
|
||||
// check leaves
|
||||
assert_eq!(
|
||||
tree_opt.get(i).unwrap(),
|
||||
tree_opt.get_subtree_root(depth, i).unwrap()
|
||||
);
|
||||
// check root
|
||||
assert_eq!(tree_opt.root(), tree_opt.get_subtree_root(0, i).unwrap());
|
||||
}
|
||||
|
||||
// check intermediate nodes
|
||||
for n in (1..=depth).rev() {
|
||||
for i in (0..(1 << n)).step_by(2) {
|
||||
let idx_l = i * (1 << (depth - n));
|
||||
let idx_r = (i + 1) * (1 << (depth - n));
|
||||
let idx_sr = idx_l;
|
||||
|
||||
let prev_l = tree_opt.get_subtree_root(n, idx_l).unwrap();
|
||||
let prev_r = tree_opt.get_subtree_root(n, idx_r).unwrap();
|
||||
let subroot = tree_opt.get_subtree_root(n - 1, idx_sr).unwrap();
|
||||
|
||||
// check intermediate nodes
|
||||
assert_eq!(Keccak256::hash(&[prev_l, prev_r]), subroot);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proof() {
|
||||
let leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
];
|
||||
let nof_leaves = 4;
|
||||
let leaves: Vec<TestFr> = (0..nof_leaves as u32).map(TestFr::from).collect();
|
||||
|
||||
// We thest the FullMerkleTree implementation
|
||||
let mut tree =
|
||||
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
|
||||
for i in 0..leaves.len() {
|
||||
let mut tree = default_full_merkle_tree(DEFAULT_DEPTH);
|
||||
for i in 0..nof_leaves {
|
||||
// We set the leaves
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
|
||||
@@ -94,16 +278,12 @@ mod test {
|
||||
assert_eq!(proof.compute_root_from(&leaves[i]), tree.root());
|
||||
|
||||
// We check that the proof is not valid for another leaf
|
||||
assert!(!tree
|
||||
.verify(&leaves[(i + 1) % leaves.len()], &proof)
|
||||
.unwrap());
|
||||
assert!(!tree.verify(&leaves[(i + 1) % nof_leaves], &proof).unwrap());
|
||||
}
|
||||
|
||||
// We test the OptimalMerkleTree implementation
|
||||
let mut tree =
|
||||
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
|
||||
.unwrap();
|
||||
for i in 0..leaves.len() {
|
||||
let mut tree = default_optimal_merkle_tree(DEFAULT_DEPTH);
|
||||
for i in 0..nof_leaves {
|
||||
// We set the leaves
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
|
||||
@@ -120,32 +300,25 @@ mod test {
|
||||
assert_eq!(proof.compute_root_from(&leaves[i]), tree.root());
|
||||
|
||||
// We check that the proof is not valid for another leaf
|
||||
assert!(!tree
|
||||
.verify(&leaves[(i + 1) % leaves.len()], &proof)
|
||||
.unwrap());
|
||||
assert!(!tree.verify(&leaves[(i + 1) % nof_leaves], &proof).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_override_range() {
|
||||
let initial_leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
];
|
||||
let nof_leaves = 4;
|
||||
let leaves: Vec<TestFr> = (0..nof_leaves as u32).map(TestFr::from).collect();
|
||||
|
||||
let mut tree =
|
||||
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
|
||||
.unwrap();
|
||||
let mut tree = default_optimal_merkle_tree(DEFAULT_DEPTH);
|
||||
|
||||
// We set the leaves
|
||||
tree.set_range(0, initial_leaves.iter().cloned()).unwrap();
|
||||
tree.set_range(0, leaves.iter().cloned()).unwrap();
|
||||
|
||||
let new_leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000005"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000006"),
|
||||
];
|
||||
]
|
||||
.map(TestFr);
|
||||
|
||||
let to_delete_indices: [usize; 2] = [0, 1];
|
||||
|
||||
@@ -158,8 +331,8 @@ mod test {
|
||||
.unwrap();
|
||||
|
||||
// ensure that the leaves are set correctly
|
||||
for i in 0..new_leaves.len() {
|
||||
assert_eq!(tree.get_leaf(i), new_leaves[i]);
|
||||
for (i, &new_leaf) in new_leaves.iter().enumerate() {
|
||||
assert_eq!(tree.get_leaf(i), new_leaf);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user