feat: unify RLN types, refactor public APIs, add full (de)serialization, align FFI/WASM/APIs, simplify errors, update docs/examples, and clean up zerokit (#355)

# Changes

- Unified the `RLN` struct and core protocol types across public, FFI,
and WASM so everything works consistently.
- Fully refactored `protocol.rs` and `public.rs` to clean up the API
surface and make the flow easier to work with.
- Added (de)serialization for `RLN_Proof` and `RLN_ProofValues`, and
matched all C, Nim, WASM, and Node.js examples.
- Aligned FFI and WASM behavior, added missing APIs, and standardized
how witness are created and passed around.
- Reworked the error types, added clearer verification messages, and
simplified the overall error structure.
- Updated variable names, README, Rust docs, and examples across the
repo, updated outdated RLN RFC link.
- Refactored `rln-cli` to use the new public API, removed
serialize-based cli example, and dropped the `eyre` crate.
- Bumped dependencies, fixed CI, fixed `+atomic` flags for latest
nightly Rust and added `Clippy.toml` for better fmt.
- Added a `prelude.rs` file for easier use, cleaned up public access for
types and types import across zerokit modules.
- Separated keygen, proof handling, slashing logic, and witness into
protocol folder.
This commit is contained in:
Vinh Trịnh
2025-12-09 19:03:04 +07:00
committed by GitHub
parent 5c73af1130
commit 77a8d28965
82 changed files with 4714 additions and 6591 deletions

View File

@@ -122,7 +122,6 @@ jobs:
uses: dtolnay/rust-toolchain@nightly
with:
components: rust-src
toolchain: nightly-2025-09-24
targets: wasm32-unknown-unknown
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
@@ -163,15 +162,19 @@ jobs:
if: success() || failure()
run: cargo fmt -- --check
working-directory: ${{ matrix.crate }}
- name: Check clippy (wasm)
- name: Check clippy wasm target
if: (success() || failure()) && (matrix.crate == 'rln-wasm')
run: |
cargo clippy --target wasm32-unknown-unknown --tests --release -- -D warnings
working-directory: ${{ matrix.crate }}
- name: Check clippy (native)
- name: Check clippy default feature
if: (success() || failure()) && (matrix.crate != 'rln-wasm')
run: |
cargo clippy --all-targets --tests --release -- -D warnings
- name: Check clippy stateless feature
if: (success() || failure()) && (matrix.crate == 'rln')
run: |
cargo clippy --all-targets --tests --release --features=stateless --no-default-features -- -D warnings
working-directory: ${{ matrix.crate }}
benchmark-utils:

View File

@@ -109,7 +109,6 @@ jobs:
uses: dtolnay/rust-toolchain@nightly
with:
components: rust-src
toolchain: nightly-2025-09-24
targets: wasm32-unknown-unknown
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
@@ -117,8 +116,8 @@ jobs:
- name: Build rln-wasm package
run: |
if [[ ${{ matrix.feature }} == *parallel* ]]; then
env RUSTFLAGS="-C target-feature=+atomics,+bulk-memory,+mutable-globals" \
rustup run nightly-2025-09-24 wasm-pack build --release --target web --scope waku \
env CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS="-C target-feature=+atomics,+bulk-memory,+mutable-globals -C link-arg=--shared-memory -C link-arg=--max-memory=1073741824 -C link-arg=--import-memory -C link-arg=--export=__wasm_init_tls -C link-arg=--export=__tls_size -C link-arg=--export=__tls_align -C link-arg=--export=__tls_base" \
rustup run nightly wasm-pack build --release --target web --scope waku \
--features ${{ matrix.feature }} -Z build-std=panic_abort,std
else
wasm-pack build --release --target web --scope waku

View File

@@ -1,29 +0,0 @@
# CHANGE LOG
## 2023-02-28 v0.2
This release contains:
- Improved code quality
- Allows consumers of zerokit RLN to set leaves to the Merkle Tree from an arbitrary index. Useful for batching updates to the Merkle Tree.
- Improved performance for proof generation and verification
- rln_wasm which allows for the consumption of RLN through a WebAssembly interface
- Refactored to generate Semaphore-compatible credentials
- Dual License under Apache 2.0 and MIT
- RLN compiles as a static library, which can be consumed through a C FFI
## 2022-09-19 v0.1
Initial beta release.
This release contains:
- RLN Module with API to manage, compute and verify [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and RLN primitives.
- This can be consumed either as a Rust API or as a C FFI. The latter means it can be easily consumed through other environments, such as [Go](https://github.com/status-im/go-zerokit-rln/blob/master/rln/librln.h) or [Nim](https://github.com/status-im/nwaku/blob/4745c7872c69b5fd5c6ddab36df9c5c3d55f57c3/waku/v2/protocol/waku_rln_relay/waku_rln_relay_types.nim).
It also contains the following examples and experiments:
- Basic [example wrapper](https://github.com/vacp2p/zerokit/tree/master/multiplier) around a simple Circom circuit to show Circom integration through ark-circom and FFI.
- Experimental [Semaphore wrapper](https://github.com/vacp2p/zerokit/tree/master/semaphore).
Feedback welcome! You can either [open an issue](https://github.com/vacp2p/zerokit/issues) or come talk to us in our [Vac Discord](https://discord.gg/PQFdubGt6d) #zerokit channel.

View File

@@ -13,7 +13,7 @@ Following these guidelines definitely helps us in accepting your contributions.
2. Create a feature branch: `git checkout -b fix/your-bug-fix` or `git checkout -b feat/your-feature-name`
3. Make your changes following our guidelines
4. Ensure relevant tests pass (see [testing guidelines](#building-and-testing))
5. Commit your changes (signed commits are highly encouraged - see [commit guidelines](#commits))
5. Commit your changes (signed commits are highly encouraged - see [commit guidelines](#commits))
6. Push and create a Pull Request
## Development Setup
@@ -178,7 +178,7 @@ chore(ci): update rust toolchain version
## Roadmap Alignment
Please refer to our [project roadmap](https://roadmap.vac.dev/) for current development priorities.
Consider how your changes align with these strategic goals, when contributing.
Consider how your changes align with these strategic goals when contributing.
## Getting Help

167
Cargo.lock generated
View File

@@ -23,6 +23,15 @@ dependencies = [
"memchr",
]
[[package]]
name = "alloca"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5a7d05ea6aea7e9e64d25b9156ba2fee3fdd659e34e41063cd2fc7cd020d7f4"
dependencies = [
"cc",
]
[[package]]
name = "allocator-api2"
version = "0.2.21"
@@ -90,7 +99,7 @@ checksum = "e7e89fe77d1f0f4fe5b96dfc940923d88d17b6a773808124f21e764dfb063c6a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -143,7 +152,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60"
dependencies = [
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -156,7 +165,7 @@ dependencies = [
"num-traits",
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -242,7 +251,7 @@ checksum = "213888f660fddcca0d257e88e54ac05bca01885f258ccdf695bafd77031bb69d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -324,9 +333,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "bytes"
version = "1.10.1"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3"
[[package]]
name = "cast"
@@ -334,6 +343,16 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
[[package]]
name = "cc"
version = "1.2.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c481bdbf0ed3b892f6f806287d72acd515b352a4ec27a208489b8c1bc839633a"
dependencies = [
"find-msvc-tools",
"shlex",
]
[[package]]
name = "cfg-if"
version = "1.0.4"
@@ -369,18 +388,18 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.51"
version = "4.5.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c26d721170e0295f191a69bd9a1f93efcdb0aff38684b61ab5750468972e5f5"
checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8"
dependencies = [
"clap_builder",
]
[[package]]
name = "clap_builder"
version = "4.5.51"
version = "4.5.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75835f0c7bf681bfd05abe44e965760fea999a5286c6eb2d59883634fd02011a"
checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00"
dependencies = [
"anstyle",
"clap_lex",
@@ -412,10 +431,11 @@ dependencies = [
[[package]]
name = "criterion"
version = "0.7.0"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1c047a62b0cc3e145fa84415a3191f628e980b194c2755aa12300a4e6cbd928"
checksum = "a0dfe5e9e71bdcf4e4954f7d14da74d1cdb92a3a07686452d1509652684b1aab"
dependencies = [
"alloca",
"anes",
"cast",
"ciborium",
@@ -424,6 +444,7 @@ dependencies = [
"itertools 0.13.0",
"num-traits",
"oorandom",
"page_size",
"plotters",
"rayon",
"regex",
@@ -435,9 +456,9 @@ dependencies = [
[[package]]
name = "criterion-plot"
version = "0.6.0"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b1bcc0dc7dfae599d84ad0b1a55f80cde8af3725da8313b528da95ef783e338"
checksum = "5de36c2bee19fba779808f92bf5d9b0fa5a40095c277aba10c458a12b35d21d6"
dependencies = [
"cast",
"itertools 0.13.0",
@@ -476,9 +497,9 @@ checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]]
name = "crypto-common"
version = "0.1.6"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
dependencies = [
"generic-array",
"typenum",
@@ -524,7 +545,7 @@ dependencies = [
"enum-ordinalize",
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -550,7 +571,7 @@ checksum = "8ca9601fb2d62598ee17836250842873a413586e5d7ed88b356e38ddbb0ec631"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -610,6 +631,12 @@ version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "find-msvc-tools"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844"
[[package]]
name = "fnv"
version = "1.0.7"
@@ -637,9 +664,9 @@ dependencies = [
[[package]]
name = "generic-array"
version = "0.14.9"
version = "0.14.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bb6743198531e02858aeaea5398fcc883e71851fcbcb5a2f773e2fb6cb1edf2"
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"typenum",
"version_check",
@@ -690,9 +717,9 @@ dependencies = [
[[package]]
name = "hashbrown"
version = "0.16.0"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
[[package]]
name = "hex"
@@ -702,18 +729,18 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "hex-literal"
version = "0.4.1"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46"
checksum = "e712f64ec3850b98572bffac52e2c6f282b29fe6c5fa6d42334b30be438d95c1"
[[package]]
name = "indexmap"
version = "2.12.0"
version = "2.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f"
checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2"
dependencies = [
"equivalent",
"hashbrown 0.16.0",
"hashbrown 0.16.1",
]
[[package]]
@@ -760,9 +787,9 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "js-sys"
version = "0.3.82"
version = "0.3.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65"
checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8"
dependencies = [
"once_cell",
"wasm-bindgen",
@@ -891,6 +918,16 @@ version = "11.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e"
[[package]]
name = "page_size"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30d5b2194ed13191c1999ae0704b7839fb18384fa22e49b57eeaa97d79ce40da"
dependencies = [
"libc",
"winapi",
]
[[package]]
name = "parking_lot"
version = "0.11.2"
@@ -1027,14 +1064,14 @@ dependencies = [
"itertools 0.14.0",
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
name = "quote"
version = "1.0.41"
version = "1.0.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1"
checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f"
dependencies = [
"proc-macro2",
]
@@ -1337,7 +1374,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -1370,6 +1407,12 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f179d4e11094a893b82fff208f74d448a7512f99f5a0acbd5c679b705f83ed9"
[[package]]
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "sled"
version = "0.34.7"
@@ -1446,9 +1489,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.108"
version = "2.0.111"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da58917d35242480a05c2897064da0a80589a2a0476c9a3f2fdc83b53502e917"
checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87"
dependencies = [
"proc-macro2",
"quote",
@@ -1485,7 +1528,7 @@ checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -1539,9 +1582,9 @@ dependencies = [
[[package]]
name = "tracing"
version = "0.1.41"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647"
dependencies = [
"pin-project-lite",
"tracing-attributes",
@@ -1550,20 +1593,20 @@ dependencies = [
[[package]]
name = "tracing-attributes"
version = "0.1.30"
version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
name = "tracing-core"
version = "0.1.34"
version = "0.1.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c"
dependencies = [
"once_cell",
"valuable",
@@ -1656,9 +1699,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen"
version = "0.2.105"
version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60"
checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd"
dependencies = [
"cfg-if",
"once_cell",
@@ -1669,9 +1712,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.105"
version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2"
checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@@ -1679,31 +1722,31 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.105"
version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc"
checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40"
dependencies = [
"bumpalo",
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.105"
version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76"
checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4"
dependencies = [
"unicode-ident",
]
[[package]]
name = "web-sys"
version = "0.3.82"
version = "0.3.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1"
checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac"
dependencies = [
"js-sys",
"wasm-bindgen",
@@ -1757,9 +1800,9 @@ dependencies = [
[[package]]
name = "winnow"
version = "0.7.13"
version = "0.7.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf"
checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829"
dependencies = [
"memchr",
]
@@ -1792,22 +1835,22 @@ dependencies = [
[[package]]
name = "zerocopy"
version = "0.8.27"
version = "0.8.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c"
checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.8.27"
version = "0.8.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831"
checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -1827,7 +1870,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]

View File

@@ -4,7 +4,7 @@ exclude = ["rln-cli", "rln-wasm"]
resolver = "2"
# Compilation profile for any non-workspace member.
# Dependencies are optimized, even in a dev build. This improves dev performance
# while having neglible impact on incremental build times.
# Dependencies are optimized, even in a dev build.
# This improves dev performance while having negligible impact on incremental build times.
[profile.dev.package."*"]
opt-level = 3

View File

@@ -13,15 +13,16 @@ Zerokit provides zero-knowledge cryptographic primitives with a focus on perform
The current focus is on Rate-Limiting Nullifier [RLN](https://github.com/Rate-Limiting-Nullifier) implementation.
Current implementation is based on the following
[specification](https://github.com/vacp2p/rfc-index/blob/main/vac/raw/rln-v2.md)
[specification](https://rfc.vac.dev/vac/raw/rln-v2)
and focused on RLNv2 which allows to set a rate limit for the number of messages that can be sent by a user.
## Features
- **RLN Implementation**: Efficient Rate-Limiting Nullifier using zkSNARKs
- **RLN Implementation**: Efficient Rate-Limiting Nullifier using zkSNARK
- **Circom Compatibility**: Uses Circom-based circuits for RLN
- **Cross-Platform**: Support for multiple architectures (see compatibility note below)
- **Cross-Platform**: Support for multiple architectures with cross-compilation
- **FFI-Friendly**: Easy to integrate with other languages
- **WASM Support**: Can be compiled to WebAssembly for web applications
## Architecture

344
rln-cli/Cargo.lock generated
View File

@@ -2,21 +2,6 @@
# It is not intended for manual editing.
version = 4
[[package]]
name = "addr2line"
version = "0.25.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b"
dependencies = [
"gimli",
]
[[package]]
name = "adler2"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
[[package]]
name = "ahash"
version = "0.8.12"
@@ -67,22 +52,22 @@ dependencies = [
[[package]]
name = "anstyle-query"
version = "1.1.4"
version = "1.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2"
checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc"
dependencies = [
"windows-sys 0.60.2",
"windows-sys",
]
[[package]]
name = "anstyle-wincon"
version = "3.0.10"
version = "3.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a"
checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d"
dependencies = [
"anstyle",
"once_cell_polyfill",
"windows-sys 0.60.2",
"windows-sys",
]
[[package]]
@@ -134,7 +119,7 @@ checksum = "e7e89fe77d1f0f4fe5b96dfc940923d88d17b6a773808124f21e764dfb063c6a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -187,7 +172,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60"
dependencies = [
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -200,7 +185,7 @@ dependencies = [
"num-traits",
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -261,7 +246,7 @@ dependencies = [
"ark-ff",
"ark-std",
"tracing",
"tracing-subscriber 0.2.25",
"tracing-subscriber",
]
[[package]]
@@ -286,7 +271,7 @@ checksum = "213888f660fddcca0d257e88e54ac05bca01885f258ccdf695bafd77031bb69d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -324,21 +309,6 @@ version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
[[package]]
name = "backtrace"
version = "0.3.76"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6"
dependencies = [
"addr2line",
"cfg-if",
"libc",
"miniz_oxide",
"object",
"rustc-demangle",
"windows-link",
]
[[package]]
name = "bitflags"
version = "1.3.2"
@@ -377,9 +347,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "bytes"
version = "1.10.1"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3"
[[package]]
name = "cfg-if"
@@ -389,9 +359,9 @@ checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
[[package]]
name = "clap"
version = "4.5.51"
version = "4.5.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c26d721170e0295f191a69bd9a1f93efcdb0aff38684b61ab5750468972e5f5"
checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8"
dependencies = [
"clap_builder",
"clap_derive",
@@ -399,9 +369,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.51"
version = "4.5.53"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75835f0c7bf681bfd05abe44e965760fea999a5286c6eb2d59883634fd02011a"
checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00"
dependencies = [
"anstream",
"anstyle",
@@ -418,7 +388,7 @@ dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -427,33 +397,6 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d"
[[package]]
name = "color-eyre"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5920befb47832a6d61ee3a3a846565cfa39b331331e68a3b1d1116630f2f26d"
dependencies = [
"backtrace",
"color-spantrace",
"eyre",
"indenter",
"once_cell",
"owo-colors",
"tracing-error",
]
[[package]]
name = "color-spantrace"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b8b88ea9df13354b55bc7234ebcce36e6ef896aca2e42a15de9e10edce01b427"
dependencies = [
"once_cell",
"owo-colors",
"tracing-core",
"tracing-error",
]
[[package]]
name = "colorchoice"
version = "1.0.4"
@@ -511,9 +454,9 @@ checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]]
name = "crypto-common"
version = "0.1.6"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
dependencies = [
"generic-array",
"typenum",
@@ -550,7 +493,7 @@ dependencies = [
"enum-ordinalize",
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -576,7 +519,7 @@ checksum = "8ca9601fb2d62598ee17836250842873a413586e5d7ed88b356e38ddbb0ec631"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -592,7 +535,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
dependencies = [
"libc",
"windows-sys 0.61.2",
"windows-sys",
]
[[package]]
@@ -630,16 +573,6 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "320bea982e85d42441eb25c49b41218e7eaa2657e8f90bc4eca7437376751e23"
[[package]]
name = "eyre"
version = "0.6.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec"
dependencies = [
"indenter",
"once_cell",
]
[[package]]
name = "fastrand"
version = "2.3.0"
@@ -673,9 +606,9 @@ dependencies = [
[[package]]
name = "generic-array"
version = "0.14.9"
version = "0.14.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bb6743198531e02858aeaea5398fcc883e71851fcbcb5a2f773e2fb6cb1edf2"
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"typenum",
"version_check",
@@ -704,12 +637,6 @@ dependencies = [
"wasip2",
]
[[package]]
name = "gimli"
version = "0.32.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7"
[[package]]
name = "hashbrown"
version = "0.15.5"
@@ -721,9 +648,9 @@ dependencies = [
[[package]]
name = "hashbrown"
version = "0.16.0"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
[[package]]
name = "heck"
@@ -731,26 +658,14 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "hex"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
[[package]]
name = "indenter"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "964de6e86d545b246d84badc0fef527924ace5134f30641c203ef52ba83f58d5"
[[package]]
name = "indexmap"
version = "2.12.0"
version = "2.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f"
checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2"
dependencies = [
"equivalent",
"hashbrown 0.16.0",
"hashbrown 0.16.1",
]
[[package]]
@@ -801,12 +716,6 @@ dependencies = [
"cpufeatures",
]
[[package]]
name = "lazy_static"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "libc"
version = "0.2.177"
@@ -874,15 +783,6 @@ dependencies = [
"zeroize",
]
[[package]]
name = "miniz_oxide"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
dependencies = [
"adler2",
]
[[package]]
name = "num-bigint"
version = "0.4.6"
@@ -912,15 +812,6 @@ dependencies = [
"libm",
]
[[package]]
name = "object"
version = "0.37.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe"
dependencies = [
"memchr",
]
[[package]]
name = "once_cell"
version = "1.21.3"
@@ -933,12 +824,6 @@ version = "1.70.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe"
[[package]]
name = "owo-colors"
version = "4.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c6901729fa79e91a0913333229e9ca5dc725089d1c363b2f4b4760709dc4a52"
[[package]]
name = "parking_lot"
version = "0.11.2"
@@ -1047,14 +932,14 @@ dependencies = [
"itertools 0.14.0",
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
name = "quote"
version = "1.0.41"
version = "1.0.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1"
checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f"
dependencies = [
"proc-macro2",
]
@@ -1195,7 +1080,6 @@ name = "rln-cli"
version = "0.5.0"
dependencies = [
"clap",
"color-eyre",
"rln",
"serde",
"serde_json",
@@ -1224,12 +1108,6 @@ version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18"
[[package]]
name = "rustc-demangle"
version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace"
[[package]]
name = "rustc_version"
version = "0.4.1"
@@ -1249,7 +1127,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys",
"windows-sys 0.61.2",
"windows-sys",
]
[[package]]
@@ -1334,7 +1212,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -1367,15 +1245,6 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f179d4e11094a893b82fff208f74d448a7512f99f5a0acbd5c679b705f83ed9"
[[package]]
name = "sharded-slab"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
dependencies = [
"lazy_static",
]
[[package]]
name = "sled"
version = "0.34.7"
@@ -1458,9 +1327,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.108"
version = "2.0.111"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da58917d35242480a05c2897064da0a80589a2a0476c9a3f2fdc83b53502e917"
checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87"
dependencies = [
"proc-macro2",
"quote",
@@ -1477,7 +1346,7 @@ dependencies = [
"getrandom 0.3.4",
"once_cell",
"rustix",
"windows-sys 0.61.2",
"windows-sys",
]
[[package]]
@@ -1497,16 +1366,7 @@ checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
]
[[package]]
name = "thread_local"
version = "1.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185"
dependencies = [
"cfg-if",
"syn 2.0.111",
]
[[package]]
@@ -1550,9 +1410,9 @@ dependencies = [
[[package]]
name = "tracing"
version = "0.1.41"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647"
dependencies = [
"pin-project-lite",
"tracing-attributes",
@@ -1561,35 +1421,25 @@ dependencies = [
[[package]]
name = "tracing-attributes"
version = "0.1.30"
version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
name = "tracing-core"
version = "0.1.34"
version = "0.1.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c"
dependencies = [
"once_cell",
"valuable",
]
[[package]]
name = "tracing-error"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b1581020d7a273442f5b45074a6a57d5757ad0a47dac0e9f0bd57b81936f3db"
dependencies = [
"tracing",
"tracing-subscriber 0.3.20",
]
[[package]]
name = "tracing-subscriber"
version = "0.2.25"
@@ -1599,17 +1449,6 @@ dependencies = [
"tracing-core",
]
[[package]]
name = "tracing-subscriber"
version = "0.3.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5"
dependencies = [
"sharded-slab",
"thread_local",
"tracing-core",
]
[[package]]
name = "typenum"
version = "1.19.0"
@@ -1710,15 +1549,6 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
[[package]]
name = "windows-sys"
version = "0.60.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-sys"
version = "0.61.2"
@@ -1728,76 +1558,11 @@ dependencies = [
"windows-link",
]
[[package]]
name = "windows-targets"
version = "0.53.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3"
dependencies = [
"windows-link",
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53"
[[package]]
name = "windows_aarch64_msvc"
version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006"
[[package]]
name = "windows_i686_gnu"
version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3"
[[package]]
name = "windows_i686_gnullvm"
version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c"
[[package]]
name = "windows_i686_msvc"
version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2"
[[package]]
name = "windows_x86_64_gnu"
version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1"
[[package]]
name = "windows_x86_64_msvc"
version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650"
[[package]]
name = "winnow"
version = "0.7.13"
version = "0.7.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf"
checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829"
dependencies = [
"memchr",
]
@@ -1830,22 +1595,22 @@ dependencies = [
[[package]]
name = "zerocopy"
version = "0.8.27"
version = "0.8.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c"
checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.8.27"
version = "0.8.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831"
checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -1865,7 +1630,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -1873,7 +1638,6 @@ name = "zerokit_utils"
version = "0.7.0"
dependencies = [
"ark-ff",
"hex",
"num-bigint",
"rayon",
"serde_json",

View File

@@ -15,10 +15,9 @@ required-features = ["stateless"]
[dependencies]
rln = { path = "../rln", version = "0.9.0", default-features = false }
zerokit_utils = { path = "../utils", version = "0.7.0", default-features = false }
clap = { version = "4.5.41", features = ["cargo", "derive", "env"] }
color-eyre = "0.6.5"
serde_json = "1.0.141"
serde = { version = "1.0", features = ["derive"] }
clap = { version = "4.5.53", features = ["cargo", "derive", "env"] }
serde_json = "1.0.145"
serde = { version = "1.0.228", features = ["derive"] }
[features]
default = ["rln/pmtree-ft", "rln/parallel"]

View File

@@ -1,35 +1,10 @@
# Zerokit RLN-CLI
The Zerokit RLN-CLI provides a command-line interface for interacting with the public API of the [Zerokit RLN Module](../rln/README.md).
It also contain:
+ [Relay Example](#relay-example) to demonstrate the use of the RLN module for spam prevention.
+ [Stateless Example](#stateless-example) to demonstrate the use of the RLN module for stateless features.
## Configuration
The CLI can be configured using a JSON configuration file (see the [example](../rln/resources/tree_depth_20/config.json)).
You can specify the configuration file path using the `RLN_CONFIG_PATH` environment variable:
```bash
export RLN_CONFIG_PATH=../rln/resources/tree_depth_20/config.json
```
Alternatively, you can provide the configuration file path as an argument for each command:
```bash
RLN_CONFIG_PATH=../rln/resources/tree_depth_20/config.json cargo run -- <SUBCOMMAND> [OPTIONS]
```
If the configuration file is empty, default settings will be used, but the tree data folder will be temporary and not saved to the preconfigured path.
We recommend using the default config, as all commands (except `new` and `create-with-params`) require an initialized RLN instance.
The Zerokit RLN-CLI provides a command-line interface examples on how to use public API of the [Zerokit RLN Module](../rln/README.md).
## Relay Example
The following [Example](src/examples/relay.rs) demonstrates how RLN enables spam prevention in anonymous environments for multple users.
The following [Relay Example](src/examples/relay.rs) demonstrates how RLN enables spam prevention in anonymous environments for multple users.
You can run the example using the following command:
@@ -43,7 +18,7 @@ The customize **TREE_DEPTH** constant differs from the default value of `20` sho
## Stateless Example
The following [Example](src/examples/stateless.rs) demonstrates how RLN can be used for stateless features by creating the Merkle tree outside of RLN instance.
The following [Stateless Example](src/examples/stateless.rs) demonstrates how RLN can be used for stateless features by creating the Merkle tree outside of RLN instance.
This example function similarly to the [Relay Example](#relay-example) but uses a stateless RLN and seperate Merkle tree.
@@ -52,97 +27,3 @@ You can run the example using the following command:
```bash
cargo run --example stateless --no-default-features --features stateless
```
## CLI Commands
### Instance Management
To initialize a new RLN instance:
```bash
cargo run new --tree-depth <DEPTH>
```
To initialize an RLN instance with custom parameters:
```bash
cargo run new-with-params --resources-path <PATH> --tree-depth <DEPTH>
```
To update the Merkle tree depth:
```bash
cargo run set-tree --tree-depth <DEPTH>
```
### Leaf Operations
To set a single leaf:
```bash
cargo run set-leaf --index <INDEX> --input <INPUT_PATH>
```
To set multiple leaves:
```bash
cargo run set-multiple-leaves --index <START_INDEX> --input <INPUT_PATH>
```
To reset multiple leaves:
```bash
cargo run reset-multiple-leaves --input <INPUT_PATH>
```
To set the next available leaf:
```bash
cargo run set-next-leaf --input <INPUT_PATH>
```
To delete a specific leaf:
```bash
cargo run delete-leaf --index <INDEX>
```
### Proof Operations
To generate a proof:
```bash
cargo run prove --input <INPUT_PATH>
```
To generate an RLN proof:
```bash
cargo run generate-proof --input <INPUT_PATH>
```
To verify a proof:
```bash
cargo run verify --input <PROOF_PATH>
```
To verify a proof with multiple Merkle roots:
```bash
cargo run verify-with-roots --input <INPUT_PATH> --roots <ROOTS_PATH>
```
### Tree Information
To retrieve the current Merkle root:
```bash
cargo run get-root
```
To obtain a Merkle proof for a specific index:
```bash
cargo run get-proof --index <INDEX>
```

View File

@@ -1,69 +0,0 @@
use std::path::PathBuf;
use clap::Subcommand;
use rln::circuit::TEST_TREE_DEPTH;
#[derive(Subcommand)]
pub(crate) enum Commands {
New {
#[arg(short, long, default_value_t = TEST_TREE_DEPTH)]
tree_depth: usize,
},
NewWithParams {
#[arg(short, long, default_value_t = TEST_TREE_DEPTH)]
tree_depth: usize,
#[arg(short, long, default_value = "../rln/resources/tree_depth_20")]
resources_path: PathBuf,
},
SetTree {
#[arg(short, long, default_value_t = TEST_TREE_DEPTH)]
tree_depth: usize,
},
SetLeaf {
#[arg(short, long)]
index: usize,
#[arg(short, long)]
input: PathBuf,
},
SetMultipleLeaves {
#[arg(short, long)]
index: usize,
#[arg(short, long)]
input: PathBuf,
},
ResetMultipleLeaves {
#[arg(short, long)]
input: PathBuf,
},
SetNextLeaf {
#[arg(short, long)]
input: PathBuf,
},
DeleteLeaf {
#[arg(short, long)]
index: usize,
},
GetRoot,
GetProof {
#[arg(short, long)]
index: usize,
},
Prove {
#[arg(short, long)]
input: PathBuf,
},
Verify {
#[arg(short, long)]
input: PathBuf,
},
GenerateProof {
#[arg(short, long)]
input: PathBuf,
},
VerifyWithRoots {
#[arg(short, long)]
input: PathBuf,
#[arg(short, long)]
roots: PathBuf,
},
}

View File

@@ -1,31 +0,0 @@
use std::{fs::File, io::Read, path::PathBuf};
use color_eyre::Result;
use serde::{Deserialize, Serialize};
use serde_json::Value;
pub const RLN_CONFIG_PATH: &str = "RLN_CONFIG_PATH";
#[derive(Serialize, Deserialize)]
pub(crate) struct Config {
pub tree_config: Option<String>,
}
impl Config {
pub(crate) fn load_config() -> Result<Config> {
match std::env::var(RLN_CONFIG_PATH) {
Ok(env) => {
let path = PathBuf::from(env);
let mut file = File::open(path)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let tree_config: Value = serde_json::from_str(&contents)?;
println!("Initializing RLN with custom config");
Ok(Config {
tree_config: Some(tree_config.to_string()),
})
}
Err(_) => Ok(Config { tree_config: None }),
}
}
}

View File

@@ -1,24 +1,23 @@
use std::{
collections::HashMap,
fs::File,
io::{stdin, stdout, Cursor, Read, Write},
io::{stdin, stdout, Read, Write},
path::{Path, PathBuf},
};
use clap::{Parser, Subcommand};
use color_eyre::{eyre::eyre, Report, Result};
use rln::{
circuit::Fr,
hashers::{hash_to_field_le, poseidon_hash},
protocol::{keygen, prepare_prove_input, prepare_verify_input},
public::RLN,
utils::{fr_to_bytes_le, generate_input_buffer, IdSecret},
use rln::prelude::{
hash_to_field_le, keygen, poseidon_hash, recover_id_secret, Fr, IdSecret, PmtreeConfigBuilder,
RLNProofValues, RLNWitnessInput, RLN,
};
use zerokit_utils::Mode;
const MESSAGE_LIMIT: u32 = 1;
const TREE_DEPTH: usize = 20;
type Result<T> = std::result::Result<T, Box<dyn std::error::Error>>;
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
struct Cli {
@@ -44,15 +43,15 @@ enum Commands {
#[derive(Debug, Clone)]
struct Identity {
identity_secret_hash: IdSecret,
identity_secret: IdSecret,
id_commitment: Fr,
}
impl Identity {
fn new() -> Self {
let (identity_secret_hash, id_commitment) = keygen();
let (identity_secret, id_commitment) = keygen();
Identity {
identity_secret_hash,
identity_secret,
id_commitment,
}
}
@@ -60,7 +59,7 @@ impl Identity {
struct RLNSystem {
rln: RLN,
used_nullifiers: HashMap<[u8; 32], Vec<u8>>,
used_nullifiers: HashMap<Fr, RLNProofValues>,
local_identities: HashMap<usize, Identity>,
}
@@ -77,11 +76,19 @@ impl RLNSystem {
file.read_exact(&mut output_buffer)?;
resources.push(output_buffer);
}
let tree_config = PmtreeConfigBuilder::new()
.path("./database")
.temporary(false)
.cache_capacity(1073741824)
.flush_every_ms(500)
.mode(Mode::HighThroughput)
.use_compression(false)
.build()?;
let rln = RLN::new_with_params(
TREE_DEPTH,
resources[0].clone(),
resources[1].clone(),
generate_input_buffer(),
tree_config,
)?;
println!("RLN instance initialized successfully");
Ok(RLNSystem {
@@ -100,8 +107,8 @@ impl RLNSystem {
println!("Registered users:");
for (index, identity) in &self.local_identities {
println!("User Index: {index}");
println!("+ Identity Secret Hash: {}", *identity.identity_secret_hash);
println!("+ Identity Commitment: {}", identity.id_commitment);
println!("+ Identity secret: {}", *identity.identity_secret);
println!("+ Identity commitment: {}", identity.id_commitment);
println!();
}
}
@@ -111,12 +118,11 @@ impl RLNSystem {
let identity = Identity::new();
let rate_commitment = poseidon_hash(&[identity.id_commitment, Fr::from(MESSAGE_LIMIT)]);
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
match self.rln.set_next_leaf(&mut buffer) {
match self.rln.set_next_leaf(rate_commitment) {
Ok(_) => {
println!("Registered User Index: {index}");
println!("+ Identity secret hash: {}", *identity.identity_secret_hash);
println!("+ Identity commitment: {},", identity.id_commitment);
println!("+ Identity secret: {}", *identity.identity_secret);
println!("+ Identity commitment: {}", identity.id_commitment);
self.local_identities.insert(index, identity);
}
Err(_) => {
@@ -127,104 +133,84 @@ impl RLNSystem {
Ok(index)
}
fn generate_proof(
fn generate_and_verify_proof(
&mut self,
user_index: usize,
message_id: u32,
signal: &str,
external_nullifier: Fr,
) -> Result<Vec<u8>> {
) -> Result<RLNProofValues> {
let identity = match self.local_identities.get(&user_index) {
Some(identity) => identity,
None => return Err(eyre!("user index {user_index} not found")),
None => return Err(format!("user index {user_index} not found").into()),
};
let serialized = prepare_prove_input(
identity.identity_secret_hash.clone(),
user_index,
let (path_elements, identity_path_index) = self.rln.get_merkle_proof(user_index)?;
let x = hash_to_field_le(signal.as_bytes());
let witness = RLNWitnessInput::new(
identity.identity_secret.clone(),
Fr::from(MESSAGE_LIMIT),
Fr::from(message_id),
path_elements,
identity_path_index,
x,
external_nullifier,
signal.as_bytes(),
);
let mut input_buffer = Cursor::new(serialized);
let mut output_buffer = Cursor::new(Vec::new());
self.rln
.generate_rln_proof(&mut input_buffer, &mut output_buffer)?;
)?;
let (proof, proof_values) = self.rln.generate_rln_proof(&witness)?;
println!("Proof generated successfully:");
println!("+ User Index: {user_index}");
println!("+ Message ID: {message_id}");
println!("+ Signal: {signal}");
Ok(output_buffer.into_inner())
let verified = self.rln.verify_rln_proof(&proof, &proof_values, &x)?;
if verified {
println!("Proof verified successfully");
}
Ok(proof_values)
}
fn verify_proof(&mut self, proof_data: Vec<u8>, signal: &str) -> Result<()> {
let proof_with_signal = prepare_verify_input(proof_data.clone(), signal.as_bytes());
let mut input_buffer = Cursor::new(proof_with_signal);
match self.rln.verify_rln_proof(&mut input_buffer) {
Ok(true) => {
let nullifier = &proof_data[256..288];
let nullifier_key: [u8; 32] = nullifier.try_into()?;
if let Some(previous_proof) = self.used_nullifiers.get(&nullifier_key) {
self.handle_duplicate_message_id(previous_proof.clone(), proof_data)?;
return Ok(());
}
self.used_nullifiers.insert(nullifier_key, proof_data);
println!("Message verified and accepted");
}
Ok(false) => {
println!("Verification failed: message_id must be unique within the epoch and satisfy 0 <= message_id < MESSAGE_LIMIT: {MESSAGE_LIMIT}");
}
Err(err) => return Err(Report::new(err)),
fn check_nullifier(&mut self, proof_values: RLNProofValues) -> Result<()> {
if let Some(&previous_proof_values) = self.used_nullifiers.get(&proof_values.nullifier) {
self.handle_duplicate_message_id(previous_proof_values, proof_values)?;
return Ok(());
}
self.used_nullifiers
.insert(proof_values.nullifier, proof_values);
println!("Message verified and accepted");
Ok(())
}
fn handle_duplicate_message_id(
&mut self,
previous_proof: Vec<u8>,
current_proof: Vec<u8>,
previous_proof_values: RLNProofValues,
current_proof_values: RLNProofValues,
) -> Result<()> {
let x = &current_proof[192..224];
let y = &current_proof[224..256];
let prev_x = &previous_proof[192..224];
let prev_y = &previous_proof[224..256];
if x == prev_x && y == prev_y {
return Err(eyre!("this exact message and signal has already been sent"));
if previous_proof_values.x == current_proof_values.x
&& previous_proof_values.y == current_proof_values.y
{
return Err("this exact message and signal has already been sent".into());
}
let mut proof1 = Cursor::new(previous_proof);
let mut proof2 = Cursor::new(current_proof);
let mut output = Cursor::new(Vec::new());
match self
.rln
.recover_id_secret(&mut proof1, &mut proof2, &mut output)
{
Ok(_) => {
let output_data = output.into_inner();
let (leaked_identity_secret_hash, _) = IdSecret::from_bytes_le(&output_data);
match recover_id_secret(&previous_proof_values, &current_proof_values) {
Ok(leaked_identity_secret) => {
if let Some((user_index, identity)) = self
.local_identities
.iter()
.find(|(_, identity)| {
identity.identity_secret_hash == leaked_identity_secret_hash
})
.find(|(_, identity)| identity.identity_secret == leaked_identity_secret)
.map(|(index, identity)| (*index, identity))
{
let real_identity_secret_hash = identity.identity_secret_hash.clone();
if leaked_identity_secret_hash != real_identity_secret_hash {
Err(eyre!("identity secret hash mismatch: leaked_identity_secret_hash != real_identity_secret_hash"))
let real_identity_secret = identity.identity_secret.clone();
if leaked_identity_secret != real_identity_secret {
Err("Identity secret mismatch: leaked_identity_secret != real_identity_secret".into())
} else {
println!(
"DUPLICATE message ID detected! Reveal identity secret hash: {}",
*leaked_identity_secret_hash
"DUPLICATE message ID detected! Reveal identity secret: {}",
*leaked_identity_secret
);
self.local_identities.remove(&user_index);
self.rln.delete_leaf(user_index)?;
@@ -232,10 +218,10 @@ impl RLNSystem {
Ok(())
}
} else {
Err(eyre!("user identity secret hash ******** not found"))
Err("user identity secret ******** not found".into())
}
}
Err(err) => Err(eyre!("Failed to recover identity secret: {err}")),
Err(err) => Err(format!("Failed to recover identity secret: {err}").into()),
}
}
}
@@ -273,15 +259,15 @@ fn main() -> Result<()> {
message_id,
signal,
} => {
match rln_system.generate_proof(
match rln_system.generate_and_verify_proof(
user_index,
message_id,
&signal,
external_nullifier,
) {
Ok(proof) => {
if let Err(err) = rln_system.verify_proof(proof, &signal) {
println!("Verification error: {err}");
Ok(proof_values) => {
if let Err(err) = rln_system.check_nullifier(proof_values) {
println!("Check nullifier error: {err}");
};
}
Err(err) => {

View File

@@ -2,22 +2,19 @@
use std::{
collections::HashMap,
io::{stdin, stdout, Cursor, Write},
io::{stdin, stdout, Write},
};
use clap::{Parser, Subcommand};
use color_eyre::{eyre::eyre, Result};
use rln::{
circuit::{Fr, TEST_TREE_DEPTH},
hashers::{hash_to_field_le, poseidon_hash, PoseidonHash},
protocol::{keygen, prepare_verify_input, serialize_witness},
public::RLN,
utils::{fr_to_bytes_le, IdSecret},
use rln::prelude::{
hash_to_field_le, keygen, poseidon_hash, recover_id_secret, Fr, IdSecret, PoseidonHash,
RLNProofValues, RLNWitnessInput, DEFAULT_TREE_DEPTH, RLN,
};
use zerokit_utils::{OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
const MESSAGE_LIMIT: u32 = 1;
type Result<T> = std::result::Result<T, Box<dyn std::error::Error>>;
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
#[derive(Parser)]
@@ -45,15 +42,15 @@ enum Commands {
#[derive(Debug, Clone)]
struct Identity {
identity_secret_hash: IdSecret,
identity_secret: IdSecret,
id_commitment: Fr,
}
impl Identity {
fn new() -> Self {
let (identity_secret_hash, id_commitment) = keygen();
let (identity_secret, id_commitment) = keygen();
Identity {
identity_secret_hash,
identity_secret,
id_commitment,
}
}
@@ -62,7 +59,7 @@ impl Identity {
struct RLNSystem {
rln: RLN,
tree: OptimalMerkleTree<PoseidonHash>,
used_nullifiers: HashMap<[u8; 32], Vec<u8>>,
used_nullifiers: HashMap<Fr, RLNProofValues>,
local_identities: HashMap<usize, Identity>,
}
@@ -71,7 +68,7 @@ impl RLNSystem {
let rln = RLN::new()?;
let default_leaf = Fr::from(0);
let tree: OptimalMerkleTree<PoseidonHash> = OptimalMerkleTree::new(
TEST_TREE_DEPTH,
DEFAULT_TREE_DEPTH,
default_leaf,
ConfigOf::<OptimalMerkleTree<PoseidonHash>>::default(),
)
@@ -94,8 +91,8 @@ impl RLNSystem {
println!("Registered users:");
for (index, identity) in &self.local_identities {
println!("User Index: {index}");
println!("+ Identity Secret Hash: {}", *identity.identity_secret_hash);
println!("+ Identity Commitment: {}", identity.id_commitment);
println!("+ Identity secret: {}", *identity.identity_secret);
println!("+ Identity commitment: {}", identity.id_commitment);
println!();
}
}
@@ -108,135 +105,112 @@ impl RLNSystem {
self.tree.update_next(rate_commitment)?;
println!("Registered User Index: {index}");
println!("+ Identity secret hash: {}", *identity.identity_secret_hash);
println!("+ Identity secret: {}", *identity.identity_secret);
println!("+ Identity commitment: {}", identity.id_commitment);
self.local_identities.insert(index, identity);
Ok(index)
}
fn generate_proof(
fn generate_and_verify_proof(
&mut self,
user_index: usize,
message_id: u32,
signal: &str,
external_nullifier: Fr,
) -> Result<Vec<u8>> {
) -> Result<RLNProofValues> {
let identity = match self.local_identities.get(&user_index) {
Some(identity) => identity,
None => return Err(eyre!("user index {user_index} not found")),
None => return Err(format!("user index {user_index} not found").into()),
};
let merkle_proof = self.tree.proof(user_index)?;
let x = hash_to_field_le(signal.as_bytes());
let rln_witness = RLNWitnessInput::new(
identity.identity_secret_hash.clone(),
let witness = RLNWitnessInput::new(
identity.identity_secret.clone(),
Fr::from(MESSAGE_LIMIT),
Fr::from(message_id),
merkle_proof.get_path_elements(),
merkle_proof.get_path_index(),
x,
external_nullifier,
)
.unwrap();
)?;
let serialized = serialize_witness(&rln_witness)?;
let mut input_buffer = Cursor::new(serialized);
let mut output_buffer = Cursor::new(Vec::new());
self.rln
.generate_rln_proof_with_witness(&mut input_buffer, &mut output_buffer)?;
let (proof, proof_values) = self.rln.generate_rln_proof(&witness)?;
println!("Proof generated successfully:");
println!("+ User Index: {user_index}");
println!("+ Message ID: {message_id}");
println!("+ Signal: {signal}");
Ok(output_buffer.into_inner())
let tree_root = self.tree.root();
let verified = self
.rln
.verify_with_roots(&proof, &proof_values, &x, &[tree_root])?;
if verified {
println!("Proof verified successfully");
}
Ok(proof_values)
}
fn verify_proof(&mut self, proof_data: Vec<u8>, signal: &str) -> Result<()> {
let proof_with_signal = prepare_verify_input(proof_data.clone(), signal.as_bytes());
let mut input_buffer = Cursor::new(proof_with_signal);
fn check_nullifier(&mut self, proof_values: RLNProofValues) -> Result<()> {
let tree_root = self.tree.root();
let root = self.tree.root();
let roots_serialized = fr_to_bytes_le(&root);
let mut roots_buffer = Cursor::new(roots_serialized);
match self
.rln
.verify_with_roots(&mut input_buffer, &mut roots_buffer)
{
Ok(true) => {
let nullifier = &proof_data[256..288];
let nullifier_key: [u8; 32] = nullifier.try_into()?;
if let Some(previous_proof) = self.used_nullifiers.get(&nullifier_key) {
self.handle_duplicate_message_id(previous_proof.clone(), proof_data)?;
return Ok(());
}
self.used_nullifiers.insert(nullifier_key, proof_data);
println!("Message verified and accepted");
}
Ok(false) => {
println!("Verification failed: message_id must be unique within the epoch and satisfy 0 <= message_id < MESSAGE_LIMIT: {MESSAGE_LIMIT}");
}
Err(err) => return Err(err.into()),
if proof_values.root != tree_root {
println!("Check nullifier failed: invalid root");
return Ok(());
}
if let Some(&previous_proof_values) = self.used_nullifiers.get(&proof_values.nullifier) {
self.handle_duplicate_message_id(previous_proof_values, proof_values)?;
return Ok(());
}
self.used_nullifiers
.insert(proof_values.nullifier, proof_values);
println!("Message verified and accepted");
Ok(())
}
fn handle_duplicate_message_id(
&mut self,
previous_proof: Vec<u8>,
current_proof: Vec<u8>,
previous_proof_values: RLNProofValues,
current_proof_values: RLNProofValues,
) -> Result<()> {
let x = &current_proof[192..224];
let y = &current_proof[224..256];
let prev_x = &previous_proof[192..224];
let prev_y = &previous_proof[224..256];
if x == prev_x && y == prev_y {
return Err(eyre!("this exact message and signal has already been sent"));
if previous_proof_values.x == current_proof_values.x
&& previous_proof_values.y == current_proof_values.y
{
return Err("this exact message and signal has already been sent".into());
}
let mut proof1 = Cursor::new(previous_proof);
let mut proof2 = Cursor::new(current_proof);
let mut output = Cursor::new(Vec::new());
match self
.rln
.recover_id_secret(&mut proof1, &mut proof2, &mut output)
{
Ok(_) => {
let output_data = output.into_inner();
let (leaked_identity_secret_hash, _) = IdSecret::from_bytes_le(&output_data);
match recover_id_secret(&previous_proof_values, &current_proof_values) {
Ok(leaked_identity_secret) => {
if let Some((user_index, identity)) = self
.local_identities
.iter()
.find(|(_, identity)| {
identity.identity_secret_hash == leaked_identity_secret_hash
})
.find(|(_, identity)| identity.identity_secret == leaked_identity_secret)
.map(|(index, identity)| (*index, identity))
{
let real_identity_secret_hash = identity.identity_secret_hash.clone();
if leaked_identity_secret_hash != real_identity_secret_hash {
Err(eyre!("identity secret hash mismatch: leaked_identity_secret_hash != real_identity_secret_hash"))
let real_identity_secret = identity.identity_secret.clone();
if leaked_identity_secret != real_identity_secret {
Err("Identity secret mismatch: leaked_identity_secret != real_identity_secret".into())
} else {
println!(
"DUPLICATE message ID detected! Reveal identity secret hash: ********"
"DUPLICATE message ID detected! Reveal identity secret: {}",
*leaked_identity_secret
);
self.local_identities.remove(&user_index);
println!("User index {user_index} has been SLASHED");
Ok(())
}
} else {
Err(eyre!("user identity secret hash ******** not found"))
Err("user identity secret ******** not found".into())
}
}
Err(err) => Err(eyre!("Failed to recover identity secret: {err}")),
Err(err) => Err(format!("Failed to recover identity secret: {err}").into()),
}
}
}
@@ -275,15 +249,15 @@ fn main() -> Result<()> {
message_id,
signal,
} => {
match rln_system.generate_proof(
match rln_system.generate_and_verify_proof(
user_index,
message_id,
&signal,
external_nullifier,
) {
Ok(proof) => {
if let Err(err) = rln_system.verify_proof(proof, &signal) {
println!("Verification error: {err}");
Ok(proof_values) => {
if let Err(err) = rln_system.check_nullifier(proof_values) {
println!("Check nullifier error: {err}");
};
}
Err(err) => {

View File

@@ -1,195 +0,0 @@
use std::{
fs::File,
io::{Cursor, Read},
path::Path,
};
use clap::Parser;
use color_eyre::{eyre::Report, Result};
use commands::Commands;
use config::Config;
use rln::{
public::RLN,
utils::{bytes_le_to_fr, bytes_le_to_vec_fr},
};
use serde_json::json;
use state::State;
mod commands;
mod config;
mod state;
#[derive(Parser)]
#[command(author, version, about, long_about = None)]
struct Cli {
#[command(subcommand)]
command: Option<Commands>,
}
fn main() -> Result<()> {
let cli = Cli::parse();
let mut state = match &cli.command {
Some(Commands::New { .. }) | Some(Commands::NewWithParams { .. }) => State::default(),
_ => State::load_state()?,
};
match cli.command {
Some(Commands::New { tree_depth }) => {
let config = Config::load_config()?;
state.rln = if let Some(tree_config) = config.tree_config {
println!("Initializing RLN with custom config");
Some(RLN::new(tree_depth, Cursor::new(tree_config.as_bytes()))?)
} else {
println!("Initializing RLN with default config");
Some(RLN::new(tree_depth, Cursor::new(json!({}).to_string()))?)
};
Ok(())
}
Some(Commands::NewWithParams {
tree_depth,
resources_path,
}) => {
let mut resources: Vec<Vec<u8>> = Vec::new();
let filenames = ["rln_final.arkzkey", "graph.bin"];
for filename in filenames {
let fullpath = resources_path.join(Path::new(filename));
let mut file = File::open(&fullpath)?;
let metadata = std::fs::metadata(&fullpath)?;
let mut buffer = vec![0; metadata.len() as usize];
file.read_exact(&mut buffer)?;
resources.push(buffer);
}
let config = Config::load_config()?;
if let Some(tree_config) = config.tree_config {
println!("Initializing RLN with custom config");
state.rln = Some(RLN::new_with_params(
tree_depth,
resources[0].clone(),
resources[1].clone(),
Cursor::new(tree_config.to_string().as_bytes()),
)?)
} else {
println!("Initializing RLN with default config");
state.rln = Some(RLN::new_with_params(
tree_depth,
resources[0].clone(),
resources[1].clone(),
Cursor::new(json!({}).to_string()),
)?)
};
Ok(())
}
Some(Commands::SetTree { tree_depth }) => {
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_tree(tree_depth)?;
Ok(())
}
Some(Commands::SetLeaf { index, input }) => {
let input_data = File::open(input)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_leaf(index, input_data)?;
Ok(())
}
Some(Commands::SetMultipleLeaves { index, input }) => {
let input_data = File::open(input)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_leaves_from(index, input_data)?;
Ok(())
}
Some(Commands::ResetMultipleLeaves { input }) => {
let input_data = File::open(input)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.init_tree_with_leaves(input_data)?;
Ok(())
}
Some(Commands::SetNextLeaf { input }) => {
let input_data = File::open(input)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.set_next_leaf(input_data)?;
Ok(())
}
Some(Commands::DeleteLeaf { index }) => {
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.delete_leaf(index)?;
Ok(())
}
Some(Commands::Prove { input }) => {
let input_data = File::open(input)?;
let mut output_buffer = Cursor::new(Vec::<u8>::new());
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.prove(input_data, &mut output_buffer)?;
let proof = output_buffer.into_inner();
println!("proof: {proof:?}");
Ok(())
}
Some(Commands::Verify { input }) => {
let input_data = File::open(input)?;
let verified = state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.verify(input_data)?;
println!("verified: {verified:?}");
Ok(())
}
Some(Commands::GenerateProof { input }) => {
let input_data = File::open(input)?;
let mut output_buffer = Cursor::new(Vec::<u8>::new());
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.generate_rln_proof(input_data, &mut output_buffer)?;
let proof = output_buffer.into_inner();
println!("proof: {proof:?}");
Ok(())
}
Some(Commands::VerifyWithRoots { input, roots }) => {
let input_data = File::open(input)?;
let roots_data = File::open(roots)?;
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.verify_with_roots(input_data, roots_data)?;
Ok(())
}
Some(Commands::GetRoot) => {
let mut output_buffer = Cursor::new(Vec::<u8>::new());
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.get_root(&mut output_buffer)
.unwrap();
let (root, _) = bytes_le_to_fr(&output_buffer.into_inner());
println!("root: {root}");
Ok(())
}
Some(Commands::GetProof { index }) => {
let mut output_buffer = Cursor::new(Vec::<u8>::new());
state
.rln
.ok_or(Report::msg("no RLN instance initialized"))?
.get_proof(index, &mut output_buffer)?;
let output_buffer_inner = output_buffer.into_inner();
let (path_elements, _) = bytes_le_to_vec_fr(&output_buffer_inner)?;
for (index, element) in path_elements.iter().enumerate() {
println!("path element {index}: {element}");
}
Ok(())
}
None => Ok(()),
}
}

View File

@@ -1,31 +0,0 @@
use std::io::Cursor;
use color_eyre::Result;
use rln::{circuit::TEST_TREE_DEPTH, public::RLN};
use serde_json::Value;
use crate::config::Config;
#[derive(Default)]
pub(crate) struct State {
pub rln: Option<RLN>,
}
impl State {
pub(crate) fn load_state() -> Result<State> {
let config = Config::load_config()?;
let rln = if let Some(tree_config) = config.tree_config {
let config_json: Value = serde_json::from_str(&tree_config)?;
let tree_depth = config_json["tree_depth"]
.as_u64()
.unwrap_or(TEST_TREE_DEPTH as u64);
Some(RLN::new(
tree_depth as usize,
Cursor::new(tree_config.as_bytes()),
)?)
} else {
None
};
Ok(State { rln })
}
}

173
rln-wasm/Cargo.lock generated
View File

@@ -69,7 +69,7 @@ checksum = "e7e89fe77d1f0f4fe5b96dfc940923d88d17b6a773808124f21e764dfb063c6a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -122,7 +122,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60"
dependencies = [
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -135,7 +135,7 @@ dependencies = [
"num-traits",
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -221,7 +221,7 @@ checksum = "213888f660fddcca0d257e88e54ac05bca01885f258ccdf695bafd77031bb69d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -253,6 +253,17 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]]
name = "async-trait"
version = "0.1.89"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.111",
]
[[package]]
name = "autocfg"
version = "1.5.0"
@@ -303,15 +314,21 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "bytes"
version = "1.10.1"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3"
[[package]]
name = "cast"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
[[package]]
name = "cc"
version = "1.2.44"
version = "1.2.48"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37521ac7aabe3d13122dc382493e20c9416f299d2ccd5b3a5340a2570cdeb0f3"
checksum = "c481bdbf0ed3b892f6f806287d72acd515b352a4ec27a208489b8c1bc839633a"
dependencies = [
"find-msvc-tools",
"shlex",
@@ -393,9 +410,9 @@ checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5"
[[package]]
name = "crypto-common"
version = "0.1.6"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
dependencies = [
"generic-array",
"typenum",
@@ -432,7 +449,7 @@ dependencies = [
"enum-ordinalize",
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -458,7 +475,7 @@ checksum = "8ca9601fb2d62598ee17836250842873a413586e5d7ed88b356e38ddbb0ec631"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -520,9 +537,9 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "find-msvc-tools"
version = "0.1.4"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52051878f80a721bb68ebfbc930e07b65ba72f2da88968ea5c06fd6ca3d3a127"
checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844"
[[package]]
name = "fnv"
@@ -551,9 +568,9 @@ dependencies = [
[[package]]
name = "generic-array"
version = "0.14.9"
version = "0.14.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bb6743198531e02858aeaea5398fcc883e71851fcbcb5a2f773e2fb6cb1edf2"
checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
dependencies = [
"typenum",
"version_check",
@@ -595,24 +612,18 @@ dependencies = [
[[package]]
name = "hashbrown"
version = "0.16.0"
version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d"
[[package]]
name = "hex"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
[[package]]
name = "indexmap"
version = "2.12.0"
version = "2.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6717a8d2a5a929a1a2eb43a12812498ed141a0bcfb7e8f7844fbdbe4303bba9f"
checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2"
dependencies = [
"equivalent",
"hashbrown 0.16.0",
"hashbrown 0.16.1",
]
[[package]]
@@ -650,9 +661,9 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "js-sys"
version = "0.3.82"
version = "0.3.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65"
checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8"
dependencies = [
"once_cell",
"wasm-bindgen",
@@ -744,6 +755,15 @@ dependencies = [
"walkdir",
]
[[package]]
name = "nu-ansi-term"
version = "0.50.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5"
dependencies = [
"windows-sys",
]
[[package]]
name = "num-bigint"
version = "0.4.6"
@@ -779,6 +799,12 @@ version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
[[package]]
name = "oorandom"
version = "11.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e"
[[package]]
name = "parking_lot"
version = "0.11.2"
@@ -887,14 +913,14 @@ dependencies = [
"itertools 0.14.0",
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
name = "quote"
version = "1.0.41"
version = "1.0.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1"
checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f"
dependencies = [
"proc-macro2",
]
@@ -1202,7 +1228,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -1317,9 +1343,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.108"
version = "2.0.111"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da58917d35242480a05c2897064da0a80589a2a0476c9a3f2fdc83b53502e917"
checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87"
dependencies = [
"proc-macro2",
"quote",
@@ -1356,7 +1382,7 @@ checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -1400,9 +1426,9 @@ dependencies = [
[[package]]
name = "tracing"
version = "0.1.41"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647"
dependencies = [
"pin-project-lite",
"tracing-attributes",
@@ -1411,20 +1437,20 @@ dependencies = [
[[package]]
name = "tracing-attributes"
version = "0.1.30"
version = "0.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
name = "tracing-core"
version = "0.1.34"
version = "0.1.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c"
dependencies = [
"once_cell",
"valuable",
@@ -1511,9 +1537,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen"
version = "0.2.105"
version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60"
checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd"
dependencies = [
"cfg-if",
"once_cell",
@@ -1524,9 +1550,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-futures"
version = "0.4.55"
version = "0.4.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0"
checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c"
dependencies = [
"cfg-if",
"js-sys",
@@ -1537,9 +1563,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.105"
version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2"
checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@@ -1547,14 +1573,14 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.105"
version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc"
checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40"
dependencies = [
"bumpalo",
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
"wasm-bindgen-shared",
]
@@ -1572,21 +1598,29 @@ dependencies = [
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.105"
version = "0.2.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76"
checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4"
dependencies = [
"unicode-ident",
]
[[package]]
name = "wasm-bindgen-test"
version = "0.3.55"
version = "0.3.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfc379bfb624eb59050b509c13e77b4eb53150c350db69628141abce842f2373"
checksum = "25e90e66d265d3a1efc0e72a54809ab90b9c0c515915c67cdf658689d2c22c6c"
dependencies = [
"async-trait",
"cast",
"js-sys",
"libm",
"minicov",
"nu-ansi-term",
"num-traits",
"oorandom",
"serde",
"serde_json",
"wasm-bindgen",
"wasm-bindgen-futures",
"wasm-bindgen-test-macro",
@@ -1594,13 +1628,13 @@ dependencies = [
[[package]]
name = "wasm-bindgen-test-macro"
version = "0.3.55"
version = "0.3.56"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "085b2df989e1e6f9620c1311df6c996e83fe16f57792b272ce1e024ac16a90f1"
checksum = "7150335716dce6028bead2b848e72f47b45e7b9422f64cccdc23bedca89affc1"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -1616,9 +1650,9 @@ dependencies = [
[[package]]
name = "web-sys"
version = "0.3.82"
version = "0.3.83"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1"
checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac"
dependencies = [
"js-sys",
"wasm-bindgen",
@@ -1672,9 +1706,9 @@ dependencies = [
[[package]]
name = "winnow"
version = "0.7.13"
version = "0.7.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf"
checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829"
dependencies = [
"memchr",
]
@@ -1707,22 +1741,22 @@ dependencies = [
[[package]]
name = "zerocopy"
version = "0.8.27"
version = "0.8.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c"
checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3"
dependencies = [
"zerocopy-derive",
]
[[package]]
name = "zerocopy-derive"
version = "0.8.27"
version = "0.8.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831"
checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -1742,7 +1776,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.108",
"syn 2.0.111",
]
[[package]]
@@ -1750,7 +1784,6 @@ name = "zerokit_utils"
version = "0.7.0"
dependencies = [
"ark-ff",
"hex",
"num-bigint",
"rayon",
"serde_json",

View File

@@ -13,10 +13,10 @@ rln = { path = "../rln", version = "0.9.0", default-features = false, features =
] }
zerokit_utils = { path = "../utils", version = "0.7.0", default-features = false }
num-bigint = { version = "0.4.6", default-features = false }
js-sys = "0.3.77"
wasm-bindgen = "0.2.100"
js-sys = "0.3.83"
wasm-bindgen = "0.2.106"
serde-wasm-bindgen = "0.6.5"
serde = "1.0"
serde = "1.0.228"
wasm-bindgen-rayon = { version = "1.3.0", features = [
"no-bundler",
], optional = true }
@@ -34,13 +34,13 @@ console_error_panic_hook = { version = "0.1.7", optional = true }
getrandom = { version = "0.2.16", features = ["js"] }
[dev-dependencies]
serde_json = "1.0.141"
wasm-bindgen-test = "0.3.50"
wasm-bindgen-futures = "0.4.50"
serde_json = "1.0.145"
wasm-bindgen-test = "0.3.56"
wasm-bindgen-futures = "0.4.56"
ark-std = { version = "0.5.0", default-features = false }
[dev-dependencies.web-sys]
version = "0.3.77"
version = "0.3.83"
features = ["Window", "Navigator"]
[features]

View File

@@ -17,10 +17,10 @@ args = ["build", "--release", "--target", "web", "--scope", "waku"]
[tasks.pack_build_parallel]
command = "env"
args = [
"RUSTFLAGS=-C target-feature=+atomics,+bulk-memory,+mutable-globals",
"CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS=-C target-feature=+atomics,+bulk-memory,+mutable-globals -C link-arg=--shared-memory -C link-arg=--max-memory=1073741824 -C link-arg=--import-memory -C link-arg=--export=__wasm_init_tls -C link-arg=--export=__tls_size -C link-arg=--export=__tls_align -C link-arg=--export=__tls_base",
"rustup",
"run",
"nightly-2025-09-24",
"nightly",
"wasm-pack",
"build",
"--release",
@@ -85,10 +85,10 @@ dependencies = ["build"]
[tasks.test_parallel]
command = "env"
args = [
"RUSTFLAGS=-C target-feature=+atomics,+bulk-memory,+mutable-globals",
"CARGO_TARGET_WASM32_UNKNOWN_UNKNOWN_RUSTFLAGS=-C target-feature=+atomics,+bulk-memory,+mutable-globals -C link-arg=--shared-memory -C link-arg=--max-memory=1073741824 -C link-arg=--import-memory -C link-arg=--export=__wasm_init_tls -C link-arg=--export=__tls_size -C link-arg=--export=__tls_align -C link-arg=--export=__tls_base",
"rustup",
"run",
"nightly-2025-09-24",
"nightly",
"wasm-pack",
"test",
"--release",

View File

@@ -5,7 +5,7 @@
[![License: Apache 2.0](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
The Zerokit RLN WASM Module provides WebAssembly bindings for working with
Rate-Limiting Nullifier [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and primitives.
Rate-Limiting Nullifier [RLN](https://rfc.vac.dev/vac/raw/rln-v2) zkSNARK proofs and primitives.
This module is used by [waku-org/js-rln](https://github.com/waku-org/js-rln/) to enable
RLN functionality in JavaScript/TypeScript applications.
@@ -116,7 +116,7 @@ enabling multi-threaded execution in the browser.
#### Install `nightly` Rust
```bash
rustup install nightly-2025-09-24
rustup install nightly
```
### Build Commands

View File

@@ -5,6 +5,12 @@ import { dirname, join } from "path";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
function debugUint8Array(uint8Array) {
return Array.from(uint8Array, (byte) =>
byte.toString(16).padStart(2, "0")
).join(", ");
}
async function calculateWitness(circomPath, inputs, witnessCalculatorFile) {
const wasmFile = readFileSync(circomPath);
const wasmFileBuffer = wasmFile.slice(
@@ -67,7 +73,11 @@ async function main() {
console.log("\nWasmFr serialization: WasmFr <-> bytes");
const serRateCommitment = rateCommitment.toBytesLE();
console.log(" - serialized rate_commitment =", serRateCommitment);
console.log(
" - serialized rate_commitment = [" +
debugUint8Array(serRateCommitment) +
"]"
);
const deserRateCommitment = rlnWasm.WasmFr.fromBytesLE(serRateCommitment);
console.log(
@@ -96,14 +106,18 @@ async function main() {
console.log("\nVecWasmFr serialization: VecWasmFr <-> bytes");
const serPathElements = pathElements.toBytesLE();
console.log(" - serialized path_elements = ", serPathElements);
console.log(
" - serialized path_elements = [" + debugUint8Array(serPathElements) + "]"
);
const deserPathElements = rlnWasm.VecWasmFr.fromBytesLE(serPathElements);
console.log(" - deserialized path_elements = ", deserPathElements.debug());
console.log("\nUint8Array serialization: Uint8Array <-> bytes");
const serPathIndex = rlnWasm.Uint8ArrayUtils.toBytesLE(identityPathIndex);
console.log(" - serialized path_index =", serPathIndex);
console.log(
" - serialized path_index = [" + debugUint8Array(serPathIndex) + "]"
);
const deserPathIndex = rlnWasm.Uint8ArrayUtils.fromBytesLE(serPathIndex);
console.log(" - deserialized path_index =", deserPathIndex);
@@ -156,7 +170,7 @@ async function main() {
const messageId = rlnWasm.WasmFr.fromUint(0);
console.log(" - message_id = " + messageId.debug());
console.log("\nGenerating RLN Proof");
console.log("\nCreating RLN Witness");
const witness = new rlnWasm.WasmRLNWitnessInput(
identitySecret,
userMessageLimit,
@@ -166,22 +180,59 @@ async function main() {
x,
externalNullifier
);
console.log("RLN Witness created successfully");
console.log("\nCalculating witness");
const witnessJson = witness.toBigIntJson();
const calculatedWitness = await calculateWitness(
circomPath,
witnessJson,
witnessCalculatorFile
);
const proof = rlnInstance.generateProofWithWitness(
console.log("Witness calculated successfully");
console.log("\nGenerating RLN Proof");
const rln_proof = rlnInstance.generateRLNProofWithWitness(
calculatedWitness,
witness
);
console.log("Proof generated successfully");
console.log("\nGetting proof values");
const proofValues = rln_proof.getValues();
console.log(" - y = " + proofValues.y.debug());
console.log(" - nullifier = " + proofValues.nullifier.debug());
console.log(" - root = " + proofValues.root.debug());
console.log(" - x = " + proofValues.x.debug());
console.log(
" - external_nullifier = " + proofValues.externalNullifier.debug()
);
console.log("\nRLNProof serialization: RLNProof <-> bytes");
const serProof = rln_proof.toBytesLE();
console.log(" - serialized proof = [" + debugUint8Array(serProof) + " ]");
const deserProof = rlnWasm.WasmRLNProof.fromBytesLE(serProof);
console.log(" - proof deserialized successfully");
console.log("\nRLNProofValues serialization: RLNProofValues <-> bytes");
const serProofValues = proofValues.toBytesLE();
console.log(
" - serialized proof_values = [" + debugUint8Array(serProofValues) + " ]"
);
const deserProofValues2 =
rlnWasm.WasmRLNProofValues.fromBytesLE(serProofValues);
console.log(" - proof_values deserialized successfully");
console.log(
" - deserialized external_nullifier = " +
deserProofValues2.externalNullifier.debug()
);
console.log("\nVerifying Proof");
const roots = new rlnWasm.VecWasmFr();
roots.push(computedRoot);
const isValid = rlnInstance.verifyWithRoots(proof, roots, x);
const isValid = rlnInstance.verifyWithRoots(rln_proof, roots, x);
if (isValid) {
console.log("Proof verified successfully");
} else {
@@ -205,7 +256,7 @@ async function main() {
const messageId2 = rlnWasm.WasmFr.fromUint(0);
console.log(" - message_id2 = " + messageId2.debug());
console.log("\nGenerating second RLN Proof");
console.log("\nCreating second RLN Witness");
const witness2 = new rlnWasm.WasmRLNWitnessInput(
identitySecret,
userMessageLimit,
@@ -215,25 +266,36 @@ async function main() {
x2,
externalNullifier
);
console.log("Second RLN Witness created successfully");
console.log("\nCalculating second witness");
const witnessJson2 = witness2.toBigIntJson();
const calculatedWitness2 = await calculateWitness(
circomPath,
witnessJson2,
witnessCalculatorFile
);
const proof2 = rlnInstance.generateProofWithWitness(
console.log("Second witness calculated successfully");
console.log("\nGenerating second RLN Proof");
const rln_proof2 = rlnInstance.generateRLNProofWithWitness(
calculatedWitness2,
witness2
);
console.log("Second proof generated successfully");
console.log("\nVerifying second proof");
const isValid2 = rlnInstance.verifyWithRoots(proof2, roots, x2);
const isValid2 = rlnInstance.verifyWithRoots(rln_proof2, roots, x2);
if (isValid2) {
console.log("Second proof verified successfully");
console.log("\nRecovering identity secret");
const recoveredSecret = rlnWasm.WasmRLNProof.recoverIdSecret(proof, proof2);
const proofValues1 = rln_proof.getValues();
const proofValues2 = rln_proof2.getValues();
const recoveredSecret = rlnWasm.WasmRLNProofValues.recoverIdSecret(
proofValues1,
proofValues2
);
console.log(" - recovered_secret = " + recoveredSecret.debug());
console.log(" - original_secret = " + identitySecret.debug());
console.log("Slashing successful: Identity is recovered!");

View File

@@ -3,13 +3,11 @@
mod wasm_rln;
mod wasm_utils;
#[cfg(not(feature = "utils"))]
pub use wasm_rln::{WasmRLN, WasmRLNProof, WasmRLNWitnessInput};
pub use wasm_utils::{ExtendedIdentity, Hasher, Identity, VecWasmFr, WasmFr};
#[cfg(all(feature = "parallel", not(feature = "utils")))]
pub use wasm_bindgen_rayon::init_thread_pool;
#[cfg(not(feature = "utils"))]
pub use wasm_rln::{WasmRLN, WasmRLNProof, WasmRLNProofValues, WasmRLNWitnessInput};
pub use wasm_utils::{ExtendedIdentity, Hasher, Identity, VecWasmFr, WasmFr};
#[cfg(feature = "panic_hook")]
#[wasm_bindgen(js_name = initPanicHook)]

View File

@@ -1,42 +1,31 @@
#![cfg(target_arch = "wasm32")]
#![cfg(not(feature = "utils"))]
use crate::wasm_utils::{VecWasmFr, WasmFr};
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
use num_bigint::BigInt;
use rln::{
circuit::{zkey_from_raw, Fr, Proof},
protocol::{
compute_id_secret, generate_proof_with_witness, proof_values_from_witness,
rln_witness_to_bigint_json, verify_proof, RLNProofValues, RLNWitnessInput, RLN,
},
utils::IdSecret,
};
use rln::prelude::*;
use serde::Serialize;
use wasm_bindgen::prelude::*;
use crate::wasm_utils::{VecWasmFr, WasmFr};
#[wasm_bindgen]
pub struct WasmRLN(RLN);
#[wasm_bindgen]
impl WasmRLN {
#[wasm_bindgen(constructor)]
pub fn new(zkey_buffer: &Uint8Array) -> Result<WasmRLN, String> {
let zkey = zkey_from_raw(&zkey_buffer.to_vec()).map_err(|err| err.to_string())?;
let rln = RLN { zkey };
pub fn new(zkey_data: &Uint8Array) -> Result<WasmRLN, String> {
let rln = RLN::new_with_params(zkey_data.to_vec()).map_err(|err| err.to_string())?;
Ok(WasmRLN(rln))
}
#[wasm_bindgen(js_name = generateProofWithWitness)]
pub fn generate_proof_with_witness(
#[wasm_bindgen(js_name = generateRLNProofWithWitness)]
pub fn generate_rln_proof_with_witness(
&self,
calculated_witness: Vec<JsBigInt>,
rln_witness: &WasmRLNWitnessInput,
witness: &WasmRLNWitnessInput,
) -> Result<WasmRLNProof, String> {
let proof_values =
proof_values_from_witness(&rln_witness.0).map_err(|err| err.to_string())?;
let calculated_witness_bigint: Vec<BigInt> = calculated_witness
.iter()
.map(|js_bigint| {
@@ -45,95 +34,137 @@ impl WasmRLN {
})
.collect();
let proof = generate_proof_with_witness(calculated_witness_bigint, &self.0.zkey)
let (proof, proof_values) = self
.0
.generate_rln_proof_with_witness(calculated_witness_bigint, &witness.0)
.map_err(|err| err.to_string())?;
Ok(WasmRLNProof {
let rln_proof = RLNProof {
proof_values,
proof,
})
};
Ok(WasmRLNProof(rln_proof))
}
#[wasm_bindgen(js_name = verifyWithRoots)]
pub fn verify_with_roots(
&self,
proof: &WasmRLNProof,
rln_proof: &WasmRLNProof,
roots: &VecWasmFr,
x: &WasmFr,
) -> Result<bool, String> {
let proof_verified = verify_proof(&self.0.zkey.0.vk, &proof.proof, &proof.proof_values)
.map_err(|err| err.to_string())?;
let roots_fr: Vec<Fr> = (0..roots.length())
.filter_map(|i| roots.get(i))
.map(|root| *root)
.collect();
if !proof_verified {
return Ok(false);
}
let roots_verified = if roots.length() == 0 {
true
} else {
(0..roots.length())
.filter_map(|i| roots.get(i))
.any(|root| *root == proof.proof_values.root)
};
let signal_verified = **x == proof.proof_values.x;
Ok(proof_verified && roots_verified && signal_verified)
self.0
.verify_with_roots(&rln_proof.0.proof, &rln_proof.0.proof_values, x, &roots_fr)
.map_err(|err| err.to_string())
}
}
#[wasm_bindgen]
pub struct WasmRLNProof {
proof: Proof,
proof_values: RLNProofValues,
}
pub struct WasmRLNProof(RLNProof);
#[wasm_bindgen]
impl WasmRLNProof {
#[wasm_bindgen(js_name = getValues)]
pub fn get_values(&self) -> WasmRLNProofValues {
WasmRLNProofValues(self.0.proof_values)
}
#[wasm_bindgen(js_name = toBytesLE)]
pub fn to_bytes_le(&self) -> Uint8Array {
Uint8Array::from(&rln_proof_to_bytes_le(&self.0)[..])
}
#[wasm_bindgen(js_name = toBytesBE)]
pub fn to_bytes_be(&self) -> Uint8Array {
Uint8Array::from(&rln_proof_to_bytes_be(&self.0)[..])
}
#[wasm_bindgen(js_name = fromBytesLE)]
pub fn from_bytes_le(bytes: &Uint8Array) -> Result<WasmRLNProof, String> {
let bytes_vec = bytes.to_vec();
let (proof, _) = bytes_le_to_rln_proof(&bytes_vec).map_err(|e| e.to_string())?;
Ok(WasmRLNProof(proof))
}
#[wasm_bindgen(js_name = fromBytesBE)]
pub fn from_bytes_be(bytes: &Uint8Array) -> Result<WasmRLNProof, String> {
let bytes_vec = bytes.to_vec();
let (proof, _) = bytes_be_to_rln_proof(&bytes_vec).map_err(|e| e.to_string())?;
Ok(WasmRLNProof(proof))
}
}
#[wasm_bindgen]
pub struct WasmRLNProofValues(RLNProofValues);
#[wasm_bindgen]
impl WasmRLNProofValues {
#[wasm_bindgen(getter)]
pub fn y(&self) -> WasmFr {
WasmFr::from(self.proof_values.y)
WasmFr::from(self.0.y)
}
#[wasm_bindgen(getter)]
pub fn nullifier(&self) -> WasmFr {
WasmFr::from(self.proof_values.nullifier)
WasmFr::from(self.0.nullifier)
}
#[wasm_bindgen(getter)]
pub fn root(&self) -> WasmFr {
WasmFr::from(self.proof_values.root)
WasmFr::from(self.0.root)
}
#[wasm_bindgen(getter)]
pub fn x(&self) -> WasmFr {
WasmFr::from(self.proof_values.x)
WasmFr::from(self.0.x)
}
#[wasm_bindgen(getter, js_name = externalNullifier)]
pub fn external_nullifier(&self) -> WasmFr {
WasmFr::from(self.proof_values.external_nullifier)
WasmFr::from(self.0.external_nullifier)
}
#[wasm_bindgen(js_name = toBytesLE)]
pub fn to_bytes_le(&self) -> Uint8Array {
Uint8Array::from(&rln_proof_values_to_bytes_le(&self.0)[..])
}
#[wasm_bindgen(js_name = toBytesBE)]
pub fn to_bytes_be(&self) -> Uint8Array {
Uint8Array::from(&rln_proof_values_to_bytes_be(&self.0)[..])
}
#[wasm_bindgen(js_name = fromBytesLE)]
pub fn from_bytes_le(bytes: &Uint8Array) -> Result<WasmRLNProofValues, String> {
let bytes_vec = bytes.to_vec();
let (proof_values, _) =
bytes_le_to_rln_proof_values(&bytes_vec).map_err(|e| e.to_string())?;
Ok(WasmRLNProofValues(proof_values))
}
#[wasm_bindgen(js_name = fromBytesBE)]
pub fn from_bytes_be(bytes: &Uint8Array) -> Result<WasmRLNProofValues, String> {
let bytes_vec = bytes.to_vec();
let (proof_values, _) =
bytes_be_to_rln_proof_values(&bytes_vec).map_err(|e| e.to_string())?;
Ok(WasmRLNProofValues(proof_values))
}
#[wasm_bindgen(js_name = recoverIdSecret)]
pub fn recover_id_secret(
proof_1: &WasmRLNProof,
proof_2: &WasmRLNProof,
proof_values_1: &WasmRLNProofValues,
proof_values_2: &WasmRLNProofValues,
) -> Result<WasmFr, String> {
let external_nullifier_1 = proof_1.proof_values.external_nullifier;
let external_nullifier_2 = proof_2.proof_values.external_nullifier;
let recovered_identity_secret = recover_id_secret(&proof_values_1.0, &proof_values_2.0)
.map_err(|err| err.to_string())?;
if external_nullifier_1 != external_nullifier_2 {
return Err("External nullifiers do not match".to_string());
}
let share1 = (proof_1.proof_values.x, proof_1.proof_values.y);
let share2 = (proof_2.proof_values.x, proof_2.proof_values.y);
let recovered_identity_secret_hash =
compute_id_secret(share1, share2).map_err(|err| err.to_string())?;
Ok(WasmFr::from(*recovered_identity_secret_hash))
Ok(WasmFr::from(*recovered_identity_secret))
}
}
@@ -156,7 +187,7 @@ impl WasmRLNWitnessInput {
let path_elements: Vec<Fr> = path_elements.inner();
let identity_path_index: Vec<u8> = identity_path_index.to_vec();
let rln_witness = RLNWitnessInput::new(
let witness = RLNWitnessInput::new(
IdSecret::from(&mut identity_secret_fr),
user_message_limit.inner(),
message_id.inner(),
@@ -167,15 +198,15 @@ impl WasmRLNWitnessInput {
)
.map_err(|err| err.to_string())?;
Ok(WasmRLNWitnessInput(rln_witness))
Ok(WasmRLNWitnessInput(witness))
}
#[wasm_bindgen(js_name = toBigIntJson)]
pub fn to_bigint_json(&self) -> Result<Object, String> {
let inputs = rln_witness_to_bigint_json(&self.0).map_err(|err| err.to_string())?;
let bigint_json = rln_witness_to_bigint_json(&self.0).map_err(|err| err.to_string())?;
let serializer = serde_wasm_bindgen::Serializer::json_compatible();
let js_value = inputs
let js_value = bigint_json
.serialize(&serializer)
.map_err(|err| err.to_string())?;
@@ -183,4 +214,30 @@ impl WasmRLNWitnessInput {
.dyn_into::<Object>()
.map_err(|err| format!("{:#?}", err))
}
#[wasm_bindgen(js_name = toBytesLE)]
pub fn to_bytes_le(&self) -> Result<Uint8Array, String> {
let bytes = rln_witness_to_bytes_le(&self.0).map_err(|err| err.to_string())?;
Ok(Uint8Array::from(&bytes[..]))
}
#[wasm_bindgen(js_name = toBytesBE)]
pub fn to_bytes_be(&self) -> Result<Uint8Array, String> {
let bytes = rln_witness_to_bytes_be(&self.0).map_err(|err| err.to_string())?;
Ok(Uint8Array::from(&bytes[..]))
}
#[wasm_bindgen(js_name = fromBytesLE)]
pub fn from_bytes_le(bytes: &Uint8Array) -> Result<WasmRLNWitnessInput, String> {
let bytes_vec = bytes.to_vec();
let (witness, _) = bytes_le_to_rln_witness(&bytes_vec).map_err(|err| err.to_string())?;
Ok(WasmRLNWitnessInput(witness))
}
#[wasm_bindgen(js_name = fromBytesBE)]
pub fn from_bytes_be(bytes: &Uint8Array) -> Result<WasmRLNWitnessInput, String> {
let bytes_vec = bytes.to_vec();
let (witness, _) = bytes_be_to_rln_witness(&bytes_vec).map_err(|err| err.to_string())?;
Ok(WasmRLNWitnessInput(witness))
}
}

View File

@@ -1,17 +1,9 @@
#![cfg(target_arch = "wasm32")]
use js_sys::Uint8Array;
use rln::{
circuit::Fr,
hashers::{hash_to_field_be, hash_to_field_le, poseidon_hash},
protocol::{extended_keygen, extended_seeded_keygen, keygen, seeded_keygen},
utils::{
bytes_be_to_fr, bytes_be_to_vec_fr, bytes_be_to_vec_u8, bytes_le_to_fr, bytes_le_to_vec_fr,
bytes_le_to_vec_u8, fr_to_bytes_be, fr_to_bytes_le, vec_fr_to_bytes_be, vec_fr_to_bytes_le,
vec_u8_to_bytes_be, vec_u8_to_bytes_le,
},
};
use std::ops::Deref;
use js_sys::Uint8Array;
use rln::prelude::*;
use wasm_bindgen::prelude::*;
// WasmFr
@@ -51,17 +43,17 @@ impl WasmFr {
}
#[wasm_bindgen(js_name = fromBytesLE)]
pub fn from_bytes_le(bytes: &Uint8Array) -> Self {
pub fn from_bytes_le(bytes: &Uint8Array) -> Result<Self, String> {
let bytes_vec = bytes.to_vec();
let (fr, _) = bytes_le_to_fr(&bytes_vec);
Self(fr)
let (fr, _) = bytes_le_to_fr(&bytes_vec).map_err(|e| e.to_string())?;
Ok(Self(fr))
}
#[wasm_bindgen(js_name = fromBytesBE)]
pub fn from_bytes_be(bytes: &Uint8Array) -> Self {
pub fn from_bytes_be(bytes: &Uint8Array) -> Result<Self, String> {
let bytes_vec = bytes.to_vec();
let (fr, _) = bytes_be_to_fr(&bytes_vec);
Self(fr)
let (fr, _) = bytes_be_to_fr(&bytes_vec).map_err(|e| e.to_string())?;
Ok(Self(fr))
}
#[wasm_bindgen(js_name = toBytesLE)]
@@ -219,7 +211,7 @@ impl Hasher {
#[wasm_bindgen]
pub struct Identity {
identity_secret_hash: Fr,
identity_secret: Fr,
id_commitment: Fr,
}
@@ -227,9 +219,9 @@ pub struct Identity {
impl Identity {
#[wasm_bindgen(js_name = generate)]
pub fn generate() -> Identity {
let (identity_secret_hash, id_commitment) = keygen();
let (identity_secret, id_commitment) = keygen();
Identity {
identity_secret_hash: *identity_secret_hash,
identity_secret: *identity_secret,
id_commitment,
}
}
@@ -237,16 +229,16 @@ impl Identity {
#[wasm_bindgen(js_name = generateSeeded)]
pub fn generate_seeded(seed: &Uint8Array) -> Identity {
let seed_vec = seed.to_vec();
let (identity_secret_hash, id_commitment) = seeded_keygen(&seed_vec);
let (identity_secret, id_commitment) = seeded_keygen(&seed_vec);
Identity {
identity_secret_hash,
identity_secret,
id_commitment,
}
}
#[wasm_bindgen(js_name = getSecretHash)]
pub fn get_secret_hash(&self) -> WasmFr {
WasmFr(self.identity_secret_hash)
WasmFr(self.identity_secret)
}
#[wasm_bindgen(js_name = getCommitment)]
@@ -256,7 +248,7 @@ impl Identity {
#[wasm_bindgen(js_name = toArray)]
pub fn to_array(&self) -> VecWasmFr {
VecWasmFr(vec![self.identity_secret_hash, self.id_commitment])
VecWasmFr(vec![self.identity_secret, self.id_commitment])
}
}
@@ -264,7 +256,7 @@ impl Identity {
pub struct ExtendedIdentity {
identity_trapdoor: Fr,
identity_nullifier: Fr,
identity_secret_hash: Fr,
identity_secret: Fr,
id_commitment: Fr,
}
@@ -272,12 +264,12 @@ pub struct ExtendedIdentity {
impl ExtendedIdentity {
#[wasm_bindgen(js_name = generate)]
pub fn generate() -> ExtendedIdentity {
let (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) =
let (identity_trapdoor, identity_nullifier, identity_secret, id_commitment) =
extended_keygen();
ExtendedIdentity {
identity_trapdoor,
identity_nullifier,
identity_secret_hash,
identity_secret,
id_commitment,
}
}
@@ -285,12 +277,12 @@ impl ExtendedIdentity {
#[wasm_bindgen(js_name = generateSeeded)]
pub fn generate_seeded(seed: &Uint8Array) -> ExtendedIdentity {
let seed_vec = seed.to_vec();
let (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) =
let (identity_trapdoor, identity_nullifier, identity_secret, id_commitment) =
extended_seeded_keygen(&seed_vec);
ExtendedIdentity {
identity_trapdoor,
identity_nullifier,
identity_secret_hash,
identity_secret,
id_commitment,
}
}
@@ -307,7 +299,7 @@ impl ExtendedIdentity {
#[wasm_bindgen(js_name = getSecretHash)]
pub fn get_secret_hash(&self) -> WasmFr {
WasmFr(self.identity_secret_hash)
WasmFr(self.identity_secret)
}
#[wasm_bindgen(js_name = getCommitment)]
@@ -320,7 +312,7 @@ impl ExtendedIdentity {
VecWasmFr(vec![
self.identity_trapdoor,
self.identity_nullifier,
self.identity_secret_hash,
self.identity_secret,
self.id_commitment,
])
}

View File

@@ -4,8 +4,7 @@
#[cfg(test)]
mod test {
use js_sys::{BigInt as JsBigInt, Date, Object, Uint8Array};
use rln::circuit::TEST_TREE_DEPTH;
use rln::hashers::PoseidonHash;
use rln::prelude::*;
use rln_wasm::{
Hasher, Identity, VecWasmFr, WasmFr, WasmRLN, WasmRLNProof, WasmRLNWitnessInput,
};
@@ -14,7 +13,6 @@ mod test {
use zerokit_utils::{
OptimalMerkleProof, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree,
};
#[cfg(feature = "parallel")]
use {rln_wasm::init_thread_pool, wasm_bindgen_futures::JsFuture, web_sys::window};
@@ -114,7 +112,7 @@ mod test {
// Create RLN instance for other benchmarks
let rln_instance = WasmRLN::new(&zkey).expect("Failed to create RLN instance");
let mut tree: OptimalMerkleTree<PoseidonHash> =
OptimalMerkleTree::default(TEST_TREE_DEPTH).expect("Failed to create tree");
OptimalMerkleTree::default(DEFAULT_TREE_DEPTH).expect("Failed to create tree");
// Benchmark generate identity
let start_identity_gen = Date::now();
@@ -125,7 +123,7 @@ mod test {
// Generate identity for other benchmarks
let identity_pair = Identity::generate();
let identity_secret_hash = identity_pair.get_secret_hash();
let identity_secret = identity_pair.get_secret_hash();
let id_commitment = identity_pair.get_commitment();
let epoch = Hasher::hash_to_field_le(&Uint8Array::from(b"test-epoch" as &[u8]));
@@ -155,8 +153,8 @@ mod test {
}
let path_index = Uint8Array::from(&merkle_proof.get_path_index()[..]);
let rln_witness_input = WasmRLNWitnessInput::new(
&identity_secret_hash,
let witness = WasmRLNWitnessInput::new(
&identity_secret,
&user_message_limit,
&message_id,
&path_elements,
@@ -166,26 +164,25 @@ mod test {
)
.expect("Failed to create WasmRLNWitnessInput");
let rln_witness_input_bigint_json = rln_witness_input
let bigint_json = witness
.to_bigint_json()
.expect("Failed to convert witness to BigInt JSON");
// Benchmark witness calculation
let start_calculate_witness = Date::now();
for _ in 0..iterations {
let _ = calculateWitness(CIRCOM_BYTES, rln_witness_input_bigint_json.clone())
let _ = calculateWitness(CIRCOM_BYTES, bigint_json.clone())
.await
.expect("Failed to calculate witness");
}
let calculate_witness_result = Date::now() - start_calculate_witness;
// Calculate witness for other benchmarks
let calculated_witness_str =
calculateWitness(CIRCOM_BYTES, rln_witness_input_bigint_json.clone())
.await
.expect("Failed to calculate witness")
.as_string()
.expect("Failed to convert calculated witness to string");
let calculated_witness_str = calculateWitness(CIRCOM_BYTES, bigint_json.clone())
.await
.expect("Failed to calculate witness")
.as_string()
.expect("Failed to convert calculated witness to string");
let calculated_witness_vec_str: Vec<String> =
serde_json::from_str(&calculated_witness_str).expect("Failed to parse JSON");
let calculated_witness: Vec<JsBigInt> = calculated_witness_vec_str
@@ -194,17 +191,18 @@ mod test {
.collect();
// Benchmark proof generation with witness
let start_generate_proof_with_witness = Date::now();
let start_generate_rln_proof_with_witness = Date::now();
for _ in 0..iterations {
let _ = rln_instance
.generate_proof_with_witness(calculated_witness.clone(), &rln_witness_input)
.generate_rln_proof_with_witness(calculated_witness.clone(), &witness)
.expect("Failed to generate proof");
}
let generate_proof_with_witness_result = Date::now() - start_generate_proof_with_witness;
let generate_rln_proof_with_witness_result =
Date::now() - start_generate_rln_proof_with_witness;
// Generate proof with witness for other benchmarks
let proof: WasmRLNProof = rln_instance
.generate_proof_with_witness(calculated_witness, &rln_witness_input)
.generate_rln_proof_with_witness(calculated_witness, &witness)
.expect("Failed to generate proof");
let root = WasmFr::from(tree.root());
@@ -250,7 +248,7 @@ mod test {
));
results.push_str(&format!(
"Proof generation with witness: {}\n",
format_duration(generate_proof_with_witness_result)
format_duration(generate_rln_proof_with_witness_result)
));
results.push_str(&format!(
"Proof verification with roots: {}\n",

View File

@@ -4,8 +4,7 @@
#[cfg(test)]
mod test {
use js_sys::{BigInt as JsBigInt, Date, Object, Uint8Array};
use rln::circuit::TEST_TREE_DEPTH;
use rln::hashers::PoseidonHash;
use rln::prelude::*;
use rln_wasm::{
Hasher, Identity, VecWasmFr, WasmFr, WasmRLN, WasmRLNProof, WasmRLNWitnessInput,
};
@@ -94,7 +93,7 @@ mod test {
// Create RLN instance for other benchmarks
let rln_instance = WasmRLN::new(&zkey).expect("Failed to create RLN instance");
let mut tree: OptimalMerkleTree<PoseidonHash> =
OptimalMerkleTree::default(TEST_TREE_DEPTH).expect("Failed to create tree");
OptimalMerkleTree::default(DEFAULT_TREE_DEPTH).expect("Failed to create tree");
// Benchmark generate identity
let start_identity_gen = Date::now();
@@ -105,7 +104,7 @@ mod test {
// Generate identity for other benchmarks
let identity_pair = Identity::generate();
let identity_secret_hash = identity_pair.get_secret_hash();
let identity_secret = identity_pair.get_secret_hash();
let id_commitment = identity_pair.get_commitment();
let epoch = Hasher::hash_to_field_le(&Uint8Array::from(b"test-epoch" as &[u8]));
@@ -135,8 +134,8 @@ mod test {
}
let path_index = Uint8Array::from(&merkle_proof.get_path_index()[..]);
let rln_witness_input = WasmRLNWitnessInput::new(
&identity_secret_hash,
let witness = WasmRLNWitnessInput::new(
&identity_secret,
&user_message_limit,
&message_id,
&path_elements,
@@ -146,26 +145,25 @@ mod test {
)
.expect("Failed to create WasmRLNWitnessInput");
let rln_witness_input_bigint_json = rln_witness_input
let bigint_json = witness
.to_bigint_json()
.expect("Failed to convert witness to BigInt JSON");
// Benchmark witness calculation
let start_calculate_witness = Date::now();
for _ in 0..iterations {
let _ = calculateWitness(CIRCOM_PATH, rln_witness_input_bigint_json.clone())
let _ = calculateWitness(CIRCOM_PATH, bigint_json.clone())
.await
.expect("Failed to calculate witness");
}
let calculate_witness_result = Date::now() - start_calculate_witness;
// Calculate witness for other benchmarks
let calculated_witness_str =
calculateWitness(CIRCOM_PATH, rln_witness_input_bigint_json.clone())
.await
.expect("Failed to calculate witness")
.as_string()
.expect("Failed to convert calculated witness to string");
let calculated_witness_str = calculateWitness(CIRCOM_PATH, bigint_json.clone())
.await
.expect("Failed to calculate witness")
.as_string()
.expect("Failed to convert calculated witness to string");
let calculated_witness_vec_str: Vec<String> =
serde_json::from_str(&calculated_witness_str).expect("Failed to parse JSON");
let calculated_witness: Vec<JsBigInt> = calculated_witness_vec_str
@@ -174,17 +172,18 @@ mod test {
.collect();
// Benchmark proof generation with witness
let start_generate_proof_with_witness = Date::now();
let start_generate_rln_proof_with_witness = Date::now();
for _ in 0..iterations {
let _ = rln_instance
.generate_proof_with_witness(calculated_witness.clone(), &rln_witness_input)
.generate_rln_proof_with_witness(calculated_witness.clone(), &witness)
.expect("Failed to generate proof");
}
let generate_proof_with_witness_result = Date::now() - start_generate_proof_with_witness;
let generate_rln_proof_with_witness_result =
Date::now() - start_generate_rln_proof_with_witness;
// Generate proof with witness for other benchmarks
let proof: WasmRLNProof = rln_instance
.generate_proof_with_witness(calculated_witness, &rln_witness_input)
.generate_rln_proof_with_witness(calculated_witness, &witness)
.expect("Failed to generate proof");
let root = WasmFr::from(tree.root());
@@ -230,7 +229,7 @@ mod test {
));
results.push_str(&format!(
"Proof generation with witness: {}\n",
format_duration(generate_proof_with_witness_result)
format_duration(generate_rln_proof_with_witness_result)
));
results.push_str(&format!(
"Proof verification with roots: {}\n",

View File

@@ -2,28 +2,27 @@
#[cfg(test)]
mod test {
use std::assert_eq;
use ark_std::rand::thread_rng;
use js_sys::Uint8Array;
use rand::Rng;
use rln::circuit::Fr;
use rln::hashers::poseidon_hash;
use rln::utils::{fr_to_bytes_be, fr_to_bytes_le, str_to_fr, IdSecret};
use rln::prelude::*;
use rln_wasm::{ExtendedIdentity, Hasher, Identity, VecWasmFr, WasmFr};
use std::assert_eq;
use wasm_bindgen_test::wasm_bindgen_test;
#[wasm_bindgen_test]
fn test_keygen_wasm() {
let identity = Identity::generate();
let identity_secret_hash = *identity.get_secret_hash();
let identity_secret = *identity.get_secret_hash();
let id_commitment = *identity.get_commitment();
assert_ne!(identity_secret_hash, Fr::from(0u8));
assert_ne!(identity_secret, Fr::from(0u8));
assert_ne!(id_commitment, Fr::from(0u8));
let arr = identity.to_array();
assert_eq!(arr.length(), 2);
assert_eq!(*arr.get(0).unwrap(), identity_secret_hash);
assert_eq!(*arr.get(0).unwrap(), identity_secret);
assert_eq!(*arr.get(1).unwrap(), id_commitment);
}
@@ -33,19 +32,19 @@ mod test {
let identity_trapdoor = *identity.get_trapdoor();
let identity_nullifier = *identity.get_nullifier();
let identity_secret_hash = *identity.get_secret_hash();
let identity_secret = *identity.get_secret_hash();
let id_commitment = *identity.get_commitment();
assert_ne!(identity_trapdoor, Fr::from(0u8));
assert_ne!(identity_nullifier, Fr::from(0u8));
assert_ne!(identity_secret_hash, Fr::from(0u8));
assert_ne!(identity_secret, Fr::from(0u8));
assert_ne!(id_commitment, Fr::from(0u8));
let arr = identity.to_array();
assert_eq!(arr.length(), 4);
assert_eq!(*arr.get(0).unwrap(), identity_trapdoor);
assert_eq!(*arr.get(1).unwrap(), identity_nullifier);
assert_eq!(*arr.get(2).unwrap(), identity_secret_hash);
assert_eq!(*arr.get(2).unwrap(), identity_secret);
assert_eq!(*arr.get(3).unwrap(), id_commitment);
}
@@ -55,10 +54,10 @@ mod test {
let seed = Uint8Array::from(&seed_bytes[..]);
let identity = Identity::generate_seeded(&seed);
let identity_secret_hash = *identity.get_secret_hash();
let identity_secret = *identity.get_secret_hash();
let id_commitment = *identity.get_commitment();
let expected_identity_secret_hash_seed_bytes = str_to_fr(
let expected_identity_secret_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
)
@@ -69,10 +68,7 @@ mod test {
)
.unwrap();
assert_eq!(
identity_secret_hash,
expected_identity_secret_hash_seed_bytes
);
assert_eq!(identity_secret, expected_identity_secret_seed_bytes);
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
}
@@ -85,7 +81,7 @@ mod test {
let identity_trapdoor = *identity.get_trapdoor();
let identity_nullifier = *identity.get_nullifier();
let identity_secret_hash = *identity.get_secret_hash();
let identity_secret = *identity.get_secret_hash();
let id_commitment = *identity.get_commitment();
let expected_identity_trapdoor_seed_bytes = str_to_fr(
@@ -98,7 +94,7 @@ mod test {
16,
)
.unwrap();
let expected_identity_secret_hash_seed_bytes = str_to_fr(
let expected_identity_secret_seed_bytes = str_to_fr(
"0x2aca62aaa7abaf3686fff2caf00f55ab9462dc12db5b5d4bcf3994e671f8e521",
16,
)
@@ -111,10 +107,7 @@ mod test {
assert_eq!(identity_trapdoor, expected_identity_trapdoor_seed_bytes);
assert_eq!(identity_nullifier, expected_identity_nullifier_seed_bytes);
assert_eq!(
identity_secret_hash,
expected_identity_secret_hash_seed_bytes
);
assert_eq!(identity_secret, expected_identity_secret_seed_bytes);
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
}

View File

@@ -12,7 +12,7 @@ repository = "https://github.com/vacp2p/zerokit"
crate-type = ["rlib", "staticlib", "cdylib"]
bench = false
# This flag disable cargo doctests, i.e. testing example code-snippets in documentation
# This flag disables cargo doctests, i.e. testing example code-snippets in documentation
doctest = false
[dependencies]
@@ -26,13 +26,13 @@ ark-poly = { version = "0.5.0", default-features = false }
ark-groth16 = { version = "0.5.0", default-features = false }
ark-serialize = { version = "0.5.0", default-features = false }
# error handling
thiserror = "2.0.12"
# Error Handling
thiserror = "2.0.17"
# utilities
rayon = { version = "1.10.0", optional = true }
# Utilities
rayon = { version = "1.11.0", optional = true }
byteorder = "1.5.0"
cfg-if = "1.0"
cfg-if = "1.0.4"
num-bigint = { version = "0.4.6", default-features = false, features = ["std"] }
num-traits = "0.2.19"
once_cell = "1.21.3"
@@ -45,21 +45,22 @@ ruint = { version = "1.17.0", default-features = false, features = [
] }
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
zeroize = "1.8.2"
tempfile = "3.21.0"
tempfile = "3.23.0"
utils = { package = "zerokit_utils", version = "0.7.0", path = "../utils", default-features = false }
# FFI
safer-ffi.version = "0.1"
# serialization
# Serialization
prost = "0.14.1"
serde_json = "1.0.141"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0.145"
serde = { version = "1.0.228", features = ["derive"] }
document-features = { version = "0.2.11", optional = true }
# Documentation
document-features = { version = "0.2.12", optional = true }
[dev-dependencies]
criterion = { version = "0.7.0", features = ["html_reports"] }
criterion = { version = "0.8.0", features = ["html_reports"] }
[features]
default = ["parallel", "pmtree-ft"]

View File

@@ -5,7 +5,7 @@
[![License: Apache 2.0](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
The Zerokit RLN Module provides a Rust implementation for working with
Rate-Limiting Nullifier [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and primitives.
Rate-Limiting Nullifier [RLN](https://rfc.vac.dev/vac/raw/rln-v2) zkSNARK proofs and primitives.
This module allows you to:
- Generate and verify RLN proofs
@@ -24,7 +24,7 @@ We start by adding zerokit RLN to our `Cargo.toml`
```toml
[dependencies]
rln = { git = "https://github.com/vacp2p/zerokit" }
rln = "0.9.0"
```
## Basic Usage Example
@@ -36,93 +36,70 @@ The RLN object constructor requires the following files:
Additionally, `rln.wasm` is used for testing in the rln-wasm module.
In the following we will use [cursors](https://doc.rust-lang.org/std/io/struct.Cursor.html)
as readers/writers for interfacing with RLN public APIs.
```rust
use std::io::Cursor;
use rln::{
circuit::Fr,
hashers::{hash_to_field, poseidon_hash},
protocol::{keygen, prepare_prove_input, prepare_verify_input},
public::RLN,
utils::fr_to_bytes_le,
};
use serde_json::json;
use rln::prelude::{keygen, poseidon_hash, hash_to_field_le, RLN, RLNWitnessInput, Fr, IdSecret};
fn main() {
// 1. Initialize RLN with parameters:
// - the tree depth;
// - the tree config, if it is not defined, the default value will be set
let tree_depth = 20;
let input = Cursor::new(json!({}).to_string());
let mut rln = RLN::new(tree_depth, input).unwrap();
let mut rln = RLN::new(tree_depth, "").unwrap();
// 2. Generate an identity keypair
let (identity_secret_hash, id_commitment) = keygen();
let (identity_secret, id_commitment) = keygen();
// 3. Add a rate commitment to the Merkle tree
let id_index = 10;
let leaf_index = 10;
let user_message_limit = Fr::from(10);
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
rln.set_leaf(id_index, &mut buffer).unwrap();
rln.set_leaf(leaf_index, rate_commitment).unwrap();
// 4. Set up external nullifier (epoch + app identifier)
// 4. Get the Merkle proof for the added commitment
let (path_elements, identity_path_index) = rln.get_merkle_proof(leaf_index).unwrap();
// 5. Set up external nullifier (epoch + app identifier)
// We generate epoch from a date seed and we ensure is
// mapped to a field element by hashing-to-field its content
let epoch = hash_to_field(b"Today at noon, this year");
// We generate rln_identifier from a date seed and we ensure is
// mapped to a field element by hashing-to-field its content
let rln_identifier = hash_to_field(b"test-rln-identifier");
let epoch = hash_to_field_le(b"Today at noon, this year");
// We generate rln_identifier from an application identifier and
// we ensure is mapped to a field element by hashing-to-field its content
let rln_identifier = hash_to_field_le(b"test-rln-identifier");
// We generate a external nullifier
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
// We choose a message_id satisfy 0 <= message_id < user_message_limit
let message_id = Fr::from(1);
// 5. Generate and verify a proof for a message
// 6. Define the message signal
let signal = b"RLN is awesome";
// 6. Prepare input for generate_rln_proof API
// input_data is [ identity_secret<32> | id_index<8> | external_nullifier<32>
// | user_message_limit<32> | message_id<32> | signal_len<8> | signal<var> ]
let prove_input = prepare_prove_input(
identity_secret_hash,
id_index,
// 7. Compute x from the signal
let x = hash_to_field_le(signal);
// 8. Create witness input for RLN proof generation
let witness = RLNWitnessInput::new(
identity_secret,
user_message_limit,
message_id,
path_elements,
identity_path_index,
x,
external_nullifier,
signal,
);
)
.unwrap();
// 7. Generate a RLN proof
// We generate a RLN proof for proof_input
let mut input_buffer = Cursor::new(prove_input);
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.generate_rln_proof(&mut input_buffer, &mut output_buffer)
.unwrap();
// 9. Generate a RLN proof
// We generate proof and proof values from the witness
let (proof, proof_values) = rln.generate_rln_proof(&witness).unwrap();
// We get the public outputs returned by the circuit evaluation
// The byte vector `proof_data` is serialized as
// `[ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]`.
let proof_data = output_buffer.into_inner();
// 8. Verify a RLN proof
// Input buffer is serialized as `[proof_data | signal_len | signal ]`,
// where `proof_data` is (computed as) the output obtained by `generate_rln_proof`.
let verify_data = prepare_verify_input(proof_data, signal);
// We verify the zk-proof against the provided proof values
let mut input_buffer = Cursor::new(verify_data);
let verified = rln.verify_rln_proof(&mut input_buffer).unwrap();
// We ensure the proof is valid
// 10. Verify the RLN proof
// We verify the proof using the proof and proof values and the hashed signal x
let verified = rln.verify_rln_proof(&proof, &proof_values, &x).unwrap();
assert!(verified);
}
```
### Comments for the code above for point 4
### Comments for the code above for point 5
The `external nullifier` includes two parameters.
@@ -171,8 +148,8 @@ cargo make test_stateless
## Advanced: Custom Circuit Compilation
The `rln` (<https://github.com/rate-limiting-nullifier/circom-rln>) repository,
which contains the RLN circuit implementation is using for pre-compiled RLN circuit for zerokit RLN.
The `circom-rln` (<https://github.com/rate-limiting-nullifier/circom-rln>) repository,
which contains the RLN circuit implementation used for pre-compiled RLN circuit for zerokit RLN.
If you want to compile your own RLN circuit, you can follow the instructions below.
### 1. Compile ZK Circuits for getting the zkey file
@@ -205,15 +182,15 @@ Where:
> for instructions on how to run an appropriate Powers of Tau ceremony and Phase 2 in order to compile the desired circuit. \
> Additionally, while `M` sets an upper bound on the number of messages per epoch (`2^M`),
> you can configure lower message limit for your use case, as long as it satisfies `user_message_limit ≤ 2^M`. \
> Currently, the `rln` module comes with a [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources)
> Currently, the `rln` module comes with a [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources/tree_depth_20)
> RLN circuit with a Merkle tree of depth `20` and a bit size of `16`,
> allowing up to `2^20` registered members and a `2^16` message limit per epoch.
#### Install circom compiler
You can follow the instructions below or refer to the
[installing Circom](https://docs.circom.io/getting-started/installation/#installing-circom) guide for more details,
but make sure to use the specific version `v2.1.0`.
[installing Circom](https://docs.circom.io/getting-started/installation/#installing-circom) guide for more details.
Make sure to use the specific version `v2.1.0`.
```sh
# Clone the circom repository
@@ -268,7 +245,7 @@ cargo build
cargo run --package circom_witnesscalc --bin build-circuit ../circom-rln/circuits/rln.circom <path_to_graph.bin>
```
The `rln` module comes with [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources)
The `rln` module comes with [pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources/tree_depth_20)
execution graph files for the RLN circuit.
### 3. Generate Arkzkey Representation for zkey file
@@ -291,7 +268,7 @@ cargo run --bin arkzkey-util <path_to_rln_final.zkey>
This will generate the `rln_final.arkzkey` file, which is used by the `rln` module.
Currently, the `rln` module comes with
[pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources) arkzkey keys for the RLN circuit.
[pre-compiled](https://github.com/vacp2p/zerokit/tree/master/rln/resources/tree_depth_20) arkzkey keys for the RLN circuit.
> [!NOTE]
> You can use this [convert_zkey.sh](./convert_zkey.sh) script
@@ -325,18 +302,6 @@ Working examples demonstrating proof generation, proof verification and slashing
- All **heap-allocated** objects returned from Rust FFI **must** be freed using their corresponding FFI `_free` functions.
## Get involved
Zerokit RLN public and FFI APIs allow interaction with many more features than what briefly showcased above.
We invite you to check our API documentation by running
```rust
cargo doc --no-deps
```
and look at unit tests to have an hint on how to interface and use them.
## Detailed Protocol Flow
1. **Identity Creation**: Generate a secret key and commitment
@@ -347,9 +312,20 @@ and look at unit tests to have an hint on how to interface and use them.
- Ensures rate-limiting constraints are satisfied
- Generates a nullifier to prevent double-usage
5. **Proof Verification**: Verify the proof without revealing the prover's identity
6. **Slashing Mechanism**: Detect and penalize double-usage attempts
## Getting Involved
Zerokit RLN public and FFI APIs allow interaction with many more features than what briefly showcased above.
We invite you to check our API documentation by running
```bash
cargo doc --no-deps
```
and look at unit tests to have an hint on how to interface and use them.
- Check the [unit tests](https://github.com/vacp2p/zerokit/tree/master/rln/tests) for more usage examples
- [RFC specification](https://rfc.vac.dev/spec/32/) for the Rate-Limiting Nullifier protocol
- [RFC specification](https://rfc.vac.dev/vac/raw/rln-v2) for the Rate-Limiting Nullifier protocol
- [GitHub repository](https://github.com/vacp2p/zerokit) for the latest updates

View File

@@ -1,5 +1,5 @@
use criterion::{criterion_group, criterion_main, Criterion};
use rln::{circuit::Fr, pm_tree_adapter::PmTree};
use rln::prelude::*;
use utils::ZerokitMerkleTree;
pub fn pmtree_benchmark(c: &mut Criterion) {
@@ -13,7 +13,7 @@ pub fn pmtree_benchmark(c: &mut Criterion) {
})
});
c.bench_function("Pmtree:delete", |b| {
c.bench_function("Pmtree::delete", |b| {
b.iter(|| {
tree.delete(0).unwrap();
})

View File

@@ -1,8 +1,5 @@
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion};
use rln::{
circuit::{Fr, TEST_TREE_DEPTH},
hashers::PoseidonHash,
};
use rln::prelude::*;
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleTree};
pub fn get_leaves(n: u32) -> Vec<Fr> {
@@ -12,7 +9,7 @@ pub fn get_leaves(n: u32) -> Vec<Fr> {
pub fn optimal_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
c.bench_function("OptimalMerkleTree::<Poseidon>::full_depth_gen", |b| {
b.iter(|| {
OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_DEPTH).unwrap();
OptimalMerkleTree::<PoseidonHash>::default(DEFAULT_TREE_DEPTH).unwrap();
})
});
@@ -20,7 +17,7 @@ pub fn optimal_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
for &n in [1u32, 10, 100].iter() {
let leaves = get_leaves(n);
let mut tree = OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_DEPTH).unwrap();
let mut tree = OptimalMerkleTree::<PoseidonHash>::default(DEFAULT_TREE_DEPTH).unwrap();
group.bench_function(
BenchmarkId::new("OptimalMerkleTree::<Poseidon>::set", n),
|b| {
@@ -43,7 +40,7 @@ pub fn optimal_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
pub fn full_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
c.bench_function("FullMerkleTree::<Poseidon>::full_depth_gen", |b| {
b.iter(|| {
FullMerkleTree::<PoseidonHash>::default(TEST_TREE_DEPTH).unwrap();
FullMerkleTree::<PoseidonHash>::default(DEFAULT_TREE_DEPTH).unwrap();
})
});
@@ -51,7 +48,7 @@ pub fn full_merkle_tree_poseidon_benchmark(c: &mut Criterion) {
for &n in [1u32, 10, 100].iter() {
let leaves = get_leaves(n);
let mut tree = FullMerkleTree::<PoseidonHash>::default(TEST_TREE_DEPTH).unwrap();
let mut tree = FullMerkleTree::<PoseidonHash>::default(DEFAULT_TREE_DEPTH).unwrap();
group.bench_function(
BenchmarkId::new("FullMerkleTree::<Poseidon>::set", n),
|b| {

View File

@@ -1,7 +1,7 @@
#!/bin/bash
# Convert zkey to arkzkey using /tmp directory
# Usage: ./convert.sh <path_to_zkey_file>
# Usage: ./convert_zkey.sh <path_to_zkey_file>
set -e
@@ -50,4 +50,7 @@ ARKZKEY_FILE="${ZKEY_ABSOLUTE_PATH%.zkey}.arkzkey"
if [ ! -f "$ARKZKEY_FILE" ]; then
echo "Could not find generated .arkzkey file at $ARKZKEY_FILE"
exit 1
fi
fi
echo "Conversion successful!"
echo "Output file: $ARKZKEY_FILE"

View File

@@ -18,7 +18,7 @@ int main(int argc, char const *const argv[])
if (!ffi_rln_new_result.ok)
{
fprintf(stderr, "Initial RLN instance creation error: %s\n", ffi_rln_new_result.err.ptr);
c_string_free(ffi_rln_new_result.err);
ffi_c_string_free(ffi_rln_new_result.err);
return EXIT_FAILURE;
}
@@ -27,78 +27,85 @@ int main(int argc, char const *const argv[])
printf("\nGenerating identity keys\n");
Vec_CFr_t keys = ffi_key_gen();
const CFr_t *identity_secret = vec_cfr_get(&keys, 0);
const CFr_t *id_commitment = vec_cfr_get(&keys, 1);
const CFr_t *identity_secret = ffi_vec_cfr_get(&keys, 0);
const CFr_t *id_commitment = ffi_vec_cfr_get(&keys, 1);
printf("Identity generated\n");
Vec_uint8_t debug = cfr_debug(identity_secret);
Vec_uint8_t debug = ffi_cfr_debug(identity_secret);
printf(" - identity_secret = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
debug = cfr_debug(id_commitment);
debug = ffi_cfr_debug(id_commitment);
printf(" - id_commitment = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
printf("\nCreating message limit\n");
CFr_t *user_message_limit = uint_to_cfr(1);
CFr_t *user_message_limit = ffi_uint_to_cfr(1);
debug = cfr_debug(user_message_limit);
debug = ffi_cfr_debug(user_message_limit);
printf(" - user_message_limit = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
printf("\nComputing rate commitment\n");
CFr_t *rate_commitment = ffi_poseidon_hash_pair(id_commitment, user_message_limit);
debug = cfr_debug(rate_commitment);
debug = ffi_cfr_debug(rate_commitment);
printf(" - rate_commitment = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
printf("\nCFr serialization: CFr <-> bytes\n");
Vec_uint8_t ser_rate_commitment = cfr_to_bytes_le(rate_commitment);
Vec_uint8_t ser_rate_commitment = ffi_cfr_to_bytes_le(rate_commitment);
debug = vec_u8_debug(&ser_rate_commitment);
debug = ffi_vec_u8_debug(&ser_rate_commitment);
printf(" - serialized rate_commitment = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
CFr_t *deser_rate_commitment = bytes_le_to_cfr(&ser_rate_commitment);
CResult_CFr_ptr_Vec_uint8_t deser_rate_commitment_result = ffi_bytes_le_to_cfr(&ser_rate_commitment);
if (!deser_rate_commitment_result.ok)
{
fprintf(stderr, "Rate commitment deserialization error: %s\n", deser_rate_commitment_result.err.ptr);
ffi_c_string_free(deser_rate_commitment_result.err);
return EXIT_FAILURE;
}
CFr_t *deser_rate_commitment = deser_rate_commitment_result.ok;
debug = cfr_debug(deser_rate_commitment);
debug = ffi_cfr_debug(deser_rate_commitment);
printf(" - deserialized rate_commitment = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
vec_u8_free(ser_rate_commitment);
cfr_free(deser_rate_commitment);
ffi_vec_u8_free(ser_rate_commitment);
ffi_cfr_free(deser_rate_commitment);
printf("\nVec<CFr> serialization: Vec<CFr> <-> bytes\n");
Vec_uint8_t ser_keys = vec_cfr_to_bytes_le(&keys);
Vec_uint8_t ser_keys = ffi_vec_cfr_to_bytes_le(&keys);
debug = vec_u8_debug(&ser_keys);
debug = ffi_vec_u8_debug(&ser_keys);
printf(" - serialized keys = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
CResult_Vec_CFr_Vec_uint8_t deser_keys_result = bytes_le_to_vec_cfr(&ser_keys);
CResult_Vec_CFr_Vec_uint8_t deser_keys_result = ffi_bytes_le_to_vec_cfr(&ser_keys);
if (deser_keys_result.err.ptr)
{
fprintf(stderr, "Keys deserialization error: %s\n", deser_keys_result.err.ptr);
c_string_free(deser_keys_result.err);
ffi_c_string_free(deser_keys_result.err);
return EXIT_FAILURE;
}
debug = vec_cfr_debug(&deser_keys_result.ok);
debug = ffi_vec_cfr_debug(&deser_keys_result.ok);
printf(" - deserialized identity_secret = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
Vec_CFr_t deser_keys = deser_keys_result.ok;
vec_cfr_free(deser_keys);
ffi_vec_cfr_free(deser_keys);
vec_u8_free(ser_keys);
ffi_vec_u8_free(ser_keys);
#ifdef STATELESS
#define TREE_DEPTH 20
#define CFR_SIZE 32
printf("\nBuilding Merkle path for stateless mode\n");
CFr_t *default_leaf = cfr_zero();
CFr_t *default_leaf = ffi_cfr_zero();
CFr_t *default_hashes[TREE_DEPTH - 1];
default_hashes[0] = ffi_poseidon_hash_pair(default_leaf, default_leaf);
@@ -107,36 +114,36 @@ int main(int argc, char const *const argv[])
default_hashes[i] = ffi_poseidon_hash_pair(default_hashes[i - 1], default_hashes[i - 1]);
}
Vec_CFr_t path_elements = vec_cfr_new(TREE_DEPTH);
vec_cfr_push(&path_elements, default_leaf);
Vec_CFr_t path_elements = ffi_vec_cfr_new(TREE_DEPTH);
ffi_vec_cfr_push(&path_elements, default_leaf);
for (size_t i = 0; i < TREE_DEPTH - 1; i++)
{
vec_cfr_push(&path_elements, default_hashes[i]);
ffi_vec_cfr_push(&path_elements, default_hashes[i]);
}
printf("\nVec<CFr> serialization: Vec<CFr> <-> bytes\n");
Vec_uint8_t ser_path_elements = vec_cfr_to_bytes_le(&path_elements);
Vec_uint8_t ser_path_elements = ffi_vec_cfr_to_bytes_le(&path_elements);
debug = vec_u8_debug(&ser_path_elements);
debug = ffi_vec_u8_debug(&ser_path_elements);
printf(" - serialized path_elements = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
CResult_Vec_CFr_Vec_uint8_t deser_path_elements_result = bytes_le_to_vec_cfr(&ser_path_elements);
CResult_Vec_CFr_Vec_uint8_t deser_path_elements_result = ffi_bytes_le_to_vec_cfr(&ser_path_elements);
if (deser_path_elements_result.err.ptr)
{
fprintf(stderr, "Path elements deserialization error: %s\n", deser_path_elements_result.err.ptr);
c_string_free(deser_path_elements_result.err);
ffi_c_string_free(deser_path_elements_result.err);
return EXIT_FAILURE;
}
debug = vec_cfr_debug(&deser_path_elements_result.ok);
debug = ffi_vec_cfr_debug(&deser_path_elements_result.ok);
printf(" - deserialized path_elements = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
Vec_CFr_t deser_path_elements = deser_path_elements_result.ok;
vec_cfr_free(deser_path_elements);
ffi_vec_cfr_free(deser_path_elements);
vec_u8_free(ser_path_elements);
ffi_vec_u8_free(ser_path_elements);
uint8_t path_index_arr[TREE_DEPTH] = {0};
Vec_uint8_t identity_path_index = {
@@ -145,28 +152,28 @@ int main(int argc, char const *const argv[])
.cap = TREE_DEPTH};
printf("\nVec<uint8> serialization: Vec<uint8> <-> bytes\n");
Vec_uint8_t ser_path_index = vec_u8_to_bytes_le(&identity_path_index);
Vec_uint8_t ser_path_index = ffi_vec_u8_to_bytes_le(&identity_path_index);
debug = vec_u8_debug(&ser_path_index);
debug = ffi_vec_u8_debug(&ser_path_index);
printf(" - serialized path_index = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
CResult_Vec_uint8_Vec_uint8_t deser_path_index_result = bytes_le_to_vec_u8(&ser_path_index);
CResult_Vec_uint8_Vec_uint8_t deser_path_index_result = ffi_bytes_le_to_vec_u8(&ser_path_index);
if (deser_path_index_result.err.ptr)
{
fprintf(stderr, "Path index deserialization error: %s\n", deser_path_index_result.err.ptr);
c_string_free(deser_path_index_result.err);
ffi_c_string_free(deser_path_index_result.err);
return EXIT_FAILURE;
}
debug = vec_u8_debug(&deser_path_index_result.ok);
debug = ffi_vec_u8_debug(&deser_path_index_result.ok);
printf(" - deserialized path_index = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
Vec_uint8_t deser_path_index = deser_path_index_result.ok;
vec_u8_free(deser_path_index);
ffi_vec_u8_free(deser_path_index);
vec_u8_free(ser_path_index);
ffi_vec_u8_free(ser_path_index);
printf("\nComputing Merkle root for stateless mode\n");
printf(" - computing root for index 0 with rate_commitment\n");
@@ -174,20 +181,20 @@ int main(int argc, char const *const argv[])
for (size_t i = 1; i < TREE_DEPTH; i++)
{
CFr_t *next_root = ffi_poseidon_hash_pair(computed_root, default_hashes[i - 1]);
cfr_free(computed_root);
ffi_cfr_free(computed_root);
computed_root = next_root;
}
debug = cfr_debug(computed_root);
debug = ffi_cfr_debug(computed_root);
printf(" - computed_root = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
#else
printf("\nAdding rate_commitment to tree\n");
CBoolResult_t set_err = ffi_set_next_leaf(&rln, &rate_commitment);
CBoolResult_t set_err = ffi_set_next_leaf(&rln, rate_commitment);
if (!set_err.ok)
{
fprintf(stderr, "Set next leaf error: %s\n", set_err.err.ptr);
c_string_free(set_err.err);
ffi_c_string_free(set_err.err);
return EXIT_FAILURE;
}
@@ -195,11 +202,11 @@ int main(int argc, char const *const argv[])
printf(" - added to tree at index %zu\n", leaf_index);
printf("\nGetting Merkle proof\n");
CResult_FFI_MerkleProof_ptr_Vec_uint8_t proof_result = ffi_get_proof(&rln, leaf_index);
CResult_FFI_MerkleProof_ptr_Vec_uint8_t proof_result = ffi_get_merkle_proof(&rln, leaf_index);
if (!proof_result.ok)
{
fprintf(stderr, "Get proof error: %s\n", proof_result.err.ptr);
c_string_free(proof_result.err);
ffi_c_string_free(proof_result.err);
return EXIT_FAILURE;
}
FFI_MerkleProof_t *merkle_proof = proof_result.ok;
@@ -211,46 +218,45 @@ int main(int argc, char const *const argv[])
Vec_uint8_t signal_vec = {signal, 32, 32};
CFr_t *x = ffi_hash_to_field_le(&signal_vec);
debug = cfr_debug(x);
debug = ffi_cfr_debug(x);
printf(" - x = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
printf("\nHashing epoch\n");
const char *epoch_str = "test-epoch";
Vec_uint8_t epoch_vec = {(uint8_t *)epoch_str, strlen(epoch_str), strlen(epoch_str)};
CFr_t *epoch = ffi_hash_to_field_le(&epoch_vec);
debug = cfr_debug(epoch);
debug = ffi_cfr_debug(epoch);
printf(" - epoch = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
printf("\nHashing RLN identifier\n");
const char *rln_id_str = "test-rln-identifier";
Vec_uint8_t rln_id_vec = {(uint8_t *)rln_id_str, strlen(rln_id_str), strlen(rln_id_str)};
CFr_t *rln_identifier = ffi_hash_to_field_le(&rln_id_vec);
debug = cfr_debug(rln_identifier);
debug = ffi_cfr_debug(rln_identifier);
printf(" - rln_identifier = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
printf("\nComputing Poseidon hash for external nullifier\n");
CFr_t *external_nullifier = ffi_poseidon_hash_pair(epoch, rln_identifier);
debug = cfr_debug(external_nullifier);
debug = ffi_cfr_debug(external_nullifier);
printf(" - external_nullifier = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
printf("\nCreating message_id\n");
CFr_t *message_id = uint_to_cfr(0);
CFr_t *message_id = ffi_uint_to_cfr(0);
debug = cfr_debug(message_id);
debug = ffi_cfr_debug(message_id);
printf(" - message_id = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
printf("\nGenerating RLN Proof\n");
printf("\nCreating RLN Witness\n");
#ifdef STATELESS
CResult_FFI_RLNProof_ptr_Vec_uint8_t proof_gen_result = ffi_generate_rln_proof_stateless(
&rln,
CResult_FFI_RLNWitnessInput_ptr_Vec_uint8_t witness_result = ffi_rln_witness_input_new(
identity_secret,
user_message_limit,
message_id,
@@ -258,30 +264,132 @@ int main(int argc, char const *const argv[])
&identity_path_index,
x,
external_nullifier);
if (!witness_result.ok)
{
fprintf(stderr, "RLN Witness creation error: %s\n", witness_result.err.ptr);
ffi_c_string_free(witness_result.err);
return EXIT_FAILURE;
}
FFI_RLNWitnessInput_t *witness = witness_result.ok;
printf("RLN Witness created successfully\n");
#else
CResult_FFI_RLNProof_ptr_Vec_uint8_t proof_gen_result = ffi_generate_rln_proof(
&rln,
CResult_FFI_RLNWitnessInput_ptr_Vec_uint8_t witness_result = ffi_rln_witness_input_new(
identity_secret,
user_message_limit,
message_id,
&merkle_proof->path_elements,
&merkle_proof->path_index,
x,
external_nullifier,
leaf_index);
external_nullifier);
if (!witness_result.ok)
{
fprintf(stderr, "RLN Witness creation error: %s\n", witness_result.err.ptr);
ffi_c_string_free(witness_result.err);
return EXIT_FAILURE;
}
FFI_RLNWitnessInput_t *witness = witness_result.ok;
printf("RLN Witness created successfully\n");
#endif
printf("\nGenerating RLN Proof\n");
CResult_FFI_RLNProof_ptr_Vec_uint8_t proof_gen_result = ffi_generate_rln_proof(
&rln,
&witness);
if (!proof_gen_result.ok)
{
fprintf(stderr, "Proof generation error: %s\n", proof_gen_result.err.ptr);
c_string_free(proof_gen_result.err);
ffi_c_string_free(proof_gen_result.err);
return EXIT_FAILURE;
}
FFI_RLNProof_t *rln_proof = proof_gen_result.ok;
printf("Proof generated successfully\n");
printf("\nGetting proof values\n");
FFI_RLNProofValues_t *proof_values = ffi_rln_proof_get_values(&rln_proof);
CFr_t *y = ffi_rln_proof_values_get_y(&proof_values);
debug = ffi_cfr_debug(y);
printf(" - y = %s\n", debug.ptr);
ffi_c_string_free(debug);
ffi_cfr_free(y);
CFr_t *nullifier = ffi_rln_proof_values_get_nullifier(&proof_values);
debug = ffi_cfr_debug(nullifier);
printf(" - nullifier = %s\n", debug.ptr);
ffi_c_string_free(debug);
ffi_cfr_free(nullifier);
CFr_t *root = ffi_rln_proof_values_get_root(&proof_values);
debug = ffi_cfr_debug(root);
printf(" - root = %s\n", debug.ptr);
ffi_c_string_free(debug);
ffi_cfr_free(root);
CFr_t *x_val = ffi_rln_proof_values_get_x(&proof_values);
debug = ffi_cfr_debug(x_val);
printf(" - x = %s\n", debug.ptr);
ffi_c_string_free(debug);
ffi_cfr_free(x_val);
CFr_t *ext_nullifier = ffi_rln_proof_values_get_external_nullifier(&proof_values);
debug = ffi_cfr_debug(ext_nullifier);
printf(" - external_nullifier = %s\n", debug.ptr);
ffi_c_string_free(debug);
ffi_cfr_free(ext_nullifier);
printf("\nRLNProof serialization: RLNProof <-> bytes\n");
Vec_uint8_t ser_proof = ffi_rln_proof_to_bytes_le(&rln_proof);
debug = ffi_vec_u8_debug(&ser_proof);
printf(" - serialized proof = %s\n", debug.ptr);
ffi_c_string_free(debug);
CResult_FFI_RLNProof_ptr_Vec_uint8_t deser_proof_result = ffi_bytes_le_to_rln_proof(&ser_proof);
if (!deser_proof_result.ok)
{
fprintf(stderr, "Proof deserialization error: %s\n", deser_proof_result.err.ptr);
ffi_c_string_free(deser_proof_result.err);
return EXIT_FAILURE;
}
FFI_RLNProof_t *deser_proof = deser_proof_result.ok;
printf(" - proof deserialized successfully\n");
printf("\nRLNProofValues serialization: RLNProofValues <-> bytes\n");
Vec_uint8_t ser_proof_values = ffi_rln_proof_values_to_bytes_le(&proof_values);
debug = ffi_vec_u8_debug(&ser_proof_values);
printf(" - serialized proof_values = %s\n", debug.ptr);
ffi_c_string_free(debug);
CResult_FFI_RLNProofValues_ptr_Vec_uint8_t deser_proof_values_result = ffi_bytes_le_to_rln_proof_values(&ser_proof_values);
if (!deser_proof_values_result.ok)
{
fprintf(stderr, "Proof values deserialization error: %s\n", deser_proof_values_result.err.ptr);
ffi_c_string_free(deser_proof_values_result.err);
return EXIT_FAILURE;
}
FFI_RLNProofValues_t *deser_proof_values = deser_proof_values_result.ok;
printf(" - proof_values deserialized successfully\n");
CFr_t *deser_external_nullifier = ffi_rln_proof_values_get_external_nullifier(&deser_proof_values);
debug = ffi_cfr_debug(deser_external_nullifier);
printf(" - deserialized external_nullifier = %s\n", debug.ptr);
ffi_c_string_free(debug);
ffi_cfr_free(deser_external_nullifier);
ffi_rln_proof_values_free(deser_proof_values);
ffi_vec_u8_free(ser_proof_values);
ffi_rln_proof_free(deser_proof);
ffi_vec_u8_free(ser_proof);
printf("\nVerifying Proof\n");
#ifdef STATELESS
Vec_CFr_t roots = vec_cfr_from_cfr(computed_root);
Vec_CFr_t roots = ffi_vec_cfr_from_cfr(computed_root);
CBoolResult_t verify_err = ffi_verify_with_roots(&rln, &rln_proof, &roots, x);
#else
CBoolResult_t verify_err = ffi_verify_rln_proof(&rln, &rln_proof, x);
@@ -290,12 +398,14 @@ int main(int argc, char const *const argv[])
if (!verify_err.ok)
{
fprintf(stderr, "Proof verification error: %s\n", verify_err.err.ptr);
c_string_free(verify_err.err);
ffi_c_string_free(verify_err.err);
return EXIT_FAILURE;
}
printf("Proof verified successfully\n");
ffi_rln_proof_free(rln_proof);
printf("\nSimulating double-signaling attack (same epoch, different message)\n");
printf("\nHashing second signal\n");
@@ -303,21 +413,20 @@ int main(int argc, char const *const argv[])
Vec_uint8_t signal2_vec = {signal2, 32, 32};
CFr_t *x2 = ffi_hash_to_field_le(&signal2_vec);
debug = cfr_debug(x2);
debug = ffi_cfr_debug(x2);
printf(" - x2 = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
printf("\nCreating second message with the same id\n");
CFr_t *message_id2 = uint_to_cfr(0);
CFr_t *message_id2 = ffi_uint_to_cfr(0);
debug = cfr_debug(message_id2);
debug = ffi_cfr_debug(message_id2);
printf(" - message_id2 = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
printf("\nGenerating second RLN Proof\n");
printf("\nCreating second RLN Witness\n");
#ifdef STATELESS
CResult_FFI_RLNProof_ptr_Vec_uint8_t proof_gen_result2 = ffi_generate_rln_proof_stateless(
&rln,
CResult_FFI_RLNWitnessInput_ptr_Vec_uint8_t witness_result2 = ffi_rln_witness_input_new(
identity_secret,
user_message_limit,
message_id2,
@@ -325,27 +434,51 @@ int main(int argc, char const *const argv[])
&identity_path_index,
x2,
external_nullifier);
if (!witness_result2.ok)
{
fprintf(stderr, "Second RLN Witness creation error: %s\n", witness_result2.err.ptr);
ffi_c_string_free(witness_result2.err);
return EXIT_FAILURE;
}
FFI_RLNWitnessInput_t *witness2 = witness_result2.ok;
printf("Second RLN Witness created successfully\n");
#else
CResult_FFI_RLNProof_ptr_Vec_uint8_t proof_gen_result2 = ffi_generate_rln_proof(
&rln,
CResult_FFI_RLNWitnessInput_ptr_Vec_uint8_t witness_result2 = ffi_rln_witness_input_new(
identity_secret,
user_message_limit,
message_id2,
&merkle_proof->path_elements,
&merkle_proof->path_index,
x2,
external_nullifier,
leaf_index);
external_nullifier);
if (!witness_result2.ok)
{
fprintf(stderr, "Second RLN Witness creation error: %s\n", witness_result2.err.ptr);
ffi_c_string_free(witness_result2.err);
return EXIT_FAILURE;
}
FFI_RLNWitnessInput_t *witness2 = witness_result2.ok;
printf("Second RLN Witness created successfully\n");
#endif
printf("\nGenerating second RLN Proof\n");
CResult_FFI_RLNProof_ptr_Vec_uint8_t proof_gen_result2 = ffi_generate_rln_proof(
&rln,
&witness2);
if (!proof_gen_result2.ok)
{
fprintf(stderr, "Second proof generation error: %s\n", proof_gen_result2.err.ptr);
c_string_free(proof_gen_result2.err);
ffi_c_string_free(proof_gen_result2.err);
return EXIT_FAILURE;
}
FFI_RLNProof_t *rln_proof2 = proof_gen_result2.ok;
printf("Second proof generated successfully\n");
FFI_RLNProofValues_t *proof_values2 = ffi_rln_proof_get_values(&rln_proof2);
printf("\nVerifying second proof\n");
#ifdef STATELESS
CBoolResult_t verify_err2 = ffi_verify_with_roots(&rln, &rln_proof2, &roots, x2);
@@ -356,61 +489,67 @@ int main(int argc, char const *const argv[])
if (!verify_err2.ok)
{
fprintf(stderr, "Proof verification error: %s\n", verify_err2.err.ptr);
c_string_free(verify_err2.err);
ffi_c_string_free(verify_err2.err);
return EXIT_FAILURE;
}
printf("Second proof verified successfully\n");
ffi_rln_proof_free(rln_proof2);
printf("\nRecovering identity secret\n");
CResult_CFr_ptr_Vec_uint8_t recover_result = ffi_recover_id_secret(&rln_proof, &rln_proof2);
CResult_CFr_ptr_Vec_uint8_t recover_result = ffi_recover_id_secret(&proof_values, &proof_values2);
if (!recover_result.ok)
{
fprintf(stderr, "Identity recovery error: %s\n", recover_result.err.ptr);
c_string_free(recover_result.err);
ffi_c_string_free(recover_result.err);
return EXIT_FAILURE;
}
CFr_t *recovered_secret = recover_result.ok;
debug = cfr_debug(recovered_secret);
debug = ffi_cfr_debug(recovered_secret);
printf(" - recovered_secret = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
debug = cfr_debug(identity_secret);
debug = ffi_cfr_debug(identity_secret);
printf(" - original_secret = %s\n", debug.ptr);
c_string_free(debug);
ffi_c_string_free(debug);
printf("Slashing successful: Identity is recovered!\n");
cfr_free(recovered_secret);
ffi_cfr_free(recovered_secret);
ffi_rln_proof_free(rln_proof2);
cfr_free(x2);
cfr_free(message_id2);
ffi_rln_proof_free(rln_proof);
ffi_rln_proof_values_free(proof_values2);
ffi_rln_proof_values_free(proof_values);
ffi_cfr_free(x2);
ffi_cfr_free(message_id2);
#ifdef STATELESS
vec_cfr_free(roots);
vec_cfr_free(path_elements);
ffi_rln_witness_input_free(witness2);
ffi_rln_witness_input_free(witness);
ffi_vec_cfr_free(roots);
ffi_vec_cfr_free(path_elements);
for (size_t i = 0; i < TREE_DEPTH - 1; i++)
{
cfr_free(default_hashes[i]);
ffi_cfr_free(default_hashes[i]);
}
cfr_free(default_leaf);
cfr_free(computed_root);
ffi_cfr_free(default_leaf);
ffi_cfr_free(computed_root);
#else
ffi_rln_witness_input_free(witness2);
ffi_rln_witness_input_free(witness);
ffi_merkle_proof_free(merkle_proof);
#endif
cfr_free(rate_commitment);
cfr_free(x);
cfr_free(epoch);
cfr_free(rln_identifier);
cfr_free(external_nullifier);
cfr_free(user_message_limit);
cfr_free(message_id);
vec_cfr_free(keys);
ffi_cfr_free(rate_commitment);
ffi_cfr_free(x);
ffi_cfr_free(epoch);
ffi_cfr_free(rln_identifier);
ffi_cfr_free(external_nullifier);
ffi_cfr_free(user_message_limit);
ffi_cfr_free(message_id);
ffi_vec_cfr_free(keys);
ffi_rln_free(rln);
return EXIT_SUCCESS;

View File

@@ -101,6 +101,6 @@ Verify: OK
2) Generates identity keys and computes `rateCommitment = Poseidon(id_commitment, user_message_limit)`.
3) Inserts the leaf with `ffi_set_next_leaf` and fetches a real Merkle path for index 0 via `ffi_get_proof`.
3) Inserts the leaf with `ffi_set_next_leaf` and fetches a real Merkle path for index 0 via `ffi_get_merkle_proof`.
4) Builds the witness from the exported proof, generates the proof, and verifies with `ffi_verify_rln_proof` using the current tree root.

View File

@@ -20,6 +20,7 @@ type
CFr* = object
FFI_RLN* = object
FFI_RLNProof* = object
FFI_RLNWitnessInput* = object
Vec_CFr* = object
dataPtr*: ptr CFr
@@ -47,10 +48,20 @@ type
ok*: ptr FFI_RLNProof
err*: Vec_uint8
CResultWitnessInputPtrVecU8* = object
ok*: ptr FFI_RLNWitnessInput
err*: Vec_uint8
FFI_RLNProofValues* = object
CResultCFrPtrVecU8* = object
ok*: ptr CFr
err*: Vec_uint8
CResultRLNProofValuesPtrVecU8* = object
ok*: ptr FFI_RLNProofValues
err*: Vec_uint8
CResultMerkleProofPtrVecU8* = object
ok*: ptr FFI_MerkleProof
err*: Vec_uint8
@@ -63,62 +74,69 @@ type
ok*: Vec_uint8
err*: Vec_uint8
CResultBigIntJsonVecU8* = object
ok*: Vec_uint8
err*: Vec_uint8
CBoolResult* = object
ok*: bool
err*: Vec_uint8
# CFr functions
proc cfr_zero*(): ptr CFr {.importc: "cfr_zero", cdecl, dynlib: RLN_LIB.}
proc cfr_free*(x: ptr CFr) {.importc: "cfr_free", cdecl, dynlib: RLN_LIB.}
proc uint_to_cfr*(value: uint32): ptr CFr {.importc: "uint_to_cfr", cdecl,
proc ffi_cfr_zero*(): ptr CFr {.importc: "ffi_cfr_zero", cdecl,
dynlib: RLN_LIB.}
proc cfr_debug*(cfr: ptr CFr): Vec_uint8 {.importc: "cfr_debug", cdecl,
proc ffi_cfr_one*(): ptr CFr {.importc: "ffi_cfr_one", cdecl, dynlib: RLN_LIB.}
proc ffi_cfr_free*(x: ptr CFr) {.importc: "ffi_cfr_free", cdecl,
dynlib: RLN_LIB.}
proc cfr_to_bytes_le*(cfr: ptr CFr): Vec_uint8 {.importc: "cfr_to_bytes_le",
proc ffi_uint_to_cfr*(value: uint32): ptr CFr {.importc: "ffi_uint_to_cfr",
cdecl, dynlib: RLN_LIB.}
proc cfr_to_bytes_be*(cfr: ptr CFr): Vec_uint8 {.importc: "cfr_to_bytes_be",
proc ffi_cfr_debug*(cfr: ptr CFr): Vec_uint8 {.importc: "ffi_cfr_debug", cdecl,
dynlib: RLN_LIB.}
proc ffi_cfr_to_bytes_le*(cfr: ptr CFr): Vec_uint8 {.importc: "ffi_cfr_to_bytes_le",
cdecl, dynlib: RLN_LIB.}
proc bytes_le_to_cfr*(bytes: ptr Vec_uint8): ptr CFr {.importc: "bytes_le_to_cfr",
proc ffi_cfr_to_bytes_be*(cfr: ptr CFr): Vec_uint8 {.importc: "ffi_cfr_to_bytes_be",
cdecl, dynlib: RLN_LIB.}
proc bytes_be_to_cfr*(bytes: ptr Vec_uint8): ptr CFr {.importc: "bytes_be_to_cfr",
proc ffi_bytes_le_to_cfr*(bytes: ptr Vec_uint8): CResultCFrPtrVecU8 {.importc: "ffi_bytes_le_to_cfr",
cdecl, dynlib: RLN_LIB.}
proc ffi_bytes_be_to_cfr*(bytes: ptr Vec_uint8): CResultCFrPtrVecU8 {.importc: "ffi_bytes_be_to_cfr",
cdecl, dynlib: RLN_LIB.}
# Vec<CFr> functions
proc vec_cfr_new*(capacity: CSize): Vec_CFr {.importc: "vec_cfr_new", cdecl,
dynlib: RLN_LIB.}
proc vec_cfr_from_cfr*(cfr: ptr CFr): Vec_CFr {.importc: "vec_cfr_from_cfr",
proc ffi_vec_cfr_new*(capacity: CSize): Vec_CFr {.importc: "ffi_vec_cfr_new",
cdecl, dynlib: RLN_LIB.}
proc vec_cfr_push*(v: ptr Vec_CFr, cfr: ptr CFr) {.importc: "vec_cfr_push",
proc ffi_vec_cfr_from_cfr*(cfr: ptr CFr): Vec_CFr {.importc: "ffi_vec_cfr_from_cfr",
cdecl, dynlib: RLN_LIB.}
proc vec_cfr_len*(v: ptr Vec_CFr): CSize {.importc: "vec_cfr_len", cdecl,
dynlib: RLN_LIB.}
proc vec_cfr_get*(v: ptr Vec_CFr, i: CSize): ptr CFr {.importc: "vec_cfr_get",
proc ffi_vec_cfr_push*(v: ptr Vec_CFr, cfr: ptr CFr) {.importc: "ffi_vec_cfr_push",
cdecl, dynlib: RLN_LIB.}
proc vec_cfr_to_bytes_le*(v: ptr Vec_CFr): Vec_uint8 {.importc: "vec_cfr_to_bytes_le",
proc ffi_vec_cfr_len*(v: ptr Vec_CFr): CSize {.importc: "ffi_vec_cfr_len",
cdecl, dynlib: RLN_LIB.}
proc vec_cfr_to_bytes_be*(v: ptr Vec_CFr): Vec_uint8 {.importc: "vec_cfr_to_bytes_be",
proc ffi_vec_cfr_get*(v: ptr Vec_CFr, i: CSize): ptr CFr {.importc: "ffi_vec_cfr_get",
cdecl, dynlib: RLN_LIB.}
proc bytes_le_to_vec_cfr*(bytes: ptr Vec_uint8): CResultVecCFrVecU8 {.importc: "bytes_le_to_vec_cfr",
proc ffi_vec_cfr_to_bytes_le*(v: ptr Vec_CFr): Vec_uint8 {.importc: "ffi_vec_cfr_to_bytes_le",
cdecl, dynlib: RLN_LIB.}
proc bytes_be_to_vec_cfr*(bytes: ptr Vec_uint8): CResultVecCFrVecU8 {.importc: "bytes_be_to_vec_cfr",
proc ffi_vec_cfr_to_bytes_be*(v: ptr Vec_CFr): Vec_uint8 {.importc: "ffi_vec_cfr_to_bytes_be",
cdecl, dynlib: RLN_LIB.}
proc vec_cfr_debug*(v: ptr Vec_CFr): Vec_uint8 {.importc: "vec_cfr_debug",
proc ffi_bytes_le_to_vec_cfr*(bytes: ptr Vec_uint8): CResultVecCFrVecU8 {.importc: "ffi_bytes_le_to_vec_cfr",
cdecl, dynlib: RLN_LIB.}
proc vec_cfr_free*(v: Vec_CFr) {.importc: "vec_cfr_free", cdecl,
proc ffi_bytes_be_to_vec_cfr*(bytes: ptr Vec_uint8): CResultVecCFrVecU8 {.importc: "ffi_bytes_be_to_vec_cfr",
cdecl, dynlib: RLN_LIB.}
proc ffi_vec_cfr_debug*(v: ptr Vec_CFr): Vec_uint8 {.importc: "ffi_vec_cfr_debug",
cdecl, dynlib: RLN_LIB.}
proc ffi_vec_cfr_free*(v: Vec_CFr) {.importc: "ffi_vec_cfr_free", cdecl,
dynlib: RLN_LIB.}
# Vec<u8> functions
proc vec_u8_to_bytes_le*(v: ptr Vec_uint8): Vec_uint8 {.importc: "vec_u8_to_bytes_le",
proc ffi_vec_u8_to_bytes_le*(v: ptr Vec_uint8): Vec_uint8 {.importc: "ffi_vec_u8_to_bytes_le",
cdecl, dynlib: RLN_LIB.}
proc vec_u8_to_bytes_be*(v: ptr Vec_uint8): Vec_uint8 {.importc: "vec_u8_to_bytes_be",
proc ffi_vec_u8_to_bytes_be*(v: ptr Vec_uint8): Vec_uint8 {.importc: "ffi_vec_u8_to_bytes_be",
cdecl, dynlib: RLN_LIB.}
proc bytes_le_to_vec_u8*(bytes: ptr Vec_uint8): CResultVecU8VecU8 {.importc: "bytes_le_to_vec_u8",
proc ffi_bytes_le_to_vec_u8*(bytes: ptr Vec_uint8): CResultVecU8VecU8 {.importc: "ffi_bytes_le_to_vec_u8",
cdecl, dynlib: RLN_LIB.}
proc bytes_be_to_vec_u8*(bytes: ptr Vec_uint8): CResultVecU8VecU8 {.importc: "bytes_be_to_vec_u8",
proc ffi_bytes_be_to_vec_u8*(bytes: ptr Vec_uint8): CResultVecU8VecU8 {.importc: "ffi_bytes_be_to_vec_u8",
cdecl, dynlib: RLN_LIB.}
proc vec_u8_debug*(v: ptr Vec_uint8): Vec_uint8 {.importc: "vec_u8_debug",
proc ffi_vec_u8_debug*(v: ptr Vec_uint8): Vec_uint8 {.importc: "ffi_vec_u8_debug",
cdecl, dynlib: RLN_LIB.}
proc vec_u8_free*(v: Vec_uint8) {.importc: "vec_u8_free", cdecl,
proc ffi_vec_u8_free*(v: Vec_uint8) {.importc: "ffi_vec_u8_free", cdecl,
dynlib: RLN_LIB.}
# Hashing functions
@@ -133,52 +151,69 @@ proc ffi_poseidon_hash_pair*(a: ptr CFr,
# Keygen function
proc ffi_key_gen*(): Vec_CFr {.importc: "ffi_key_gen", cdecl,
dynlib: RLN_LIB.}
proc ffi_seeded_key_gen*(seed: ptr Vec_uint8): Vec_CFr {.importc: "ffi_seeded_key_gen",
cdecl, dynlib: RLN_LIB.}
proc ffi_extended_key_gen*(): Vec_CFr {.importc: "ffi_extended_key_gen",
cdecl, dynlib: RLN_LIB.}
proc ffi_seeded_extended_key_gen*(seed: ptr Vec_uint8): Vec_CFr {.importc: "ffi_seeded_extended_key_gen",
cdecl, dynlib: RLN_LIB.}
# RLN instance functions
when defined(ffiStateless):
proc ffi_rln_new*(): CResultRLNPtrVecU8 {.importc: "ffi_rln_new", cdecl,
dynlib: RLN_LIB.}
proc ffi_rln_new_with_params*(zkey_data: ptr Vec_uint8,
graph_data: ptr Vec_uint8): CResultRLNPtrVecU8 {.importc: "ffi_rln_new_with_params",
cdecl, dynlib: RLN_LIB.}
else:
proc ffi_rln_new*(treeDepth: CSize, config: cstring): CResultRLNPtrVecU8 {.importc: "ffi_rln_new",
cdecl, dynlib: RLN_LIB.}
proc ffi_rln_new_with_params*(treeDepth: CSize, zkey_data: ptr Vec_uint8,
graph_data: ptr Vec_uint8, config: cstring): CResultRLNPtrVecU8 {.importc: "ffi_rln_new_with_params",
cdecl, dynlib: RLN_LIB.}
proc ffi_rln_free*(rln: ptr FFI_RLN) {.importc: "ffi_rln_free", cdecl,
dynlib: RLN_LIB.}
# Proof generation/verification functions
when defined(ffiStateless):
proc ffi_generate_rln_proof_stateless*(
rln: ptr ptr FFI_RLN,
identity_secret: ptr CFr,
user_message_limit: ptr CFr,
message_id: ptr CFr,
path_elements: ptr Vec_CFr,
identity_path_index: ptr Vec_uint8,
x: ptr CFr,
external_nullifier: ptr CFr
): CResultProofPtrVecU8 {.importc: "ffi_generate_rln_proof_stateless", cdecl,
dynlib: RLN_LIB.}
else:
proc ffi_generate_rln_proof*(
rln: ptr ptr FFI_RLN,
identity_secret: ptr CFr,
user_message_limit: ptr CFr,
message_id: ptr CFr,
x: ptr CFr,
external_nullifier: ptr CFr,
leaf_index: CSize
): CResultProofPtrVecU8 {.importc: "ffi_generate_rln_proof", cdecl,
dynlib: RLN_LIB.}
# Witness input functions
proc ffi_rln_witness_input_new*(
identity_secret: ptr CFr,
user_message_limit: ptr CFr,
message_id: ptr CFr,
path_elements: ptr Vec_CFr,
identity_path_index: ptr Vec_uint8,
x: ptr CFr,
external_nullifier: ptr CFr
): CResultWitnessInputPtrVecU8 {.importc: "ffi_rln_witness_input_new", cdecl,
dynlib: RLN_LIB.}
proc ffi_rln_witness_to_bytes_le*(witness: ptr ptr FFI_RLNWitnessInput): Vec_uint8 {.importc: "ffi_rln_witness_to_bytes_le",
cdecl, dynlib: RLN_LIB.}
proc ffi_rln_witness_to_bytes_be*(witness: ptr ptr FFI_RLNWitnessInput): Vec_uint8 {.importc: "ffi_rln_witness_to_bytes_be",
cdecl, dynlib: RLN_LIB.}
proc ffi_bytes_le_to_rln_witness*(bytes: ptr Vec_uint8): CResultWitnessInputPtrVecU8 {.importc: "ffi_bytes_le_to_rln_witness",
cdecl, dynlib: RLN_LIB.}
proc ffi_bytes_be_to_rln_witness*(bytes: ptr Vec_uint8): CResultWitnessInputPtrVecU8 {.importc: "ffi_bytes_be_to_rln_witness",
cdecl, dynlib: RLN_LIB.}
proc ffi_rln_witness_to_bigint_json*(witness: ptr ptr FFI_RLNWitnessInput): CResultBigIntJsonVecU8 {.importc: "ffi_rln_witness_to_bigint_json",
cdecl, dynlib: RLN_LIB.}
proc ffi_rln_witness_input_free*(witness: ptr FFI_RLNWitnessInput) {.importc: "ffi_rln_witness_input_free",
cdecl, dynlib: RLN_LIB.}
when defined(ffiStateless):
proc ffi_verify_with_roots*(
rln: ptr ptr FFI_RLN,
proof: ptr ptr FFI_RLNProof,
roots: ptr Vec_CFr,
x: ptr CFr
): CBoolResult {.importc: "ffi_verify_with_roots", cdecl,
dynlib: RLN_LIB.}
else:
# Proof generation/verification functions
proc ffi_generate_rln_proof*(
rln: ptr ptr FFI_RLN,
witness: ptr ptr FFI_RLNWitnessInput
): CResultProofPtrVecU8 {.importc: "ffi_generate_rln_proof", cdecl,
dynlib: RLN_LIB.}
proc ffi_generate_rln_proof_with_witness*(
rln: ptr ptr FFI_RLN,
calculated_witness: ptr Vec_uint8,
witness: ptr ptr FFI_RLNWitnessInput
): CResultProofPtrVecU8 {.importc: "ffi_generate_rln_proof_with_witness",
cdecl, dynlib: RLN_LIB.}
when not defined(ffiStateless):
proc ffi_verify_rln_proof*(
rln: ptr ptr FFI_RLN,
proof: ptr ptr FFI_RLNProof,
@@ -186,25 +221,101 @@ else:
): CBoolResult {.importc: "ffi_verify_rln_proof", cdecl,
dynlib: RLN_LIB.}
proc ffi_verify_with_roots*(
rln: ptr ptr FFI_RLN,
proof: ptr ptr FFI_RLNProof,
roots: ptr Vec_CFr,
x: ptr CFr
): CBoolResult {.importc: "ffi_verify_with_roots", cdecl,
dynlib: RLN_LIB.}
proc ffi_rln_proof_free*(p: ptr FFI_RLNProof) {.importc: "ffi_rln_proof_free",
cdecl, dynlib: RLN_LIB.}
# Merkle tree operations (non-stateless mode)
when not defined(ffiStateless):
proc ffi_set_tree*(rln: ptr ptr FFI_RLN,
tree_depth: CSize): CBoolResult {.importc: "ffi_set_tree",
cdecl, dynlib: RLN_LIB.}
proc ffi_delete_leaf*(rln: ptr ptr FFI_RLN,
index: CSize): CBoolResult {.importc: "ffi_delete_leaf",
cdecl, dynlib: RLN_LIB.}
proc ffi_set_leaf*(rln: ptr ptr FFI_RLN, index: CSize,
leaf: ptr CFr): CBoolResult {.importc: "ffi_set_leaf",
cdecl, dynlib: RLN_LIB.}
proc ffi_get_leaf*(rln: ptr ptr FFI_RLN,
index: CSize): CResultCFrPtrVecU8 {.importc: "ffi_get_leaf",
cdecl, dynlib: RLN_LIB.}
proc ffi_set_next_leaf*(rln: ptr ptr FFI_RLN,
leaf: ptr ptr CFr): CBoolResult {.importc: "ffi_set_next_leaf",
leaf: ptr CFr): CBoolResult {.importc: "ffi_set_next_leaf",
cdecl, dynlib: RLN_LIB.}
proc ffi_set_leaves_from*(rln: ptr ptr FFI_RLN, index: CSize,
leaves: ptr Vec_CFr): CBoolResult {.importc: "ffi_set_leaves_from",
cdecl, dynlib: RLN_LIB.}
proc ffi_init_tree_with_leaves*(rln: ptr ptr FFI_RLN,
leaves: ptr Vec_CFr): CBoolResult {.importc: "ffi_init_tree_with_leaves",
cdecl, dynlib: RLN_LIB.}
proc ffi_atomic_operation*(rln: ptr ptr FFI_RLN, index: CSize,
leaves: ptr Vec_CFr,
indices: ptr Vec_uint8): CBoolResult {.importc: "ffi_atomic_operation",
cdecl, dynlib: RLN_LIB.}
proc ffi_seq_atomic_operation*(rln: ptr ptr FFI_RLN, leaves: ptr Vec_CFr,
indices: ptr Vec_uint8): CBoolResult {.importc: "ffi_seq_atomic_operation",
cdecl, dynlib: RLN_LIB.}
proc ffi_get_root*(rln: ptr ptr FFI_RLN): ptr CFr {.importc: "ffi_get_root",
cdecl, dynlib: RLN_LIB.}
proc ffi_leaves_set*(rln: ptr ptr FFI_RLN): CSize {.importc: "ffi_leaves_set",
cdecl, dynlib: RLN_LIB.}
proc ffi_get_proof*(rln: ptr ptr FFI_RLN,
index: CSize): CResultMerkleProofPtrVecU8 {.importc: "ffi_get_proof",
proc ffi_get_merkle_proof*(rln: ptr ptr FFI_RLN,
index: CSize): CResultMerkleProofPtrVecU8 {.importc: "ffi_get_merkle_proof",
cdecl, dynlib: RLN_LIB.}
proc ffi_set_metadata*(rln: ptr ptr FFI_RLN,
metadata: ptr Vec_uint8): CBoolResult {.importc: "ffi_set_metadata",
cdecl, dynlib: RLN_LIB.}
proc ffi_get_metadata*(rln: ptr ptr FFI_RLN): CResultVecU8VecU8 {.importc: "ffi_get_metadata",
cdecl, dynlib: RLN_LIB.}
proc ffi_flush*(rln: ptr ptr FFI_RLN): CBoolResult {.importc: "ffi_flush",
cdecl, dynlib: RLN_LIB.}
proc ffi_merkle_proof_free*(p: ptr FFI_MerkleProof) {.importc: "ffi_merkle_proof_free",
cdecl, dynlib: RLN_LIB.}
# Secret recovery
proc ffi_recover_id_secret*(proof1: ptr ptr FFI_RLNProof,
proof2: ptr ptr FFI_RLNProof): CResultCFrPtrVecU8 {.importc: "ffi_recover_id_secret",
# Identity secret recovery
proc ffi_recover_id_secret*(proof_values_1: ptr ptr FFI_RLNProofValues,
proof_values_2: ptr ptr FFI_RLNProofValues): CResultCFrPtrVecU8 {.importc: "ffi_recover_id_secret",
cdecl, dynlib: RLN_LIB.}
# RLNProof serialization
proc ffi_rln_proof_to_bytes_le*(proof: ptr ptr FFI_RLNProof): Vec_uint8 {.importc: "ffi_rln_proof_to_bytes_le",
cdecl, dynlib: RLN_LIB.}
proc ffi_rln_proof_to_bytes_be*(proof: ptr ptr FFI_RLNProof): Vec_uint8 {.importc: "ffi_rln_proof_to_bytes_be",
cdecl, dynlib: RLN_LIB.}
proc ffi_bytes_le_to_rln_proof*(bytes: ptr Vec_uint8): CResultProofPtrVecU8 {.importc: "ffi_bytes_le_to_rln_proof",
cdecl, dynlib: RLN_LIB.}
proc ffi_bytes_be_to_rln_proof*(bytes: ptr Vec_uint8): CResultProofPtrVecU8 {.importc: "ffi_bytes_be_to_rln_proof",
cdecl, dynlib: RLN_LIB.}
# RLNProofValues functions
proc ffi_rln_proof_get_values*(proof: ptr ptr FFI_RLNProof): ptr FFI_RLNProofValues {.importc: "ffi_rln_proof_get_values",
cdecl, dynlib: RLN_LIB.}
proc ffi_rln_proof_values_get_y*(pv: ptr ptr FFI_RLNProofValues): ptr CFr {.importc: "ffi_rln_proof_values_get_y",
cdecl, dynlib: RLN_LIB.}
proc ffi_rln_proof_values_get_nullifier*(pv: ptr ptr FFI_RLNProofValues): ptr CFr {.importc: "ffi_rln_proof_values_get_nullifier",
cdecl, dynlib: RLN_LIB.}
proc ffi_rln_proof_values_get_root*(pv: ptr ptr FFI_RLNProofValues): ptr CFr {.importc: "ffi_rln_proof_values_get_root",
cdecl, dynlib: RLN_LIB.}
proc ffi_rln_proof_values_get_x*(pv: ptr ptr FFI_RLNProofValues): ptr CFr {.importc: "ffi_rln_proof_values_get_x",
cdecl, dynlib: RLN_LIB.}
proc ffi_rln_proof_values_get_external_nullifier*(pv: ptr ptr FFI_RLNProofValues): ptr CFr {.importc: "ffi_rln_proof_values_get_external_nullifier",
cdecl, dynlib: RLN_LIB.}
proc ffi_rln_proof_values_to_bytes_le*(pv: ptr ptr FFI_RLNProofValues): Vec_uint8 {.importc: "ffi_rln_proof_values_to_bytes_le",
cdecl, dynlib: RLN_LIB.}
proc ffi_rln_proof_values_to_bytes_be*(pv: ptr ptr FFI_RLNProofValues): Vec_uint8 {.importc: "ffi_rln_proof_values_to_bytes_be",
cdecl, dynlib: RLN_LIB.}
proc ffi_bytes_le_to_rln_proof_values*(bytes: ptr Vec_uint8): CResultRLNProofValuesPtrVecU8 {.importc: "ffi_bytes_le_to_rln_proof_values",
cdecl, dynlib: RLN_LIB.}
proc ffi_bytes_be_to_rln_proof_values*(bytes: ptr Vec_uint8): CResultRLNProofValuesPtrVecU8 {.importc: "ffi_bytes_be_to_rln_proof_values",
cdecl, dynlib: RLN_LIB.}
proc ffi_rln_proof_values_free*(pv: ptr FFI_RLNProofValues) {.importc: "ffi_rln_proof_values_free",
cdecl, dynlib: RLN_LIB.}
# Helpers functions
@@ -218,7 +329,7 @@ proc asString*(v: Vec_uint8): string =
result = newString(v.len.int)
copyMem(addr result[0], v.dataPtr, v.len.int)
proc c_string_free*(s: Vec_uint8) {.importc: "c_string_free", cdecl,
proc ffi_c_string_free*(s: Vec_uint8) {.importc: "ffi_c_string_free", cdecl,
dynlib: RLN_LIB.}
when isMainModule:
@@ -233,7 +344,7 @@ when isMainModule:
if rlnRes.ok.isNil:
stderr.writeLine "Initial RLN instance creation error: ", asString(rlnRes.err)
c_string_free(rlnRes.err)
ffi_c_string_free(rlnRes.err)
quit 1
var rln = rlnRes.ok
@@ -241,77 +352,83 @@ when isMainModule:
echo "\nGenerating identity keys"
var keys = ffi_key_gen()
let identitySecret = vec_cfr_get(addr keys, CSize(0))
let idCommitment = vec_cfr_get(addr keys, CSize(1))
let identitySecret = ffi_vec_cfr_get(addr keys, CSize(0))
let idCommitment = ffi_vec_cfr_get(addr keys, CSize(1))
echo "Identity generated"
block:
let debug = cfr_debug(identitySecret)
let debug = ffi_cfr_debug(identitySecret)
echo " - identity_secret = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
block:
let debug = cfr_debug(idCommitment)
let debug = ffi_cfr_debug(idCommitment)
echo " - id_commitment = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
echo "\nCreating message limit"
let userMessageLimit = uint_to_cfr(1'u32)
let userMessageLimit = ffi_uint_to_cfr(1'u32)
block:
let debug = cfr_debug(userMessageLimit)
let debug = ffi_cfr_debug(userMessageLimit)
echo " - user_message_limit = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
echo "\nComputing rate commitment"
let rateCommitment = ffi_poseidon_hash_pair(idCommitment, userMessageLimit)
block:
let debug = cfr_debug(rateCommitment)
let debug = ffi_cfr_debug(rateCommitment)
echo " - rate_commitment = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
echo "\nCFr serialization: CFr <-> bytes"
var serRateCommitment = cfr_to_bytes_be(rateCommitment)
var serRateCommitment = ffi_cfr_to_bytes_be(rateCommitment)
block:
let debug = vec_u8_debug(addr serRateCommitment)
let debug = ffi_vec_u8_debug(addr serRateCommitment)
echo " - serialized rate_commitment = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
let deserRateCommitment = bytes_be_to_cfr(addr serRateCommitment)
let deserRateCommitmentResult = ffi_bytes_be_to_cfr(addr serRateCommitment)
if deserRateCommitmentResult.ok.isNil:
stderr.writeLine "Rate commitment deserialization error: ", asString(
deserRateCommitmentResult.err)
ffi_c_string_free(deserRateCommitmentResult.err)
quit 1
let deserRateCommitment = deserRateCommitmentResult.ok
block:
let debug = cfr_debug(deserRateCommitment)
let debug = ffi_cfr_debug(deserRateCommitment)
echo " - deserialized rate_commitment = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
vec_u8_free(serRateCommitment)
cfr_free(deserRateCommitment)
ffi_vec_u8_free(serRateCommitment)
ffi_cfr_free(deserRateCommitment)
echo "\nVec<CFr> serialization: Vec<CFr> <-> bytes"
var serKeys = vec_cfr_to_bytes_be(addr keys)
var serKeys = ffi_vec_cfr_to_bytes_be(addr keys)
block:
let debug = vec_u8_debug(addr serKeys)
let debug = ffi_vec_u8_debug(addr serKeys)
echo " - serialized keys = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
let deserKeysResult = bytes_be_to_vec_cfr(addr serKeys)
let deserKeysResult = ffi_bytes_be_to_vec_cfr(addr serKeys)
if deserKeysResult.err.dataPtr != nil:
stderr.writeLine "Keys deserialization error: ", asString(
deserKeysResult.err)
c_string_free(deserKeysResult.err)
ffi_c_string_free(deserKeysResult.err)
quit 1
block:
var okKeys = deserKeysResult.ok
let debug = vec_cfr_debug(addr okKeys)
let debug = ffi_vec_cfr_debug(addr okKeys)
echo " - deserialized identity_secret = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
vec_cfr_free(deserKeysResult.ok)
vec_u8_free(serKeys)
ffi_vec_cfr_free(deserKeysResult.ok)
ffi_vec_u8_free(serKeys)
when defined(ffiStateless):
const treeDepth = 20
@@ -319,98 +436,98 @@ when isMainModule:
echo "\nBuilding Merkle path for stateless mode"
let defaultLeaf = cfr_zero()
let defaultLeaf = ffi_cfr_zero()
var defaultHashes: array[treeDepth-1, ptr CFr]
defaultHashes[0] = ffi_poseidon_hash_pair(defaultLeaf, defaultLeaf)
for i in 1..treeDepth-2:
defaultHashes[i] = ffi_poseidon_hash_pair(defaultHashes[i-1],
defaultHashes[i-1])
var pathElements = vec_cfr_new(CSize(treeDepth))
vec_cfr_push(addr pathElements, defaultLeaf)
var pathElements = ffi_vec_cfr_new(CSize(treeDepth))
ffi_vec_cfr_push(addr pathElements, defaultLeaf)
for i in 0..treeDepth-2:
vec_cfr_push(addr pathElements, defaultHashes[i])
ffi_vec_cfr_push(addr pathElements, defaultHashes[i])
echo "\nVec<CFr> serialization: Vec<CFr> <-> bytes"
var serPathElements = vec_cfr_to_bytes_be(addr pathElements)
var serPathElements = ffi_vec_cfr_to_bytes_be(addr pathElements)
block:
let debug = vec_u8_debug(addr serPathElements)
let debug = ffi_vec_u8_debug(addr serPathElements)
echo " - serialized path_elements = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
let deserPathElements = bytes_be_to_vec_cfr(addr serPathElements)
let deserPathElements = ffi_bytes_be_to_vec_cfr(addr serPathElements)
if deserPathElements.err.dataPtr != nil:
stderr.writeLine "Path elements deserialization error: ", asString(
deserPathElements.err)
c_string_free(deserPathElements.err)
ffi_c_string_free(deserPathElements.err)
quit 1
block:
var okPathElems = deserPathElements.ok
let debug = vec_cfr_debug(addr okPathElems)
let debug = ffi_vec_cfr_debug(addr okPathElems)
echo " - deserialized path_elements = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
vec_cfr_free(deserPathElements.ok)
vec_u8_free(serPathElements)
ffi_vec_cfr_free(deserPathElements.ok)
ffi_vec_u8_free(serPathElements)
var pathIndexSeq = newSeq[uint8](treeDepth)
var identityPathIndex = asVecU8(pathIndexSeq)
echo "\nVec<uint8> serialization: Vec<uint8> <-> bytes"
var serPathIndex = vec_u8_to_bytes_be(addr identityPathIndex)
var serPathIndex = ffi_vec_u8_to_bytes_be(addr identityPathIndex)
block:
let debug = vec_u8_debug(addr serPathIndex)
let debug = ffi_vec_u8_debug(addr serPathIndex)
echo " - serialized path_index = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
let deserPathIndex = bytes_be_to_vec_u8(addr serPathIndex)
let deserPathIndex = ffi_bytes_be_to_vec_u8(addr serPathIndex)
if deserPathIndex.err.dataPtr != nil:
stderr.writeLine "Path index deserialization error: ", asString(
deserPathIndex.err)
c_string_free(deserPathIndex.err)
ffi_c_string_free(deserPathIndex.err)
quit 1
block:
var okPathIdx = deserPathIndex.ok
let debug = vec_u8_debug(addr okPathIdx)
let debug = ffi_vec_u8_debug(addr okPathIdx)
echo " - deserialized path_index = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
vec_u8_free(deserPathIndex.ok)
vec_u8_free(serPathIndex)
ffi_vec_u8_free(deserPathIndex.ok)
ffi_vec_u8_free(serPathIndex)
echo "\nComputing Merkle root for stateless mode"
echo " - computing root for index 0 with rate_commitment"
var computedRoot = ffi_poseidon_hash_pair(rateCommitment, defaultLeaf)
for i in 1..treeDepth-1:
let next = ffi_poseidon_hash_pair(computedRoot, defaultHashes[i-1])
cfr_free(computedRoot)
ffi_cfr_free(computedRoot)
computedRoot = next
block:
let debug = cfr_debug(computedRoot)
let debug = ffi_cfr_debug(computedRoot)
echo " - computed_root = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
else:
echo "\nAdding rate_commitment to tree"
var rcPtr = rateCommitment
let setErr = ffi_set_next_leaf(addr rln, addr rcPtr)
let setErr = ffi_set_next_leaf(addr rln, rcPtr)
if not setErr.ok:
stderr.writeLine "Set next leaf error: ", asString(setErr.err)
c_string_free(setErr.err)
ffi_c_string_free(setErr.err)
quit 1
let leafIndex = ffi_leaves_set(addr rln) - 1
echo " - added to tree at index ", leafIndex
echo "\nGetting Merkle proof"
let proofResult = ffi_get_proof(addr rln, leafIndex)
let proofResult = ffi_get_merkle_proof(addr rln, leafIndex)
if proofResult.ok.isNil:
stderr.writeLine "Get proof error: ", asString(proofResult.err)
c_string_free(proofResult.err)
ffi_c_string_free(proofResult.err)
quit 1
let merkleProof = proofResult.ok
echo " - proof obtained (depth: ", merkleProof.path_elements.len, ")"
@@ -423,9 +540,9 @@ when isMainModule:
let x = ffi_hash_to_field_be(addr signalVec)
block:
let debug = cfr_debug(x)
let debug = ffi_cfr_debug(x)
echo " - x = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
echo "\nHashing epoch"
let epochStr = "test-epoch"
@@ -435,9 +552,9 @@ when isMainModule:
let epoch = ffi_hash_to_field_be(addr epochVec)
block:
let debug = cfr_debug(epoch)
let debug = ffi_cfr_debug(epoch)
echo " - epoch = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
echo "\nHashing RLN identifier"
let rlnIdStr = "test-rln-identifier"
@@ -447,58 +564,182 @@ when isMainModule:
let rlnIdentifier = ffi_hash_to_field_be(addr rlnIdVec)
block:
let debug = cfr_debug(rlnIdentifier)
let debug = ffi_cfr_debug(rlnIdentifier)
echo " - rln_identifier = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
echo "\nComputing Poseidon hash for external nullifier"
let externalNullifier = ffi_poseidon_hash_pair(epoch, rlnIdentifier)
block:
let debug = cfr_debug(externalNullifier)
let debug = ffi_cfr_debug(externalNullifier)
echo " - external_nullifier = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
echo "\nCreating message_id"
let messageId = uint_to_cfr(0'u32)
let messageId = ffi_uint_to_cfr(0'u32)
block:
let debug = cfr_debug(messageId)
let debug = ffi_cfr_debug(messageId)
echo " - message_id = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
echo "\nGenerating RLN Proof"
var proofRes: CResultProofPtrVecU8
echo "\nCreating RLN Witness"
when defined(ffiStateless):
proofRes = ffi_generate_rln_proof_stateless(addr rln, identitySecret,
var witnessRes = ffi_rln_witness_input_new(identitySecret,
userMessageLimit, messageId, addr pathElements, addr identityPathIndex,
x, externalNullifier)
if witnessRes.ok.isNil:
stderr.writeLine "RLN Witness creation error: ", asString(witnessRes.err)
ffi_c_string_free(witnessRes.err)
quit 1
var witness = witnessRes.ok
echo "RLN Witness created successfully"
else:
proofRes = ffi_generate_rln_proof(addr rln, identitySecret,
userMessageLimit, messageId, x, externalNullifier, leafIndex)
var witnessRes = ffi_rln_witness_input_new(identitySecret,
userMessageLimit, messageId, addr merkleProof.path_elements,
addr merkleProof.path_index, x, externalNullifier)
if witnessRes.ok.isNil:
stderr.writeLine "RLN Witness creation error: ", asString(witnessRes.err)
ffi_c_string_free(witnessRes.err)
quit 1
var witness = witnessRes.ok
echo "RLN Witness created successfully"
echo "\nRLNWitnessInput serialization: RLNWitnessInput <-> bytes"
var serWitness = ffi_rln_witness_to_bytes_be(addr witness)
block:
let debug = ffi_vec_u8_debug(addr serWitness)
echo " - serialized witness = ", asString(debug)
ffi_c_string_free(debug)
let deserWitnessResult = ffi_bytes_be_to_rln_witness(addr serWitness)
if deserWitnessResult.ok.isNil:
stderr.writeLine "Witness deserialization error: ", asString(
deserWitnessResult.err)
ffi_c_string_free(deserWitnessResult.err)
quit 1
echo " - witness deserialized successfully"
ffi_rln_witness_input_free(deserWitnessResult.ok)
ffi_vec_u8_free(serWitness)
echo "\nGenerating RLN Proof"
var proofRes = ffi_generate_rln_proof(addr rln, addr witness)
if proofRes.ok.isNil:
stderr.writeLine "Proof generation error: ", asString(proofRes.err)
c_string_free(proofRes.err)
ffi_c_string_free(proofRes.err)
quit 1
var proof = proofRes.ok
echo "Proof generated successfully"
echo "\nGetting proof values"
var proofValues = ffi_rln_proof_get_values(addr proof)
block:
let y = ffi_rln_proof_values_get_y(addr proofValues)
let debug = ffi_cfr_debug(y)
echo " - y = ", asString(debug)
ffi_c_string_free(debug)
ffi_cfr_free(y)
block:
let nullifier = ffi_rln_proof_values_get_nullifier(addr proofValues)
let debug = ffi_cfr_debug(nullifier)
echo " - nullifier = ", asString(debug)
ffi_c_string_free(debug)
ffi_cfr_free(nullifier)
block:
let root = ffi_rln_proof_values_get_root(addr proofValues)
let debug = ffi_cfr_debug(root)
echo " - root = ", asString(debug)
ffi_c_string_free(debug)
ffi_cfr_free(root)
block:
let xVal = ffi_rln_proof_values_get_x(addr proofValues)
let debug = ffi_cfr_debug(xVal)
echo " - x = ", asString(debug)
ffi_c_string_free(debug)
ffi_cfr_free(xVal)
block:
let extNullifier = ffi_rln_proof_values_get_external_nullifier(
addr proofValues)
let debug = ffi_cfr_debug(extNullifier)
echo " - external_nullifier = ", asString(debug)
ffi_c_string_free(debug)
ffi_cfr_free(extNullifier)
echo "\nRLNProof serialization: RLNProof <-> bytes"
var serProof = ffi_rln_proof_to_bytes_be(addr proof)
block:
let debug = ffi_vec_u8_debug(addr serProof)
echo " - serialized proof = ", asString(debug)
ffi_c_string_free(debug)
let deserProofResult = ffi_bytes_be_to_rln_proof(addr serProof)
if deserProofResult.ok.isNil:
stderr.writeLine "Proof deserialization error: ", asString(
deserProofResult.err)
ffi_c_string_free(deserProofResult.err)
quit 1
var deserProof = deserProofResult.ok
echo " - proof deserialized successfully"
echo "\nRLNProofValues serialization: RLNProofValues <-> bytes"
var serProofValues = ffi_rln_proof_values_to_bytes_be(addr proofValues)
block:
let debug = ffi_vec_u8_debug(addr serProofValues)
echo " - serialized proof_values = ", asString(debug)
ffi_c_string_free(debug)
let deserProofValuesResult = ffi_bytes_be_to_rln_proof_values(
addr serProofValues)
if deserProofValuesResult.ok.isNil:
stderr.writeLine "Proof values deserialization error: ", asString(
deserProofValuesResult.err)
ffi_c_string_free(deserProofValuesResult.err)
quit 1
var deserProofValues = deserProofValuesResult.ok
echo " - proof_values deserialized successfully"
block:
let deserExternalNullifier = ffi_rln_proof_values_get_external_nullifier(
addr deserProofValues)
let debug = ffi_cfr_debug(deserExternalNullifier)
echo " - deserialized external_nullifier = ", asString(debug)
ffi_c_string_free(debug)
ffi_cfr_free(deserExternalNullifier)
ffi_rln_proof_values_free(deserProofValues)
ffi_vec_u8_free(serProofValues)
ffi_rln_proof_free(deserProof)
ffi_vec_u8_free(serProof)
echo "\nVerifying Proof"
when defined(ffiStateless):
var roots = vec_cfr_from_cfr(computedRoot)
var roots = ffi_vec_cfr_from_cfr(computedRoot)
let verifyErr = ffi_verify_with_roots(addr rln, addr proof, addr roots, x)
else:
let verifyErr = ffi_verify_rln_proof(addr rln, addr proof, x)
if not verifyErr.ok:
stderr.writeLine "Proof verification error: ", asString(verifyErr.err)
c_string_free(verifyErr.err)
ffi_c_string_free(verifyErr.err)
quit 1
echo "Proof verified successfully"
ffi_rln_proof_free(proof)
echo "\nSimulating double-signaling attack (same epoch, different message)"
echo "\nHashing second signal"
@@ -509,36 +750,55 @@ when isMainModule:
let x2 = ffi_hash_to_field_be(addr signal2Vec)
block:
let debug = cfr_debug(x2)
let debug = ffi_cfr_debug(x2)
echo " - x2 = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
echo "\nCreating second message with the same id"
let messageId2 = uint_to_cfr(0'u32)
let messageId2 = ffi_uint_to_cfr(0'u32)
block:
let debug = cfr_debug(messageId2)
let debug = ffi_cfr_debug(messageId2)
echo " - message_id2 = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
echo "\nGenerating second RLN Proof"
var proofRes2: CResultProofPtrVecU8
echo "\nCreating second RLN Witness"
when defined(ffiStateless):
proofRes2 = ffi_generate_rln_proof_stateless(addr rln, identitySecret,
var witnessRes2 = ffi_rln_witness_input_new(identitySecret,
userMessageLimit, messageId2, addr pathElements, addr identityPathIndex,
x2, externalNullifier)
if witnessRes2.ok.isNil:
stderr.writeLine "Second RLN Witness creation error: ", asString(
witnessRes2.err)
ffi_c_string_free(witnessRes2.err)
quit 1
var witness2 = witnessRes2.ok
echo "Second RLN Witness created successfully"
else:
proofRes2 = ffi_generate_rln_proof(addr rln, identitySecret,
userMessageLimit, messageId2, x2, externalNullifier, leafIndex)
var witnessRes2 = ffi_rln_witness_input_new(identitySecret,
userMessageLimit, messageId2, addr merkleProof.path_elements,
addr merkleProof.path_index, x2, externalNullifier)
if witnessRes2.ok.isNil:
stderr.writeLine "Second RLN Witness creation error: ", asString(
witnessRes2.err)
ffi_c_string_free(witnessRes2.err)
quit 1
var witness2 = witnessRes2.ok
echo "Second RLN Witness created successfully"
echo "\nGenerating second RLN Proof"
var proofRes2 = ffi_generate_rln_proof(addr rln, addr witness2)
if proofRes2.ok.isNil:
stderr.writeLine "Second proof generation error: ", asString(proofRes2.err)
c_string_free(proofRes2.err)
ffi_c_string_free(proofRes2.err)
quit 1
var proof2 = proofRes2.ok
echo "Second proof generated successfully"
var proofValues2 = ffi_rln_proof_get_values(addr proof2)
echo "\nVerifying second proof"
when defined(ffiStateless):
let verifyErr2 = ffi_verify_with_roots(addr rln, addr proof2, addr roots, x2)
@@ -548,54 +808,59 @@ when isMainModule:
if not verifyErr2.ok:
stderr.writeLine "Second proof verification error: ", asString(
verifyErr2.err)
c_string_free(verifyErr2.err)
ffi_c_string_free(verifyErr2.err)
quit 1
echo "Second proof verified successfully"
echo "\nRecovering identity secret"
let recoverRes = ffi_recover_id_secret(addr proof, addr proof2)
let recoverRes = ffi_recover_id_secret(addr proofValues, addr proofValues2)
if recoverRes.ok.isNil:
stderr.writeLine "Identity recovery error: ", asString(recoverRes.err)
c_string_free(recoverRes.err)
ffi_c_string_free(recoverRes.err)
quit 1
let recoveredSecret = recoverRes.ok
block:
let debug = cfr_debug(recoveredSecret)
let debug = ffi_cfr_debug(recoveredSecret)
echo " - recovered_secret = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
block:
let debug = cfr_debug(identitySecret)
let debug = ffi_cfr_debug(identitySecret)
echo " - original_secret = ", asString(debug)
c_string_free(debug)
ffi_c_string_free(debug)
echo "Slashing successful: Identity is recovered!"
cfr_free(recoveredSecret)
ffi_cfr_free(recoveredSecret)
ffi_rln_proof_values_free(proofValues2)
ffi_rln_proof_values_free(proofValues)
ffi_rln_proof_free(proof2)
cfr_free(x2)
cfr_free(messageId2)
ffi_rln_proof_free(proof)
ffi_cfr_free(x2)
ffi_cfr_free(messageId2)
when defined(ffiStateless):
vec_cfr_free(roots)
vec_cfr_free(pathElements)
ffi_rln_witness_input_free(witness2)
ffi_rln_witness_input_free(witness)
ffi_vec_cfr_free(roots)
ffi_vec_cfr_free(pathElements)
for i in 0..treeDepth-2:
cfr_free(defaultHashes[i])
cfr_free(defaultLeaf)
cfr_free(computedRoot)
ffi_cfr_free(defaultHashes[i])
ffi_cfr_free(defaultLeaf)
ffi_cfr_free(computedRoot)
else:
ffi_rln_witness_input_free(witness2)
ffi_rln_witness_input_free(witness)
ffi_merkle_proof_free(merkleProof)
cfr_free(rateCommitment)
cfr_free(x)
cfr_free(epoch)
cfr_free(rlnIdentifier)
cfr_free(externalNullifier)
cfr_free(userMessageLimit)
cfr_free(messageId)
vec_cfr_free(keys)
ffi_cfr_free(rateCommitment)
ffi_cfr_free(x)
ffi_cfr_free(epoch)
ffi_cfr_free(rlnIdentifier)
ffi_cfr_free(externalNullifier)
ffi_cfr_free(userMessageLimit)
ffi_cfr_free(messageId)
ffi_vec_cfr_free(keys)
ffi_rln_free(rln)

View File

@@ -1,5 +1,5 @@
{
"path": "database",
"path": "./database",
"temporary": false,
"cache_capacity": 1073741824,
"flush_every_ms": 500,

View File

@@ -1,5 +1,5 @@
use rln::ffi;
fn main() -> ::std::io::Result<()> {
fn main() -> std::io::Result<()> {
ffi::generate_headers()
}

View File

@@ -1,6 +1,6 @@
#[derive(Debug, thiserror::Error)]
pub enum ZKeyReadError {
#[error("No proving key found!")]
#[error("Empty zkey bytes provided")]
EmptyBytes,
#[error("{0}")]
SerializationError(#[from] ark_serialize::SerializationError),

View File

@@ -1,21 +1,23 @@
// This file is based on the code by iden3. Its preimage can be found here:
// This crate is based on the code by iden3. Its preimage can be found here:
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/lib.rs
pub mod graph;
pub mod proto;
pub mod storage;
use ruint::aliases::U256;
use std::collections::HashMap;
use graph::Node;
use ruint::aliases::U256;
use storage::deserialize_witnesscalc_graph;
use zeroize::zeroize_flat_type;
use crate::circuit::iden3calc::graph::fr_to_u256;
use crate::circuit::Fr;
use crate::utils::FrOrSecret;
use graph::Node;
use crate::{
circuit::{iden3calc::graph::fr_to_u256, Fr},
utils::FrOrSecret,
};
pub type InputSignalsInfo = HashMap<String, (usize, usize)>;
pub(crate) type InputSignalsInfo = HashMap<String, (usize, usize)>;
pub(crate) fn calc_witness<I: IntoIterator<Item = (String, Vec<FrOrSecret>)>>(
inputs: I,

View File

@@ -1,11 +1,6 @@
// This file is based on the code by iden3. Its preimage can be found here:
// This crate is based on the code by iden3. Its preimage can be found here:
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/graph.rs
use ark_ff::{BigInt, BigInteger, One, PrimeField, Zero};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate};
use rand::Rng;
use ruint::{aliases::U256, uint};
use serde::{Deserialize, Serialize};
use std::{
cmp::Ordering,
collections::HashMap,
@@ -13,8 +8,13 @@ use std::{
ops::{Deref, Shl, Shr},
};
use crate::circuit::iden3calc::proto;
use crate::circuit::Fr;
use ark_ff::{BigInt, BigInteger, One, PrimeField, Zero};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate};
use rand::Rng;
use ruint::{aliases::U256, uint};
use serde::{Deserialize, Serialize};
use crate::circuit::{iden3calc::proto, Fr};
pub const M: U256 =
uint!(21888242871839275222246405745257275088548364400416034343698204186575808495617_U256);
@@ -817,10 +817,11 @@ fn u_lt(a: &U256, b: &U256) -> U256 {
#[cfg(test)]
mod test {
use super::*;
use std::{ops::Div, str::FromStr};
use ruint::uint;
use std::ops::Div;
use std::str::FromStr;
use super::*;
#[test]
fn test_ok() {

View File

@@ -1,32 +1,32 @@
// This file has been generated by prost-build during compilation of the code by iden3
// This crate has been generated by prost-build during compilation of the code by iden3
// and modified manually. The *.proto file used to generate this on can be found here:
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/protos/messages.proto
use std::collections::HashMap;
#[derive(Clone, PartialEq, ::prost::Message)]
#[derive(Clone, PartialEq, prost::Message)]
pub struct BigUInt {
#[prost(bytes = "vec", tag = "1")]
pub value_le: Vec<u8>,
}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
#[derive(Clone, Copy, PartialEq, prost::Message)]
pub struct InputNode {
#[prost(uint32, tag = "1")]
pub idx: u32,
}
#[derive(Clone, PartialEq, ::prost::Message)]
#[derive(Clone, PartialEq, prost::Message)]
pub struct ConstantNode {
#[prost(message, optional, tag = "1")]
pub value: Option<BigUInt>,
}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
#[derive(Clone, Copy, PartialEq, prost::Message)]
pub struct UnoOpNode {
#[prost(enumeration = "UnoOp", tag = "1")]
pub op: i32,
#[prost(uint32, tag = "2")]
pub a_idx: u32,
}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
#[derive(Clone, Copy, PartialEq, prost::Message)]
pub struct DuoOpNode {
#[prost(enumeration = "DuoOp", tag = "1")]
pub op: i32,
@@ -35,7 +35,7 @@ pub struct DuoOpNode {
#[prost(uint32, tag = "3")]
pub b_idx: u32,
}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
#[derive(Clone, Copy, PartialEq, prost::Message)]
pub struct TresOpNode {
#[prost(enumeration = "TresOp", tag = "1")]
pub op: i32,
@@ -46,14 +46,14 @@ pub struct TresOpNode {
#[prost(uint32, tag = "4")]
pub c_idx: u32,
}
#[derive(Clone, PartialEq, ::prost::Message)]
#[derive(Clone, PartialEq, prost::Message)]
pub struct Node {
#[prost(oneof = "node::Node", tags = "1, 2, 3, 4, 5")]
pub node: Option<node::Node>,
}
/// Nested message and enum types in `Node`.
pub mod node {
#[derive(Clone, PartialEq, ::prost::Oneof)]
#[derive(Clone, PartialEq, prost::Oneof)]
pub enum Node {
#[prost(message, tag = "1")]
Input(super::InputNode),
@@ -67,21 +67,21 @@ pub mod node {
TresOp(super::TresOpNode),
}
}
#[derive(Clone, Copy, PartialEq, ::prost::Message)]
#[derive(Clone, Copy, PartialEq, prost::Message)]
pub struct SignalDescription {
#[prost(uint32, tag = "1")]
pub offset: u32,
#[prost(uint32, tag = "2")]
pub len: u32,
}
#[derive(Clone, PartialEq, ::prost::Message)]
#[derive(Clone, PartialEq, prost::Message)]
pub struct GraphMetadata {
#[prost(uint32, repeated, tag = "1")]
pub witness_signals: Vec<u32>,
#[prost(map = "string, message", tag = "2")]
pub inputs: HashMap<String, SignalDescription>,
}
#[derive(Clone, Copy, Debug, PartialEq, ::prost::Enumeration)]
#[derive(Clone, Copy, Debug, PartialEq, prost::Enumeration)]
pub enum DuoOp {
Mul = 0,
Div = 1,
@@ -105,13 +105,13 @@ pub enum DuoOp {
Bxor = 19,
}
#[derive(Clone, Copy, Debug, PartialEq, ::prost::Enumeration)]
#[derive(Clone, Copy, Debug, PartialEq, prost::Enumeration)]
pub enum UnoOp {
Neg = 0,
Id = 1,
}
#[derive(Clone, Copy, Debug, PartialEq, ::prost::Enumeration)]
#[derive(Clone, Copy, Debug, PartialEq, prost::Enumeration)]
pub enum TresOp {
TernCond = 0,
}

View File

@@ -1,10 +1,11 @@
// This file is based on the code by iden3. Its preimage can be found here:
// This crate is based on the code by iden3. Its preimage can be found here:
// https://github.com/iden3/circom-witnesscalc/blob/5cb365b6e4d9052ecc69d4567fcf5bc061c20e94/src/storage.rs
use std::io::{Read, Write};
use ark_ff::PrimeField;
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use prost::Message;
use std::io::{Read, Write};
use crate::circuit::{
iden3calc::{
@@ -334,12 +335,14 @@ impl<R: Read> Write for WriteBackReader<R> {
#[cfg(test)]
mod test {
use super::*;
use byteorder::ByteOrder;
use core::str::FromStr;
use graph::{Operation, TresOperation, UnoOperation};
use std::collections::HashMap;
use byteorder::ByteOrder;
use graph::{Operation, TresOperation, UnoOperation};
use super::*;
#[test]
fn test_read_message() {
let mut buf = Vec::new();

View File

@@ -4,22 +4,22 @@ pub mod error;
pub mod iden3calc;
pub mod qap;
#[cfg(not(target_arch = "wasm32"))]
use std::sync::LazyLock;
use ark_bn254::{
Bn254, Fq as ArkFq, Fq2 as ArkFq2, Fr as ArkFr, G1Affine as ArkG1Affine,
G1Projective as ArkG1Projective, G2Affine as ArkG2Affine, G2Projective as ArkG2Projective,
};
use ark_ff::Field;
use ark_groth16::{
Proof as ArkProof, ProvingKey as ArkProvingKey, VerifyingKey as ArkVerifyingKey,
};
use ark_relations::r1cs::ConstraintMatrices;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use crate::circuit::error::ZKeyReadError;
use {ark_ff::Field, ark_serialize::CanonicalDeserialize, ark_serialize::CanonicalSerialize};
#[cfg(not(target_arch = "wasm32"))]
use std::sync::LazyLock;
#[cfg(not(target_arch = "wasm32"))]
const GRAPH_BYTES: &[u8] = include_bytes!("../../resources/tree_depth_20/graph.bin");
@@ -31,25 +31,49 @@ static ARKZKEY: LazyLock<Zkey> = LazyLock::new(|| {
read_arkzkey_from_bytes_uncompressed(ARKZKEY_BYTES).expect("Failed to read arkzkey")
});
pub const TEST_TREE_DEPTH: usize = 20;
pub const DEFAULT_TREE_DEPTH: usize = 20;
pub const COMPRESS_PROOF_SIZE: usize = 128;
// The following types define the pairing friendly elliptic curve, the underlying finite fields and groups default to this module
// Note that proofs are serialized assuming Fr to be 4x8 = 32 bytes in size. Hence, changing to a curve with different encoding will make proof verification to fail
/// BN254 pairing-friendly elliptic curve.
pub type Curve = Bn254;
/// Scalar field Fr of the BN254 curve.
pub type Fr = ArkFr;
/// Base field Fq of the BN254 curve.
pub type Fq = ArkFq;
/// Quadratic extension field element for the BN254 curve.
pub type Fq2 = ArkFq2;
/// Affine representation of a G1 group element on the BN254 curve.
pub type G1Affine = ArkG1Affine;
/// Projective representation of a G1 group element on the BN254 curve.
pub type G1Projective = ArkG1Projective;
/// Affine representation of a G2 group element on the BN254 curve.
pub type G2Affine = ArkG2Affine;
/// Projective representation of a G2 group element on the BN254 curve.
pub type G2Projective = ArkG2Projective;
/// Groth16 proof for the BN254 curve.
pub type Proof = ArkProof<Curve>;
/// Proving key for the Groth16 proof system.
pub type ProvingKey = ArkProvingKey<Curve>;
/// Combining the proving key and constraint matrices.
pub type Zkey = (ArkProvingKey<Curve>, ConstraintMatrices<Fr>);
/// Verifying key for the Groth16 proof system.
pub type VerifyingKey = ArkVerifyingKey<Curve>;
// Loads the zkey using a bytes vector
/// Loads the zkey from raw bytes
pub fn zkey_from_raw(zkey_data: &[u8]) -> Result<Zkey, ZKeyReadError> {
if zkey_data.is_empty() {
return Err(ZKeyReadError::EmptyBytes);
@@ -60,21 +84,20 @@ pub fn zkey_from_raw(zkey_data: &[u8]) -> Result<Zkey, ZKeyReadError> {
Ok(proving_key_and_matrices)
}
// Loads the proving key
// Loads default zkey from folder
#[cfg(not(target_arch = "wasm32"))]
pub fn zkey_from_folder() -> &'static Zkey {
&ARKZKEY
}
// Loads the graph data
// Loads default graph from folder
#[cfg(not(target_arch = "wasm32"))]
pub fn graph_from_folder() -> &'static [u8] {
GRAPH_BYTES
}
////////////////////////////////////////////////////////
// Functions and structs from [arkz-key](https://github.com/zkmopro/ark-zkey/blob/main/src/lib.rs#L106)
////////////////////////////////////////////////////////
// The following functions and structs are based on code from ark-zkey:
// https://github.com/zkmopro/ark-zkey/blob/main/src/lib.rs#L106
#[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)]
struct SerializableProvingKey(ArkProvingKey<Curve>);
@@ -110,7 +133,6 @@ fn read_arkzkey_from_bytes_uncompressed(arkzkey_data: &[u8]) -> Result<Zkey, ZKe
let serialized_constraint_matrices =
SerializableConstraintMatrices::deserialize_uncompressed_unchecked(&mut cursor)?;
// Get on right form for API
let proving_key: ProvingKey = serialized_proving_key.0;
let constraint_matrices: ConstraintMatrices<Fr> = ConstraintMatrices {
num_instance_variables: serialized_constraint_matrices.num_instance_variables,

View File

@@ -1,4 +1,4 @@
// This file is based on the code by arkworks. Its preimage can be found here:
// This crate is based on the code by arkworks. Its preimage can be found here:
// https://github.com/arkworks-rs/circom-compat/blob/3c95ed98e23a408b4d99a53e483a9bba39685a4e/src/circom/qap.rs
use ark_ff::PrimeField;
@@ -6,7 +6,6 @@ use ark_groth16::r1cs_to_qap::{evaluate_constraint, LibsnarkReduction, R1CSToQAP
use ark_poly::EvaluationDomain;
use ark_relations::r1cs::{ConstraintMatrices, ConstraintSystemRef, SynthesisError};
use ark_std::{cfg_into_iter, cfg_iter, cfg_iter_mut, vec};
#[cfg(feature = "parallel")]
use rayon::iter::{
IndexedParallelIterator, IntoParallelIterator, IntoParallelRefIterator,

View File

@@ -1,16 +1,14 @@
use std::{array::TryFromSliceError, num::TryFromIntError};
use ark_relations::r1cs::SynthesisError;
use ark_serialize::SerializationError;
use num_bigint::{BigInt, ParseBigIntError};
use std::array::TryFromSliceError;
use std::num::TryFromIntError;
use std::string::FromUtf8Error;
use thiserror::Error;
use utils::error::{FromConfigError, ZerokitMerkleTreeError};
use crate::circuit::{error::ZKeyReadError, Fr};
#[derive(Debug, thiserror::Error)]
pub enum ConversionError {
pub enum UtilsError {
#[error("Expected radix 10 or 16")]
WrongRadix,
#[error("{0}")]
@@ -23,59 +21,46 @@ pub enum ConversionError {
InsufficientData { expected: usize, actual: usize },
}
#[derive(Error, Debug)]
pub enum ProofError {
#[error("{0}")]
ProtocolError(#[from] ProtocolError),
#[error("Error producing proof: {0}")]
SynthesisError(#[from] SynthesisError),
}
#[derive(Debug, thiserror::Error)]
pub enum ProtocolError {
#[error("Error producing proof: {0}")]
Synthesis(#[from] SynthesisError),
#[error("{0}")]
Conversion(#[from] ConversionError),
Utils(#[from] UtilsError),
#[error("Expected to read {0} bytes but read only {1} bytes")]
InvalidReadLen(usize, usize),
#[error("Cannot convert bigint {0:?} to biguint")]
BigUintConversion(BigInt),
#[error("{0}")]
JsonError(#[from] serde_json::Error),
#[error("Message id ({0}) is not within user_message_limit ({1})")]
InvalidMessageId(Fr, Fr),
#[error("Merkle proof length mismatch: expected {0}, got {1}")]
InvalidMerkleProofLength(usize, usize),
}
#[derive(Debug, thiserror::Error)]
pub enum ComputeIdSecretError {
/// Usually it means that the same signal is used to recover the user secret hash
#[error("External nullifiers mismatch: {0} != {1}")]
ExternalNullifierMismatch(Fr, Fr),
#[error("Cannot recover secret: division by zero")]
DivisionByZero,
}
#[derive(Error, Debug)]
pub enum VerifyError {
#[error("Invalid proof provided")]
InvalidProof,
#[error("Expected one of the provided roots")]
InvalidRoot,
#[error("Signal value does not match")]
InvalidSignal,
}
#[derive(Debug, thiserror::Error)]
pub enum RLNError {
#[error("I/O error: {0}")]
IO(#[from] std::io::Error),
#[error("Utf8 error: {0}")]
Utf8(#[from] FromUtf8Error),
#[error("Serde json error: {0}")]
JSON(#[from] serde_json::Error),
#[error("Config error: {0}")]
Config(#[from] FromConfigError),
#[error("Serialization error: {0}")]
Serialization(#[from] SerializationError),
#[error("Merkle tree error: {0}")]
MerkleTree(#[from] ZerokitMerkleTreeError),
#[error("ZKey error: {0}")]
ZKey(#[from] ZKeyReadError),
#[error("Conversion error: {0}")]
Conversion(#[from] ConversionError),
#[error("Protocol error: {0}")]
Protocol(#[from] ProtocolError),
#[error("Proof error: {0}")]
Proof(#[from] ProofError),
#[error("Unable to extract secret")]
RecoverSecret(#[from] ComputeIdSecretError),
#[error("Verify error: {0}")]
Verify(#[from] VerifyError),
}

View File

@@ -1,23 +1,15 @@
#![allow(non_camel_case_types)]
use super::ffi_utils::{CBoolResult, CFr, CResult};
use crate::{
circuit::{graph_from_folder, zkey_from_folder, zkey_from_raw, Fr, Proof},
protocol::{
compute_id_secret, generate_proof, proof_values_from_witness, verify_proof, RLNProofValues,
RLNWitnessInput, RLN,
},
utils::IdSecret,
};
use num_bigint::BigInt;
use safer_ffi::{boxed::Box_, derive_ReprC, ffi_export, prelude::repr_c};
#[cfg(not(feature = "stateless"))]
use {safer_ffi::prelude::char_p, std::fs::File, std::io::Read};
use super::ffi_utils::{CBoolResult, CFr, CResult};
use crate::prelude::*;
#[cfg(not(feature = "stateless"))]
use {
crate::poseidon_tree::PoseidonTree,
safer_ffi::prelude::char_p,
std::{fs::File, io::Read, str::FromStr},
utils::{Hasher, ZerokitMerkleProof, ZerokitMerkleTree},
};
const MAX_CONFIG_SIZE: u64 = 1024 * 1024; // 1MB
// FFI_RLN
@@ -33,67 +25,49 @@ pub fn ffi_rln_new(
tree_depth: usize,
config_path: char_p::Ref<'_>,
) -> CResult<repr_c::Box<FFI_RLN>, repr_c::String> {
let tree_config = match File::open(config_path.to_str()).and_then(|mut file| {
let mut config_str = String::new();
file.read_to_string(&mut config_str)?;
Ok(config_str)
}) {
Ok(config_str) if !config_str.is_empty() => {
match <PoseidonTree as ZerokitMerkleTree>::Config::from_str(&config_str) {
Ok(config) => config,
Err(err) => {
return CResult {
ok: None,
err: Some(err.to_string().into()),
};
}
let config_str = File::open(config_path.to_str())
.and_then(|mut file| {
let metadata = file.metadata()?;
if metadata.len() > MAX_CONFIG_SIZE {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
format!(
"Config file too large: {} bytes (max {} bytes)",
metadata.len(),
MAX_CONFIG_SIZE
),
));
}
}
_ => <PoseidonTree as ZerokitMerkleTree>::Config::default(),
};
let mut s = String::new();
file.read_to_string(&mut s)?;
Ok(s)
})
.unwrap_or_default();
let zkey = zkey_from_folder().to_owned();
let graph_data = graph_from_folder().to_owned();
// We compute a default empty tree
let tree = match PoseidonTree::new(
tree_depth,
<PoseidonTree as ZerokitMerkleTree>::Hasher::default_leaf(),
tree_config,
) {
Ok(tree) => tree,
Err(err) => {
return CResult {
ok: None,
err: Some(err.to_string().into()),
};
}
};
let rln = RLN {
zkey,
graph_data,
#[cfg(not(feature = "stateless"))]
tree,
};
CResult {
ok: Some(Box_::new(FFI_RLN(rln))),
err: None,
match RLN::new(tree_depth, config_str.as_str()) {
Ok(rln) => CResult {
ok: Some(Box_::new(FFI_RLN(rln))),
err: None,
},
Err(err) => CResult {
ok: None,
err: Some(err.to_string().into()),
},
}
}
#[cfg(feature = "stateless")]
#[ffi_export]
pub fn ffi_rln_new() -> CResult<repr_c::Box<FFI_RLN>, repr_c::String> {
let zkey = zkey_from_folder().to_owned();
let graph_data = graph_from_folder().to_owned();
let rln = RLN { zkey, graph_data };
CResult {
ok: Some(Box_::new(FFI_RLN(rln))),
err: None,
match RLN::new() {
Ok(rln) => CResult {
ok: Some(Box_::new(FFI_RLN(rln))),
err: None,
},
Err(err) => CResult {
ok: None,
err: Some(err.to_string().into()),
},
}
}
@@ -101,90 +75,61 @@ pub fn ffi_rln_new() -> CResult<repr_c::Box<FFI_RLN>, repr_c::String> {
#[ffi_export]
pub fn ffi_rln_new_with_params(
tree_depth: usize,
zkey_buffer: &repr_c::Vec<u8>,
zkey_data: &repr_c::Vec<u8>,
graph_data: &repr_c::Vec<u8>,
config_path: char_p::Ref<'_>,
) -> CResult<repr_c::Box<FFI_RLN>, repr_c::String> {
let tree_config = match File::open(config_path.to_str()).and_then(|mut file| {
let mut config_str = String::new();
file.read_to_string(&mut config_str)?;
Ok(config_str)
}) {
Ok(config_str) if !config_str.is_empty() => {
match <PoseidonTree as ZerokitMerkleTree>::Config::from_str(&config_str) {
Ok(config) => config,
Err(err) => {
return CResult {
ok: None,
err: Some(err.to_string().into()),
};
}
let config_str = File::open(config_path.to_str())
.and_then(|mut file| {
let metadata = file.metadata()?;
if metadata.len() > MAX_CONFIG_SIZE {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
format!(
"Config file too large: {} bytes (max {} bytes)",
metadata.len(),
MAX_CONFIG_SIZE
),
));
}
}
_ => <PoseidonTree as ZerokitMerkleTree>::Config::default(),
};
let mut s = String::new();
file.read_to_string(&mut s)?;
Ok(s)
})
.unwrap_or_default();
let zkey = match zkey_from_raw(zkey_buffer) {
Ok(pk) => pk,
Err(err) => {
return CResult {
ok: None,
err: Some(err.to_string().into()),
};
}
};
let graph_data = graph_data.to_vec();
// We compute a default empty tree
let tree = match PoseidonTree::new(
match RLN::new_with_params(
tree_depth,
<PoseidonTree as ZerokitMerkleTree>::Hasher::default_leaf(),
tree_config,
zkey_data.to_vec(),
graph_data.to_vec(),
config_str.as_str(),
) {
Ok(tree) => tree,
Err(err) => {
return CResult {
ok: None,
err: Some(err.to_string().into()),
};
}
};
let rln = RLN {
zkey,
graph_data,
#[cfg(not(feature = "stateless"))]
tree,
};
CResult {
ok: Some(Box_::new(FFI_RLN(rln))),
err: None,
Ok(rln) => CResult {
ok: Some(Box_::new(FFI_RLN(rln))),
err: None,
},
Err(err) => CResult {
ok: None,
err: Some(err.to_string().into()),
},
}
}
#[cfg(feature = "stateless")]
#[ffi_export]
pub fn ffi_new_with_params(
zkey_buffer: &repr_c::Vec<u8>,
pub fn ffi_rln_new_with_params(
zkey_data: &repr_c::Vec<u8>,
graph_data: &repr_c::Vec<u8>,
) -> CResult<repr_c::Box<FFI_RLN>, repr_c::String> {
let zkey = match zkey_from_raw(zkey_buffer) {
Ok(pk) => pk,
Err(err) => {
return CResult {
ok: None,
err: Some(err.to_string().into()),
};
}
};
let graph_data = graph_data.to_vec();
let rln = RLN { zkey, graph_data };
CResult {
ok: Some(Box_::new(FFI_RLN(rln))),
err: None,
match RLN::new_with_params(zkey_data.to_vec(), graph_data.to_vec()) {
Ok(rln) => CResult {
ok: Some(Box_::new(FFI_RLN(rln))),
err: None,
},
Err(err) => CResult {
ok: None,
err: Some(err.to_string().into()),
},
}
}
@@ -197,9 +142,55 @@ pub fn ffi_rln_free(rln: repr_c::Box<FFI_RLN>) {
#[derive_ReprC]
#[repr(opaque)]
pub struct FFI_RLNProof {
pub proof: Proof,
pub proof_values: RLNProofValues,
pub struct FFI_RLNProof(pub(crate) RLNProof);
#[ffi_export]
pub fn ffi_rln_proof_get_values(
rln_proof: &repr_c::Box<FFI_RLNProof>,
) -> repr_c::Box<FFI_RLNProofValues> {
Box_::new(FFI_RLNProofValues(rln_proof.0.proof_values))
}
#[ffi_export]
pub fn ffi_rln_proof_to_bytes_le(rln_proof: &repr_c::Box<FFI_RLNProof>) -> repr_c::Vec<u8> {
rln_proof_to_bytes_le(&rln_proof.0).into()
}
#[ffi_export]
pub fn ffi_rln_proof_to_bytes_be(rln_proof: &repr_c::Box<FFI_RLNProof>) -> repr_c::Vec<u8> {
rln_proof_to_bytes_be(&rln_proof.0).into()
}
#[ffi_export]
pub fn ffi_bytes_le_to_rln_proof(
bytes: &repr_c::Vec<u8>,
) -> CResult<repr_c::Box<FFI_RLNProof>, repr_c::String> {
match bytes_le_to_rln_proof(bytes) {
Ok((rln_proof, _)) => CResult {
ok: Some(Box_::new(FFI_RLNProof(rln_proof))),
err: None,
},
Err(err) => CResult {
ok: None,
err: Some(err.to_string().into()),
},
}
}
#[ffi_export]
pub fn ffi_bytes_be_to_rln_proof(
bytes: &repr_c::Vec<u8>,
) -> CResult<repr_c::Box<FFI_RLNProof>, repr_c::String> {
match bytes_be_to_rln_proof(bytes) {
Ok((rln_proof, _)) => CResult {
ok: Some(Box_::new(FFI_RLNProof(rln_proof))),
err: None,
},
Err(err) => CResult {
ok: None,
err: Some(err.to_string().into()),
},
}
}
#[ffi_export]
@@ -207,84 +198,14 @@ pub fn ffi_rln_proof_free(rln_proof: repr_c::Box<FFI_RLNProof>) {
drop(rln_proof);
}
// Proof generation APIs
// RLNWitnessInput
#[derive_ReprC]
#[repr(opaque)]
pub struct FFI_RLNWitnessInput(pub(crate) RLNWitnessInput);
#[cfg(not(feature = "stateless"))]
#[ffi_export]
pub fn ffi_generate_rln_proof(
rln: &repr_c::Box<FFI_RLN>,
identity_secret: &CFr,
user_message_limit: &CFr,
message_id: &CFr,
x: &CFr,
external_nullifier: &CFr,
leaf_index: usize,
) -> CResult<repr_c::Box<FFI_RLNProof>, repr_c::String> {
let proof = match rln.0.tree.proof(leaf_index) {
Ok(proof) => proof,
Err(err) => {
return CResult {
ok: None,
err: Some(err.to_string().into()),
};
}
};
let path_elements: Vec<Fr> = proof.get_path_elements();
let identity_path_index: Vec<u8> = proof.get_path_index();
let mut identity_secret_fr = identity_secret.0;
let rln_witness = match RLNWitnessInput::new(
IdSecret::from(&mut identity_secret_fr),
user_message_limit.0,
message_id.0,
path_elements,
identity_path_index,
x.0,
external_nullifier.0,
) {
Ok(witness) => witness,
Err(err) => {
return CResult {
ok: None,
err: Some(err.to_string().into()),
};
}
};
let proof_values = match proof_values_from_witness(&rln_witness) {
Ok(pv) => pv,
Err(err) => {
return CResult {
ok: None,
err: Some(err.to_string().into()),
};
}
};
let proof = match generate_proof(&rln.0.zkey, &rln_witness, &rln.0.graph_data) {
Ok(proof) => proof,
Err(err) => {
return CResult {
ok: None,
err: Some(err.to_string().into()),
};
}
};
CResult {
ok: Some(Box_::new(FFI_RLNProof {
proof_values,
proof,
})),
err: None,
}
}
#[cfg(feature = "stateless")]
#[ffi_export]
pub fn ffi_generate_rln_proof_stateless(
rln: &repr_c::Box<FFI_RLN>,
pub fn ffi_rln_witness_input_new(
identity_secret: &CFr,
user_message_limit: &CFr,
message_id: &CFr,
@@ -292,11 +213,11 @@ pub fn ffi_generate_rln_proof_stateless(
identity_path_index: &repr_c::Vec<u8>,
x: &CFr,
external_nullifier: &CFr,
) -> CResult<repr_c::Box<FFI_RLNProof>, repr_c::String> {
) -> CResult<repr_c::Box<FFI_RLNWitnessInput>, repr_c::String> {
let mut identity_secret_fr = identity_secret.0;
let path_elements: Vec<Fr> = path_elements.iter().map(|cfr| cfr.0).collect();
let identity_path_index: Vec<u8> = identity_path_index.iter().copied().collect();
let rln_witness = match RLNWitnessInput::new(
match RLNWitnessInput::new(
IdSecret::from(&mut identity_secret_fr),
user_message_limit.0,
message_id.0,
@@ -305,41 +226,251 @@ pub fn ffi_generate_rln_proof_stateless(
x.0,
external_nullifier.0,
) {
Ok(witness) => witness,
Ok(witness) => CResult {
ok: Some(Box_::new(FFI_RLNWitnessInput(witness))),
err: None,
},
Err(err) => CResult {
ok: None,
err: Some(err.to_string().into()),
},
}
}
#[ffi_export]
pub fn ffi_rln_witness_to_bytes_le(
witness: &repr_c::Box<FFI_RLNWitnessInput>,
) -> CResult<repr_c::Vec<u8>, repr_c::String> {
match rln_witness_to_bytes_le(&witness.0) {
Ok(bytes) => CResult {
ok: Some(bytes.into()),
err: None,
},
Err(err) => CResult {
ok: None,
err: Some(err.to_string().into()),
},
}
}
#[ffi_export]
pub fn ffi_rln_witness_to_bytes_be(
witness: &repr_c::Box<FFI_RLNWitnessInput>,
) -> CResult<repr_c::Vec<u8>, repr_c::String> {
match rln_witness_to_bytes_be(&witness.0) {
Ok(bytes) => CResult {
ok: Some(bytes.into()),
err: None,
},
Err(err) => CResult {
ok: None,
err: Some(err.to_string().into()),
},
}
}
#[ffi_export]
pub fn ffi_bytes_le_to_rln_witness(
bytes: &repr_c::Vec<u8>,
) -> CResult<repr_c::Box<FFI_RLNWitnessInput>, repr_c::String> {
match bytes_le_to_rln_witness(bytes) {
Ok((witness, _)) => CResult {
ok: Some(Box_::new(FFI_RLNWitnessInput(witness))),
err: None,
},
Err(err) => CResult {
ok: None,
err: Some(err.to_string().into()),
},
}
}
#[ffi_export]
pub fn ffi_bytes_be_to_rln_witness(
bytes: &repr_c::Vec<u8>,
) -> CResult<repr_c::Box<FFI_RLNWitnessInput>, repr_c::String> {
match bytes_be_to_rln_witness(bytes) {
Ok((witness, _)) => CResult {
ok: Some(Box_::new(FFI_RLNWitnessInput(witness))),
err: None,
},
Err(err) => CResult {
ok: None,
err: Some(err.to_string().into()),
},
}
}
#[ffi_export]
pub fn ffi_rln_witness_to_bigint_json(
witness: &repr_c::Box<FFI_RLNWitnessInput>,
) -> CResult<repr_c::String, repr_c::String> {
match rln_witness_to_bigint_json(&witness.0) {
Ok(json) => CResult {
ok: Some(json.to_string().into()),
err: None,
},
Err(err) => CResult {
ok: None,
err: Some(err.to_string().into()),
},
}
}
#[ffi_export]
pub fn ffi_rln_witness_input_free(witness: repr_c::Box<FFI_RLNWitnessInput>) {
drop(witness);
}
// RLNProofValues
#[derive_ReprC]
#[repr(opaque)]
pub struct FFI_RLNProofValues(pub(crate) RLNProofValues);
#[ffi_export]
pub fn ffi_rln_proof_values_get_y(pv: &repr_c::Box<FFI_RLNProofValues>) -> repr_c::Box<CFr> {
CFr::from(pv.0.y).into()
}
#[ffi_export]
pub fn ffi_rln_proof_values_get_nullifier(
pv: &repr_c::Box<FFI_RLNProofValues>,
) -> repr_c::Box<CFr> {
CFr::from(pv.0.nullifier).into()
}
#[ffi_export]
pub fn ffi_rln_proof_values_get_root(pv: &repr_c::Box<FFI_RLNProofValues>) -> repr_c::Box<CFr> {
CFr::from(pv.0.root).into()
}
#[ffi_export]
pub fn ffi_rln_proof_values_get_x(pv: &repr_c::Box<FFI_RLNProofValues>) -> repr_c::Box<CFr> {
CFr::from(pv.0.x).into()
}
#[ffi_export]
pub fn ffi_rln_proof_values_get_external_nullifier(
pv: &repr_c::Box<FFI_RLNProofValues>,
) -> repr_c::Box<CFr> {
CFr::from(pv.0.external_nullifier).into()
}
#[ffi_export]
pub fn ffi_rln_proof_values_to_bytes_le(pv: &repr_c::Box<FFI_RLNProofValues>) -> repr_c::Vec<u8> {
rln_proof_values_to_bytes_le(&pv.0).into()
}
#[ffi_export]
pub fn ffi_rln_proof_values_to_bytes_be(pv: &repr_c::Box<FFI_RLNProofValues>) -> repr_c::Vec<u8> {
rln_proof_values_to_bytes_be(&pv.0).into()
}
#[ffi_export]
pub fn ffi_bytes_le_to_rln_proof_values(
bytes: &repr_c::Vec<u8>,
) -> CResult<repr_c::Box<FFI_RLNProofValues>, repr_c::String> {
match bytes_le_to_rln_proof_values(bytes) {
Ok((pv, _)) => CResult {
ok: Some(Box_::new(FFI_RLNProofValues(pv))),
err: None,
},
Err(e) => CResult {
ok: None,
err: Some(format!("{:?}", e).into()),
},
}
}
#[ffi_export]
pub fn ffi_bytes_be_to_rln_proof_values(
bytes: &repr_c::Vec<u8>,
) -> CResult<repr_c::Box<FFI_RLNProofValues>, repr_c::String> {
match bytes_be_to_rln_proof_values(bytes) {
Ok((pv, _)) => CResult {
ok: Some(Box_::new(FFI_RLNProofValues(pv))),
err: None,
},
Err(e) => CResult {
ok: None,
err: Some(format!("{:?}", e).into()),
},
}
}
#[ffi_export]
pub fn ffi_rln_proof_values_free(proof_values: repr_c::Box<FFI_RLNProofValues>) {
drop(proof_values);
}
// Proof generation APIs
#[ffi_export]
pub fn ffi_generate_rln_proof(
rln: &repr_c::Box<FFI_RLN>,
witness: &repr_c::Box<FFI_RLNWitnessInput>,
) -> CResult<repr_c::Box<FFI_RLNProof>, repr_c::String> {
match rln.0.generate_rln_proof(&witness.0) {
Ok((proof, proof_values)) => {
let rln_proof = RLNProof {
proof_values,
proof,
};
CResult {
ok: Some(Box_::new(FFI_RLNProof(rln_proof))),
err: None,
}
}
Err(err) => CResult {
ok: None,
err: Some(err.to_string().into()),
},
}
}
#[ffi_export]
pub fn ffi_generate_rln_proof_with_witness(
rln: &repr_c::Box<FFI_RLN>,
calculated_witness: &repr_c::Vec<repr_c::String>,
witness: &repr_c::Box<FFI_RLNWitnessInput>,
) -> CResult<repr_c::Box<FFI_RLNProof>, repr_c::String> {
let calculated_witness_bigint: Result<Vec<BigInt>, _> = calculated_witness
.iter()
.map(|s| {
let s_str = unsafe { std::str::from_utf8_unchecked(s.as_bytes()) };
s_str.parse::<BigInt>()
})
.collect();
let calculated_witness_bigint = match calculated_witness_bigint {
Ok(w) => w,
Err(err) => {
return CResult {
ok: None,
err: Some(err.to_string().into()),
};
err: Some(format!("Failed to parse witness: {}", err).into()),
}
}
};
let proof_values = match proof_values_from_witness(&rln_witness) {
Ok(pv) => pv,
Err(err) => {
return CResult {
ok: None,
err: Some(err.to_string().into()),
match rln
.0
.generate_rln_proof_with_witness(calculated_witness_bigint, &witness.0)
{
Ok((proof, proof_values)) => {
let rln_proof = RLNProof {
proof_values,
proof,
};
CResult {
ok: Some(Box_::new(FFI_RLNProof(rln_proof))),
err: None,
}
}
};
let proof = match generate_proof(&rln.0.zkey, &rln_witness, &rln.0.graph_data) {
Ok(proof) => proof,
Err(err) => {
return CResult {
ok: None,
err: Some(err.to_string().into()),
};
}
};
CResult {
ok: Some(Box_::new(FFI_RLNProof {
proof_values,
proof,
})),
err: None,
Err(err) => CResult {
ok: None,
err: Some(err.to_string().into()),
},
}
}
@@ -349,95 +480,47 @@ pub fn ffi_generate_rln_proof_stateless(
#[ffi_export]
pub fn ffi_verify_rln_proof(
rln: &repr_c::Box<FFI_RLN>,
proof: &repr_c::Box<FFI_RLNProof>,
rln_proof: &repr_c::Box<FFI_RLNProof>,
x: &CFr,
) -> CBoolResult {
// Verify the root
if rln.0.tree.root() != proof.proof_values.root {
return CBoolResult {
match rln
.0
.verify_rln_proof(&rln_proof.0.proof, &rln_proof.0.proof_values, &x.0)
{
Ok(verified) => CBoolResult {
ok: verified,
err: None,
},
Err(err) => CBoolResult {
ok: false,
err: Some("Invalid root".to_string().into()),
};
}
// Verify the signal
if *x != proof.proof_values.x {
return CBoolResult {
ok: false,
err: Some("Invalid signal".to_string().into()),
};
}
// Verify the proof
match verify_proof(&rln.0.zkey.0.vk, &proof.proof, &proof.proof_values) {
Ok(proof_verified) => {
if !proof_verified {
return CBoolResult {
ok: false,
err: Some("Invalid proof".to_string().into()),
};
}
}
Err(err) => {
return CBoolResult {
ok: false,
err: Some(err.to_string().into()),
};
}
};
// All verifications passed
CBoolResult {
ok: true,
err: None,
err: Some(err.to_string().into()),
},
}
}
#[ffi_export]
pub fn ffi_verify_with_roots(
rln: &repr_c::Box<FFI_RLN>,
proof: &repr_c::Box<FFI_RLNProof>,
rln_proof: &repr_c::Box<FFI_RLNProof>,
roots: &repr_c::Vec<CFr>,
x: &CFr,
) -> CBoolResult {
// Verify the root
if !roots.is_empty() && !roots.iter().any(|root| root.0 == proof.proof_values.root) {
return CBoolResult {
let roots_fr: Vec<Fr> = roots.iter().map(|cfr| cfr.0).collect();
match rln.0.verify_with_roots(
&rln_proof.0.proof,
&rln_proof.0.proof_values,
&x.0,
&roots_fr,
) {
Ok(verified) => CBoolResult {
ok: verified,
err: None,
},
Err(err) => CBoolResult {
ok: false,
err: Some("Invalid root".to_string().into()),
};
}
// Verify the signal
if *x != proof.proof_values.x {
return CBoolResult {
ok: false,
err: Some("Invalid signal".to_string().into()),
};
}
// Verify the proof
match verify_proof(&rln.0.zkey.0.vk, &proof.proof, &proof.proof_values) {
Ok(proof_verified) => {
if !proof_verified {
return CBoolResult {
ok: false,
err: Some("Invalid proof".to_string().into()),
};
}
}
Err(err) => {
return CBoolResult {
ok: false,
err: Some(err.to_string().into()),
};
}
};
// All verifications passed
CBoolResult {
ok: true,
err: None,
err: Some(err.to_string().into()),
},
}
}
@@ -445,37 +528,17 @@ pub fn ffi_verify_with_roots(
#[ffi_export]
pub fn ffi_recover_id_secret(
proof_1: &repr_c::Box<FFI_RLNProof>,
proof_2: &repr_c::Box<FFI_RLNProof>,
proof_values_1: &repr_c::Box<FFI_RLNProofValues>,
proof_values_2: &repr_c::Box<FFI_RLNProofValues>,
) -> CResult<repr_c::Box<CFr>, repr_c::String> {
let external_nullifier_1 = proof_1.proof_values.external_nullifier;
let external_nullifier_2 = proof_2.proof_values.external_nullifier;
// We continue only if the proof values are for the same external nullifier
if external_nullifier_1 != external_nullifier_2 {
return CResult {
match recover_id_secret(&proof_values_1.0, &proof_values_2.0) {
Ok(secret) => CResult {
ok: Some(Box_::new(CFr::from(*secret))),
err: None,
},
Err(err) => CResult {
ok: None,
err: Some("External nullifiers do not match".to_string().into()),
};
}
// We extract the two shares
let share1 = (proof_1.proof_values.x, proof_1.proof_values.y);
let share2 = (proof_2.proof_values.x, proof_2.proof_values.y);
// We recover the secret
let recovered_identity_secret_hash = match compute_id_secret(share1, share2) {
Ok(secret) => secret,
Err(err) => {
return CResult {
ok: None,
err: Some(err.to_string().into()),
};
}
};
CResult {
ok: Some(CFr::from(*recovered_identity_secret_hash).into()),
err: None,
err: Some(err.to_string().into()),
},
}
}

View File

@@ -1,12 +1,11 @@
#![allow(non_camel_case_types)]
#![cfg(not(feature = "stateless"))]
use {
super::ffi_rln::FFI_RLN,
super::ffi_utils::{CBoolResult, CFr, CResult},
crate::poseidon_tree::PoseidonTree,
safer_ffi::{boxed::Box_, derive_ReprC, ffi_export, prelude::repr_c},
utils::{ZerokitMerkleProof, ZerokitMerkleTree},
use safer_ffi::{boxed::Box_, derive_ReprC, ffi_export, prelude::repr_c};
use super::{
ffi_rln::FFI_RLN,
ffi_utils::{CBoolResult, CFr, CResult},
};
// MerkleProof
@@ -27,15 +26,11 @@ pub fn ffi_merkle_proof_free(merkle_proof: repr_c::Box<FFI_MerkleProof>) {
#[ffi_export]
pub fn ffi_set_tree(rln: &mut repr_c::Box<FFI_RLN>, tree_depth: usize) -> CBoolResult {
// We compute a default empty tree of desired depth
match PoseidonTree::default(tree_depth) {
Ok(tree) => {
rln.0.tree = tree;
CBoolResult {
ok: true,
err: None,
}
}
match rln.0.set_tree(tree_depth) {
Ok(_) => CBoolResult {
ok: true,
err: None,
},
Err(err) => CBoolResult {
ok: false,
err: Some(err.to_string().into()),
@@ -47,7 +42,7 @@ pub fn ffi_set_tree(rln: &mut repr_c::Box<FFI_RLN>, tree_depth: usize) -> CBoolR
#[ffi_export]
pub fn ffi_delete_leaf(rln: &mut repr_c::Box<FFI_RLN>, index: usize) -> CBoolResult {
match rln.0.tree.delete(index) {
match rln.0.delete_leaf(index) {
Ok(_) => CBoolResult {
ok: true,
err: None,
@@ -60,12 +55,8 @@ pub fn ffi_delete_leaf(rln: &mut repr_c::Box<FFI_RLN>, index: usize) -> CBoolRes
}
#[ffi_export]
pub fn ffi_set_leaf(
rln: &mut repr_c::Box<FFI_RLN>,
index: usize,
leaf: &repr_c::Box<CFr>,
) -> CBoolResult {
match rln.0.tree.set(index, leaf.0) {
pub fn ffi_set_leaf(rln: &mut repr_c::Box<FFI_RLN>, index: usize, leaf: &CFr) -> CBoolResult {
match rln.0.set_leaf(index, leaf.0) {
Ok(_) => CBoolResult {
ok: true,
err: None,
@@ -82,7 +73,7 @@ pub fn ffi_get_leaf(
rln: &repr_c::Box<FFI_RLN>,
index: usize,
) -> CResult<repr_c::Box<CFr>, repr_c::String> {
match rln.0.tree.get(index) {
match rln.0.get_leaf(index) {
Ok(leaf) => CResult {
ok: Some(CFr::from(leaf).into()),
err: None,
@@ -96,12 +87,12 @@ pub fn ffi_get_leaf(
#[ffi_export]
pub fn ffi_leaves_set(rln: &repr_c::Box<FFI_RLN>) -> usize {
rln.0.tree.leaves_set()
rln.0.leaves_set()
}
#[ffi_export]
pub fn ffi_set_next_leaf(rln: &mut repr_c::Box<FFI_RLN>, leaf: &repr_c::Box<CFr>) -> CBoolResult {
match rln.0.tree.update_next(leaf.0) {
pub fn ffi_set_next_leaf(rln: &mut repr_c::Box<FFI_RLN>, leaf: &CFr) -> CBoolResult {
match rln.0.set_next_leaf(leaf.0) {
Ok(_) => CBoolResult {
ok: true,
err: None,
@@ -119,11 +110,8 @@ pub fn ffi_set_leaves_from(
index: usize,
leaves: &repr_c::Vec<CFr>,
) -> CBoolResult {
match rln
.0
.tree
.override_range(index, leaves.iter().map(|cfr| cfr.0), [].into_iter())
{
let leaves_vec: Vec<_> = leaves.iter().map(|cfr| cfr.0).collect();
match rln.0.set_leaves_from(index, leaves_vec) {
Ok(_) => CBoolResult {
ok: true,
err: None,
@@ -140,20 +128,8 @@ pub fn ffi_init_tree_with_leaves(
rln: &mut repr_c::Box<FFI_RLN>,
leaves: &repr_c::Vec<CFr>,
) -> CBoolResult {
// Reset tree to default
let tree_depth = rln.0.tree.depth();
if let Err(err) = PoseidonTree::default(tree_depth) {
return CBoolResult {
ok: false,
err: Some(err.to_string().into()),
};
};
match rln
.0
.tree
.override_range(0, leaves.iter().map(|cfr| cfr.0), [].into_iter())
{
let leaves_vec: Vec<_> = leaves.iter().map(|cfr| cfr.0).collect();
match rln.0.init_tree_with_leaves(leaves_vec) {
Ok(_) => CBoolResult {
ok: true,
err: None,
@@ -174,11 +150,9 @@ pub fn ffi_atomic_operation(
leaves: &repr_c::Vec<CFr>,
indices: &repr_c::Vec<usize>,
) -> CBoolResult {
match rln.0.tree.override_range(
index,
leaves.iter().map(|cfr| cfr.0),
indices.iter().copied(),
) {
let leaves_vec: Vec<_> = leaves.iter().map(|cfr| cfr.0).collect();
let indices_vec: Vec<_> = indices.iter().copied().collect();
match rln.0.atomic_operation(index, leaves_vec, indices_vec) {
Ok(_) => CBoolResult {
ok: true,
err: None,
@@ -196,12 +170,10 @@ pub fn ffi_seq_atomic_operation(
leaves: &repr_c::Vec<CFr>,
indices: &repr_c::Vec<u8>,
) -> CBoolResult {
let index = rln.0.tree.leaves_set();
match rln.0.tree.override_range(
index,
leaves.iter().map(|cfr| cfr.0),
indices.iter().map(|x| *x as usize),
) {
let index = rln.0.leaves_set();
let leaves_vec: Vec<_> = leaves.iter().map(|cfr| cfr.0).collect();
let indices_vec: Vec<_> = indices.iter().map(|x| *x as usize).collect();
match rln.0.atomic_operation(index, leaves_vec, indices_vec) {
Ok(_) => CBoolResult {
ok: true,
err: None,
@@ -217,24 +189,23 @@ pub fn ffi_seq_atomic_operation(
#[ffi_export]
pub fn ffi_get_root(rln: &repr_c::Box<FFI_RLN>) -> repr_c::Box<CFr> {
CFr::from(rln.0.tree.root()).into()
CFr::from(rln.0.get_root()).into()
}
#[ffi_export]
pub fn ffi_get_proof(
pub fn ffi_get_merkle_proof(
rln: &repr_c::Box<FFI_RLN>,
index: usize,
) -> CResult<repr_c::Box<FFI_MerkleProof>, repr_c::String> {
match rln.0.tree.proof(index) {
Ok(proof) => {
let path_elements: repr_c::Vec<CFr> = proof
.get_path_elements()
match rln.0.get_merkle_proof(index) {
Ok((path_elements, path_index)) => {
let path_elements: repr_c::Vec<CFr> = path_elements
.iter()
.map(|fr| CFr::from(*fr))
.collect::<Vec<_>>()
.into();
let path_index: repr_c::Vec<u8> = proof.get_path_index().into();
let path_index: repr_c::Vec<u8> = path_index.into();
let merkle_proof = FFI_MerkleProof {
path_elements,
@@ -257,7 +228,7 @@ pub fn ffi_get_proof(
#[ffi_export]
pub fn ffi_set_metadata(rln: &mut repr_c::Box<FFI_RLN>, metadata: &repr_c::Vec<u8>) -> CBoolResult {
match rln.0.tree.set_metadata(metadata) {
match rln.0.set_metadata(metadata) {
Ok(_) => CBoolResult {
ok: true,
err: None,
@@ -271,7 +242,7 @@ pub fn ffi_set_metadata(rln: &mut repr_c::Box<FFI_RLN>, metadata: &repr_c::Vec<u
#[ffi_export]
pub fn ffi_get_metadata(rln: &repr_c::Box<FFI_RLN>) -> CResult<repr_c::Vec<u8>, repr_c::String> {
match rln.0.tree.metadata() {
match rln.0.get_metadata() {
Ok(metadata) => CResult {
ok: Some(metadata.into()),
err: None,
@@ -285,7 +256,7 @@ pub fn ffi_get_metadata(rln: &repr_c::Box<FFI_RLN>) -> CResult<repr_c::Vec<u8>,
#[ffi_export]
pub fn ffi_flush(rln: &mut repr_c::Box<FFI_RLN>) -> CBoolResult {
match rln.0.tree.close_db_connection() {
match rln.0.flush() {
Ok(_) => CBoolResult {
ok: true,
err: None,

View File

@@ -1,15 +1,15 @@
#![allow(non_camel_case_types)]
use crate::{
circuit::Fr,
hashers::{hash_to_field_be, hash_to_field_le, poseidon_hash},
protocol::{extended_keygen, extended_seeded_keygen, keygen, seeded_keygen},
utils::{bytes_be_to_fr, bytes_le_to_fr, fr_to_bytes_be, fr_to_bytes_le},
};
use safer_ffi::{boxed::Box_, prelude::ReprC};
use safer_ffi::{derive_ReprC, ffi_export, prelude::repr_c};
use std::ops::Deref;
use safer_ffi::{
boxed::Box_,
derive_ReprC, ffi_export,
prelude::{repr_c, ReprC},
};
use crate::prelude::*;
// CResult
#[derive_ReprC]
@@ -67,44 +67,60 @@ impl PartialEq<Fr> for CFr {
}
#[ffi_export]
pub fn cfr_zero() -> repr_c::Box<CFr> {
pub fn ffi_cfr_zero() -> repr_c::Box<CFr> {
CFr::from(Fr::from(0)).into()
}
#[ffi_export]
pub fn cfr_one() -> repr_c::Box<CFr> {
pub fn ffi_cfr_one() -> repr_c::Box<CFr> {
CFr::from(Fr::from(1)).into()
}
#[ffi_export]
pub fn cfr_to_bytes_le(cfr: &CFr) -> repr_c::Vec<u8> {
pub fn ffi_cfr_to_bytes_le(cfr: &CFr) -> repr_c::Vec<u8> {
fr_to_bytes_le(&cfr.0).into()
}
#[ffi_export]
pub fn cfr_to_bytes_be(cfr: &CFr) -> repr_c::Vec<u8> {
pub fn ffi_cfr_to_bytes_be(cfr: &CFr) -> repr_c::Vec<u8> {
fr_to_bytes_be(&cfr.0).into()
}
#[ffi_export]
pub fn bytes_le_to_cfr(bytes: &repr_c::Vec<u8>) -> repr_c::Box<CFr> {
let (cfr, _) = bytes_le_to_fr(bytes);
CFr(cfr).into()
pub fn ffi_bytes_le_to_cfr(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Box<CFr>, repr_c::String> {
match bytes_le_to_fr(bytes) {
Ok((cfr, _)) => CResult {
ok: Some(CFr(cfr).into()),
err: None,
},
Err(e) => CResult {
ok: None,
err: Some(format!("{:?}", e).into()),
},
}
}
#[ffi_export]
pub fn bytes_be_to_cfr(bytes: &repr_c::Vec<u8>) -> repr_c::Box<CFr> {
let (cfr, _) = bytes_be_to_fr(bytes);
CFr(cfr).into()
pub fn ffi_bytes_be_to_cfr(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Box<CFr>, repr_c::String> {
match bytes_be_to_fr(bytes) {
Ok((cfr, _)) => CResult {
ok: Some(CFr(cfr).into()),
err: None,
},
Err(e) => CResult {
ok: None,
err: Some(format!("{:?}", e).into()),
},
}
}
#[ffi_export]
pub fn uint_to_cfr(value: u32) -> repr_c::Box<CFr> {
pub fn ffi_uint_to_cfr(value: u32) -> repr_c::Box<CFr> {
CFr::from(Fr::from(value)).into()
}
#[ffi_export]
pub fn cfr_debug(cfr: Option<&CFr>) -> repr_c::String {
pub fn ffi_cfr_debug(cfr: Option<&CFr>) -> repr_c::String {
match cfr {
Some(cfr) => format!("{:?}", cfr.0).into(),
None => "None".into(),
@@ -112,24 +128,24 @@ pub fn cfr_debug(cfr: Option<&CFr>) -> repr_c::String {
}
#[ffi_export]
pub fn cfr_free(cfr: repr_c::Box<CFr>) {
pub fn ffi_cfr_free(cfr: repr_c::Box<CFr>) {
drop(cfr);
}
// Vec<CFr>
#[ffi_export]
pub fn vec_cfr_new(capacity: usize) -> repr_c::Vec<CFr> {
pub fn ffi_vec_cfr_new(capacity: usize) -> repr_c::Vec<CFr> {
Vec::with_capacity(capacity).into()
}
#[ffi_export]
pub fn vec_cfr_from_cfr(cfr: &CFr) -> repr_c::Vec<CFr> {
pub fn ffi_vec_cfr_from_cfr(cfr: &CFr) -> repr_c::Vec<CFr> {
vec![*cfr].into()
}
#[ffi_export]
pub fn vec_cfr_push(v: &mut safer_ffi::Vec<CFr>, cfr: &CFr) {
pub fn ffi_vec_cfr_push(v: &mut safer_ffi::Vec<CFr>, cfr: &CFr) {
let mut new: Vec<CFr> = std::mem::replace(v, Vec::new().into()).into();
if new.len() == new.capacity() {
new.reserve_exact(1);
@@ -139,30 +155,32 @@ pub fn vec_cfr_push(v: &mut safer_ffi::Vec<CFr>, cfr: &CFr) {
}
#[ffi_export]
pub fn vec_cfr_len(v: &repr_c::Vec<CFr>) -> usize {
pub fn ffi_vec_cfr_len(v: &repr_c::Vec<CFr>) -> usize {
v.len()
}
#[ffi_export]
pub fn vec_cfr_get(v: &repr_c::Vec<CFr>, i: usize) -> Option<&CFr> {
pub fn ffi_vec_cfr_get(v: &repr_c::Vec<CFr>, i: usize) -> Option<&CFr> {
v.get(i)
}
#[ffi_export]
pub fn vec_cfr_to_bytes_le(vec: &repr_c::Vec<CFr>) -> repr_c::Vec<u8> {
pub fn ffi_vec_cfr_to_bytes_le(vec: &repr_c::Vec<CFr>) -> repr_c::Vec<u8> {
let vec_fr: Vec<Fr> = vec.iter().map(|cfr| cfr.0).collect();
crate::utils::vec_fr_to_bytes_le(&vec_fr).into()
vec_fr_to_bytes_le(&vec_fr).into()
}
#[ffi_export]
pub fn vec_cfr_to_bytes_be(vec: &repr_c::Vec<CFr>) -> repr_c::Vec<u8> {
pub fn ffi_vec_cfr_to_bytes_be(vec: &repr_c::Vec<CFr>) -> repr_c::Vec<u8> {
let vec_fr: Vec<Fr> = vec.iter().map(|cfr| cfr.0).collect();
crate::utils::vec_fr_to_bytes_be(&vec_fr).into()
vec_fr_to_bytes_be(&vec_fr).into()
}
#[ffi_export]
pub fn bytes_le_to_vec_cfr(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Vec<CFr>, repr_c::String> {
match crate::utils::bytes_le_to_vec_fr(bytes) {
pub fn ffi_bytes_le_to_vec_cfr(
bytes: &repr_c::Vec<u8>,
) -> CResult<repr_c::Vec<CFr>, repr_c::String> {
match bytes_le_to_vec_fr(bytes) {
Ok((vec_fr, _)) => {
let vec_cfr: Vec<CFr> = vec_fr.into_iter().map(CFr).collect();
CResult {
@@ -178,8 +196,10 @@ pub fn bytes_le_to_vec_cfr(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Vec<CFr>,
}
#[ffi_export]
pub fn bytes_be_to_vec_cfr(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Vec<CFr>, repr_c::String> {
match crate::utils::bytes_be_to_vec_fr(bytes) {
pub fn ffi_bytes_be_to_vec_cfr(
bytes: &repr_c::Vec<u8>,
) -> CResult<repr_c::Vec<CFr>, repr_c::String> {
match bytes_be_to_vec_fr(bytes) {
Ok((vec_fr, _)) => {
let vec_cfr: Vec<CFr> = vec_fr.into_iter().map(CFr).collect();
CResult {
@@ -195,7 +215,7 @@ pub fn bytes_be_to_vec_cfr(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Vec<CFr>,
}
#[ffi_export]
pub fn vec_cfr_debug(v: Option<&repr_c::Vec<CFr>>) -> repr_c::String {
pub fn ffi_vec_cfr_debug(v: Option<&repr_c::Vec<CFr>>) -> repr_c::String {
match v {
Some(v) => {
let vec_fr: Vec<Fr> = v.iter().map(|cfr| cfr.0).collect();
@@ -206,25 +226,25 @@ pub fn vec_cfr_debug(v: Option<&repr_c::Vec<CFr>>) -> repr_c::String {
}
#[ffi_export]
pub fn vec_cfr_free(v: repr_c::Vec<CFr>) {
pub fn ffi_vec_cfr_free(v: repr_c::Vec<CFr>) {
drop(v);
}
// Vec<u8>
#[ffi_export]
pub fn vec_u8_to_bytes_le(vec: &repr_c::Vec<u8>) -> repr_c::Vec<u8> {
crate::utils::vec_u8_to_bytes_le(vec).into()
pub fn ffi_vec_u8_to_bytes_le(vec: &repr_c::Vec<u8>) -> repr_c::Vec<u8> {
vec_u8_to_bytes_le(vec).into()
}
#[ffi_export]
pub fn vec_u8_to_bytes_be(vec: &repr_c::Vec<u8>) -> repr_c::Vec<u8> {
crate::utils::vec_u8_to_bytes_be(vec).into()
pub fn ffi_vec_u8_to_bytes_be(vec: &repr_c::Vec<u8>) -> repr_c::Vec<u8> {
vec_u8_to_bytes_be(vec).into()
}
#[ffi_export]
pub fn bytes_le_to_vec_u8(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Vec<u8>, repr_c::String> {
match crate::utils::bytes_le_to_vec_u8(bytes) {
pub fn ffi_bytes_le_to_vec_u8(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Vec<u8>, repr_c::String> {
match bytes_le_to_vec_u8(bytes) {
Ok((vec, _)) => CResult {
ok: Some(vec.into()),
err: None,
@@ -237,8 +257,8 @@ pub fn bytes_le_to_vec_u8(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Vec<u8>, r
}
#[ffi_export]
pub fn bytes_be_to_vec_u8(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Vec<u8>, repr_c::String> {
match crate::utils::bytes_be_to_vec_u8(bytes) {
pub fn ffi_bytes_be_to_vec_u8(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Vec<u8>, repr_c::String> {
match bytes_be_to_vec_u8(bytes) {
Ok((vec, _)) => CResult {
ok: Some(vec.into()),
err: None,
@@ -251,15 +271,15 @@ pub fn bytes_be_to_vec_u8(bytes: &repr_c::Vec<u8>) -> CResult<repr_c::Vec<u8>, r
}
#[ffi_export]
pub fn vec_u8_debug(v: Option<&repr_c::Vec<u8>>) -> repr_c::String {
pub fn ffi_vec_u8_debug(v: Option<&repr_c::Vec<u8>>) -> repr_c::String {
match v {
Some(v) => format!("{:?}", v.deref()).into(),
Some(v) => format!("{:x?}", v.deref()).into(),
None => "None".into(),
}
}
#[ffi_export]
pub fn vec_u8_free(v: repr_c::Vec<u8>) {
pub fn ffi_vec_u8_free(v: repr_c::Vec<u8>) {
drop(v);
}
@@ -285,24 +305,23 @@ pub fn ffi_poseidon_hash_pair(a: &CFr, b: &CFr) -> repr_c::Box<CFr> {
#[ffi_export]
pub fn ffi_key_gen() -> repr_c::Vec<CFr> {
let (identity_secret_hash, id_commitment) = keygen();
vec![CFr(*identity_secret_hash), CFr(id_commitment)].into()
let (identity_secret, id_commitment) = keygen();
vec![CFr(*identity_secret), CFr(id_commitment)].into()
}
#[ffi_export]
pub fn ffi_seeded_key_gen(seed: &repr_c::Vec<u8>) -> repr_c::Vec<CFr> {
let (identity_secret_hash, id_commitment) = seeded_keygen(seed);
vec![CFr(identity_secret_hash), CFr(id_commitment)].into()
let (identity_secret, id_commitment) = seeded_keygen(seed);
vec![CFr(identity_secret), CFr(id_commitment)].into()
}
#[ffi_export]
pub fn ffi_extended_key_gen() -> repr_c::Vec<CFr> {
let (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) =
extended_keygen();
let (identity_trapdoor, identity_nullifier, identity_secret, id_commitment) = extended_keygen();
vec![
CFr(identity_trapdoor),
CFr(identity_nullifier),
CFr(identity_secret_hash),
CFr(identity_secret),
CFr(id_commitment),
]
.into()
@@ -310,18 +329,18 @@ pub fn ffi_extended_key_gen() -> repr_c::Vec<CFr> {
#[ffi_export]
pub fn ffi_seeded_extended_key_gen(seed: &repr_c::Vec<u8>) -> repr_c::Vec<CFr> {
let (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) =
let (identity_trapdoor, identity_nullifier, identity_secret, id_commitment) =
extended_seeded_keygen(seed);
vec![
CFr(identity_trapdoor),
CFr(identity_nullifier),
CFr(identity_secret_hash),
CFr(identity_secret),
CFr(id_commitment),
]
.into()
}
#[ffi_export]
pub fn c_string_free(s: repr_c::String) {
pub fn ffi_c_string_free(s: repr_c::String) {
drop(s);
}

View File

@@ -5,6 +5,6 @@ pub mod ffi_tree;
pub mod ffi_utils;
#[cfg(feature = "headers")]
pub fn generate_headers() -> ::std::io::Result<()> {
::safer_ffi::headers::builder().to_file("rln.h")?.generate()
pub fn generate_headers() -> std::io::Result<()> {
safer_ffi::headers::builder().to_file("rln.h")?.generate()
}

View File

@@ -1,17 +1,18 @@
// This crate instantiates the Poseidon hash algorithm.
use once_cell::sync::Lazy;
use tiny_keccak::{Hasher, Keccak};
use utils::poseidon::Poseidon;
use crate::{
circuit::Fr,
utils::{bytes_be_to_fr, bytes_le_to_fr},
};
use once_cell::sync::Lazy;
use tiny_keccak::{Hasher, Keccak};
use utils::poseidon::Poseidon;
/// These indexed constants hardcode the supported round parameters tuples (t, RF, RN, SKIP_MATRICES) for the Bn254 scalar field.
/// SKIP_MATRICES is the index of the randomly generated secure MDS matrix.
/// TODO: generate these parameters
pub const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
(2, 8, 56, 0),
(3, 8, 57, 0),
(4, 8, 56, 0),
@@ -57,7 +58,7 @@ pub fn hash_to_field_le(signal: &[u8]) -> Fr {
hasher.finalize(&mut hash);
// We export the hash as a field element
let (el, _) = bytes_le_to_fr(hash.as_ref());
let (el, _) = bytes_le_to_fr(hash.as_ref()).expect("Keccak256 hash is always 32 bytes");
el
}
@@ -73,6 +74,6 @@ pub fn hash_to_field_be(signal: &[u8]) -> Fr {
hash.reverse();
// We export the hash as a field element
let (el, _) = bytes_be_to_fr(hash.as_ref());
let (el, _) = bytes_be_to_fr(hash.as_ref()).expect("Keccak256 hash is always 32 bytes");
el
}

View File

@@ -4,10 +4,9 @@ pub mod ffi;
pub mod hashers;
pub mod pm_tree_adapter;
pub mod poseidon_tree;
pub mod prelude;
pub mod protocol;
pub mod public;
#[cfg(test)]
pub mod public_api_tests;
pub mod utils;
// Ensure that only one Merkle tree feature is enabled at a time

View File

@@ -1,18 +1,21 @@
#![cfg(feature = "pmtree-ft")]
use serde_json::Value;
use std::fmt::Debug;
use std::path::PathBuf;
use std::str::FromStr;
use tempfile::Builder;
use std::{fmt::Debug, path::PathBuf, str::FromStr};
use crate::circuit::Fr;
use crate::hashers::{poseidon_hash, PoseidonHash};
use crate::utils::{bytes_le_to_fr, fr_to_bytes_le};
use utils::error::{FromConfigError, ZerokitMerkleTreeError};
use utils::pmtree::tree::Key;
use utils::pmtree::{Database, Hasher, PmtreeErrorKind};
use utils::{pmtree, Config, Mode, SledDB, ZerokitMerkleProof, ZerokitMerkleTree};
use serde_json::Value;
use tempfile::Builder;
use utils::{
error::{FromConfigError, ZerokitMerkleTreeError},
pmtree,
pmtree::{tree::Key, Database, Hasher, PmtreeErrorKind},
Config, Mode, SledDB, ZerokitMerkleProof, ZerokitMerkleTree,
};
use crate::{
circuit::Fr,
hashers::{poseidon_hash, PoseidonHash},
utils::{bytes_le_to_fr, fr_to_bytes_le},
};
const METADATA_KEY: [u8; 8] = *b"metadata";
@@ -40,7 +43,7 @@ impl Hasher for PoseidonHash {
}
fn deserialize(value: pmtree::Value) -> Self::Fr {
let (fr, _) = bytes_le_to_fr(&value);
let (fr, _) = bytes_le_to_fr(&value).expect("pmtree value should be valid Fr bytes");
fr
}
@@ -77,8 +80,14 @@ pub struct PmtreeConfigBuilder {
use_compression: bool,
}
impl Default for PmtreeConfigBuilder {
fn default() -> Self {
Self::new()
}
}
impl PmtreeConfigBuilder {
fn new() -> Self {
pub fn new() -> Self {
PmtreeConfigBuilder {
path: None,
temporary: DEFAULT_TEMPORARY,
@@ -290,11 +299,10 @@ impl ZerokitMerkleTree for PmTree {
} else if n == self.depth() {
self.get(index)
} else {
let node = self
.tree
.get_elem(Key::new(n, index >> (self.depth() - n)))
.unwrap();
Ok(node)
match self.tree.get_elem(Key::new(n, index >> (self.depth() - n))) {
Ok(value) => Ok(value),
Err(_) => Err(ZerokitMerkleTreeError::InvalidSubTreeIndex),
}
}
}
@@ -360,12 +368,12 @@ impl ZerokitMerkleTree for PmTree {
fn verify(
&self,
leaf: &FrOf<Self::Hasher>,
witness: &Self::Proof,
merkle_proof: &Self::Proof,
) -> Result<bool, ZerokitMerkleTreeError> {
if self.tree.verify(leaf, &witness.proof) {
if self.tree.verify(leaf, &merkle_proof.proof) {
Ok(true)
} else {
Err(ZerokitMerkleTreeError::InvalidWitness)
Err(ZerokitMerkleTreeError::InvalidMerkleProof)
}
}

40
rln/src/prelude.rs Normal file
View File

@@ -0,0 +1,40 @@
// This module re-exports the most commonly used types and functions from the RLN library
#[cfg(not(feature = "stateless"))]
pub use utils::{Hasher, ZerokitMerkleProof, ZerokitMerkleTree};
#[cfg(not(target_arch = "wasm32"))]
pub use crate::circuit::{graph_from_folder, zkey_from_folder};
#[cfg(not(feature = "stateless"))]
pub use crate::pm_tree_adapter::{FrOf, PmTree, PmTreeProof, PmtreeConfig, PmtreeConfigBuilder};
#[cfg(not(feature = "stateless"))]
pub use crate::poseidon_tree::{MerkleProof, PoseidonTree};
#[cfg(not(feature = "stateless"))]
pub use crate::protocol::compute_tree_root;
#[cfg(not(target_arch = "wasm32"))]
pub use crate::protocol::{generate_zk_proof, verify_zk_proof};
pub use crate::{
circuit::{
zkey_from_raw, Curve, Fq, Fq2, Fr, G1Affine, G1Projective, G2Affine, G2Projective, Proof,
VerifyingKey, Zkey, COMPRESS_PROOF_SIZE, DEFAULT_TREE_DEPTH,
},
error::{ProtocolError, RLNError, UtilsError, VerifyError},
hashers::{hash_to_field_be, hash_to_field_le, poseidon_hash, PoseidonHash},
protocol::{
bytes_be_to_rln_proof, bytes_be_to_rln_proof_values, bytes_be_to_rln_witness,
bytes_le_to_rln_proof, bytes_le_to_rln_proof_values, bytes_le_to_rln_witness,
extended_keygen, extended_seeded_keygen, generate_zk_proof_with_witness, keygen,
proof_values_from_witness, recover_id_secret, rln_proof_to_bytes_be, rln_proof_to_bytes_le,
rln_proof_values_to_bytes_be, rln_proof_values_to_bytes_le, rln_witness_to_bigint_json,
rln_witness_to_bytes_be, rln_witness_to_bytes_le, seeded_keygen, RLNProof, RLNProofValues,
RLNWitnessInput,
},
public::RLN,
utils::{
bytes_be_to_fr, bytes_be_to_vec_fr, bytes_be_to_vec_u8, bytes_be_to_vec_usize,
bytes_le_to_fr, bytes_le_to_vec_fr, bytes_le_to_vec_u8, bytes_le_to_vec_usize,
fr_to_bytes_be, fr_to_bytes_le, normalize_usize_be, normalize_usize_le, str_to_fr,
to_bigint, vec_fr_to_bytes_be, vec_fr_to_bytes_le, vec_u8_to_bytes_be, vec_u8_to_bytes_le,
IdSecret, FR_BYTE_SIZE,
},
};

View File

@@ -1,840 +0,0 @@
// This crate collects all the underlying primitives used to implement RLN
#[cfg(not(feature = "stateless"))]
use {
crate::error::ConversionError,
crate::poseidon_tree::PoseidonTree,
utils::{ZerokitMerkleProof, ZerokitMerkleTree},
};
use crate::circuit::{
iden3calc::calc_witness, qap::CircomReduction, Curve, Fr, Proof, VerifyingKey, Zkey,
};
use crate::error::{ComputeIdSecretError, ProofError, ProtocolError};
use crate::hashers::poseidon_hash;
use crate::utils::{
bytes_be_to_fr, bytes_le_to_fr, bytes_le_to_vec_fr, bytes_le_to_vec_u8, fr_byte_size,
fr_to_bytes_le, normalize_usize_le, to_bigint, vec_fr_to_bytes_le, vec_u8_to_bytes_le,
FrOrSecret, IdSecret,
};
use ark_ff::AdditiveGroup;
use ark_groth16::{prepare_verifying_key, Groth16};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::{rand::thread_rng, UniformRand};
use num_bigint::BigInt;
use rand::SeedableRng;
use rand_chacha::ChaCha20Rng;
use serde::{Deserialize, Serialize};
#[cfg(test)]
use std::time::Instant;
use tiny_keccak::{Hasher as _, Keccak};
use zeroize::Zeroize;
pub struct RLN {
pub zkey: Zkey,
#[cfg(not(target_arch = "wasm32"))]
pub graph_data: Vec<u8>,
#[cfg(not(feature = "stateless"))]
pub tree: PoseidonTree,
}
///////////////////////////////////////////////////////
// RLN Witness data structure and utility functions
///////////////////////////////////////////////////////
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct RLNWitnessInput {
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
identity_secret: IdSecret,
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
user_message_limit: Fr,
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
message_id: Fr,
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
path_elements: Vec<Fr>,
identity_path_index: Vec<u8>,
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
x: Fr,
#[serde(serialize_with = "ark_se", deserialize_with = "ark_de")]
external_nullifier: Fr,
}
impl RLNWitnessInput {
pub fn new(
identity_secret: IdSecret,
user_message_limit: Fr,
message_id: Fr,
path_elements: Vec<Fr>,
identity_path_index: Vec<u8>,
x: Fr,
external_nullifier: Fr,
) -> Result<Self, ProtocolError> {
// Message ID range check
if message_id > user_message_limit {
return Err(ProtocolError::InvalidMessageId(
message_id,
user_message_limit,
));
}
// Merkle proof length check
let path_elements_len = path_elements.len();
let identity_path_index_len = identity_path_index.len();
if path_elements_len != identity_path_index_len {
return Err(ProtocolError::InvalidMerkleProofLength(
path_elements_len,
identity_path_index_len,
));
}
Ok(Self {
identity_secret,
user_message_limit,
message_id,
path_elements,
identity_path_index,
x,
external_nullifier,
})
}
}
#[derive(Debug, PartialEq)]
pub struct RLNProofValues {
// Public outputs:
pub y: Fr,
pub nullifier: Fr,
pub root: Fr,
// Public Inputs:
pub x: Fr,
pub external_nullifier: Fr,
}
pub fn serialize_field_element(element: Fr) -> Vec<u8> {
fr_to_bytes_le(&element)
}
pub fn deserialize_field_element(serialized: Vec<u8>) -> Fr {
let (element, _) = bytes_le_to_fr(&serialized);
element
}
pub fn deserialize_identity_pair_le(serialized: Vec<u8>) -> (Fr, Fr) {
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized);
let (id_commitment, _) = bytes_le_to_fr(&serialized[read..]);
(identity_secret_hash, id_commitment)
}
pub fn deserialize_identity_pair_be(serialized: Vec<u8>) -> (Fr, Fr) {
let (identity_secret_hash, read) = bytes_be_to_fr(&serialized);
let (id_commitment, _) = bytes_be_to_fr(&serialized[read..]);
(identity_secret_hash, id_commitment)
}
pub fn deserialize_identity_tuple_le(serialized: Vec<u8>) -> (Fr, Fr, Fr, Fr) {
let mut all_read = 0;
let (identity_trapdoor, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (identity_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (identity_commitment, _) = bytes_le_to_fr(&serialized[all_read..]);
(
identity_trapdoor,
identity_nullifier,
identity_secret_hash,
identity_commitment,
)
}
pub fn deserialize_identity_tuple_be(serialized: Vec<u8>) -> (Fr, Fr, Fr, Fr) {
let mut all_read = 0;
let (identity_trapdoor, read) = bytes_be_to_fr(&serialized[all_read..]);
all_read += read;
let (identity_nullifier, read) = bytes_be_to_fr(&serialized[all_read..]);
all_read += read;
let (identity_secret_hash, read) = bytes_be_to_fr(&serialized[all_read..]);
all_read += read;
let (identity_commitment, _) = bytes_be_to_fr(&serialized[all_read..]);
(
identity_trapdoor,
identity_nullifier,
identity_secret_hash,
identity_commitment,
)
}
/// Serializes witness
///
/// # Errors
///
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
/// input data is [ identity_secret<32> | user_message_limit<32> | message_id<32> | path_elements<32> | identity_path_index<8> | x<32> | external_nullifier<32> ]
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Result<Vec<u8>, ProtocolError> {
// Calculate capacity for Vec:
// - 5 fixed field elements: identity_secret, user_message_limit, message_id, x, external_nullifier
// - variable number of path elements
// - identity_path_index (variable size)
let mut serialized: Vec<u8> = Vec::with_capacity(
fr_byte_size() * (5 + rln_witness.path_elements.len())
+ rln_witness.identity_path_index.len(),
);
serialized.extend_from_slice(&rln_witness.identity_secret.to_bytes_le());
serialized.extend_from_slice(&fr_to_bytes_le(&rln_witness.user_message_limit));
serialized.extend_from_slice(&fr_to_bytes_le(&rln_witness.message_id));
serialized.extend_from_slice(&vec_fr_to_bytes_le(&rln_witness.path_elements));
serialized.extend_from_slice(&vec_u8_to_bytes_le(&rln_witness.identity_path_index));
serialized.extend_from_slice(&fr_to_bytes_le(&rln_witness.x));
serialized.extend_from_slice(&fr_to_bytes_le(&rln_witness.external_nullifier));
Ok(serialized)
}
/// Deserializes witness
///
/// # Errors
///
/// Returns an error if `message_id` is not within `user_message_limit`.
pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize), ProtocolError> {
let mut all_read: usize = 0;
let (identity_secret, read) = IdSecret::from_bytes_le(&serialized[all_read..]);
all_read += read;
let (user_message_limit, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (message_id, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..])?;
all_read += read;
let (identity_path_index, read) = bytes_le_to_vec_u8(&serialized[all_read..])?;
all_read += read;
let (x, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (external_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
if serialized.len() != all_read {
return Err(ProtocolError::InvalidReadLen(serialized.len(), all_read));
}
Ok((
RLNWitnessInput::new(
identity_secret,
user_message_limit,
message_id,
path_elements,
identity_path_index,
x,
external_nullifier,
)?,
all_read,
))
}
// This function deserializes input for kilic's rln generate_proof public API
// https://github.com/kilic/rln/blob/7ac74183f8b69b399e3bc96c1ae8ab61c026dc43/src/public.rs#L148
// input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
// return value is a rln witness populated according to this information
#[cfg(not(feature = "stateless"))]
pub fn proof_inputs_to_rln_witness(
tree: &mut PoseidonTree,
serialized: &[u8],
) -> Result<(RLNWitnessInput, usize), ProtocolError> {
use crate::hashers::hash_to_field_le;
let mut all_read: usize = 0;
let (identity_secret, read) = IdSecret::from_bytes_le(&serialized[all_read..]);
all_read += read;
let id_index = usize::try_from(u64::from_le_bytes(
serialized[all_read..all_read + 8]
.try_into()
.map_err(ConversionError::FromSlice)?,
))
.map_err(ConversionError::ToUsize)?;
all_read += 8;
let (user_message_limit, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (message_id, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (external_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let signal_len = usize::try_from(u64::from_le_bytes(
serialized[all_read..all_read + 8]
.try_into()
.map_err(ConversionError::FromSlice)?,
))
.map_err(ConversionError::ToUsize)?;
all_read += 8;
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
let merkle_proof = tree.proof(id_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
let x = hash_to_field_le(&signal);
Ok((
RLNWitnessInput::new(
identity_secret,
user_message_limit,
message_id,
path_elements,
identity_path_index,
x,
external_nullifier,
)?,
all_read,
))
}
pub fn proof_values_from_witness(
rln_witness: &RLNWitnessInput,
) -> Result<RLNProofValues, ProtocolError> {
// y share
let a_0 = &rln_witness.identity_secret;
let mut to_hash = [
*(a_0.clone()),
rln_witness.external_nullifier,
rln_witness.message_id,
];
let a_1 = poseidon_hash(&to_hash);
let y = *(a_0.clone()) + rln_witness.x * a_1;
// Nullifier
let nullifier = poseidon_hash(&[a_1]);
to_hash[0].zeroize();
// Merkle tree root computations
let root = compute_tree_root(
&rln_witness.identity_secret,
&rln_witness.user_message_limit,
&rln_witness.path_elements,
&rln_witness.identity_path_index,
);
Ok(RLNProofValues {
y,
nullifier,
root,
x: rln_witness.x,
external_nullifier: rln_witness.external_nullifier,
})
}
/// input_data is [ root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
pub fn serialize_proof_values(rln_proof_values: &RLNProofValues) -> Vec<u8> {
// Calculate capacity for Vec:
// 5 field elements: root, external_nullifier, x, y, nullifier
let mut serialized = Vec::with_capacity(fr_byte_size() * 5);
serialized.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.root));
serialized.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.external_nullifier));
serialized.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.x));
serialized.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.y));
serialized.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.nullifier));
serialized
}
// Note: don't forget to skip the 128 bytes ZK proof, if serialized contains it.
// This proc deserialzies only proof _values_, i.e. circuit outputs, not the zk proof.
pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) {
let mut all_read: usize = 0;
let (root, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (external_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (x, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (y, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
let (nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
all_read += read;
(
RLNProofValues {
y,
nullifier,
root,
x,
external_nullifier,
},
all_read,
)
}
// input_data is [ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]
pub fn prepare_prove_input(
identity_secret: IdSecret,
id_index: usize,
user_message_limit: Fr,
message_id: Fr,
external_nullifier: Fr,
signal: &[u8],
) -> Vec<u8> {
// Calculate capacity for Vec:
// - 4 field elements: identity_secret, user_message_limit, message_id, external_nullifier
// - 16 bytes for two normalized usize values (id_index<8> + signal_len<8>)
// - variable length signal data
let mut serialized = Vec::with_capacity(fr_byte_size() * 4 + 16 + signal.len()); // length of 4 fr elements + 16 bytes (id_index + len) + signal length
serialized.extend_from_slice(&identity_secret.to_bytes_le());
serialized.extend_from_slice(&normalize_usize_le(id_index));
serialized.extend_from_slice(&fr_to_bytes_le(&user_message_limit));
serialized.extend_from_slice(&fr_to_bytes_le(&message_id));
serialized.extend_from_slice(&fr_to_bytes_le(&external_nullifier));
serialized.extend_from_slice(&normalize_usize_le(signal.len()));
serialized.extend_from_slice(signal);
serialized
}
// input_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var> ]
pub fn prepare_verify_input(proof_data: Vec<u8>, signal: &[u8]) -> Vec<u8> {
// Calculate capacity for Vec:
// - proof_data contains the proof and proof values (proof<128> + root<32> + external_nullifier<32> + x<32> + y<32> + nullifier<32>)
// - 8 bytes for normalized signal length value (signal_len<8>)
// - variable length signal data
let mut serialized = Vec::with_capacity(proof_data.len() + 8 + signal.len());
serialized.extend(proof_data);
serialized.extend_from_slice(&normalize_usize_le(signal.len()));
serialized.extend_from_slice(signal);
serialized
}
///////////////////////////////////////////////////////
// Merkle tree utility functions
///////////////////////////////////////////////////////
pub fn compute_tree_root(
identity_secret: &IdSecret,
user_message_limit: &Fr,
path_elements: &[Fr],
identity_path_index: &[u8],
) -> Fr {
let mut to_hash = [*identity_secret.clone()];
let id_commitment = poseidon_hash(&to_hash);
to_hash[0].zeroize();
let mut root = poseidon_hash(&[id_commitment, *user_message_limit]);
for i in 0..identity_path_index.len() {
if identity_path_index[i] == 0 {
root = poseidon_hash(&[root, path_elements[i]]);
} else {
root = poseidon_hash(&[path_elements[i], root]);
}
}
root
}
///////////////////////////////////////////////////////
// Protocol utility functions
///////////////////////////////////////////////////////
// Generates a tuple (identity_secret_hash, id_commitment) where
// identity_secret_hash is random and id_commitment = PoseidonHash(identity_secret_hash)
// RNG is instantiated using thread_rng()
pub fn keygen() -> (IdSecret, Fr) {
let mut rng = thread_rng();
let identity_secret_hash = IdSecret::rand(&mut rng);
let mut to_hash = [*identity_secret_hash.clone()];
let id_commitment = poseidon_hash(&to_hash);
to_hash[0].zeroize();
(identity_secret_hash, id_commitment)
}
// Generates a tuple (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) where
// identity_trapdoor and identity_nullifier are random,
// identity_secret_hash = PoseidonHash(identity_trapdoor, identity_nullifier),
// id_commitment = PoseidonHash(identity_secret_hash),
// RNG is instantiated using thread_rng()
// Generated credentials are compatible with Semaphore credentials
pub fn extended_keygen() -> (Fr, Fr, Fr, Fr) {
let mut rng = thread_rng();
let identity_trapdoor = Fr::rand(&mut rng);
let identity_nullifier = Fr::rand(&mut rng);
let identity_secret_hash = poseidon_hash(&[identity_trapdoor, identity_nullifier]);
let id_commitment = poseidon_hash(&[identity_secret_hash]);
(
identity_trapdoor,
identity_nullifier,
identity_secret_hash,
id_commitment,
)
}
// Generates a tuple (identity_secret_hash, id_commitment) where
// identity_secret_hash is random and id_commitment = PoseidonHash(identity_secret_hash)
// RNG is instantiated using 20 rounds of ChaCha seeded with the hash of the input
pub fn seeded_keygen(signal: &[u8]) -> (Fr, Fr) {
// ChaCha20 requires a seed of exactly 32 bytes.
// We first hash the input seed signal to a 32 bytes array and pass this as seed to ChaCha20
let mut seed = [0; 32];
let mut hasher = Keccak::v256();
hasher.update(signal);
hasher.finalize(&mut seed);
let mut rng = ChaCha20Rng::from_seed(seed);
let identity_secret_hash = Fr::rand(&mut rng);
let id_commitment = poseidon_hash(&[identity_secret_hash]);
(identity_secret_hash, id_commitment)
}
// Generates a tuple (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) where
// identity_trapdoor and identity_nullifier are random,
// identity_secret_hash = PoseidonHash(identity_trapdoor, identity_nullifier),
// id_commitment = PoseidonHash(identity_secret_hash),
// RNG is instantiated using 20 rounds of ChaCha seeded with the hash of the input
// Generated credentials are compatible with Semaphore credentials
pub fn extended_seeded_keygen(signal: &[u8]) -> (Fr, Fr, Fr, Fr) {
// ChaCha20 requires a seed of exactly 32 bytes.
// We first hash the input seed signal to a 32 bytes array and pass this as seed to ChaCha20
let mut seed = [0; 32];
let mut hasher = Keccak::v256();
hasher.update(signal);
hasher.finalize(&mut seed);
let mut rng = ChaCha20Rng::from_seed(seed);
let identity_trapdoor = Fr::rand(&mut rng);
let identity_nullifier = Fr::rand(&mut rng);
let identity_secret_hash = poseidon_hash(&[identity_trapdoor, identity_nullifier]);
let id_commitment = poseidon_hash(&[identity_secret_hash]);
(
identity_trapdoor,
identity_nullifier,
identity_secret_hash,
id_commitment,
)
}
pub fn compute_id_secret(
share1: (Fr, Fr),
share2: (Fr, Fr),
) -> Result<IdSecret, ComputeIdSecretError> {
// Assuming a0 is the identity secret and a1 = poseidonHash([a0, external_nullifier]),
// a (x,y) share satisfies the following relation
// y = a_0 + x * a_1
let (x1, y1) = share1;
let (x2, y2) = share2;
// If the two input shares were computed for the same external_nullifier and identity secret, we can recover the latter
// y1 = a_0 + x1 * a_1
// y2 = a_0 + x2 * a_1
if (x1 - x2) != Fr::ZERO {
let a_1 = (y1 - y2) / (x1 - x2);
let mut a_0 = y1 - x1 * a_1;
// If shares come from the same polynomial, a0 is correctly recovered and a1 = poseidonHash([a0, external_nullifier])
let id_secret = IdSecret::from(&mut a_0);
Ok(id_secret)
} else {
Err(ComputeIdSecretError::DivisionByZero)
}
}
///////////////////////////////////////////////////////
// zkSNARK utility functions
///////////////////////////////////////////////////////
fn calculate_witness_element<E: ark_ec::pairing::Pairing>(
witness: Vec<BigInt>,
) -> Result<Vec<E::ScalarField>, ProtocolError> {
use ark_ff::PrimeField;
let modulus = <E::ScalarField as PrimeField>::MODULUS;
// convert it to field elements
use num_traits::Signed;
let mut witness_vec = vec![];
for w in witness.into_iter() {
let w = if w.sign() == num_bigint::Sign::Minus {
// Need to negate the witness element if negative
modulus.into()
- w.abs()
.to_biguint()
.ok_or(ProtocolError::BigUintConversion(w))?
} else {
w.to_biguint().ok_or(ProtocolError::BigUintConversion(w))?
};
witness_vec.push(E::ScalarField::from(w))
}
Ok(witness_vec)
}
pub fn generate_proof_with_witness(
calculated_witness: Vec<BigInt>,
zkey: &Zkey,
) -> Result<Proof, ProofError> {
// If in debug mode, we measure and later print time take to compute witness
#[cfg(test)]
let now = Instant::now();
let full_assignment = calculate_witness_element::<Curve>(calculated_witness)?;
#[cfg(test)]
println!("witness generation took: {:.2?}", now.elapsed());
// Random Values
let mut rng = thread_rng();
let r = Fr::rand(&mut rng);
let s = Fr::rand(&mut rng);
// If in debug mode, we measure and later print time take to compute proof
#[cfg(test)]
let now = Instant::now();
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
&zkey.0,
r,
s,
&zkey.1,
zkey.1.num_instance_variables,
zkey.1.num_constraints,
full_assignment.as_slice(),
)?;
#[cfg(test)]
println!("proof generation took: {:.2?}", now.elapsed());
Ok(proof)
}
/// Formats inputs for witness calculation
///
/// # Errors
///
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
pub fn inputs_for_witness_calculation(
rln_witness: &RLNWitnessInput,
) -> Result<[(&str, Vec<FrOrSecret>); 7], ProtocolError> {
let mut identity_path_index = Vec::with_capacity(rln_witness.identity_path_index.len());
rln_witness
.identity_path_index
.iter()
.for_each(|v| identity_path_index.push(Fr::from(*v)));
Ok([
(
"identitySecret",
vec![rln_witness.identity_secret.clone().into()],
),
(
"userMessageLimit",
vec![rln_witness.user_message_limit.into()],
),
("messageId", vec![rln_witness.message_id.into()]),
(
"pathElements",
rln_witness
.path_elements
.iter()
.cloned()
.map(Into::into)
.collect(),
),
(
"identityPathIndex",
identity_path_index.into_iter().map(Into::into).collect(),
),
("x", vec![rln_witness.x.into()]),
(
"externalNullifier",
vec![rln_witness.external_nullifier.into()],
),
])
}
/// Generates a RLN proof
///
/// # Errors
///
/// Returns a [`ProofError`] if proving fails.
pub fn generate_proof(
zkey: &Zkey,
rln_witness: &RLNWitnessInput,
graph_data: &[u8],
) -> Result<Proof, ProofError> {
let inputs = inputs_for_witness_calculation(rln_witness)?
.into_iter()
.map(|(name, values)| (name.to_string(), values));
// If in debug mode, we measure and later print time take to compute witness
#[cfg(test)]
let now = Instant::now();
let full_assignment = calc_witness(inputs, graph_data);
#[cfg(test)]
println!("witness generation took: {:.2?}", now.elapsed());
// Random Values
let mut rng = thread_rng();
let r = Fr::rand(&mut rng);
let s = Fr::rand(&mut rng);
// If in debug mode, we measure and later print time take to compute proof
#[cfg(test)]
let now = Instant::now();
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
&zkey.0,
r,
s,
&zkey.1,
zkey.1.num_instance_variables,
zkey.1.num_constraints,
full_assignment.as_slice(),
)?;
#[cfg(test)]
println!("proof generation took: {:.2?}", now.elapsed());
Ok(proof)
}
/// Verifies a given RLN proof
///
/// # Errors
///
/// Returns a [`ProofError`] if verifying fails. Verification failure does not
/// necessarily mean the proof is incorrect.
pub fn verify_proof(
verifying_key: &VerifyingKey,
proof: &Proof,
proof_values: &RLNProofValues,
) -> Result<bool, ProofError> {
// We re-arrange proof-values according to the circuit specification
let inputs = vec![
proof_values.y,
proof_values.root,
proof_values.nullifier,
proof_values.x,
proof_values.external_nullifier,
];
// Check that the proof is valid
let pvk = prepare_verifying_key(verifying_key);
// If in debug mode, we measure and later print time take to verify proof
#[cfg(test)]
let now = Instant::now();
let verified = Groth16::<_, CircomReduction>::verify_proof(&pvk, proof, &inputs)?;
#[cfg(test)]
println!("verify took: {:.2?}", now.elapsed());
Ok(verified)
}
// auxiliary function for serialisation Fr to json using ark serilize
fn ark_se<S, A: CanonicalSerialize>(a: &A, s: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut bytes = vec![];
a.serialize_compressed(&mut bytes)
.map_err(serde::ser::Error::custom)?;
s.serialize_bytes(&bytes)
}
// auxiliary function for deserialisation Fr to json using ark serilize
fn ark_de<'de, D, A: CanonicalDeserialize>(data: D) -> Result<A, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let s: Vec<u8> = serde::de::Deserialize::deserialize(data)?;
let a = A::deserialize_compressed_unchecked(s.as_slice());
a.map_err(serde::de::Error::custom)
}
/// Converts a JSON value into [`RLNWitnessInput`] object.
///
/// # Errors
///
/// Returns an error if `rln_witness.message_id` is not within `rln_witness.user_message_limit`.
pub fn rln_witness_from_json(
input_json: serde_json::Value,
) -> Result<RLNWitnessInput, ProtocolError> {
Ok(serde_json::from_value(input_json)?)
}
/// Converts a [`RLNWitnessInput`] object to the corresponding JSON serialization.
///
/// # Errors
///
/// Returns an error if `message_id` is not within `user_message_limit`.
pub fn rln_witness_to_json(
rln_witness: &RLNWitnessInput,
) -> Result<serde_json::Value, ProtocolError> {
Ok(serde_json::to_value(rln_witness)?)
}
/// Converts a [`RLNWitnessInput`] object to the corresponding JSON serialization.
/// Before serialisation the data should be translated into big int for further calculation in the witness calculator.
///
/// # Errors
///
/// Returns an error if `message_id` is not within `user_message_limit`.
pub fn rln_witness_to_bigint_json(
rln_witness: &RLNWitnessInput,
) -> Result<serde_json::Value, ProtocolError> {
let mut path_elements = Vec::new();
for v in rln_witness.path_elements.iter() {
path_elements.push(to_bigint(v).to_str_radix(10));
}
let mut identity_path_index = Vec::new();
rln_witness
.identity_path_index
.iter()
.for_each(|v| identity_path_index.push(BigInt::from(*v).to_str_radix(10)));
let inputs = serde_json::json!({
"identitySecret": to_bigint(&rln_witness.identity_secret).to_str_radix(10),
"userMessageLimit": to_bigint(&rln_witness.user_message_limit).to_str_radix(10),
"messageId": to_bigint(&rln_witness.message_id).to_str_radix(10),
"pathElements": path_elements,
"identityPathIndex": identity_path_index,
"x": to_bigint(&rln_witness.x).to_str_radix(10),
"externalNullifier": to_bigint(&rln_witness.external_nullifier).to_str_radix(10),
});
Ok(inputs)
}

View File

@@ -0,0 +1,79 @@
use ark_std::{rand::thread_rng, UniformRand};
use rand::SeedableRng;
use rand_chacha::ChaCha20Rng;
use tiny_keccak::{Hasher as _, Keccak};
use crate::{circuit::Fr, hashers::poseidon_hash, utils::IdSecret};
/// Generates a random RLN identity using a cryptographically secure RNG.
///
/// Returns `(identity_secret, id_commitment)` where the commitment is `PoseidonHash(identity_secret)`.
pub fn keygen() -> (IdSecret, Fr) {
let mut rng = thread_rng();
let identity_secret = IdSecret::rand(&mut rng);
let id_commitment = poseidon_hash(&[*identity_secret.clone()]);
(identity_secret, id_commitment)
}
/// Generates an extended RLN identity compatible with Semaphore.
///
/// Returns `(identity_trapdoor, identity_nullifier, identity_secret, id_commitment)` where:
/// - `identity_secret = PoseidonHash(identity_trapdoor, identity_nullifier)`
/// - `id_commitment = PoseidonHash(identity_secret)`
pub fn extended_keygen() -> (Fr, Fr, Fr, Fr) {
let mut rng = thread_rng();
let identity_trapdoor = Fr::rand(&mut rng);
let identity_nullifier = Fr::rand(&mut rng);
let identity_secret = poseidon_hash(&[identity_trapdoor, identity_nullifier]);
let id_commitment = poseidon_hash(&[identity_secret]);
(
identity_trapdoor,
identity_nullifier,
identity_secret,
id_commitment,
)
}
/// Generates a deterministic RLN identity from a seed.
///
/// Uses ChaCha20 RNG seeded with Keccak-256 hash of the input.
/// Returns `(identity_secret, id_commitment)`. Same input always produces the same identity.
pub fn seeded_keygen(signal: &[u8]) -> (Fr, Fr) {
// ChaCha20 requires a seed of exactly 32 bytes.
// We first hash the input seed signal to a 32 bytes array and pass this as seed to ChaCha20
let mut seed = [0; 32];
let mut hasher = Keccak::v256();
hasher.update(signal);
hasher.finalize(&mut seed);
let mut rng = ChaCha20Rng::from_seed(seed);
let identity_secret = Fr::rand(&mut rng);
let id_commitment = poseidon_hash(&[identity_secret]);
(identity_secret, id_commitment)
}
/// Generates a deterministic extended RLN identity from a seed, compatible with Semaphore.
///
/// Uses ChaCha20 RNG seeded with Keccak-256 hash of the input.
/// Returns `(identity_trapdoor, identity_nullifier, identity_secret, id_commitment)`.
/// Same input always produces the same identity.
pub fn extended_seeded_keygen(signal: &[u8]) -> (Fr, Fr, Fr, Fr) {
// ChaCha20 requires a seed of exactly 32 bytes.
// We first hash the input seed signal to a 32 bytes array and pass this as seed to ChaCha20
let mut seed = [0; 32];
let mut hasher = Keccak::v256();
hasher.update(signal);
hasher.finalize(&mut seed);
let mut rng = ChaCha20Rng::from_seed(seed);
let identity_trapdoor = Fr::rand(&mut rng);
let identity_nullifier = Fr::rand(&mut rng);
let identity_secret = poseidon_hash(&[identity_trapdoor, identity_nullifier]);
let id_commitment = poseidon_hash(&[identity_secret]);
(
identity_trapdoor,
identity_nullifier,
identity_secret,
id_commitment,
)
}

19
rln/src/protocol/mod.rs Normal file
View File

@@ -0,0 +1,19 @@
// This crate collects all the underlying primitives used to implement RLN
mod keygen;
mod proof;
mod slashing;
mod witness;
pub use keygen::{extended_keygen, extended_seeded_keygen, keygen, seeded_keygen};
pub use proof::{
bytes_be_to_rln_proof, bytes_be_to_rln_proof_values, bytes_le_to_rln_proof,
bytes_le_to_rln_proof_values, generate_zk_proof, generate_zk_proof_with_witness,
rln_proof_to_bytes_be, rln_proof_to_bytes_le, rln_proof_values_to_bytes_be,
rln_proof_values_to_bytes_le, verify_zk_proof, RLNProof, RLNProofValues,
};
pub use slashing::recover_id_secret;
pub use witness::{
bytes_be_to_rln_witness, bytes_le_to_rln_witness, compute_tree_root, proof_values_from_witness,
rln_witness_to_bigint_json, rln_witness_to_bytes_be, rln_witness_to_bytes_le, RLNWitnessInput,
};

353
rln/src/protocol/proof.rs Normal file
View File

@@ -0,0 +1,353 @@
use ark_ff::PrimeField;
use ark_groth16::{prepare_verifying_key, Groth16};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::{rand::thread_rng, UniformRand};
use num_bigint::BigInt;
use num_traits::Signed;
use super::witness::{inputs_for_witness_calculation, RLNWitnessInput};
use crate::{
circuit::{
iden3calc::calc_witness, qap::CircomReduction, Curve, Fr, Proof, VerifyingKey, Zkey,
COMPRESS_PROOF_SIZE,
},
error::ProtocolError,
utils::{bytes_be_to_fr, bytes_le_to_fr, fr_to_bytes_be, fr_to_bytes_le, FR_BYTE_SIZE},
};
/// Complete RLN proof.
///
/// Combines the Groth16 proof with its public values.
#[derive(Debug, PartialEq, Clone)]
pub struct RLNProof {
pub proof: Proof,
pub proof_values: RLNProofValues,
}
/// Public values for RLN proof verification.
///
/// Contains the circuit's public inputs and outputs. Used in proof verification
/// and identity secret recovery when rate limit violations are detected.
#[derive(Debug, PartialEq, Clone, Copy)]
pub struct RLNProofValues {
// Public outputs:
pub y: Fr,
pub nullifier: Fr,
pub root: Fr,
// Public Inputs:
pub x: Fr,
pub external_nullifier: Fr,
}
/// Serializes RLN proof values to little-endian bytes.
pub fn rln_proof_values_to_bytes_le(rln_proof_values: &RLNProofValues) -> Vec<u8> {
// Calculate capacity for Vec:
// 5 field elements: root, external_nullifier, x, y, nullifier
let mut bytes = Vec::with_capacity(FR_BYTE_SIZE * 5);
bytes.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.root));
bytes.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.external_nullifier));
bytes.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.x));
bytes.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.y));
bytes.extend_from_slice(&fr_to_bytes_le(&rln_proof_values.nullifier));
bytes
}
/// Serializes RLN proof values to big-endian bytes.
pub fn rln_proof_values_to_bytes_be(rln_proof_values: &RLNProofValues) -> Vec<u8> {
// Calculate capacity for Vec:
// 5 field elements: root, external_nullifier, x, y, nullifier
let mut bytes = Vec::with_capacity(FR_BYTE_SIZE * 5);
bytes.extend_from_slice(&fr_to_bytes_be(&rln_proof_values.root));
bytes.extend_from_slice(&fr_to_bytes_be(&rln_proof_values.external_nullifier));
bytes.extend_from_slice(&fr_to_bytes_be(&rln_proof_values.x));
bytes.extend_from_slice(&fr_to_bytes_be(&rln_proof_values.y));
bytes.extend_from_slice(&fr_to_bytes_be(&rln_proof_values.nullifier));
bytes
}
/// Deserializes RLN proof values from little-endian bytes.
///
/// Format: `[ root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]`
///
/// Returns the deserialized proof values and the number of bytes read.
pub fn bytes_le_to_rln_proof_values(
bytes: &[u8],
) -> Result<(RLNProofValues, usize), ProtocolError> {
let mut read: usize = 0;
let (root, el_size) = bytes_le_to_fr(&bytes[read..])?;
read += el_size;
let (external_nullifier, el_size) = bytes_le_to_fr(&bytes[read..])?;
read += el_size;
let (x, el_size) = bytes_le_to_fr(&bytes[read..])?;
read += el_size;
let (y, el_size) = bytes_le_to_fr(&bytes[read..])?;
read += el_size;
let (nullifier, el_size) = bytes_le_to_fr(&bytes[read..])?;
read += el_size;
Ok((
RLNProofValues {
y,
nullifier,
root,
x,
external_nullifier,
},
read,
))
}
/// Deserializes RLN proof values from big-endian bytes.
///
/// Format: `[ root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]`
///
/// Returns the deserialized proof values and the number of bytes read.
pub fn bytes_be_to_rln_proof_values(
bytes: &[u8],
) -> Result<(RLNProofValues, usize), ProtocolError> {
let mut read: usize = 0;
let (root, el_size) = bytes_be_to_fr(&bytes[read..])?;
read += el_size;
let (external_nullifier, el_size) = bytes_be_to_fr(&bytes[read..])?;
read += el_size;
let (x, el_size) = bytes_be_to_fr(&bytes[read..])?;
read += el_size;
let (y, el_size) = bytes_be_to_fr(&bytes[read..])?;
read += el_size;
let (nullifier, el_size) = bytes_be_to_fr(&bytes[read..])?;
read += el_size;
Ok((
RLNProofValues {
y,
nullifier,
root,
x,
external_nullifier,
},
read,
))
}
/// Serializes RLN proof to little-endian bytes.
///
/// Note: The Groth16 proof is always serialized in LE format (arkworks behavior),
/// while proof_values are serialized in LE format.
pub fn rln_proof_to_bytes_le(rln_proof: &RLNProof) -> Vec<u8> {
// Calculate capacity for Vec:
// - 128 bytes for compressed Groth16 proof
// - 5 field elements for proof values (root, external_nullifier, x, y, nullifier)
let mut bytes = Vec::with_capacity(COMPRESS_PROOF_SIZE + FR_BYTE_SIZE * 5);
// Serialize proof (always LE format from arkworks)
rln_proof
.proof
.serialize_compressed(&mut bytes)
.expect("serialization should not fail");
// Serialize proof values in LE
let proof_values_bytes = rln_proof_values_to_bytes_le(&rln_proof.proof_values);
bytes.extend_from_slice(&proof_values_bytes);
bytes
}
/// Serializes RLN proof to big-endian bytes.
///
/// Note: The Groth16 proof is always serialized in LE format (arkworks behavior),
/// while proof_values are serialized in BE format. This creates a mixed-endian format.
pub fn rln_proof_to_bytes_be(rln_proof: &RLNProof) -> Vec<u8> {
// Calculate capacity for Vec:
// - 128 bytes for compressed Groth16 proof
// - 5 field elements for proof values (root, external_nullifier, x, y, nullifier)
let mut bytes = Vec::with_capacity(COMPRESS_PROOF_SIZE + FR_BYTE_SIZE * 5);
// Serialize proof (always LE format from arkworks)
rln_proof
.proof
.serialize_compressed(&mut bytes)
.expect("serialization should not fail");
// Serialize proof values in BE
let proof_values_bytes = rln_proof_values_to_bytes_be(&rln_proof.proof_values);
bytes.extend_from_slice(&proof_values_bytes);
bytes
}
/// Deserializes RLN proof from little-endian bytes.
///
/// Format: `[ proof<128,LE> | root<32,LE> | external_nullifier<32,LE> | x<32,LE> | y<32,LE> | nullifier<32,LE> ]`
///
/// Returns the deserialized proof and the number of bytes read.
pub fn bytes_le_to_rln_proof(bytes: &[u8]) -> Result<(RLNProof, usize), ProtocolError> {
let mut read: usize = 0;
// Deserialize proof (always LE from arkworks)
let proof = Proof::deserialize_compressed(&bytes[read..read + COMPRESS_PROOF_SIZE])
.map_err(|_| ProtocolError::InvalidReadLen(bytes.len(), read + COMPRESS_PROOF_SIZE))?;
read += COMPRESS_PROOF_SIZE;
// Deserialize proof values
let (values, el_size) = bytes_le_to_rln_proof_values(&bytes[read..])?;
read += el_size;
Ok((
RLNProof {
proof,
proof_values: values,
},
read,
))
}
/// Deserializes RLN proof from big-endian bytes.
///
/// Format: `[ proof<128,LE> | root<32,BE> | external_nullifier<32,BE> | x<32,BE> | y<32,BE> | nullifier<32,BE> ]`
///
/// Note: Mixed-endian format - proof is LE (arkworks), proof_values are BE.
///
/// Returns the deserialized proof and the number of bytes read.
pub fn bytes_be_to_rln_proof(bytes: &[u8]) -> Result<(RLNProof, usize), ProtocolError> {
let mut read: usize = 0;
// Deserialize proof (always LE from arkworks)
let proof = Proof::deserialize_compressed(&bytes[read..read + COMPRESS_PROOF_SIZE])
.map_err(|_| ProtocolError::InvalidReadLen(bytes.len(), read + COMPRESS_PROOF_SIZE))?;
read += COMPRESS_PROOF_SIZE;
// Deserialize proof values
let (values, el_size) = bytes_be_to_rln_proof_values(&bytes[read..])?;
read += el_size;
Ok((
RLNProof {
proof,
proof_values: values,
},
read,
))
}
// zkSNARK proof generation and verification
/// Converts calculated witness (BigInt) to field elements.
fn calculated_witness_to_field_elements<E: ark_ec::pairing::Pairing>(
calculated_witness: Vec<BigInt>,
) -> Result<Vec<E::ScalarField>, ProtocolError> {
let modulus = <E::ScalarField as PrimeField>::MODULUS;
// convert it to field elements
let mut field_elements = vec![];
for w in calculated_witness.into_iter() {
let w = if w.sign() == num_bigint::Sign::Minus {
// Need to negate the witness element if negative
modulus.into()
- w.abs()
.to_biguint()
.ok_or(ProtocolError::BigUintConversion(w))?
} else {
w.to_biguint().ok_or(ProtocolError::BigUintConversion(w))?
};
field_elements.push(E::ScalarField::from(w))
}
Ok(field_elements)
}
/// Generates a zkSNARK proof from pre-calculated witness values.
///
/// Use this when witness calculation is performed externally.
pub fn generate_zk_proof_with_witness(
calculated_witness: Vec<BigInt>,
zkey: &Zkey,
) -> Result<Proof, ProtocolError> {
let full_assignment = calculated_witness_to_field_elements::<Curve>(calculated_witness)?;
// Random Values
let mut rng = thread_rng();
let r = Fr::rand(&mut rng);
let s = Fr::rand(&mut rng);
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
&zkey.0,
r,
s,
&zkey.1,
zkey.1.num_instance_variables,
zkey.1.num_constraints,
full_assignment.as_slice(),
)?;
Ok(proof)
}
/// Generates a zkSNARK proof from witness input using the provided circuit data.
pub fn generate_zk_proof(
zkey: &Zkey,
witness: &RLNWitnessInput,
graph_data: &[u8],
) -> Result<Proof, ProtocolError> {
let inputs = inputs_for_witness_calculation(witness)?
.into_iter()
.map(|(name, values)| (name.to_string(), values));
let full_assignment = calc_witness(inputs, graph_data);
// Random Values
let mut rng = thread_rng();
let r = Fr::rand(&mut rng);
let s = Fr::rand(&mut rng);
let proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices(
&zkey.0,
r,
s,
&zkey.1,
zkey.1.num_instance_variables,
zkey.1.num_constraints,
full_assignment.as_slice(),
)?;
Ok(proof)
}
/// Verifies a zkSNARK proof against the verifying key and public values.
///
/// Returns `true` if the proof is cryptographically valid, `false` if verification fails.
/// Note: Verification failure may occur due to proof computation errors, not necessarily malicious proofs.
pub fn verify_zk_proof(
verifying_key: &VerifyingKey,
proof: &Proof,
proof_values: &RLNProofValues,
) -> Result<bool, ProtocolError> {
// We re-arrange proof-values according to the circuit specification
let inputs = vec![
proof_values.y,
proof_values.root,
proof_values.nullifier,
proof_values.x,
proof_values.external_nullifier,
];
// Check that the proof is valid
let pvk = prepare_verifying_key(verifying_key);
let verified = Groth16::<_, CircomReduction>::verify_proof(&pvk, proof, &inputs)?;
Ok(verified)
}

View File

@@ -0,0 +1,55 @@
use ark_ff::AdditiveGroup;
use super::proof::RLNProofValues;
use crate::{circuit::Fr, error::ProtocolError, utils::IdSecret};
/// Computes identity secret from two (x, y) shares.
fn compute_id_secret(share1: (Fr, Fr), share2: (Fr, Fr)) -> Result<IdSecret, ProtocolError> {
// Assuming a0 is the identity secret and a1 = poseidonHash([a0, external_nullifier]),
// a (x,y) share satisfies the following relation
// y = a_0 + x * a_1
let (x1, y1) = share1;
let (x2, y2) = share2;
// If the two input shares were computed for the same external_nullifier and identity secret, we can recover the latter
// y1 = a_0 + x1 * a_1
// y2 = a_0 + x2 * a_1
if (x1 - x2) != Fr::ZERO {
let a_1 = (y1 - y2) / (x1 - x2);
let mut a_0 = y1 - x1 * a_1;
// If shares come from the same polynomial, a0 is correctly recovered and a1 = poseidonHash([a0, external_nullifier])
let id_secret = IdSecret::from(&mut a_0);
Ok(id_secret)
} else {
Err(ProtocolError::DivisionByZero)
}
}
/// Recovers identity secret from two proof shares with the same external nullifier.
///
/// When a user violates rate limits by generating multiple proofs in the same epoch,
/// their shares can be used to recover their identity secret through polynomial interpolation.
pub fn recover_id_secret(
rln_proof_values_1: &RLNProofValues,
rln_proof_values_2: &RLNProofValues,
) -> Result<IdSecret, ProtocolError> {
let external_nullifier_1 = rln_proof_values_1.external_nullifier;
let external_nullifier_2 = rln_proof_values_2.external_nullifier;
// We continue only if the proof values are for the same external nullifier
if external_nullifier_1 != external_nullifier_2 {
return Err(ProtocolError::ExternalNullifierMismatch(
external_nullifier_1,
external_nullifier_2,
));
}
// We extract the two shares
let share1 = (rln_proof_values_1.x, rln_proof_values_1.y);
let share2 = (rln_proof_values_2.x, rln_proof_values_2.y);
// We recover the secret
compute_id_secret(share1, share2)
}

354
rln/src/protocol/witness.rs Normal file
View File

@@ -0,0 +1,354 @@
use zeroize::Zeroize;
use super::proof::RLNProofValues;
use crate::{
circuit::Fr,
error::ProtocolError,
hashers::poseidon_hash,
utils::{
bytes_be_to_fr, bytes_be_to_vec_fr, bytes_be_to_vec_u8, bytes_le_to_fr, bytes_le_to_vec_fr,
bytes_le_to_vec_u8, fr_to_bytes_be, fr_to_bytes_le, to_bigint, vec_fr_to_bytes_be,
vec_fr_to_bytes_le, vec_u8_to_bytes_be, vec_u8_to_bytes_le, FrOrSecret, IdSecret,
FR_BYTE_SIZE,
},
};
/// Witness input for RLN proof generation.
///
/// Contains the identity credentials, merkle proof, rate-limiting parameters,
/// and signal binding data required to generate a Groth16 proof for the RLN protocol.
#[derive(Debug, PartialEq, Clone)]
pub struct RLNWitnessInput {
identity_secret: IdSecret,
user_message_limit: Fr,
message_id: Fr,
path_elements: Vec<Fr>,
identity_path_index: Vec<u8>,
x: Fr,
external_nullifier: Fr,
}
impl RLNWitnessInput {
pub fn new(
identity_secret: IdSecret,
user_message_limit: Fr,
message_id: Fr,
path_elements: Vec<Fr>,
identity_path_index: Vec<u8>,
x: Fr,
external_nullifier: Fr,
) -> Result<Self, ProtocolError> {
// Message ID range check
if message_id > user_message_limit {
return Err(ProtocolError::InvalidMessageId(
message_id,
user_message_limit,
));
}
// Merkle proof length check
let path_elements_len = path_elements.len();
let identity_path_index_len = identity_path_index.len();
if path_elements_len != identity_path_index_len {
return Err(ProtocolError::InvalidMerkleProofLength(
path_elements_len,
identity_path_index_len,
));
}
Ok(Self {
identity_secret,
user_message_limit,
message_id,
path_elements,
identity_path_index,
x,
external_nullifier,
})
}
pub fn identity_secret(&self) -> &IdSecret {
&self.identity_secret
}
pub fn user_message_limit(&self) -> &Fr {
&self.user_message_limit
}
pub fn message_id(&self) -> &Fr {
&self.message_id
}
pub fn path_elements(&self) -> &[Fr] {
&self.path_elements
}
pub fn identity_path_index(&self) -> &[u8] {
&self.identity_path_index
}
pub fn x(&self) -> &Fr {
&self.x
}
pub fn external_nullifier(&self) -> &Fr {
&self.external_nullifier
}
}
/// Serializes an RLN witness to little-endian bytes.
pub fn rln_witness_to_bytes_le(witness: &RLNWitnessInput) -> Result<Vec<u8>, ProtocolError> {
// Calculate capacity for Vec:
// - 5 fixed field elements: identity_secret, user_message_limit, message_id, x, external_nullifier
// - variable number of path elements
// - identity_path_index (variable size)
let mut bytes: Vec<u8> = Vec::with_capacity(
FR_BYTE_SIZE * (5 + witness.path_elements.len()) + witness.identity_path_index.len(),
);
bytes.extend_from_slice(&witness.identity_secret.to_bytes_le());
bytes.extend_from_slice(&fr_to_bytes_le(&witness.user_message_limit));
bytes.extend_from_slice(&fr_to_bytes_le(&witness.message_id));
bytes.extend_from_slice(&vec_fr_to_bytes_le(&witness.path_elements));
bytes.extend_from_slice(&vec_u8_to_bytes_le(&witness.identity_path_index));
bytes.extend_from_slice(&fr_to_bytes_le(&witness.x));
bytes.extend_from_slice(&fr_to_bytes_le(&witness.external_nullifier));
Ok(bytes)
}
/// Serializes an RLN witness to big-endian bytes.
pub fn rln_witness_to_bytes_be(witness: &RLNWitnessInput) -> Result<Vec<u8>, ProtocolError> {
// Calculate capacity for Vec:
// - 5 fixed field elements: identity_secret, user_message_limit, message_id, x, external_nullifier
// - variable number of path elements
// - identity_path_index (variable size)
let mut bytes: Vec<u8> = Vec::with_capacity(
FR_BYTE_SIZE * (5 + witness.path_elements.len()) + witness.identity_path_index.len(),
);
bytes.extend_from_slice(&witness.identity_secret.to_bytes_be());
bytes.extend_from_slice(&fr_to_bytes_be(&witness.user_message_limit));
bytes.extend_from_slice(&fr_to_bytes_be(&witness.message_id));
bytes.extend_from_slice(&vec_fr_to_bytes_be(&witness.path_elements));
bytes.extend_from_slice(&vec_u8_to_bytes_be(&witness.identity_path_index));
bytes.extend_from_slice(&fr_to_bytes_be(&witness.x));
bytes.extend_from_slice(&fr_to_bytes_be(&witness.external_nullifier));
Ok(bytes)
}
/// Deserializes an RLN witness from little-endian bytes.
///
/// Format: `[ identity_secret<32> | user_message_limit<32> | message_id<32> | path_elements<var> | identity_path_index<var> | x<32> | external_nullifier<32> ]`
///
/// Returns the deserialized witness and the number of bytes read.
pub fn bytes_le_to_rln_witness(bytes: &[u8]) -> Result<(RLNWitnessInput, usize), ProtocolError> {
let mut read: usize = 0;
let (identity_secret, el_size) = IdSecret::from_bytes_le(&bytes[read..])?;
read += el_size;
let (user_message_limit, el_size) = bytes_le_to_fr(&bytes[read..])?;
read += el_size;
let (message_id, el_size) = bytes_le_to_fr(&bytes[read..])?;
read += el_size;
let (path_elements, el_size) = bytes_le_to_vec_fr(&bytes[read..])?;
read += el_size;
let (identity_path_index, el_size) = bytes_le_to_vec_u8(&bytes[read..])?;
read += el_size;
let (x, el_size) = bytes_le_to_fr(&bytes[read..])?;
read += el_size;
let (external_nullifier, el_size) = bytes_le_to_fr(&bytes[read..])?;
read += el_size;
if bytes.len() != read {
return Err(ProtocolError::InvalidReadLen(bytes.len(), read));
}
Ok((
RLNWitnessInput::new(
identity_secret,
user_message_limit,
message_id,
path_elements,
identity_path_index,
x,
external_nullifier,
)?,
read,
))
}
/// Deserializes an RLN witness from big-endian bytes.
///
/// Format: `[ identity_secret<32> | user_message_limit<32> | message_id<32> | path_elements<var> | identity_path_index<var> | x<32> | external_nullifier<32> ]`
///
/// Returns the deserialized witness and the number of bytes read.
pub fn bytes_be_to_rln_witness(bytes: &[u8]) -> Result<(RLNWitnessInput, usize), ProtocolError> {
let mut read: usize = 0;
let (identity_secret, el_size) = IdSecret::from_bytes_be(&bytes[read..])?;
read += el_size;
let (user_message_limit, el_size) = bytes_be_to_fr(&bytes[read..])?;
read += el_size;
let (message_id, el_size) = bytes_be_to_fr(&bytes[read..])?;
read += el_size;
let (path_elements, el_size) = bytes_be_to_vec_fr(&bytes[read..])?;
read += el_size;
let (identity_path_index, el_size) = bytes_be_to_vec_u8(&bytes[read..])?;
read += el_size;
let (x, el_size) = bytes_be_to_fr(&bytes[read..])?;
read += el_size;
let (external_nullifier, el_size) = bytes_be_to_fr(&bytes[read..])?;
read += el_size;
if bytes.len() != read {
return Err(ProtocolError::InvalidReadLen(bytes.len(), read));
}
Ok((
RLNWitnessInput::new(
identity_secret,
user_message_limit,
message_id,
path_elements,
identity_path_index,
x,
external_nullifier,
)?,
read,
))
}
/// Converts RLN witness to JSON with BigInt string representation for witness calculator.
pub fn rln_witness_to_bigint_json(
witness: &RLNWitnessInput,
) -> Result<serde_json::Value, ProtocolError> {
use num_bigint::BigInt;
let mut path_elements = Vec::new();
for v in witness.path_elements.iter() {
path_elements.push(to_bigint(v).to_str_radix(10));
}
let mut identity_path_index = Vec::new();
witness
.identity_path_index
.iter()
.for_each(|v| identity_path_index.push(BigInt::from(*v).to_str_radix(10)));
let inputs = serde_json::json!({
"identitySecret": to_bigint(&witness.identity_secret).to_str_radix(10),
"userMessageLimit": to_bigint(&witness.user_message_limit).to_str_radix(10),
"messageId": to_bigint(&witness.message_id).to_str_radix(10),
"pathElements": path_elements,
"identityPathIndex": identity_path_index,
"x": to_bigint(&witness.x).to_str_radix(10),
"externalNullifier": to_bigint(&witness.external_nullifier).to_str_radix(10),
});
Ok(inputs)
}
/// Computes RLN proof values from witness input.
///
/// Calculates the public outputs (y, nullifier, root) that will be part of the proof.
pub fn proof_values_from_witness(
witness: &RLNWitnessInput,
) -> Result<RLNProofValues, ProtocolError> {
// y share
let a_0 = &witness.identity_secret;
let mut to_hash = [**a_0, witness.external_nullifier, witness.message_id];
let a_1 = poseidon_hash(&to_hash);
let y = *(a_0.clone()) + witness.x * a_1;
// Nullifier
let nullifier = poseidon_hash(&[a_1]);
to_hash[0].zeroize();
// Merkle tree root computations
let root = compute_tree_root(
&witness.identity_secret,
&witness.user_message_limit,
&witness.path_elements,
&witness.identity_path_index,
);
Ok(RLNProofValues {
y,
nullifier,
root,
x: witness.x,
external_nullifier: witness.external_nullifier,
})
}
/// Computes the Merkle tree root from identity credentials and Merkle membership proof.
pub fn compute_tree_root(
identity_secret: &IdSecret,
user_message_limit: &Fr,
path_elements: &[Fr],
identity_path_index: &[u8],
) -> Fr {
let mut to_hash = [*identity_secret.clone()];
let id_commitment = poseidon_hash(&to_hash);
to_hash[0].zeroize();
let mut root = poseidon_hash(&[id_commitment, *user_message_limit]);
for i in 0..identity_path_index.len() {
if identity_path_index[i] == 0 {
root = poseidon_hash(&[root, path_elements[i]]);
} else {
root = poseidon_hash(&[path_elements[i], root]);
}
}
root
}
/// Prepares inputs for witness calculation from RLN witness input.
pub(super) fn inputs_for_witness_calculation(
witness: &RLNWitnessInput,
) -> Result<[(&str, Vec<FrOrSecret>); 7], ProtocolError> {
let mut identity_path_index = Vec::with_capacity(witness.identity_path_index.len());
witness
.identity_path_index
.iter()
.for_each(|v| identity_path_index.push(Fr::from(*v)));
Ok([
(
"identitySecret",
vec![witness.identity_secret.clone().into()],
),
("userMessageLimit", vec![witness.user_message_limit.into()]),
("messageId", vec![witness.message_id.into()]),
(
"pathElements",
witness
.path_elements
.iter()
.cloned()
.map(Into::into)
.collect(),
),
(
"identityPathIndex",
identity_path_index.into_iter().map(Into::into).collect(),
),
("x", vec![witness.x.into()]),
("externalNullifier", vec![witness.external_nullifier.into()]),
])
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
// This crate provides cross-module useful utilities (mainly type conversions) not necessarily specific to RLN
use crate::circuit::Fr;
use crate::error::ConversionError;
use std::ops::Deref;
use ark_ff::PrimeField;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::UniformRand;
@@ -9,26 +9,29 @@ use num_bigint::{BigInt, BigUint};
use num_traits::Num;
use rand::Rng;
use ruint::aliases::U256;
use serde_json::json;
use std::io::Cursor;
use std::ops::Deref;
use zeroize::{Zeroize, ZeroizeOnDrop, Zeroizing};
use crate::{circuit::Fr, error::UtilsError};
/// Byte size of a field element aligned to 64-bit boundary, computed once at compile time.
pub const FR_BYTE_SIZE: usize = {
// Get the modulus bit size of the field
let modulus_bits: u32 = Fr::MODULUS_BIT_SIZE;
// Alignment boundary in bits for field element serialization
let alignment_bits: u32 = 64;
// Align to the next multiple of alignment_bits and convert to bytes
((modulus_bits + alignment_bits - (modulus_bits % alignment_bits)) / 8) as usize
};
#[inline(always)]
pub fn to_bigint(el: &Fr) -> BigInt {
BigUint::from(*el).into()
}
#[inline(always)]
pub const fn fr_byte_size() -> usize {
let mbs = <Fr as PrimeField>::MODULUS_BIT_SIZE;
((mbs + 64 - (mbs % 64)) / 8) as usize
}
#[inline(always)]
pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr, ConversionError> {
pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr, UtilsError> {
if !(radix == 10 || radix == 16) {
return Err(ConversionError::WrongRadix);
return Err(UtilsError::WrongRadix);
}
// We remove any quote present and we trim
@@ -45,21 +48,33 @@ pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr, ConversionError> {
}
#[inline(always)]
pub fn bytes_le_to_fr(input: &[u8]) -> (Fr, usize) {
let el_size = fr_byte_size();
(
pub fn bytes_le_to_fr(input: &[u8]) -> Result<(Fr, usize), UtilsError> {
let el_size = FR_BYTE_SIZE;
if input.len() < el_size {
return Err(UtilsError::InsufficientData {
expected: el_size,
actual: input.len(),
});
}
Ok((
Fr::from(BigUint::from_bytes_le(&input[0..el_size])),
el_size,
)
))
}
#[inline(always)]
pub fn bytes_be_to_fr(input: &[u8]) -> (Fr, usize) {
let el_size = fr_byte_size();
(
pub fn bytes_be_to_fr(input: &[u8]) -> Result<(Fr, usize), UtilsError> {
let el_size = FR_BYTE_SIZE;
if input.len() < el_size {
return Err(UtilsError::InsufficientData {
expected: el_size,
actual: input.len(),
});
}
Ok((
Fr::from(BigUint::from_bytes_be(&input[0..el_size])),
el_size,
)
))
}
#[inline(always)]
@@ -67,7 +82,7 @@ pub fn fr_to_bytes_le(input: &Fr) -> Vec<u8> {
let input_biguint: BigUint = (*input).into();
let mut res = input_biguint.to_bytes_le();
//BigUint conversion ignores most significant zero bytes. We restore them otherwise serialization will fail (length % 8 != 0)
res.resize(fr_byte_size(), 0);
res.resize(FR_BYTE_SIZE, 0);
res
}
@@ -76,7 +91,7 @@ pub fn fr_to_bytes_be(input: &Fr) -> Vec<u8> {
let input_biguint: BigUint = (*input).into();
let mut res = input_biguint.to_bytes_be();
// For BE, insert 0 at the start of the Vec (see also fr_to_bytes_le comments)
let to_insert_count = fr_byte_size().saturating_sub(res.len());
let to_insert_count = FR_BYTE_SIZE.saturating_sub(res.len());
if to_insert_count > 0 {
// Insert multi 0 at index 0
res.splice(0..0, std::iter::repeat_n(0, to_insert_count));
@@ -88,8 +103,8 @@ pub fn fr_to_bytes_be(input: &Fr) -> Vec<u8> {
pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Vec<u8> {
// Calculate capacity for Vec:
// - 8 bytes for normalized vector length (usize)
// - each Fr element requires fr_byte_size() bytes (typically 32 bytes)
let mut bytes = Vec::with_capacity(8 + input.len() * fr_byte_size());
// - each Fr element requires FR_BYTE_SIZE bytes (typically 32 bytes)
let mut bytes = Vec::with_capacity(8 + input.len() * FR_BYTE_SIZE);
// We store the vector length
bytes.extend_from_slice(&normalize_usize_le(input.len()));
@@ -106,8 +121,8 @@ pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Vec<u8> {
pub fn vec_fr_to_bytes_be(input: &[Fr]) -> Vec<u8> {
// Calculate capacity for Vec:
// - 8 bytes for normalized vector length (usize)
// - each Fr element requires fr_byte_size() bytes (typically 32 bytes)
let mut bytes = Vec::with_capacity(8 + input.len() * fr_byte_size());
// - each Fr element requires FR_BYTE_SIZE bytes (typically 32 bytes)
let mut bytes = Vec::with_capacity(8 + input.len() * FR_BYTE_SIZE);
// We store the vector length
bytes.extend_from_slice(&normalize_usize_be(input.len()));
@@ -153,10 +168,10 @@ pub fn vec_u8_to_bytes_be(input: &[u8]) -> Vec<u8> {
}
#[inline(always)]
pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize), ConversionError> {
pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize), UtilsError> {
let mut read: usize = 0;
if input.len() < 8 {
return Err(ConversionError::InsufficientData {
return Err(UtilsError::InsufficientData {
expected: 8,
actual: input.len(),
});
@@ -164,7 +179,7 @@ pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize), ConversionEr
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
read += 8;
if input.len() < 8 + len {
return Err(ConversionError::InsufficientData {
return Err(UtilsError::InsufficientData {
expected: 8 + len,
actual: input.len(),
});
@@ -175,10 +190,10 @@ pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize), ConversionEr
}
#[inline(always)]
pub fn bytes_be_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize), ConversionError> {
pub fn bytes_be_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize), UtilsError> {
let mut read: usize = 0;
if input.len() < 8 {
return Err(ConversionError::InsufficientData {
return Err(UtilsError::InsufficientData {
expected: 8,
actual: input.len(),
});
@@ -186,7 +201,7 @@ pub fn bytes_be_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize), ConversionEr
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into()?))?;
read += 8;
if input.len() < 8 + len {
return Err(ConversionError::InsufficientData {
return Err(UtilsError::InsufficientData {
expected: 8 + len,
actual: input.len(),
});
@@ -197,26 +212,26 @@ pub fn bytes_be_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize), ConversionEr
}
#[inline(always)]
pub fn bytes_le_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize), ConversionError> {
pub fn bytes_le_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize), UtilsError> {
let mut read: usize = 0;
if input.len() < 8 {
return Err(ConversionError::InsufficientData {
return Err(UtilsError::InsufficientData {
expected: 8,
actual: input.len(),
});
}
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
read += 8;
let el_size = fr_byte_size();
let el_size = FR_BYTE_SIZE;
if input.len() < 8 + len * el_size {
return Err(ConversionError::InsufficientData {
return Err(UtilsError::InsufficientData {
expected: 8 + len * el_size,
actual: input.len(),
});
}
let mut res: Vec<Fr> = Vec::with_capacity(len);
for i in 0..len {
let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]);
let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)])?;
res.push(curr_el);
read += el_size;
}
@@ -224,26 +239,26 @@ pub fn bytes_le_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize), ConversionEr
}
#[inline(always)]
pub fn bytes_be_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize), ConversionError> {
pub fn bytes_be_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize), UtilsError> {
let mut read: usize = 0;
if input.len() < 8 {
return Err(ConversionError::InsufficientData {
return Err(UtilsError::InsufficientData {
expected: 8,
actual: input.len(),
});
}
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into()?))?;
read += 8;
let el_size = fr_byte_size();
let el_size = FR_BYTE_SIZE;
if input.len() < 8 + len * el_size {
return Err(ConversionError::InsufficientData {
return Err(UtilsError::InsufficientData {
expected: 8 + len * el_size,
actual: input.len(),
});
}
let mut res: Vec<Fr> = Vec::with_capacity(len);
for i in 0..len {
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]);
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)])?;
res.push(curr_el);
read += el_size;
}
@@ -251,9 +266,9 @@ pub fn bytes_be_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize), ConversionEr
}
#[inline(always)]
pub fn bytes_le_to_vec_usize(input: &[u8]) -> Result<Vec<usize>, ConversionError> {
pub fn bytes_le_to_vec_usize(input: &[u8]) -> Result<Vec<usize>, UtilsError> {
if input.len() < 8 {
return Err(ConversionError::InsufficientData {
return Err(UtilsError::InsufficientData {
expected: 8,
actual: input.len(),
});
@@ -263,7 +278,7 @@ pub fn bytes_le_to_vec_usize(input: &[u8]) -> Result<Vec<usize>, ConversionError
Ok(vec![])
} else {
if input.len() < 8 + nof_elem * 8 {
return Err(ConversionError::InsufficientData {
return Err(UtilsError::InsufficientData {
expected: 8 + nof_elem * 8,
actual: input.len(),
});
@@ -278,9 +293,9 @@ pub fn bytes_le_to_vec_usize(input: &[u8]) -> Result<Vec<usize>, ConversionError
}
#[inline(always)]
pub fn bytes_be_to_vec_usize(input: &[u8]) -> Result<Vec<usize>, ConversionError> {
pub fn bytes_be_to_vec_usize(input: &[u8]) -> Result<Vec<usize>, UtilsError> {
if input.len() < 8 {
return Err(ConversionError::InsufficientData {
return Err(UtilsError::InsufficientData {
expected: 8,
actual: input.len(),
});
@@ -290,7 +305,7 @@ pub fn bytes_be_to_vec_usize(input: &[u8]) -> Result<Vec<usize>, ConversionError
Ok(vec![])
} else {
if input.len() < 8 + nof_elem * 8 {
return Err(ConversionError::InsufficientData {
return Err(UtilsError::InsufficientData {
expected: 8 + nof_elem * 8,
actual: input.len(),
});
@@ -327,11 +342,6 @@ pub fn normalize_usize_be(input: usize) -> [u8; 8] {
bytes
}
#[inline(always)] // using for test
pub fn generate_input_buffer() -> Cursor<String> {
Cursor::new(json!({}).to_string())
}
#[derive(
Debug, Zeroize, ZeroizeOnDrop, Clone, PartialEq, CanonicalSerialize, CanonicalDeserialize,
)]
@@ -346,26 +356,47 @@ impl IdSecret {
res
}
pub fn from_bytes_le(input: &[u8]) -> (Self, usize) {
let el_size = fr_byte_size();
pub fn from_bytes_le(input: &[u8]) -> Result<(Self, usize), UtilsError> {
let el_size = FR_BYTE_SIZE;
if input.len() < el_size {
return Err(UtilsError::InsufficientData {
expected: el_size,
actual: input.len(),
});
}
let b_uint = BigUint::from_bytes_le(&input[0..el_size]);
let mut fr = Fr::from(b_uint);
let res = IdSecret::from(&mut fr);
// Note: no zeroize on b_uint as it has been moved
(res, el_size)
Ok((res, el_size))
}
pub fn from_bytes_be(input: &[u8]) -> Result<(Self, usize), UtilsError> {
let el_size = FR_BYTE_SIZE;
if input.len() < el_size {
return Err(UtilsError::InsufficientData {
expected: el_size,
actual: input.len(),
});
}
let b_uint = BigUint::from_bytes_be(&input[0..el_size]);
let mut fr = Fr::from(b_uint);
let res = IdSecret::from(&mut fr);
// Note: no zeroize on b_uint as it has been moved
Ok((res, el_size))
}
pub(crate) fn to_bytes_le(&self) -> Zeroizing<Vec<u8>> {
let input_biguint: BigUint = self.0.into();
let mut res = input_biguint.to_bytes_le();
res.resize(fr_byte_size(), 0);
res.resize(FR_BYTE_SIZE, 0);
Zeroizing::new(res)
}
pub(crate) fn to_bytes_be(&self) -> Zeroizing<Vec<u8>> {
let input_biguint: BigUint = self.0.into();
let mut res = input_biguint.to_bytes_be();
let to_insert_count = fr_byte_size().saturating_sub(res.len());
let to_insert_count = FR_BYTE_SIZE.saturating_sub(res.len());
if to_insert_count > 0 {
// Insert multi 0 at index 0
res.splice(0..0, std::iter::repeat_n(0, to_insert_count));
@@ -404,7 +435,7 @@ impl Deref for IdSecret {
}
#[derive(Debug, Zeroize, ZeroizeOnDrop)]
pub enum FrOrSecret {
pub(crate) enum FrOrSecret {
IdSecret(IdSecret),
Fr(Fr),
}

View File

@@ -1,25 +1,21 @@
#[cfg(test)]
#[cfg(not(feature = "stateless"))]
mod test {
use std::{fs::File, io::Read};
use ark_std::{rand::thread_rng, UniformRand};
use rand::Rng;
use rln::circuit::{Fr, TEST_TREE_DEPTH};
use rln::ffi::{ffi_rln::*, ffi_tree::*, ffi_utils::*};
use rln::hashers::{hash_to_field_le, poseidon_hash as utils_poseidon_hash};
use rln::protocol::*;
use rln::utils::*;
use rln::{
ffi::{ffi_rln::*, ffi_tree::*, ffi_utils::*},
prelude::*,
};
use safer_ffi::prelude::repr_c;
use serde_json::json;
use std::fs::File;
use std::io::Read;
use zeroize::Zeroize;
const NO_OF_LEAVES: usize = 256;
fn create_rln_instance() -> repr_c::Box<FFI_RLN> {
let input_config = json!({}).to_string();
let c_str = std::ffi::CString::new(input_config).unwrap();
match ffi_rln_new(TEST_TREE_DEPTH, c_str.as_c_str().into()) {
match ffi_rln_new(DEFAULT_TREE_DEPTH, c"".into()) {
CResult {
ok: Some(rln),
err: None,
@@ -68,15 +64,42 @@ mod test {
external_nullifier: &CFr,
leaf_index: usize,
) -> repr_c::Box<FFI_RLNProof> {
match ffi_generate_rln_proof(
ffi_rln_instance,
// Get merkle proof for the leaf index
let merkle_proof = match ffi_get_merkle_proof(ffi_rln_instance, leaf_index) {
CResult {
ok: Some(proof),
err: None,
} => proof,
CResult {
ok: None,
err: Some(err),
} => panic!("get merkle proof call failed: {}", err),
_ => unreachable!(),
};
// Create witness input
let witness = match ffi_rln_witness_input_new(
identity_secret,
user_message_limit,
message_id,
&merkle_proof.path_elements,
&merkle_proof.path_index,
x,
external_nullifier,
leaf_index,
) {
CResult {
ok: Some(witness),
err: None,
} => witness,
CResult {
ok: None,
err: Some(err),
} => panic!("witness creation call failed: {}", err),
_ => unreachable!(),
};
// Generate proof from witness
let proof = match ffi_generate_rln_proof(ffi_rln_instance, &witness) {
CResult {
ok: Some(proof),
err: None,
@@ -86,7 +109,9 @@ mod test {
err: Some(err),
} => panic!("generate rln proof call failed: {}", err),
_ => unreachable!(),
}
};
proof
}
#[test]
@@ -100,7 +125,7 @@ mod test {
// We first add leaves one by one specifying the index
for (i, leaf) in leaves.iter().enumerate() {
// We prepare the rate_commitment and we set the leaf at provided index
let result = ffi_set_leaf(&mut ffi_rln_instance, i, &CFr::from(*leaf).into());
let result = ffi_set_leaf(&mut ffi_rln_instance, i, &CFr::from(*leaf));
if !result.ok {
panic!("set leaf call failed: {:?}", result.err);
}
@@ -110,14 +135,14 @@ mod test {
let root_single = get_tree_root(&ffi_rln_instance);
// We reset the tree to default
let result = ffi_set_tree(&mut ffi_rln_instance, TEST_TREE_DEPTH);
let result = ffi_set_tree(&mut ffi_rln_instance, DEFAULT_TREE_DEPTH);
if !result.ok {
panic!("set tree call failed: {:?}", result.err);
}
// We add leaves one by one using the internal index (new leaves goes in next available position)
for leaf in &leaves {
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(*leaf).into());
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(*leaf));
if !result.ok {
panic!("set next leaf call failed: {:?}", result.err);
}
@@ -130,7 +155,7 @@ mod test {
assert_eq!(root_single, root_next);
// We reset the tree to default
let result = ffi_set_tree(&mut ffi_rln_instance, TEST_TREE_DEPTH);
let result = ffi_set_tree(&mut ffi_rln_instance, DEFAULT_TREE_DEPTH);
if !result.ok {
panic!("set tree call failed: {:?}", result.err);
}
@@ -157,7 +182,7 @@ mod test {
let root_delete = get_tree_root(&ffi_rln_instance);
// We reset the tree to default
let result = ffi_set_tree(&mut ffi_rln_instance, TEST_TREE_DEPTH);
let result = ffi_set_tree(&mut ffi_rln_instance, DEFAULT_TREE_DEPTH);
if !result.ok {
panic!("set tree call failed: {:?}", result.err);
}
@@ -170,7 +195,6 @@ mod test {
}
#[test]
// This test is similar to the one in public.rs but it uses the RLN object as a pointer
// Uses `set_leaves_from` to set leaves in a batch
fn test_leaf_setting_with_index_ffi() {
// We create a RLN instance
@@ -216,14 +240,14 @@ mod test {
);
// We reset the tree to default
let result = ffi_set_tree(&mut ffi_rln_instance, TEST_TREE_DEPTH);
let result = ffi_set_tree(&mut ffi_rln_instance, DEFAULT_TREE_DEPTH);
if !result.ok {
panic!("set tree call failed: {:?}", result.err);
}
// We add leaves one by one using the internal index (new leaves goes in next available position)
for leaf in &leaves {
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(*leaf).into());
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(*leaf));
if !result.ok {
panic!("set next leaf call failed: {:?}", result.err);
}
@@ -238,7 +262,6 @@ mod test {
}
#[test]
// This test is similar to the one in public.rs but it uses the RLN object as a pointer
fn test_atomic_operation_ffi() {
// We generate a vector of random leaves
let leaves = get_random_leaves();
@@ -272,7 +295,6 @@ mod test {
}
#[test]
// This test is similar to the one in public.rs but it uses the RLN object as a pointer
fn test_set_leaves_bad_index_ffi() {
// We generate a vector of random leaves
let leaves = get_random_leaves();
@@ -280,7 +302,7 @@ mod test {
let mut ffi_rln_instance = create_rln_instance();
let mut rng = thread_rng();
let bad_index = (1 << TEST_TREE_DEPTH) - rng.gen_range(0..NO_OF_LEAVES) as usize;
let bad_index = (1 << DEFAULT_TREE_DEPTH) - rng.gen_range(0..NO_OF_LEAVES) as usize;
// Get root of empty tree
let root_empty = get_tree_root(&ffi_rln_instance);
@@ -306,19 +328,19 @@ mod test {
let mut ffi_rln_instance = create_rln_instance();
// generate identity
let mut identity_secret_hash_ = hash_to_field_le(b"test-merkle-proof");
let identity_secret_hash = IdSecret::from(&mut identity_secret_hash_);
let mut to_hash = [*identity_secret_hash.clone()];
let id_commitment = utils_poseidon_hash(&to_hash);
let mut identity_secret_ = hash_to_field_le(b"test-merkle-proof");
let identity_secret = IdSecret::from(&mut identity_secret_);
let mut to_hash = [*identity_secret.clone()];
let id_commitment = poseidon_hash(&to_hash);
to_hash[0].zeroize();
let user_message_limit = Fr::from(100);
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
// We prepare id_commitment and we set the leaf at provided index
let result = ffi_set_leaf(
&mut ffi_rln_instance,
leaf_index,
&CFr::from(rate_commitment).into(),
&CFr::from(rate_commitment),
);
if !result.ok {
panic!("set leaf call failed: {:?}", result.err);
@@ -340,7 +362,7 @@ mod test {
);
// We obtain the Merkle proof
let proof = match ffi_get_proof(&ffi_rln_instance, leaf_index) {
let proof = match ffi_get_merkle_proof(&ffi_rln_instance, leaf_index) {
CResult {
ok: Some(proof),
err: None,
@@ -389,7 +411,7 @@ mod test {
// We double check that the proof computed from public API is correct
let root_from_proof = compute_tree_root(
&identity_secret_hash,
&identity_secret,
&user_message_limit,
&path_elements,
&identity_path_index,
@@ -410,9 +432,9 @@ mod test {
let zkey_path = "./resources/tree_depth_20/rln_final.arkzkey";
let mut zkey_file = File::open(zkey_path).expect("no file found");
let metadata = std::fs::metadata(zkey_path).expect("unable to read metadata");
let mut zkey_buffer = vec![0; metadata.len() as usize];
let mut zkey_data = vec![0; metadata.len() as usize];
zkey_file
.read_exact(&mut zkey_buffer)
.read_exact(&mut zkey_data)
.expect("buffer overflow");
let graph_data = "./resources/tree_depth_20/graph.bin";
@@ -427,8 +449,8 @@ mod test {
let tree_config = "".to_string();
let c_str = std::ffi::CString::new(tree_config).unwrap();
let ffi_rln_instance2 = match ffi_rln_new_with_params(
TEST_TREE_DEPTH,
&zkey_buffer.into(),
DEFAULT_TREE_DEPTH,
&zkey_data.into(),
&graph_buffer.into(),
c_str.as_c_str().into(),
) {
@@ -457,7 +479,7 @@ mod test {
// We generate a vector of random leaves
let mut rng = thread_rng();
let leaves: Vec<Fr> = (0..NO_OF_LEAVES)
.map(|_| utils_poseidon_hash(&[Fr::rand(&mut rng), Fr::from(100)]))
.map(|_| poseidon_hash(&[Fr::rand(&mut rng), Fr::from(100)]))
.collect();
// We create a RLN instance
@@ -467,7 +489,7 @@ mod test {
set_leaves_init(&mut ffi_rln_instance, &leaves);
// We generate a new identity pair
let (identity_secret_hash, id_commitment) = identity_pair_gen();
let (identity_secret, id_commitment) = identity_pair_gen();
let identity_index: usize = NO_OF_LEAVES;
// We generate a random signal
@@ -479,38 +501,24 @@ mod test {
// We generate a random rln_identifier
let rln_identifier = hash_to_field_le(b"test-rln-identifier");
// We generate a external nullifier
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
// We choose a message_id satisfy 0 <= message_id < MESSAGE_LIMIT
let message_id = Fr::from(1);
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
// We set as leaf rate_commitment, its index would be equal to no_of_leaves
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(rate_commitment).into());
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(rate_commitment));
if !result.ok {
panic!("set next leaf call failed: {:?}", result.err);
}
// Get the merkle proof for the identity
let _merkle_proof = match ffi_get_proof(&ffi_rln_instance, identity_index) {
CResult {
ok: Some(proof),
err: None,
} => proof,
CResult {
ok: None,
err: Some(err),
} => panic!("get merkle proof call failed: {}", err),
_ => unreachable!(),
};
// Hash the signal to get x
let x = hash_to_field_le(&signal);
// path_elements and identity_path_index are not needed in non-stateless mode
let rln_proof = rln_proof_gen(
&ffi_rln_instance,
&CFr::from(*identity_secret_hash),
&CFr::from(*identity_secret),
&CFr::from(user_message_limit),
&CFr::from(message_id),
&CFr::from(x),
@@ -535,8 +543,8 @@ mod test {
set_leaves_init(&mut ffi_rln_instance, &leaves);
// We generate a new identity pair
let (identity_secret_hash, id_commitment) = identity_pair_gen();
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
let (identity_secret, id_commitment) = identity_pair_gen();
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
let identity_index: usize = NO_OF_LEAVES;
// We generate a random signal
@@ -548,38 +556,22 @@ mod test {
// We generate a random rln_identifier
let rln_identifier = hash_to_field_le(b"test-rln-identifier");
// We generate a external nullifier
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
// We choose a message_id satisfy 0 <= message_id < MESSAGE_LIMIT
let message_id = Fr::from(1);
// We set as leaf rate_commitment, its index would be equal to no_of_leaves
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(rate_commitment).into());
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(rate_commitment));
if !result.ok {
panic!("set next leaf call failed: {:?}", result.err);
}
// Get the merkle proof for the identity
let _merkle_proof = match ffi_get_proof(&ffi_rln_instance, identity_index) {
CResult {
ok: Some(proof),
err: None,
} => proof,
CResult {
ok: None,
err: Some(err),
} => panic!("get merkle proof call failed: {}", err),
_ => unreachable!(),
};
// Hash the signal to get x
let x = hash_to_field_le(&signal);
// path_elements and identity_path_index are not needed in non-stateless mode
// witness input is now passed directly as parameters
let rln_proof = rln_proof_gen(
&ffi_rln_instance,
&CFr::from(*identity_secret_hash),
&CFr::from(*identity_secret),
&CFr::from(user_message_limit),
&CFr::from(message_id),
&CFr::from(x),
@@ -644,13 +636,13 @@ mod test {
let mut ffi_rln_instance = create_rln_instance();
// We generate a new identity pair
let (identity_secret_hash, id_commitment) = identity_pair_gen();
let (identity_secret, id_commitment) = identity_pair_gen();
let user_message_limit = Fr::from(100);
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
// We set as leaf rate_commitment, its index would be equal to 0 since tree is empty
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(rate_commitment).into());
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(rate_commitment));
if !result.ok {
panic!("set next leaf call failed: {:?}", result.err);
}
@@ -669,34 +661,19 @@ mod test {
// We generate a random rln_identifier
let rln_identifier = hash_to_field_le(b"test-rln-identifier");
// We generate a external nullifier
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
// We choose a message_id satisfy 0 <= message_id < MESSAGE_LIMIT
let message_id = Fr::from(1);
// Get the merkle proof for the identity
let _merkle_proof = match ffi_get_proof(&ffi_rln_instance, identity_index) {
CResult {
ok: Some(proof),
err: None,
} => proof,
CResult {
ok: None,
err: Some(err),
} => panic!("get merkle proof call failed: {}", err),
_ => unreachable!(),
};
// Hash the signals to get x
let x1 = hash_to_field_le(&signal1);
let x2 = hash_to_field_le(&signal2);
// path_elements and identity_path_index are not needed in non-stateless mode
// witness input is now passed directly as parameters
// Generate proofs using witness-based API
// We call generate_rln_proof for first proof values
let rln_proof1 = rln_proof_gen(
&ffi_rln_instance,
&CFr::from(*identity_secret_hash.clone()),
&CFr::from(*identity_secret.clone()),
&CFr::from(user_message_limit),
&CFr::from(message_id),
&CFr::from(x1),
@@ -707,7 +684,7 @@ mod test {
// We call generate_rln_proof for second proof values
let rln_proof2 = rln_proof_gen(
&ffi_rln_instance,
&CFr::from(*identity_secret_hash.clone()),
&CFr::from(*identity_secret.clone()),
&CFr::from(user_message_limit),
&CFr::from(message_id),
&CFr::from(x2),
@@ -715,7 +692,10 @@ mod test {
identity_index,
);
let recovered_id_secret_cfr = match ffi_recover_id_secret(&rln_proof1, &rln_proof2) {
let recovered_id_secret_cfr = match ffi_recover_id_secret(
&ffi_rln_proof_get_values(&rln_proof1),
&ffi_rln_proof_get_values(&rln_proof2),
) {
CResult {
ok: Some(secret),
err: None,
@@ -727,21 +707,18 @@ mod test {
_ => unreachable!(),
};
// We check if the recovered identity secret hash corresponds to the original one
let recovered_identity_secret_hash = *recovered_id_secret_cfr;
assert_eq!(recovered_identity_secret_hash, *identity_secret_hash);
// We check if the recovered identity secret corresponds to the original one
let recovered_identity_secret = *recovered_id_secret_cfr;
assert_eq!(recovered_identity_secret, *identity_secret);
// We now test that computing identity_secret_hash is unsuccessful if shares computed from two different identity secret hashes but within same epoch are passed
// We now test that computing identity_secret is unsuccessful if shares computed from two different identity secret but within same epoch are passed
// We generate a new identity pair
let (identity_secret_hash_new, id_commitment_new) = identity_pair_gen();
let rate_commitment_new = utils_poseidon_hash(&[id_commitment_new, user_message_limit]);
let (identity_secret_new, id_commitment_new) = identity_pair_gen();
let rate_commitment_new = poseidon_hash(&[id_commitment_new, user_message_limit]);
// We set as leaf id_commitment, its index would be equal to 1 since at 0 there is id_commitment
let result = ffi_set_next_leaf(
&mut ffi_rln_instance,
&CFr::from(rate_commitment_new).into(),
);
let result = ffi_set_next_leaf(&mut ffi_rln_instance, &CFr::from(rate_commitment_new));
if !result.ok {
panic!("set next leaf call failed: {:?}", result.err);
}
@@ -752,26 +729,9 @@ mod test {
let signal3: [u8; 32] = rng.gen();
let x3 = hash_to_field_le(&signal3);
// Get the merkle proof for the new identity
let _merkle_proof_new = match ffi_get_proof(&ffi_rln_instance, identity_index_new) {
CResult {
ok: Some(proof),
err: None,
} => proof,
CResult {
ok: None,
err: Some(err),
} => panic!("get merkle proof call failed: {}", err),
_ => unreachable!(),
};
// path_elements_new and identity_path_index_new are not needed in non-stateless mode
// witness input is now passed directly as parameters
// We call generate_rln_proof
let rln_proof3 = rln_proof_gen(
&ffi_rln_instance,
&CFr::from(*identity_secret_hash_new.clone()),
&CFr::from(*identity_secret_new.clone()),
&CFr::from(user_message_limit),
&CFr::from(message_id),
&CFr::from(x3),
@@ -779,9 +739,12 @@ mod test {
identity_index_new,
);
// We attempt to recover the secret using share1 (coming from identity_secret_hash) and share3 (coming from identity_secret_hash_new)
// We attempt to recover the secret using share1 (coming from identity_secret) and share3 (coming from identity_secret_new)
let recovered_id_secret_new_cfr = match ffi_recover_id_secret(&rln_proof1, &rln_proof3) {
let recovered_id_secret_new_cfr = match ffi_recover_id_secret(
&ffi_rln_proof_get_values(&rln_proof1),
&ffi_rln_proof_get_values(&rln_proof3),
) {
CResult {
ok: Some(secret),
err: None,
@@ -793,20 +756,17 @@ mod test {
_ => unreachable!(),
};
let recovered_identity_secret_hash_new = recovered_id_secret_new_cfr;
let recovered_identity_secret_new = recovered_id_secret_new_cfr;
// ensure that the recovered secret does not match with either of the
// used secrets in proof generation
assert_ne!(
*recovered_identity_secret_hash_new,
*identity_secret_hash_new
);
assert_ne!(*recovered_identity_secret_new, *identity_secret_new);
}
#[test]
fn test_get_leaf_ffi() {
// We create a RLN instance
let no_of_leaves = 1 << TEST_TREE_DEPTH;
let no_of_leaves = 1 << DEFAULT_TREE_DEPTH;
// We create a RLN instance
let mut ffi_rln_instance = create_rln_instance();
@@ -820,11 +780,7 @@ mod test {
// We insert the id_commitment into the tree at a random index
let mut rng = thread_rng();
let index = rng.gen_range(0..no_of_leaves) as usize;
let result = ffi_set_leaf(
&mut ffi_rln_instance,
index,
&CFr::from(id_commitment).into(),
);
let result = ffi_set_leaf(&mut ffi_rln_instance, index, &CFr::from(id_commitment));
if !result.ok {
panic!("set leaf call failed: {:?}", result.err);
}

View File

@@ -1,319 +0,0 @@
#[cfg(test)]
#[cfg(feature = "stateless")]
mod test {
use ark_std::{rand::thread_rng, UniformRand};
use rand::Rng;
use rln::circuit::{Fr, TEST_TREE_DEPTH};
use rln::ffi::{ffi_rln::*, ffi_utils::*};
use rln::hashers::{hash_to_field_le, poseidon_hash as utils_poseidon_hash, PoseidonHash};
use rln::utils::*;
use safer_ffi::prelude::repr_c;
use utils::{OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
fn create_rln_instance() -> repr_c::Box<FFI_RLN> {
match ffi_rln_new() {
CResult {
ok: Some(rln),
err: None,
} => rln,
CResult {
ok: None,
err: Some(err),
} => panic!("RLN object creation failed: {}", err),
_ => unreachable!(),
}
}
fn identity_pair_gen() -> (IdSecret, Fr) {
let key_gen = ffi_key_gen();
let mut id_secret_fr = *key_gen[0];
let id_secret_hash = IdSecret::from(&mut id_secret_fr);
let id_commitment = *key_gen[1];
(id_secret_hash, id_commitment)
}
// ...existing code...
#[test]
fn test_recover_id_secret_stateless_ffi() {
let default_leaf = Fr::from(0);
let mut tree: OptimalMerkleTree<PoseidonHash> = OptimalMerkleTree::new(
TEST_TREE_DEPTH,
default_leaf,
ConfigOf::<OptimalMerkleTree<PoseidonHash>>::default(),
)
.unwrap();
let ffi_rln_instance = create_rln_instance();
// We generate a new identity pair
let (identity_secret_hash, id_commitment) = identity_pair_gen();
let user_message_limit = Fr::from(100);
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
tree.update_next(rate_commitment).unwrap();
// We generate a random epoch
let epoch = hash_to_field_le(b"test-epoch");
let rln_identifier = hash_to_field_le(b"test-rln-identifier");
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
// We generate two proofs using same epoch but different signals.
// We generate a random signal
let mut rng = thread_rng();
let signal1: [u8; 32] = rng.gen();
let x1 = hash_to_field_le(&signal1);
let signal2: [u8; 32] = rng.gen();
let x2 = hash_to_field_le(&signal2);
let identity_index = tree.leaves_set();
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
let path_elements: repr_c::Vec<CFr> = merkle_proof
.get_path_elements()
.iter()
.map(|fr| CFr::from(*fr))
.collect::<Vec<_>>()
.into();
let identity_path_index: repr_c::Vec<u8> = merkle_proof.get_path_index().to_vec().into();
// We call generate_rln_proof for first proof values
let rln_proof1 = match ffi_generate_rln_proof_stateless(
&ffi_rln_instance,
&CFr::from(*identity_secret_hash.clone()),
&CFr::from(user_message_limit),
&CFr::from(Fr::from(1)),
&path_elements,
&identity_path_index,
&CFr::from(x1),
&CFr::from(external_nullifier),
) {
CResult {
ok: Some(proof),
err: None,
} => proof,
CResult {
ok: None,
err: Some(err),
} => panic!("generate rln proof with witness call failed: {}", err),
_ => unreachable!(),
};
// We call generate_rln_proof for second proof values
let rln_proof2 = match ffi_generate_rln_proof_stateless(
&ffi_rln_instance,
&CFr::from(*identity_secret_hash.clone()),
&CFr::from(user_message_limit),
&CFr::from(Fr::from(1)),
&path_elements,
&identity_path_index,
&CFr::from(x2),
&CFr::from(external_nullifier),
) {
CResult {
ok: Some(proof),
err: None,
} => proof,
CResult {
ok: None,
err: Some(err),
} => panic!("generate rln proof with witness call failed: {}", err),
_ => unreachable!(),
};
let recovered_id_secret_cfr = match ffi_recover_id_secret(&rln_proof1, &rln_proof2) {
CResult {
ok: Some(secret),
err: None,
} => secret,
CResult {
ok: None,
err: Some(err),
} => panic!("recover id secret call failed: {}", err),
_ => unreachable!(),
};
// We check if the recovered identity secret hash corresponds to the original one
let recovered_identity_secret_hash = *recovered_id_secret_cfr;
assert_eq!(recovered_identity_secret_hash, *identity_secret_hash);
// We now test that computing identity_secret_hash is unsuccessful if shares computed from two different identity secret hashes but within same epoch are passed
// We generate a new identity pair
let (identity_secret_hash_new, id_commitment_new) = identity_pair_gen();
let rate_commitment_new = utils_poseidon_hash(&[id_commitment_new, user_message_limit]);
tree.update_next(rate_commitment_new).unwrap();
// We generate a random signal
let signal3: [u8; 32] = rng.gen();
let x3 = hash_to_field_le(&signal3);
let identity_index_new = tree.leaves_set();
let merkle_proof_new = tree.proof(identity_index_new).expect("proof should exist");
let path_elements_new: repr_c::Vec<CFr> = merkle_proof_new
.get_path_elements()
.iter()
.map(|fr| CFr::from(*fr))
.collect::<Vec<_>>()
.into();
let identity_path_index_new: repr_c::Vec<u8> =
merkle_proof_new.get_path_index().to_vec().into();
// We call generate_rln_proof
let rln_proof3 = match ffi_generate_rln_proof_stateless(
&ffi_rln_instance,
&CFr::from(*identity_secret_hash_new.clone()),
&CFr::from(user_message_limit),
&CFr::from(Fr::from(1)),
&path_elements_new,
&identity_path_index_new,
&CFr::from(x3),
&CFr::from(external_nullifier),
) {
CResult {
ok: Some(proof),
err: None,
} => proof,
CResult {
ok: None,
err: Some(err),
} => panic!("generate rln proof with witness call failed: {}", err),
_ => unreachable!(),
};
// We attempt to recover the secret using share1 (coming from identity_secret_hash) and share3 (coming from identity_secret_hash_new)
let recovered_id_secret_new_cfr = match ffi_recover_id_secret(&rln_proof1, &rln_proof3) {
CResult {
ok: Some(secret),
err: None,
} => secret,
CResult {
ok: None,
err: Some(err),
} => panic!("recover id secret call failed: {}", err),
_ => unreachable!(),
};
let recovered_identity_secret_hash_new = recovered_id_secret_new_cfr;
// ensure that the recovered secret does not match with either of the
// used secrets in proof generation
assert_ne!(
*recovered_identity_secret_hash_new,
*identity_secret_hash_new
);
}
#[test]
fn test_verify_with_roots_stateless_ffi() {
let default_leaf = Fr::from(0);
let mut tree: OptimalMerkleTree<PoseidonHash> = OptimalMerkleTree::new(
TEST_TREE_DEPTH,
default_leaf,
ConfigOf::<OptimalMerkleTree<PoseidonHash>>::default(),
)
.unwrap();
let ffi_rln_instance = create_rln_instance();
// We generate a new identity pair
let (identity_secret_hash, id_commitment) = identity_pair_gen();
let identity_index = tree.leaves_set();
let user_message_limit = Fr::from(100);
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
tree.update_next(rate_commitment).unwrap();
// We generate a random epoch
let epoch = hash_to_field_le(b"test-epoch");
let rln_identifier = hash_to_field_le(b"test-rln-identifier");
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
// We generate a random signal
let mut rng = thread_rng();
let signal: [u8; 32] = rng.gen();
let x = hash_to_field_le(&signal);
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
// We prepare input for generate_rln_proof API
let path_elements: repr_c::Vec<CFr> = merkle_proof
.get_path_elements()
.iter()
.map(|fr| CFr::from(*fr))
.collect::<Vec<_>>()
.into();
let identity_path_index: repr_c::Vec<u8> = merkle_proof.get_path_index().to_vec().into();
let rln_proof = match ffi_generate_rln_proof_stateless(
&ffi_rln_instance,
&CFr::from(*identity_secret_hash.clone()),
&CFr::from(user_message_limit),
&CFr::from(Fr::from(1)),
&path_elements,
&identity_path_index,
&CFr::from(x),
&CFr::from(external_nullifier),
) {
CResult {
ok: Some(proof),
err: None,
} => proof,
CResult {
ok: None,
err: Some(err),
} => panic!("generate rln proof with witness call failed: {}", err),
_ => unreachable!(),
};
// If no roots is provided, proof validation is skipped and if the remaining proof values are valid, the proof will be correctly verified
let roots_empty: repr_c::Vec<CFr> = vec![].into();
assert!(
ffi_verify_with_roots(&ffi_rln_instance, &rln_proof, &roots_empty, &CFr::from(x)).ok
);
// We serialize in the roots buffer some random values and we check that the proof is not verified since doesn't contain the correct root the proof refers to
let mut roots_random: Vec<CFr> = Vec::new();
for _ in 0..5 {
roots_random.push(CFr::from(Fr::rand(&mut rng)));
}
let roots_random_vec: repr_c::Vec<CFr> = roots_random.into();
assert!(
!ffi_verify_with_roots(
&ffi_rln_instance,
&rln_proof,
&roots_random_vec,
&CFr::from(x),
)
.ok
);
// We get the root of the tree obtained adding one leaf per time
let root = tree.root();
// We add the real root and we check if now the proof is verified
let mut roots_with_correct: Vec<CFr> = Vec::new();
for _ in 0..5 {
roots_with_correct.push(CFr::from(Fr::rand(&mut rng)));
}
roots_with_correct.push(CFr::from(root));
let roots_correct_vec: repr_c::Vec<CFr> = roots_with_correct.into();
assert!(
ffi_verify_with_roots(
&ffi_rln_instance,
&rln_proof,
&roots_correct_vec,
&CFr::from(x)
)
.ok
);
}
}

View File

@@ -1,10 +1,7 @@
#[cfg(test)]
mod test {
use rand::Rng;
use rln::circuit::Fr;
use rln::ffi::ffi_utils::*;
use rln::hashers::poseidon_hash;
use rln::utils::{fr_to_bytes_be, fr_to_bytes_le, str_to_fr, IdSecret};
use rln::{ffi::ffi_utils::*, prelude::*};
#[test]
// Tests hash to field using FFI APIs
@@ -13,11 +10,11 @@ mod test {
let seed_bytes: Vec<u8> = vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
let res = ffi_seeded_key_gen(&seed_bytes.into());
assert_eq!(res.len(), 2, "seeded key gen call failed");
let identity_secret_hash = res.first().unwrap();
let identity_secret = res.first().unwrap();
let id_commitment = res.get(1).unwrap();
// We check against expected values
let expected_identity_secret_hash_seed_bytes = str_to_fr(
let expected_identity_secret_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
)
@@ -28,10 +25,7 @@ mod test {
)
.unwrap();
assert_eq!(
*identity_secret_hash,
expected_identity_secret_hash_seed_bytes
);
assert_eq!(*identity_secret, expected_identity_secret_seed_bytes);
assert_eq!(*id_commitment, expected_id_commitment_seed_bytes);
}
@@ -44,7 +38,7 @@ mod test {
assert_eq!(key_gen.len(), 4, "seeded extended key gen call failed");
let identity_trapdoor = *key_gen[0];
let identity_nullifier = *key_gen[1];
let identity_secret_hash = *key_gen[2];
let identity_secret = *key_gen[2];
let id_commitment = *key_gen[3];
// We check against expected values
@@ -58,7 +52,7 @@ mod test {
16,
)
.unwrap();
let expected_identity_secret_hash_seed_bytes = str_to_fr(
let expected_identity_secret_seed_bytes = str_to_fr(
"0x2aca62aaa7abaf3686fff2caf00f55ab9462dc12db5b5d4bcf3994e671f8e521",
16,
)
@@ -71,38 +65,35 @@ mod test {
assert_eq!(identity_trapdoor, expected_identity_trapdoor_seed_bytes);
assert_eq!(identity_nullifier, expected_identity_nullifier_seed_bytes);
assert_eq!(
identity_secret_hash,
expected_identity_secret_hash_seed_bytes
);
assert_eq!(identity_secret, expected_identity_secret_seed_bytes);
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
}
#[test]
// Test CFr FFI functions
fn test_cfr_ffi() {
let cfr_zero = cfr_zero();
let cfr_zero = ffi_cfr_zero();
let fr_zero = rln::circuit::Fr::from(0u8);
assert_eq!(*cfr_zero, fr_zero);
let cfr_one = cfr_one();
let cfr_one = ffi_cfr_one();
let fr_one = rln::circuit::Fr::from(1u8);
assert_eq!(*cfr_one, fr_one);
let cfr_int = uint_to_cfr(42);
let cfr_int = ffi_uint_to_cfr(42);
let fr_int = rln::circuit::Fr::from(42u8);
assert_eq!(*cfr_int, fr_int);
let cfr_debug_str = cfr_debug(Some(&cfr_int));
let cfr_debug_str = ffi_cfr_debug(Some(&cfr_int));
assert_eq!(cfr_debug_str.to_string(), "42");
let key_gen = ffi_key_gen();
let mut id_secret_fr = *key_gen[0];
let id_secret_hash = IdSecret::from(&mut id_secret_fr);
let id_commitment = *key_gen[1];
let cfr_id_secret_hash = vec_cfr_get(&key_gen, 0).unwrap();
let cfr_id_secret_hash = ffi_vec_cfr_get(&key_gen, 0).unwrap();
assert_eq!(*cfr_id_secret_hash, *id_secret_hash);
let cfr_id_commitment = vec_cfr_get(&key_gen, 1).unwrap();
let cfr_id_commitment = ffi_vec_cfr_get(&key_gen, 1).unwrap();
assert_eq!(*cfr_id_commitment, id_commitment);
}
@@ -113,15 +104,15 @@ mod test {
let signal_gen: [u8; 32] = rng.gen();
let signal: Vec<u8> = signal_gen.to_vec();
let bytes_le = vec_u8_to_bytes_le(&signal.clone().into());
let expected_le = rln::utils::vec_u8_to_bytes_le(&signal);
let bytes_le = ffi_vec_u8_to_bytes_le(&signal.clone().into());
let expected_le = vec_u8_to_bytes_le(&signal);
assert_eq!(bytes_le.iter().copied().collect::<Vec<_>>(), expected_le);
let bytes_be = vec_u8_to_bytes_be(&signal.clone().into());
let expected_be = rln::utils::vec_u8_to_bytes_be(&signal);
let bytes_be = ffi_vec_u8_to_bytes_be(&signal.clone().into());
let expected_be = vec_u8_to_bytes_be(&signal);
assert_eq!(bytes_be.iter().copied().collect::<Vec<_>>(), expected_be);
let signal_from_le = match bytes_le_to_vec_u8(&bytes_le) {
let signal_from_le = match ffi_bytes_le_to_vec_u8(&bytes_le) {
CResult {
ok: Some(vec_u8),
err: None,
@@ -129,12 +120,12 @@ mod test {
CResult {
ok: None,
err: Some(err),
} => panic!("bytes_le_to_vec_u8 call failed: {}", err),
} => panic!("ffi_bytes_le_to_vec_u8 call failed: {}", err),
_ => unreachable!(),
};
assert_eq!(signal_from_le.iter().copied().collect::<Vec<_>>(), signal);
let signal_from_be = match bytes_be_to_vec_u8(&bytes_be) {
let signal_from_be = match ffi_bytes_be_to_vec_u8(&bytes_be) {
CResult {
ok: Some(vec_u8),
err: None,
@@ -142,7 +133,7 @@ mod test {
CResult {
ok: None,
err: Some(err),
} => panic!("bytes_be_to_vec_u8 call failed: {}", err),
} => panic!("ffi_bytes_be_to_vec_u8 call failed: {}", err),
_ => unreachable!(),
};
assert_eq!(signal_from_be.iter().copied().collect::<Vec<_>>(), signal);
@@ -154,15 +145,15 @@ mod test {
let vec_fr = [Fr::from(1u8), Fr::from(2u8), Fr::from(3u8), Fr::from(4u8)];
let vec_cfr: Vec<CFr> = vec_fr.iter().map(|fr| CFr::from(*fr)).collect();
let bytes_le = vec_cfr_to_bytes_le(&vec_cfr.clone().into());
let expected_le = rln::utils::vec_fr_to_bytes_le(&vec_fr);
let bytes_le = ffi_vec_cfr_to_bytes_le(&vec_cfr.clone().into());
let expected_le = vec_fr_to_bytes_le(&vec_fr);
assert_eq!(bytes_le.iter().copied().collect::<Vec<_>>(), expected_le);
let bytes_be = vec_cfr_to_bytes_be(&vec_cfr.clone().into());
let expected_be = rln::utils::vec_fr_to_bytes_be(&vec_fr);
let bytes_be = ffi_vec_cfr_to_bytes_be(&vec_cfr.clone().into());
let expected_be = vec_fr_to_bytes_be(&vec_fr);
assert_eq!(bytes_be.iter().copied().collect::<Vec<_>>(), expected_be);
let vec_cfr_from_le = match bytes_le_to_vec_cfr(&bytes_le) {
let vec_cfr_from_le = match ffi_bytes_le_to_vec_cfr(&bytes_le) {
CResult {
ok: Some(vec_cfr),
err: None,
@@ -170,12 +161,12 @@ mod test {
CResult {
ok: None,
err: Some(err),
} => panic!("bytes_le_to_vec_cfr call failed: {}", err),
} => panic!("ffi_bytes_le_to_vec_cfr call failed: {}", err),
_ => unreachable!(),
};
assert_eq!(vec_cfr_from_le.iter().copied().collect::<Vec<_>>(), vec_cfr);
let vec_cfr_from_be = match bytes_be_to_vec_cfr(&bytes_be) {
let vec_cfr_from_be = match ffi_bytes_be_to_vec_cfr(&bytes_be) {
CResult {
ok: Some(vec_cfr),
err: None,
@@ -183,7 +174,7 @@ mod test {
CResult {
ok: None,
err: Some(err),
} => panic!("bytes_be_to_vec_cfr call failed: {}", err),
} => panic!("ffi_bytes_be_to_vec_cfr call failed: {}", err),
_ => unreachable!(),
};
assert_eq!(vec_cfr_from_be.iter().copied().collect::<Vec<_>>(), vec_cfr);
@@ -197,24 +188,24 @@ mod test {
let signal: Vec<u8> = signal_gen.to_vec();
let cfr_le_1 = ffi_hash_to_field_le(&signal.clone().into());
let fr_le_2 = rln::hashers::hash_to_field_le(&signal);
let fr_le_2 = hash_to_field_le(&signal);
assert_eq!(*cfr_le_1, fr_le_2);
let cfr_be_1 = ffi_hash_to_field_be(&signal.clone().into());
let fr_be_2 = rln::hashers::hash_to_field_be(&signal);
let fr_be_2 = hash_to_field_be(&signal);
assert_eq!(*cfr_be_1, fr_be_2);
assert_eq!(*cfr_le_1, *cfr_be_1);
assert_eq!(fr_le_2, fr_be_2);
let hash_cfr_le_1 = cfr_to_bytes_le(&cfr_le_1)
let hash_cfr_le_1 = ffi_cfr_to_bytes_le(&cfr_le_1)
.iter()
.copied()
.collect::<Vec<_>>();
let hash_fr_le_2 = fr_to_bytes_le(&fr_le_2);
assert_eq!(hash_cfr_le_1, hash_fr_le_2);
let hash_cfr_be_1 = cfr_to_bytes_be(&cfr_be_1)
let hash_cfr_be_1 = ffi_cfr_to_bytes_be(&cfr_be_1)
.iter()
.copied()
.collect::<Vec<_>>();

View File

@@ -1,16 +1,10 @@
////////////////////////////////////////////////////////////
// Tests
////////////////////////////////////////////////////////////
#![cfg(not(feature = "stateless"))]
#[cfg(test)]
mod test {
use rln::hashers::{poseidon_hash, PoseidonHash};
use rln::{
circuit::{Fr, TEST_TREE_DEPTH},
poseidon_tree::PoseidonTree,
};
use rln::prelude::*;
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
#[test]
@@ -19,8 +13,8 @@ mod test {
let sample_size = 100;
let leaves: Vec<Fr> = (0..sample_size).map(Fr::from).collect();
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(TEST_TREE_DEPTH).unwrap();
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(TEST_TREE_DEPTH).unwrap();
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(DEFAULT_TREE_DEPTH).unwrap();
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(DEFAULT_TREE_DEPTH).unwrap();
for (i, leave) in leaves
.into_iter()

View File

@@ -3,16 +3,7 @@
#[cfg(test)]
mod test {
use ark_ff::BigInt;
use rln::circuit::{graph_from_folder, zkey_from_folder};
use rln::circuit::{Fr, TEST_TREE_DEPTH};
use rln::hashers::{hash_to_field_le, poseidon_hash};
use rln::poseidon_tree::PoseidonTree;
use rln::protocol::{
deserialize_proof_values, deserialize_witness, generate_proof, keygen,
proof_values_from_witness, rln_witness_from_json, rln_witness_to_json, seeded_keygen,
serialize_proof_values, serialize_witness, verify_proof, RLNWitnessInput,
};
use rln::utils::str_to_fr;
use rln::prelude::*;
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
@@ -23,14 +14,14 @@ mod test {
let leaf_index = 3;
// generate identity
let identity_secret_hash = hash_to_field_le(b"test-merkle-proof");
let id_commitment = poseidon_hash(&[identity_secret_hash]);
let identity_secret = hash_to_field_le(b"test-merkle-proof");
let id_commitment = poseidon_hash(&[identity_secret]);
let rate_commitment = poseidon_hash(&[id_commitment, 100.into()]);
// generate merkle tree
let default_leaf = Fr::from(0);
let mut tree = PoseidonTree::new(
TEST_TREE_DEPTH,
DEFAULT_TREE_DEPTH,
default_leaf,
ConfigOf::<PoseidonTree>::default(),
)
@@ -94,14 +85,14 @@ mod test {
fn get_test_witness() -> RLNWitnessInput {
let leaf_index = 3;
// Generate identity pair
let (identity_secret_hash, id_commitment) = keygen();
let (identity_secret, id_commitment) = keygen();
let user_message_limit = Fr::from(100);
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
//// generate merkle tree
let default_leaf = Fr::from(0);
let mut tree = PoseidonTree::new(
TEST_TREE_DEPTH,
DEFAULT_TREE_DEPTH,
default_leaf,
ConfigOf::<PoseidonTree>::default(),
)
@@ -121,7 +112,7 @@ mod test {
let message_id = Fr::from(1);
RLNWitnessInput::new(
identity_secret_hash,
identity_secret,
user_message_limit,
message_id,
merkle_proof.get_path_elements(),
@@ -132,70 +123,39 @@ mod test {
.unwrap()
}
#[test]
// We test a RLN proof generation and verification
fn test_witness_from_json() {
// We generate all relevant keys
let proving_key = zkey_from_folder();
let verifying_key = &proving_key.0.vk;
let graph_data = graph_from_folder();
// We compute witness from the json input
let rln_witness = get_test_witness();
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
assert_eq!(rln_witness_deser, rln_witness);
// Let's generate a zkSNARK proof
let proof = generate_proof(proving_key, &rln_witness_deser, graph_data).unwrap();
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
// Let's verify the proof
let verified = verify_proof(verifying_key, &proof, &proof_values);
assert!(verified.unwrap());
}
#[test]
// We test a RLN proof generation and verification
fn test_end_to_end() {
let rln_witness = get_test_witness();
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
assert_eq!(rln_witness_deser, rln_witness);
let witness = get_test_witness();
// We generate all relevant keys
let proving_key = zkey_from_folder();
let verifying_key = &proving_key.0.vk;
let graph_data = graph_from_folder();
// Let's generate a zkSNARK proof
let proof = generate_proof(proving_key, &rln_witness_deser, graph_data).unwrap();
let proof = generate_zk_proof(proving_key, &witness, graph_data).unwrap();
let proof_values = proof_values_from_witness(&rln_witness_deser).unwrap();
let proof_values = proof_values_from_witness(&witness).unwrap();
// Let's verify the proof
let success = verify_proof(verifying_key, &proof, &proof_values).unwrap();
let success = verify_zk_proof(&proving_key.0.vk, &proof, &proof_values).unwrap();
assert!(success);
}
#[test]
fn test_witness_serialization() {
// We test witness JSON serialization
let rln_witness = get_test_witness();
let rln_witness_json = rln_witness_to_json(&rln_witness).unwrap();
let rln_witness_deser = rln_witness_from_json(rln_witness_json).unwrap();
assert_eq!(rln_witness_deser, rln_witness);
let witness = get_test_witness();
// We test witness serialization
let ser = serialize_witness(&rln_witness).unwrap();
let (deser, _) = deserialize_witness(&ser).unwrap();
assert_eq!(rln_witness, deser);
let ser = rln_witness_to_bytes_le(&witness).unwrap();
let (deser, _) = bytes_le_to_rln_witness(&ser).unwrap();
assert_eq!(witness, deser);
// We test Proof values serialization
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
let ser = serialize_proof_values(&proof_values);
let (deser, _) = deserialize_proof_values(&ser);
let proof_values = proof_values_from_witness(&witness).unwrap();
let ser = rln_proof_values_to_bytes_le(&proof_values);
let (deser, _) = bytes_le_to_rln_proof_values(&ser).unwrap();
assert_eq!(proof_values, deser);
}
@@ -205,10 +165,10 @@ mod test {
fn test_seeded_keygen() {
// Generate identity pair using a seed phrase
let seed_phrase: &str = "A seed phrase example";
let (identity_secret_hash, id_commitment) = seeded_keygen(seed_phrase.as_bytes());
let (identity_secret, id_commitment) = seeded_keygen(seed_phrase.as_bytes());
// We check against expected values
let expected_identity_secret_hash_seed_phrase = str_to_fr(
let expected_identity_secret_seed_phrase = str_to_fr(
"0x20df38f3f00496f19fe7c6535492543b21798ed7cb91aebe4af8012db884eda3",
16,
)
@@ -219,18 +179,15 @@ mod test {
)
.unwrap();
assert_eq!(
identity_secret_hash,
expected_identity_secret_hash_seed_phrase
);
assert_eq!(identity_secret, expected_identity_secret_seed_phrase);
assert_eq!(id_commitment, expected_id_commitment_seed_phrase);
// Generate identity pair using an byte array
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
let (identity_secret_hash, id_commitment) = seeded_keygen(seed_bytes);
let (identity_secret, id_commitment) = seeded_keygen(seed_bytes);
// We check against expected values
let expected_identity_secret_hash_seed_bytes = str_to_fr(
let expected_identity_secret_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
)
@@ -241,19 +198,13 @@ mod test {
)
.unwrap();
assert_eq!(
identity_secret_hash,
expected_identity_secret_hash_seed_bytes
);
assert_eq!(identity_secret, expected_identity_secret_seed_bytes);
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
// We check again if the identity pair generated with the same seed phrase corresponds to the previously generated one
let (identity_secret_hash, id_commitment) = seeded_keygen(seed_phrase.as_bytes());
let (identity_secret, id_commitment) = seeded_keygen(seed_phrase.as_bytes());
assert_eq!(
identity_secret_hash,
expected_identity_secret_hash_seed_phrase
);
assert_eq!(identity_secret, expected_identity_secret_seed_phrase);
assert_eq!(id_commitment, expected_id_commitment_seed_phrase);
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,14 +1,7 @@
#[cfg(test)]
mod test {
use rln::utils::{
bytes_be_to_fr, bytes_be_to_vec_fr, bytes_be_to_vec_u8, bytes_be_to_vec_usize,
bytes_le_to_fr, bytes_le_to_vec_fr, bytes_le_to_vec_u8, bytes_le_to_vec_usize,
fr_to_bytes_be, fr_to_bytes_le, normalize_usize_be, normalize_usize_le, str_to_fr,
vec_fr_to_bytes_be, vec_fr_to_bytes_le, vec_u8_to_bytes_be, vec_u8_to_bytes_le,
};
use ark_std::{rand::thread_rng, UniformRand};
use rln::circuit::Fr;
use rln::prelude::*;
#[test]
fn test_normalize_usize_le() {
@@ -194,12 +187,12 @@ mod test {
// Test little-endian roundtrip
let le_bytes = fr_to_bytes_le(&fr);
let (reconstructed_le, _) = bytes_le_to_fr(&le_bytes);
let (reconstructed_le, _) = bytes_le_to_fr(&le_bytes).unwrap();
assert_eq!(fr, reconstructed_le);
// Test big-endian roundtrip
let be_bytes = fr_to_bytes_be(&fr);
let (reconstructed_be, _) = bytes_be_to_fr(&be_bytes);
let (reconstructed_be, _) = bytes_be_to_fr(&be_bytes).unwrap();
assert_eq!(fr, reconstructed_be);
}
}
@@ -320,8 +313,8 @@ mod test {
// They should be different (unless the value is symmetric)
if le_bytes != be_bytes {
// Verify they can both be reconstructed correctly
let (reconstructed_le, _) = bytes_le_to_fr(&le_bytes);
let (reconstructed_be, _) = bytes_be_to_fr(&be_bytes);
let (reconstructed_le, _) = bytes_le_to_fr(&le_bytes).unwrap();
let (reconstructed_be, _) = bytes_be_to_fr(&be_bytes).unwrap();
assert_eq!(fr, reconstructed_le);
assert_eq!(fr, reconstructed_be);
}
@@ -329,7 +322,27 @@ mod test {
#[test]
fn test_error_handling() {
// Test with valid length but insufficient data
// Test bytes_le_to_fr and bytes_be_to_fr with insufficient data
let short_bytes = vec![0u8; 10]; // Less than FR_BYTE_SIZE (32 bytes)
assert!(bytes_le_to_fr(&short_bytes).is_err());
assert!(bytes_be_to_fr(&short_bytes).is_err());
// Test with empty bytes
let empty_bytes = vec![];
assert!(bytes_le_to_fr(&empty_bytes).is_err());
assert!(bytes_be_to_fr(&empty_bytes).is_err());
// Test with exact size - should succeed
let exact_bytes = vec![0u8; FR_BYTE_SIZE];
assert!(bytes_le_to_fr(&exact_bytes).is_ok());
assert!(bytes_be_to_fr(&exact_bytes).is_ok());
// Test with more than enough data - should succeed
let extra_bytes = vec![0u8; FR_BYTE_SIZE + 10];
assert!(bytes_le_to_fr(&extra_bytes).is_ok());
assert!(bytes_be_to_fr(&extra_bytes).is_ok());
// Test with valid length but insufficient data for vector deserialization
let valid_length_invalid_data = vec![0u8; 8]; // Length 0, but no data
assert!(bytes_le_to_vec_u8(&valid_length_invalid_data).is_ok());
assert!(bytes_be_to_vec_u8(&valid_length_invalid_data).is_ok());

6
rustfmt.toml Normal file
View File

@@ -0,0 +1,6 @@
# Run cargo +nightly fmt to format with this configuration
edition = "2021" # use Rust 2021 edition
unstable_features = true # needed for group_imports
reorder_imports = true # sort imports alphabetically
imports_granularity = "Crate" # keep items from the same crate grouped together
group_imports = "StdExternalCrate" # group std, external, and local imports separately

View File

@@ -16,17 +16,17 @@ ark-ff = { version = "0.5.0", default-features = false }
num-bigint = { version = "0.4.6", default-features = false }
pmtree = { package = "vacp2p_pmtree", version = "2.0.3", optional = true }
sled = "0.34.7"
serde_json = "1.0.141"
hex = "0.4.3"
rayon = "1.10.0"
serde_json = "1.0.145"
rayon = "1.11.0"
thiserror = "2.0"
[dev-dependencies]
hex = "0.4.3"
hex-literal = "1.1.0"
ark-bn254 = { version = "0.5.0", features = ["std"] }
num-traits = "0.2.19"
hex-literal = "0.4.1"
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
criterion = { version = "0.7.0", features = ["html_reports"] }
criterion = { version = "0.8.0", features = ["html_reports"] }
[features]
default = []

View File

@@ -1,5 +1,6 @@
use criterion::{criterion_group, criterion_main, Criterion};
use std::{fmt::Display, str::FromStr, sync::LazyLock};
use criterion::{criterion_group, criterion_main, Criterion};
use tiny_keccak::{Hasher as _, Keccak};
use zerokit_utils::{
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,

View File

@@ -15,8 +15,8 @@ pub enum ZerokitMerkleTreeError {
TooManySet,
#[error("Unknown error while computing merkle proof")]
ComputingProofError,
#[error("Invalid witness length (!= tree depth)")]
InvalidWitness,
#[error("Invalid merkle proof length (!= tree depth)")]
InvalidMerkleProof,
#[cfg(feature = "pmtree-ft")]
#[error("Pmtree error: {0}")]
PmtreeErrorKind(#[from] pmtree::PmtreeErrorKind),

View File

@@ -11,9 +11,8 @@ use crate::merkle_tree::{
error::{FromConfigError, ZerokitMerkleTreeError},
FrOf, Hasher, ZerokitMerkleProof, ZerokitMerkleTree, MIN_PARALLEL_NODES,
};
////////////////////////////////////////////////////////////
///// Full Merkle Tree Implementation
////////////////////////////////////////////////////////////
// Full Merkle Tree Implementation
/// Merkle tree with all leaf and intermediate hashes stored
#[derive(Clone, PartialEq, Eq, Debug)]
@@ -290,9 +289,9 @@ where
fn verify(
&self,
hash: &FrOf<Self::Hasher>,
proof: &FullMerkleProof<H>,
merkle_proof: &FullMerkleProof<H>,
) -> Result<bool, ZerokitMerkleTreeError> {
Ok(proof.compute_root_from(hash) == self.root())
Ok(merkle_proof.compute_root_from(hash) == self.root())
}
fn set_metadata(&mut self, metadata: &[u8]) -> Result<(), ZerokitMerkleTreeError> {

View File

@@ -13,12 +13,13 @@
//! * Disk based storage backend (using mmaped files should be easy)
//! * Implement serialization for tree and Merkle proof
use crate::merkle_tree::error::ZerokitMerkleTreeError;
use std::{
fmt::{Debug, Display},
str::FromStr,
};
use crate::merkle_tree::error::ZerokitMerkleTreeError;
/// Enables parallel hashing when there are at least 8 nodes (4 pairs to hash), justifying the overhead.
pub const MIN_PARALLEL_NODES: usize = 8;
@@ -85,7 +86,7 @@ pub trait ZerokitMerkleTree {
fn verify(
&self,
leaf: &FrOf<Self::Hasher>,
witness: &Self::Proof,
merkle_proof: &Self::Proof,
) -> Result<bool, ZerokitMerkleTreeError>;
fn set_metadata(&mut self, metadata: &[u8]) -> Result<(), ZerokitMerkleTreeError>;
fn metadata(&self) -> Result<Vec<u8>, ZerokitMerkleTreeError>;

View File

@@ -4,8 +4,8 @@ pub mod full_merkle_tree;
pub mod merkle_tree;
pub mod optimal_merkle_tree;
pub use self::full_merkle_tree::{FullMerkleConfig, FullMerkleProof, FullMerkleTree};
pub use self::merkle_tree::{
FrOf, Hasher, ZerokitMerkleProof, ZerokitMerkleTree, MIN_PARALLEL_NODES,
pub use self::{
full_merkle_tree::{FullMerkleConfig, FullMerkleProof, FullMerkleTree},
merkle_tree::{FrOf, Hasher, ZerokitMerkleProof, ZerokitMerkleTree, MIN_PARALLEL_NODES},
optimal_merkle_tree::{OptimalMerkleConfig, OptimalMerkleProof, OptimalMerkleTree},
};
pub use self::optimal_merkle_tree::{OptimalMerkleConfig, OptimalMerkleProof, OptimalMerkleTree};

View File

@@ -6,9 +6,8 @@ use crate::merkle_tree::{
error::{FromConfigError, ZerokitMerkleTreeError},
FrOf, Hasher, ZerokitMerkleProof, ZerokitMerkleTree, MIN_PARALLEL_NODES,
};
////////////////////////////////////////////////////////////
///// Optimal Merkle Tree Implementation
////////////////////////////////////////////////////////////
// Optimal Merkle Tree Implementation
/// The Merkle tree structure
#[derive(Clone, PartialEq, Eq, Debug)]
@@ -267,11 +266,15 @@ where
}
/// Verifies a Merkle proof with respect to the input leaf and the tree root
fn verify(&self, leaf: &H::Fr, witness: &Self::Proof) -> Result<bool, ZerokitMerkleTreeError> {
if witness.length() != self.depth {
return Err(ZerokitMerkleTreeError::InvalidWitness);
fn verify(
&self,
leaf: &H::Fr,
merkle_proof: &Self::Proof,
) -> Result<bool, ZerokitMerkleTreeError> {
if merkle_proof.length() != self.depth {
return Err(ZerokitMerkleTreeError::InvalidMerkleProof);
}
let expected_root = witness.compute_root_from(leaf);
let expected_root = merkle_proof.compute_root_from(leaf);
Ok(expected_root.eq(&self.root()))
}

View File

@@ -1,4 +1,5 @@
pub mod sled_adapter;
pub use self::sled_adapter::SledDB;
pub use pmtree;
pub use sled::{Config, Mode};
pub use self::sled_adapter::SledDB;

View File

@@ -1,9 +1,7 @@
use pmtree::*;
use std::{collections::HashMap, thread, time::Duration};
use pmtree::*;
use sled::Db as Sled;
use std::collections::HashMap;
use std::thread;
use std::time::Duration;
pub struct SledDB(Sled);

View File

@@ -3,9 +3,10 @@
// Implementation partially taken from https://github.com/arnaucube/poseidon-rs/blob/233027d6075a637c29ad84a8a44f5653b81f0410/src/lib.rs
// and adapted to work over arkworks field traits and custom data structures
use crate::poseidon_constants::find_poseidon_ark_and_mds;
use ark_ff::PrimeField;
use crate::poseidon_constants::find_poseidon_ark_and_mds;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct RoundParameters<F: PrimeField> {
pub t: usize,

View File

@@ -1,8 +1,9 @@
// Tests adapted from https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/merkle_tree.rs
#[cfg(test)]
mod test {
use hex_literal::hex;
use std::{fmt::Display, str::FromStr};
use hex_literal::hex;
use tiny_keccak::{Hasher as _, Keccak};
use zerokit_utils::{
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,

View File

@@ -1,9 +1,9 @@
#[cfg(test)]
mod test {
use std::{collections::HashMap, str::FromStr};
use ark_bn254::Fr;
use ark_ff::{AdditiveGroup, Field};
use std::collections::HashMap;
use std::str::FromStr;
use zerokit_utils::poseidon_hash::Poseidon;
const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [