Compare commits

..

9 Commits

Author SHA1 Message Date
Rahul Ghangas
936b7fc3cb test: add helper function to wrap init 2023-02-24 11:56:01 +05:30
Rahul Ghangas
cf7e2de380 test: call init and init_thread_pool before tests start 2023-02-24 11:55:46 +05:30
Rahul Ghangas
4fbb1525ea chore: use browser for testing, since node is not supported 2023-02-24 11:54:44 +05:30
Rahul Ghangas
a7f6d63060 chore: add new line at the end of rust-toolchain file 2023-02-02 01:07:08 +05:30
Rahul Ghangas
95f895c64c feat: target toolchain and add rustflags 2023-02-02 01:03:48 +05:30
Rahul Ghangas
228ccba90a chore: remove unused dependency in rln that's causing wasm issues 2023-02-02 01:02:16 +05:30
Rahul Ghangas
e04fa2c7a5 feat: rexport init_thread_pool 2023-02-02 01:01:50 +05:30
Rahul Ghangas
6539bb4d5e chore: enable parallel feature for rln 2023-02-02 00:59:32 +05:30
Rahul Ghangas
5394050c0b chore: add wasm rayon for threading 2023-02-02 00:58:46 +05:30
47 changed files with 963 additions and 1509 deletions

View File

@@ -46,9 +46,10 @@ jobs:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo-make test
run: |
cargo make test --release
@@ -71,9 +72,10 @@ jobs:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo-make test
run: |
cargo make test --release
@@ -95,11 +97,10 @@ jobs:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- run: git submodule update --init --recursive
- name: Install wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
- run: cargo install cargo-make
- run: cargo make build
working-directory: rln-wasm
- run: cargo-make test
@@ -122,9 +123,10 @@ jobs:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo-make test
run: |
cargo make test --release
@@ -147,9 +149,10 @@ jobs:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- name: Update git submodules
run: git submodule update --init --recursive
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo-make test
run: |
cargo make test --release
@@ -174,31 +177,30 @@ jobs:
toolchain: stable
override: true
components: rustfmt, clippy
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- name: Update git submodules
run: git submodule update --init --recursive
- name: cargo fmt
if: success() || failure()
run: cargo fmt --all -- --check
- name: multiplier - cargo clippy
if: success() || failure()
run: |
cargo clippy --release -- -D warnings
cargo clippy --release
working-directory: multiplier
- name: semaphore - cargo clippy
if: success() || failure()
run: |
cargo clippy --release -- -D warnings
cargo clippy --release
working-directory: semaphore
- name: rln - cargo clippy
if: success() || failure()
run: |
cargo clippy --release -- -D warnings
cargo clippy --release
working-directory: rln
- name: utils - cargo clippy
if: success() || failure()
run: |
cargo clippy --release -- -D warnings
cargo clippy --release
working-directory: utils
# We skip clippy on rln-wasm, since wasm target is managed by cargo make
# Currently not treating warnings as error, too noisy

View File

@@ -6,12 +6,6 @@ on:
jobs:
linux:
strategy:
matrix:
target:
- x86_64-unknown-linux-gnu
- aarch64-unknown-linux-gnu
- i686-unknown-linux-gnu
name: Linux build
runs-on: ubuntu-latest
steps:
@@ -23,57 +17,50 @@ jobs:
profile: minimal
toolchain: stable
override: true
target: ${{ matrix.target }}
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: cross build
- name: Update git submodules
run: git submodule update --init --recursive
- name: cargo build
run: |
cross build --release --target ${{ matrix.target }} --workspace --exclude rln-wasm
cargo build --release --workspace --exclude rln-wasm
mkdir release
cp target/${{ matrix.target }}/release/librln* release/
tar -czvf ${{ matrix.target }}-rln.tar.gz release/
cp target/release/librln* release/
tar -czvf linux-rln.tar.gz release/
- name: Upload archive artifact
uses: actions/upload-artifact@v2
with:
name: ${{ matrix.target }}-archive
path: ${{ matrix.target }}-rln.tar.gz
name: linux-archive
path: linux-rln.tar.gz
retention-days: 2
macos:
name: MacOS build
runs-on: macos-latest
strategy:
matrix:
target:
- x86_64-apple-darwin
- aarch64-apple-darwin
steps:
- name: Checkout sources
uses: actions/checkout@v2
with:
ref: master
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
target: ${{ matrix.target }}
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: cross build
- name: Update git submodules
run: git submodule update --init --recursive
- name: cargo build
run: |
cross build --release --target ${{ matrix.target }} --workspace --exclude rln-wasm
cargo build --release --workspace --exclude rln-wasm
mkdir release
cp target/${{ matrix.target }}/release/librln* release/
tar -czvf ${{ matrix.target }}-rln.tar.gz release/
cp target/release/librln* release/
tar -czvf macos-rln.tar.gz release/
- name: Upload archive artifact
uses: actions/upload-artifact@v2
with:
name: ${{ matrix.target }}-archive
path: ${{ matrix.target }}-rln.tar.gz
name: macos-archive
path: macos-rln.tar.gz
retention-days: 2
browser-rln-wasm:
@@ -82,20 +69,22 @@ jobs:
steps:
- name: Checkout sources
uses: actions/checkout@v2
with:
ref: master
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- run: git submodule update --init --recursive
- name: Install wasm-pack
uses: jetli/wasm-pack-action@v0.3.0
- name: cross make build
- name: Install cargo-make
run: cargo install cargo-make
- name: cargo make build
run: |
cross make build
cargo make build
mkdir release
cp pkg/** release/
tar -czvf browser-rln-wasm.tar.gz release/
@@ -122,7 +111,7 @@ jobs:
uses: actions/download-artifact@v2
- name: Delete tag
uses: dev-drprasad/delete-tag-and-release@v0.2.1
uses: dev-drprasad/delete-tag-and-release@v0.2.0
with:
delete_release: true
tag_name: nightly
@@ -131,12 +120,14 @@ jobs:
- name: Create prerelease
run: |
start_tag=$(gh release list -L 2 --exclude-drafts | grep -v nightly | cut -d$'\t' -f3 | sed -n '1p')
start_tag=$(gh release list -L 2 --exclude-drafts | grep -v nightly | cut -d$'\t' -f3)
gh release create nightly --prerelease --target master \
--title 'Nightly build ("master" branch)' \
--generate-notes \
--notes-start-tag $start_tag \
*-archive/*.tar.gz \
linux-archive/linux-rln.tar.gz \
macos-archive/macos-rln.tar.gz \
browser-rln-wasm-archive/browser-rln-wasm.tar.gz
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
@@ -145,4 +136,6 @@ jobs:
with:
failOnError: false
name: |
*-archive
linux-archive
macos-archive
browser-rln-wasm-archive

View File

@@ -1,15 +1,18 @@
## 2023-02-28 v0.2
## Upcoming release
This release contains:
- Improved code quality
Release highlights:
- Allows consumers of zerokit RLN to set leaves to the Merkle Tree from an arbitrary index. Useful for batching updates to the Merkle Tree.
- Improved performance for proof generation and verification
- rln_wasm which allows for the consumption of RLN through a WebAssembly interface
- Refactored to generate Semaphore-compatible credentials
- Dual License under Apache 2.0 and MIT
- RLN compiles as a static library, which can be consumed through a C FFI
The full list of changes is below.
### Features
- Creation of `set_leaves_from`, which allows consumers to add leaves to a tree from a given starting index. `init_tree_with_leaves` internally uses `set_leaves_from`, with index 0.
### Changes
- Renaming of `set_leaves` to `init_tree_with_leaves`, which is a more accurate representation of the function's utility.
### Fixes
- None
## 2022-09-19 v0.1

View File

@@ -7,16 +7,3 @@ members = [
"rln-wasm",
"utils",
]
# Compilation profile for any non-workspace member.
# Dependencies are optimized, even in a dev build. This improves dev performance
# while having neglible impact on incremental build times.
[profile.dev.package."*"]
opt-level = 3
[profile.release.package."rln-wasm"]
# Tell `rustc` to optimize for small code size.
opt-level = "s"
[profile.release.package."semaphore"]
codegen-units = 1

View File

@@ -1,32 +0,0 @@
[target.x86_64-pc-windows-gnu]
image = "ghcr.io/cross-rs/x86_64-pc-windows-gnu:latest"
[target.aarch64-unknown-linux-gnu]
image = "ghcr.io/cross-rs/aarch64-unknown-linux-gnu:latest"
[target.x86_64-unknown-linux-gnu]
image = "ghcr.io/cross-rs/x86_64-unknown-linux-gnu:latest"
[target.arm-unknown-linux-gnueabi]
image = "ghcr.io/cross-rs/arm-unknown-linux-gnueabi:latest"
[target.i686-pc-windows-gnu]
image = "ghcr.io/cross-rs/i686-pc-windows-gnu:latest"
[target.i686-unknown-linux-gnu]
image = "ghcr.io/cross-rs/i686-unknown-linux-gnu:latest"
[target.arm-unknown-linux-gnueabihf]
image = "ghcr.io/cross-rs/arm-unknown-linux-gnueabihf:latest"
[target.mips-unknown-linux-gnu]
image = "ghcr.io/cross-rs/mips-unknown-linux-gnu:latest"
[target.mips64-unknown-linux-gnuabi64]
image = "ghcr.io/cross-rs/mips64-unknown-linux-gnuabi64:latest"
[target.mips64el-unknown-linux-gnuabi64]
image = "ghcr.io/cross-rs/mips64el-unknown-linux-gnuabi64:latest"
[target.mipsel-unknown-linux-gnu]
image = "ghcr.io/cross-rs/mipsel-unknown-linux-gnu:latest"

View File

@@ -1,203 +0,0 @@
Copyright (c) 2022 Vac Research
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -1,25 +0,0 @@
Copyright (c) 2022 Vac Research
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE O THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@@ -1,21 +1,13 @@
.PHONY: all installdeps build test clean
.PHONY: all test clean
all: .pre-build build
.fetch-submodules:
@git submodule update --init --recursive
.pre-build: .fetch-submodules
@cargo install cargo-make
ifdef CI
@cargo install cross --git https://github.com/cross-rs/cross --branch main
endif
installdeps: .pre-build
build: .pre-build
all: .pre-build
@cargo make build
.pre-build:
ifndef $(cargo make --help)
@cargo install --force cargo-make
endif
test: .pre-build
@cargo make test

View File

@@ -17,19 +17,3 @@ in Rust.
- [RLN library](https://github.com/kilic/rln) written in Rust based on Bellman.
- [semaphore-rs](https://github.com/worldcoin/semaphore-rs) written in Rust based on ark-circom.
## Build and Test
To install missing dependencies, run the following commands from the root folder
```bash
make installdeps
```
To build and test all crates, run the following commands from the root folder
```bash
make build
make test
```
## Release assets
We use [`cross-rs`](https://github.com/cross-rs/cross) to cross-compile and generate release assets for rln.

View File

@@ -2,7 +2,6 @@
name = "multiplier"
version = "0.1.0"
edition = "2018"
license = "MIT OR Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -13,19 +12,23 @@ license = "MIT OR Apache-2.0"
# fnv = { version = "1.0.3", default-features = false }
# num = { version = "0.4.0" }
# num-traits = { version = "0.2.0", default-features = false }
num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
# ZKP Generation
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
# ark-ff = { version = "0.3.0", default-features = false, features = ["parallel", "asm"] }
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
ark-bn254 = { version = "0.3.0" }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
# ark-poly = { version = "^0.3.0", default-features = false, features = ["parallel"] }
ark-relations = { version = "0.3.0", default-features = false }
ark-serialize = { version = "0.3.0", default-features = false }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circom-2"], rev = "35ce5a9" }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features = ["circom-2"] }
# error handling
color-eyre = "0.6.1"
# thiserror = "1.0.26"
color-eyre = "0.5"
# decoding of data
# hex = "0.4.3"

View File

@@ -3,15 +3,7 @@
Example wrapper around a basic Circom circuit to test Circom 2 integration
through ark-circom and FFI.
## Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```
## FFI
# FFI
To generate C or Nim bindings from Rust FFI, use `cbindgen` or `nbindgen`:

View File

@@ -31,12 +31,12 @@ impl<'a> From<&Buffer> for &'a [u8] {
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn new_circuit(ctx: *mut *mut Multiplier) -> bool {
if let Ok(mul) = Multiplier::new() {
unsafe { *ctx = Box::into_raw(Box::new(mul)) };
true
} else {
false
}
println!("multiplier ffi: new");
let mul = Multiplier::new();
unsafe { *ctx = Box::into_raw(Box::new(mul)) };
true
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]

View File

@@ -1,6 +1,6 @@
use ark_circom::{CircomBuilder, CircomConfig};
use ark_std::rand::thread_rng;
use color_eyre::{Report, Result};
use color_eyre::Result;
use ark_bn254::Bn254;
use ark_groth16::{
@@ -25,18 +25,17 @@ fn groth16_proof_example() -> Result<()> {
let circom = builder.build()?;
let inputs = circom
.get_public_inputs()
.ok_or(Report::msg("no public inputs"))?;
let inputs = circom.get_public_inputs().unwrap();
let proof = prove(circom, &params, &mut rng)?;
let pvk = prepare_verifying_key(&params.vk);
match verify_proof(&pvk, &proof, &inputs) {
Ok(_) => Ok(()),
Err(_) => Err(Report::msg("not verified")),
}
let verified = verify_proof(&pvk, &proof, &inputs)?;
assert!(verified);
Ok(())
}
fn main() {

View File

@@ -7,8 +7,9 @@ use ark_groth16::{
Proof, ProvingKey,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use color_eyre::{Report, Result};
use std::io::{Read, Write};
// , SerializationError};
use std::io::{self, Read, Write};
pub struct Multiplier {
circom: CircomCircuit<Bn254>,
@@ -17,11 +18,12 @@ pub struct Multiplier {
impl Multiplier {
// TODO Break this apart here
pub fn new() -> Result<Multiplier> {
pub fn new() -> Multiplier {
let cfg = CircomConfig::<Bn254>::new(
"./resources/circom2_multiplier2.wasm",
"./resources/circom2_multiplier2.r1cs",
)?;
)
.unwrap();
let mut builder = CircomBuilder::new(cfg);
builder.push_input("a", 3);
@@ -32,41 +34,40 @@ impl Multiplier {
let mut rng = thread_rng();
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng)?;
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng).unwrap();
let circom = builder.build()?;
let circom = builder.build().unwrap();
Ok(Multiplier { circom, params })
//let inputs = circom.get_public_inputs().unwrap();
Multiplier { circom, params }
}
// TODO Input Read
pub fn prove<W: Write>(&self, result_data: W) -> Result<()> {
pub fn prove<W: Write>(&self, result_data: W) -> io::Result<()> {
let mut rng = thread_rng();
// XXX: There's probably a better way to do this
let circom = self.circom.clone();
let params = self.params.clone();
let proof = prove(circom, &params, &mut rng)?;
let proof = prove(circom, &params, &mut rng).unwrap();
// XXX: Unclear if this is different from other serialization(s)
proof.serialize(result_data)?;
let _ = proof.serialize(result_data).unwrap();
Ok(())
}
pub fn verify<R: Read>(&self, input_data: R) -> Result<bool> {
let proof = Proof::deserialize(input_data)?;
pub fn verify<R: Read>(&self, input_data: R) -> io::Result<bool> {
let proof = Proof::deserialize(input_data).unwrap();
let pvk = prepare_verifying_key(&self.params.vk);
// XXX Part of input data?
let inputs = self
.circom
.get_public_inputs()
.ok_or(Report::msg("no public inputs"))?;
let inputs = self.circom.get_public_inputs().unwrap();
let verified = verify_proof(&pvk, &proof, &inputs)?;
let verified = verify_proof(&pvk, &proof, &inputs).unwrap();
Ok(verified)
}
@@ -74,6 +75,6 @@ impl Multiplier {
impl Default for Multiplier {
fn default() -> Self {
Self::new().unwrap()
Self::new()
}
}

View File

@@ -4,7 +4,8 @@ mod tests {
#[test]
fn multiplier_proof() {
let mul = Multiplier::new().unwrap();
let mul = Multiplier::new();
//let inputs = mul.circom.get_public_inputs().unwrap();
let mut output_data: Vec<u8> = Vec::new();
let _ = mul.prove(&mut output_data);

View File

@@ -2,7 +2,6 @@
name = "private-settlement"
version = "0.1.0"
edition = "2021"
license = "MIT OR Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@@ -2,10 +2,3 @@
This module is to provide APIs to manage, compute and verify [Private Settlement](https://rfc.vac.dev/spec/44/) zkSNARK proofs and primitives.
## Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```

5
rln-wasm/.cargo/config Normal file
View File

@@ -0,0 +1,5 @@
[target.wasm32-unknown-unknown]
rustflags = ["-C", "target-feature=+atomics,+bulk-memory,+mutable-globals"]
[unstable]
build-std = ["panic_abort", "std"]

View File

@@ -11,12 +11,13 @@ crate-type = ["cdylib", "rlib"]
default = ["console_error_panic_hook"]
[dependencies]
rln = { path = "../rln", default-features = false, features = ["wasm"] }
rln = { path = "../rln", default-features = false, features = ["wasm", "parallel"] }
num-bigint = { version = "0.4", default-features = false, features = ["rand", "serde"] }
wasmer = { version = "2.3", default-features = false, features = ["js", "std"] }
wasmer = { version = "3.1.1", default-features = false, features = ["js", "std"] }
web-sys = {version = "0.3", features=["console"]}
getrandom = { version = "0.2.7", default-features = false, features = ["js"] }
wasm-bindgen = "0.2.63"
wasm-bindgen-rayon = "1.0.3"
serde-wasm-bindgen = "0.4"
js-sys = "0.3.59"
serde_json = "1.0.85"
@@ -31,3 +32,6 @@ console_error_panic_hook = { version = "0.1.7", optional = true }
wasm-bindgen-test = "0.3.13"
wasm-bindgen-futures = "0.4.33"
[profile.release]
# Tell `rustc` to optimize for small code size.
opt-level = "s"

View File

@@ -14,7 +14,7 @@ dependencies = [
[tasks.test]
command = "wasm-pack"
args = ["test", "--release", "--node"]
args = ["test", "--release", "--firefox", "--headless"]
dependencies = ["build"]
[tasks.login]

View File

@@ -10,12 +10,6 @@ curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
```
cargo install cargo-make
```
OR
```
make installdeps
```
3. Compile zerokit for `wasm32-unknown-unknown`:
```
cd rln-wasm

1
rln-wasm/rust-toolchain Normal file
View File

@@ -0,0 +1 @@
nightly-2022-12-12

View File

@@ -8,6 +8,8 @@ use num_bigint::BigInt;
use rln::public::RLN;
use wasm_bindgen::prelude::*;
pub use wasm_bindgen_rayon::init_thread_pool;
#[wasm_bindgen]
pub fn init_panic_hook() {
console_error_panic_hook::set_once();
@@ -22,30 +24,21 @@ pub struct RLNWrapper {
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = newRLN)]
pub fn wasm_new(
tree_height: usize,
zkey: Uint8Array,
vk: Uint8Array,
) -> Result<*mut RLNWrapper, String> {
let instance = RLN::new_with_params(tree_height, zkey.to_vec(), vk.to_vec())
.map_err(|err| format!("{:#?}", err))?;
pub fn wasm_new(tree_height: usize, zkey: Uint8Array, vk: Uint8Array) -> *mut RLNWrapper {
let instance = RLN::new_with_params(tree_height, zkey.to_vec(), vk.to_vec());
let wrapper = RLNWrapper { instance };
Ok(Box::into_raw(Box::new(wrapper)))
Box::into_raw(Box::new(wrapper))
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = getSerializedRLNWitness)]
pub fn wasm_get_serialized_rln_witness(
ctx: *mut RLNWrapper,
input: Uint8Array,
) -> Result<Uint8Array, String> {
pub fn wasm_get_serialized_rln_witness(ctx: *mut RLNWrapper, input: Uint8Array) -> Uint8Array {
let wrapper = unsafe { &mut *ctx };
let rln_witness = wrapper
.instance
.get_serialized_rln_witness(&input.to_vec()[..])
.map_err(|err| format!("{:#?}", err))?;
.get_serialized_rln_witness(&input.to_vec()[..]);
Ok(Uint8Array::from(&rln_witness[..]))
Uint8Array::from(&rln_witness[..])
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -95,18 +88,16 @@ pub fn wasm_init_tree_with_leaves(ctx: *mut RLNWrapper, input: Uint8Array) -> Re
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = RLNWitnessToJson)]
pub fn rln_witness_to_json(
ctx: *mut RLNWrapper,
serialized_witness: Uint8Array,
) -> Result<Object, String> {
pub fn rln_witness_to_json(ctx: *mut RLNWrapper, serialized_witness: Uint8Array) -> Object {
let wrapper = unsafe { &mut *ctx };
let inputs = wrapper
.instance
.get_rln_witness_json(&serialized_witness.to_vec()[..])
.map_err(|err| err.to_string())?;
.unwrap();
let js_value = serde_wasm_bindgen::to_value(&inputs).map_err(|err| err.to_string())?;
Object::from_entries(&js_value).map_err(|err| format!("{:#?}", err))
let js_value = serde_wasm_bindgen::to_value(&inputs).unwrap();
let obj = Object::from_entries(&js_value);
obj.unwrap()
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -118,18 +109,17 @@ pub fn generate_rln_proof_with_witness(
) -> Result<Uint8Array, String> {
let wrapper = unsafe { &mut *ctx };
let mut witness_vec: Vec<BigInt> = vec![];
for v in calculated_witness {
witness_vec.push(
let witness_vec: Vec<BigInt> = calculated_witness
.iter()
.map(|v| {
v.to_string(10)
.map_err(|err| format!("{:#?}", err))?
.unwrap()
.as_string()
.ok_or("not a string error")?
.unwrap()
.parse::<BigInt>()
.map_err(|err| format!("{:#?}", err))?,
);
}
.unwrap()
})
.collect();
let mut output_data: Vec<u8> = Vec::new();

View File

@@ -1,4 +1,5 @@
const fs = require("fs");
const rln_wasm = require("/pkg/rln_wasm.js");
// Utils functions for loading circom witness calculator and reading files from test
@@ -7,8 +8,12 @@ module.exports = {
return fs.readFileSync(path);
},
calculateWitness: async function(circom_path, inputs){
const wc = require("resources/witness_calculator.js");
initWasm: async function() {
await rln_wasm();
},
calculateWitness: async function(circom_path, inputs) {
const wc = require("/resources/witness_calculator.js");
const wasmFile = fs.readFileSync(circom_path);
const wasmFileBuffer = wasmFile.slice(wasmFile.byteOffset, wasmFile.byteOffset + wasmFile.byteLength);
const witnessCalculator = await wc(wasmFileBuffer);

View File

@@ -4,23 +4,31 @@
mod tests {
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
use rln::circuit::TEST_TREE_HEIGHT;
use rln::utils::normalize_usize;
use rln_wasm::*;
use wasm_bindgen::prelude::*;
use wasm_bindgen::JsValue;
use wasm_bindgen_test::wasm_bindgen_test;
use wasm_bindgen_rayon::init_thread_pool;
#[wasm_bindgen(module = "src/utils.js")]
wasm_bindgen_test::wasm_bindgen_test_configure!(run_in_browser);
#[wasm_bindgen(module = "/src/utils.js")]
extern "C" {
#[wasm_bindgen(catch)]
fn read_file(path: &str) -> Result<Uint8Array, JsValue>;
#[wasm_bindgen(catch)]
async fn calculateWitness(circom_path: &str, input: Object) -> Result<JsValue, JsValue>;
async fn initWasm() -> Result<(), JsValue>;
#[wasm_bindgen(catch)]
async fn calculateWitness(circom_path: &str, inputs: Object) -> Result<JsValue, JsValue>;
}
#[wasm_bindgen_test]
pub async fn test_basic_flow() {
initWasm().await.unwrap();
wasm_bindgen_futures::JsFuture::from(init_thread_pool(4)).await.unwrap();
let tree_height = TEST_TREE_HEIGHT;
let circom_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln.wasm");
let zkey_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
@@ -42,19 +50,20 @@ mod tests {
// Prepare the message
let signal = "Hello World".as_bytes();
let signal_len: u64 = signal.len() as u64;
// Setting up the epoch (With 0s for the test)
let epoch = Uint8Array::new_with_length(32);
epoch.fill(0, 0, 32);
let identity_index: usize = 0;
let identity_index: u64 = 0;
// Serializing the message
let mut serialized_vec: Vec<u8> = Vec::new();
serialized_vec.append(&mut idkey.to_vec());
serialized_vec.append(&mut normalize_usize(identity_index));
serialized_vec.append(&mut identity_index.to_le_bytes().to_vec());
serialized_vec.append(&mut epoch.to_vec());
serialized_vec.append(&mut normalize_usize(signal.len()));
serialized_vec.append(&mut signal_len.to_le_bytes().to_vec());
serialized_vec.append(&mut signal.to_vec());
let serialized_message = Uint8Array::from(&serialized_vec[..]);
@@ -88,7 +97,7 @@ mod tests {
// Add signal_len | signal
let mut proof_bytes = proof.to_vec();
proof_bytes.append(&mut normalize_usize(signal.len()));
proof_bytes.append(&mut signal_len.to_le_bytes().to_vec());
proof_bytes.append(&mut signal.to_vec());
let proof_with_signal = Uint8Array::from(&proof_bytes[..]);

View File

@@ -2,10 +2,9 @@
name = "rln"
version = "0.1.0"
edition = "2021"
license = "MIT OR Apache-2.0"
[lib]
crate-type = ["rlib", "staticlib"]
crate-type = ["cdylib", "rlib", "staticlib"]
# This flag disable cargo doctests, i.e. testing example code-snippets in documentation
doctest = false
@@ -13,40 +12,38 @@ doctest = false
[dependencies]
# ZKP Generation
ark-ec = { version = "=0.3.0", default-features = false }
ark-ff = { version = "=0.3.0", default-features = false, features = [ "asm"] }
ark-std = { version = "=0.3.0", default-features = false }
ark-bn254 = { version = "=0.3.0" }
ark-ec = { version = "0.3.0", default-features = false }
ark-ff = { version = "0.3.0", default-features = false, features = [ "asm"] }
ark-std = { version = "0.3.0", default-features = false }
ark-bn254 = { version = "0.3.0" }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", default-features = false }
ark-relations = { version = "=0.3.0", default-features = false, features = [ "std" ] }
ark-serialize = { version = "=0.3.0", default-features = false }
ark-circom = { git = "https://github.com/vacp2p/ark-circom", rev = "0e587145cb05e08b2d1a01509eb578670088eb2f", default-features = false, features = ["circom-2"] }
ark-relations = { version = "0.3.0", default-features = false, features = [ "std" ] }
ark-serialize = { version = "0.3.0", default-features = false }
ark-circom = { git = "https://github.com/vacp2p/ark-circom", branch = "wasm", default-features = false, features = ["circom-2"] }
#ark-circom = { git = "https://github.com/vacp2p/ark-circom", branch = "no-ethers-core", features = ["circom-2"] }
# WASM
wasmer = { version = "2.3.0", default-features = false }
# error handling
color-eyre = "=0.6.2"
thiserror = "=1.0.38"
color-eyre = "0.5.11"
thiserror = "1.0.0"
# utilities
cfg-if = "=1.0"
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
cfg-if = "1.0"
num-bigint = { version = "0.4.3", default-features = false, features = ["rand"] }
num-traits = "0.2.11"
once_cell = "1.14.0"
rand = "=0.8.5"
rand_chacha = "=0.3.1"
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
utils = { path = "../utils/", default-features = false }
pmtree = { git = "https://github.com/Rate-Limiting-Nullifier/pmtree", rev = "f6d1a1fecad72cd39e6808e78085091d541dc882", optional = true}
rand = "0.8"
rand_chacha = "0.3.1"
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
utils = { path = "../utils", default-features = false }
# serialization
serde_json = "1.0.48"
include_dir = "=0.7.3"
[dev-dependencies]
sled = "=0.34.7"
pmtree = { git = "https://github.com/Rate-Limiting-Nullifier/pmtree" }
[features]
default = ["parallel", "wasmer/sys-default"]
@@ -55,4 +52,4 @@ wasm = ["wasmer/js", "wasmer/std"]
fullmerkletree = ["default"]
# Note: pmtree feature is still experimental
pmtree-ft = ["default", "pmtree"]
pmtree = ["default"]

View File

@@ -3,21 +3,15 @@
This module provides APIs to manage, compute and verify [RLN](https://rfc.vac.dev/spec/32/) zkSNARK proofs and RLN primitives.
## Pre-requisites
### Install dependencies and clone repo
### Install
```sh
make installdeps
git clone https://github.com/vacp2p/zerokit.git
cd zerokit/rln
```
Implemented tests can be executed by running within the module folder
### Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```
`cargo test --release`
### Compile ZK circuits

View File

@@ -8,10 +8,10 @@ use ark_circom::read_zkey;
use ark_groth16::{ProvingKey, VerifyingKey};
use ark_relations::r1cs::ConstraintMatrices;
use cfg_if::cfg_if;
use color_eyre::{Report, Result};
use num_bigint::BigUint;
use serde_json::Value;
use std::io::Cursor;
use std::fs::File;
use std::io::{Cursor, Error, ErrorKind, Result};
use std::path::Path;
use std::str::FromStr;
@@ -21,12 +21,11 @@ cfg_if! {
use once_cell::sync::OnceCell;
use std::sync::Mutex;
use wasmer::{Module, Store};
use include_dir::{include_dir, Dir};
}
}
const ZKEY_FILENAME: &str = "rln_final.zkey";
const VK_FILENAME: &str = "verification_key.json";
const VK_FILENAME: &str = "verifying_key.json";
const WASM_FILENAME: &str = "rln.wasm";
// These parameters are used for tests
@@ -34,11 +33,11 @@ const WASM_FILENAME: &str = "rln.wasm";
// Changing these parameters to other values than these defaults will cause zkSNARK proof verification to fail
pub const TEST_PARAMETERS_INDEX: usize = 2;
pub const TEST_TREE_HEIGHT: usize = [15, 19, 20][TEST_PARAMETERS_INDEX];
pub const TEST_RESOURCES_FOLDER: &str =
["tree_height_15", "tree_height_19", "tree_height_20"][TEST_PARAMETERS_INDEX];
#[cfg(not(target_arch = "wasm32"))]
static RESOURCES_DIR: Dir<'_> = include_dir!("$CARGO_MANIFEST_DIR/resources");
pub const TEST_RESOURCES_FOLDER: &str = [
"./resources/tree_height_15/",
"./resources/tree_height_19/",
"./resources/tree_height_20/",
][TEST_PARAMETERS_INDEX];
// The following types define the pairing friendly elliptic curve, the underlying finite fields and groups default to this module
// Note that proofs are serialized assuming Fr to be 4x8 = 32 bytes in size. Hence, changing to a curve with different encoding will make proof verification to fail
@@ -58,22 +57,21 @@ pub fn zkey_from_raw(zkey_data: &Vec<u8>) -> Result<(ProvingKey<Curve>, Constrai
let proving_key_and_matrices = read_zkey(&mut c)?;
Ok(proving_key_and_matrices)
} else {
Err(Report::msg("No proving key found!"))
Err(Error::new(ErrorKind::NotFound, "No proving key found!"))
}
}
// Loads the proving key
#[cfg(not(target_arch = "wasm32"))]
pub fn zkey_from_folder(
resources_folder: &str,
) -> Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)> {
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
if let Some(zkey) = zkey {
let mut c = Cursor::new(zkey.contents());
let proving_key_and_matrices = read_zkey(&mut c)?;
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
if Path::new(&zkey_path).exists() {
let mut file = File::open(&zkey_path)?;
let proving_key_and_matrices = read_zkey(&mut file)?;
Ok(proving_key_and_matrices)
} else {
Err(Report::msg("No proving key found!"))
Err(Error::new(ErrorKind::NotFound, "No proving key found!"))
}
}
@@ -82,35 +80,39 @@ pub fn vk_from_raw(vk_data: &Vec<u8>, zkey_data: &Vec<u8>) -> Result<VerifyingKe
let verifying_key: VerifyingKey<Curve>;
if !vk_data.is_empty() {
verifying_key = vk_from_vector(vk_data)?;
verifying_key = vk_from_vector(vk_data);
Ok(verifying_key)
} else if !zkey_data.is_empty() {
let (proving_key, _matrices) = zkey_from_raw(zkey_data)?;
verifying_key = proving_key.vk;
Ok(verifying_key)
} else {
Err(Report::msg("No proving/verification key found!"))
Err(Error::new(
ErrorKind::NotFound,
"No proving/verification key found!",
))
}
}
// Loads the verification key
#[cfg(not(target_arch = "wasm32"))]
pub fn vk_from_folder(resources_folder: &str) -> Result<VerifyingKey<Curve>> {
let vk = RESOURCES_DIR.get_file(Path::new(resources_folder).join(VK_FILENAME));
let zkey = RESOURCES_DIR.get_file(Path::new(resources_folder).join(ZKEY_FILENAME));
let vk_path = format!("{resources_folder}{VK_FILENAME}");
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
let verifying_key: VerifyingKey<Curve>;
if let Some(vk) = vk {
verifying_key = vk_from_json(vk.contents_utf8().ok_or(Report::msg(
"Could not read verification key from JSON file!",
))?)?;
if Path::new(&vk_path).exists() {
verifying_key = vk_from_json(&vk_path);
Ok(verifying_key)
} else if let Some(_zkey) = zkey {
} else if Path::new(&zkey_path).exists() {
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
verifying_key = proving_key.vk;
Ok(verifying_key)
} else {
Err(Report::msg("No proving/verification key found!"))
Err(Error::new(
ErrorKind::NotFound,
"No proving/verification key found!",
))
}
}
@@ -119,150 +121,129 @@ static WITNESS_CALCULATOR: OnceCell<Mutex<WitnessCalculator>> = OnceCell::new();
// Initializes the witness calculator using a bytes vector
#[cfg(not(target_arch = "wasm32"))]
pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> Result<&'static Mutex<WitnessCalculator>> {
WITNESS_CALCULATOR.get_or_try_init(|| {
pub fn circom_from_raw(wasm_buffer: Vec<u8>) -> &'static Mutex<WitnessCalculator> {
WITNESS_CALCULATOR.get_or_init(|| {
let store = Store::default();
let module = Module::new(&store, wasm_buffer)?;
let result = WitnessCalculator::from_module(module)?;
Ok::<Mutex<WitnessCalculator>, Report>(Mutex::new(result))
let module = Module::new(&store, wasm_buffer).unwrap();
let result =
WitnessCalculator::from_module(module).expect("Failed to create witness calculator");
Mutex::new(result)
})
}
// Initializes the witness calculator
#[cfg(not(target_arch = "wasm32"))]
pub fn circom_from_folder(resources_folder: &str) -> Result<&'static Mutex<WitnessCalculator>> {
pub fn circom_from_folder(resources_folder: &str) -> &'static Mutex<WitnessCalculator> {
// We read the wasm file
let wasm = RESOURCES_DIR.get_file(Path::new(resources_folder).join(WASM_FILENAME));
if let Some(wasm) = wasm {
let wasm_buffer = wasm.contents();
circom_from_raw(wasm_buffer.to_vec())
} else {
Err(Report::msg("No wasm file found!"))
}
let wasm_path = format!("{resources_folder}{WASM_FILENAME}");
let wasm_buffer = std::fs::read(&wasm_path).unwrap();
circom_from_raw(wasm_buffer)
}
// The following function implementations are taken/adapted from https://github.com/gakonst/ark-circom/blob/1732e15d6313fe176b0b1abb858ac9e095d0dbd7/src/zkey.rs
// Utilities to convert a json verification key in a groth16::VerificationKey
fn fq_from_str(s: &str) -> Result<Fq> {
Ok(Fq::try_from(BigUint::from_str(s)?)?)
fn fq_from_str(s: &str) -> Fq {
Fq::try_from(BigUint::from_str(s).unwrap()).unwrap()
}
// Extracts the element in G1 corresponding to its JSON serialization
fn json_to_g1(json: &Value, key: &str) -> Result<G1Affine> {
fn json_to_g1(json: &Value, key: &str) -> G1Affine {
let els: Vec<String> = json
.get(key)
.ok_or(Report::msg("no json value"))?
.unwrap()
.as_array()
.ok_or(Report::msg("value not an array"))?
.unwrap()
.iter()
.map(|i| i.as_str().ok_or(Report::msg("element is not a string")))
.map(|x| x.map(|v| v.to_owned()))
.collect::<Result<Vec<String>>>()?;
Ok(G1Affine::from(G1Projective::new(
fq_from_str(&els[0])?,
fq_from_str(&els[1])?,
fq_from_str(&els[2])?,
)))
.map(|i| i.as_str().unwrap().to_string())
.collect();
G1Affine::from(G1Projective::new(
fq_from_str(&els[0]),
fq_from_str(&els[1]),
fq_from_str(&els[2]),
))
}
// Extracts the vector of G1 elements corresponding to its JSON serialization
fn json_to_g1_vec(json: &Value, key: &str) -> Result<Vec<G1Affine>> {
fn json_to_g1_vec(json: &Value, key: &str) -> Vec<G1Affine> {
let els: Vec<Vec<String>> = json
.get(key)
.ok_or(Report::msg("no json value"))?
.unwrap()
.as_array()
.ok_or(Report::msg("value not an array"))?
.unwrap()
.iter()
.map(|i| {
i.as_array()
.ok_or(Report::msg("element is not an array"))
.and_then(|array| {
array
.iter()
.map(|x| x.as_str().ok_or(Report::msg("element is not a string")))
.map(|x| x.map(|v| v.to_owned()))
.collect::<Result<Vec<String>>>()
})
.unwrap()
.iter()
.map(|x| x.as_str().unwrap().to_string())
.collect::<Vec<String>>()
})
.collect::<Result<Vec<Vec<String>>>>()?;
.collect();
let mut res = vec![];
for coords in els {
res.push(G1Affine::from(G1Projective::new(
fq_from_str(&coords[0])?,
fq_from_str(&coords[1])?,
fq_from_str(&coords[2])?,
)))
}
Ok(res)
els.iter()
.map(|coords| {
G1Affine::from(G1Projective::new(
fq_from_str(&coords[0]),
fq_from_str(&coords[1]),
fq_from_str(&coords[2]),
))
})
.collect()
}
// Extracts the element in G2 corresponding to its JSON serialization
fn json_to_g2(json: &Value, key: &str) -> Result<G2Affine> {
fn json_to_g2(json: &Value, key: &str) -> G2Affine {
let els: Vec<Vec<String>> = json
.get(key)
.ok_or(Report::msg("no json value"))?
.unwrap()
.as_array()
.ok_or(Report::msg("value not an array"))?
.unwrap()
.iter()
.map(|i| {
i.as_array()
.ok_or(Report::msg("element is not an array"))
.and_then(|array| {
array
.iter()
.map(|x| x.as_str().ok_or(Report::msg("element is not a string")))
.map(|x| x.map(|v| v.to_owned()))
.collect::<Result<Vec<String>>>()
})
.unwrap()
.iter()
.map(|x| x.as_str().unwrap().to_string())
.collect::<Vec<String>>()
})
.collect::<Result<Vec<Vec<String>>>>()?;
.collect();
let x = Fq2::new(fq_from_str(&els[0][0])?, fq_from_str(&els[0][1])?);
let y = Fq2::new(fq_from_str(&els[1][0])?, fq_from_str(&els[1][1])?);
let z = Fq2::new(fq_from_str(&els[2][0])?, fq_from_str(&els[2][1])?);
Ok(G2Affine::from(G2Projective::new(x, y, z)))
let x = Fq2::new(fq_from_str(&els[0][0]), fq_from_str(&els[0][1]));
let y = Fq2::new(fq_from_str(&els[1][0]), fq_from_str(&els[1][1]));
let z = Fq2::new(fq_from_str(&els[2][0]), fq_from_str(&els[2][1]));
G2Affine::from(G2Projective::new(x, y, z))
}
// Converts JSON to a VerifyingKey
fn to_verifying_key(json: serde_json::Value) -> Result<VerifyingKey<Curve>> {
Ok(VerifyingKey {
alpha_g1: json_to_g1(&json, "vk_alpha_1")?,
beta_g2: json_to_g2(&json, "vk_beta_2")?,
gamma_g2: json_to_g2(&json, "vk_gamma_2")?,
delta_g2: json_to_g2(&json, "vk_delta_2")?,
gamma_abc_g1: json_to_g1_vec(&json, "IC")?,
})
fn to_verifying_key(json: serde_json::Value) -> VerifyingKey<Curve> {
VerifyingKey {
alpha_g1: json_to_g1(&json, "vk_alpha_1"),
beta_g2: json_to_g2(&json, "vk_beta_2"),
gamma_g2: json_to_g2(&json, "vk_gamma_2"),
delta_g2: json_to_g2(&json, "vk_delta_2"),
gamma_abc_g1: json_to_g1_vec(&json, "IC"),
}
}
// Computes the verification key from its JSON serialization
fn vk_from_json(vk: &str) -> Result<VerifyingKey<Curve>> {
let json: Value = serde_json::from_str(vk)?;
fn vk_from_json(vk_path: &str) -> VerifyingKey<Curve> {
let json = std::fs::read_to_string(vk_path).unwrap();
let json: Value = serde_json::from_str(&json).unwrap();
to_verifying_key(json)
}
// Computes the verification key from a bytes vector containing its JSON serialization
fn vk_from_vector(vk: &[u8]) -> Result<VerifyingKey<Curve>> {
let json = String::from_utf8(vk.to_vec())?;
let json: Value = serde_json::from_str(&json)?;
fn vk_from_vector(vk: &[u8]) -> VerifyingKey<Curve> {
let json = String::from_utf8(vk.to_vec()).expect("Found invalid UTF-8");
let json: Value = serde_json::from_str(&json).unwrap();
to_verifying_key(json)
}
// Checks verification key to be correct with respect to proving key
#[cfg(not(target_arch = "wasm32"))]
pub fn check_vk_from_zkey(
resources_folder: &str,
verifying_key: VerifyingKey<Curve>,
) -> Result<()> {
let (proving_key, _matrices) = zkey_from_folder(resources_folder)?;
if proving_key.vk == verifying_key {
Ok(())
} else {
Err(Report::msg("verifying_keys are not equal"))
}
pub fn check_vk_from_zkey(resources_folder: &str, verifying_key: VerifyingKey<Curve>) {
let (proving_key, _matrices) = zkey_from_folder(resources_folder).unwrap();
assert_eq!(proving_key.vk, verifying_key);
}

View File

@@ -2,7 +2,7 @@
use std::slice;
use crate::public::{hash as public_hash, poseidon_hash as public_poseidon_hash, RLN};
use crate::public::RLN;
// Macro to call methods with arbitrary amount of arguments,
// First argument to the macro is context,
@@ -53,28 +53,6 @@ macro_rules! call_with_output_arg {
false
}
}
};
}
// Macro to call methods with arbitrary amount of arguments,
// which are not implemented in a ctx RLN object
// First argument is the method to call
// Second argument is the output buffer argument
// The remaining arguments are all other inputs to the method
macro_rules! no_ctx_call_with_output_arg {
($method:ident, $output_arg:expr, $( $arg:expr ),* ) => {
{
let mut output_data: Vec<u8> = Vec::new();
if $method($($arg.process()),*, &mut output_data).is_ok() {
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
} else {
std::mem::forget(output_data);
false
}
}
}
}
@@ -171,12 +149,9 @@ impl<'a> From<&Buffer> for &'a [u8] {
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut *mut RLN) -> bool {
if let Ok(rln) = RLN::new(tree_height, input_buffer.process()) {
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
} else {
false
}
let rln = RLN::new(tree_height, input_buffer.process());
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@@ -188,17 +163,14 @@ pub extern "C" fn new_with_params(
vk_buffer: *const Buffer,
ctx: *mut *mut RLN,
) -> bool {
if let Ok(rln) = RLN::new_with_params(
let rln = RLN::new_with_params(
tree_height,
circom_buffer.process().to_vec(),
zkey_buffer.process().to_vec(),
vk_buffer.process().to_vec(),
) {
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
} else {
false
}
);
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
////////////////////////////////////////////////////////
@@ -370,12 +342,10 @@ pub extern "C" fn recover_id_secret(
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn hash(input_buffer: *const Buffer, output_buffer: *mut Buffer) -> bool {
no_ctx_call_with_output_arg!(public_hash, output_buffer, input_buffer)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn poseidon_hash(input_buffer: *const Buffer, output_buffer: *mut Buffer) -> bool {
no_ctx_call_with_output_arg!(public_poseidon_hash, output_buffer, input_buffer)
pub extern "C" fn hash(
ctx: *mut RLN,
input_buffer: *const Buffer,
output_buffer: *mut Buffer,
) -> bool {
call_with_output_arg!(ctx, hash, output_buffer, input_buffer)
}

View File

@@ -7,11 +7,6 @@ use crate::poseidon_hash::poseidon_hash;
use cfg_if::cfg_if;
use utils::merkle_tree::*;
#[cfg(feature = "pmtree-ft")]
use crate::utils::{bytes_le_to_fr, fr_to_bytes_le};
#[cfg(feature = "pmtree-ft")]
use pmtree::*;
// The zerokit RLN default Merkle tree implementation is the OptimalMerkleTree.
// To switch to FullMerkleTree implementation, it is enough to enable the fullmerkletree feature
@@ -41,26 +36,3 @@ impl utils::merkle_tree::Hasher for PoseidonHash {
poseidon_hash(inputs)
}
}
#[cfg(feature = "pmtree-ft")]
// The pmtree Hasher trait used by pmtree Merkle tree
impl pmtree::Hasher for PoseidonHash {
type Fr = Fr;
fn default_leaf() -> Self::Fr {
Fr::from(0)
}
fn serialize(value: Self::Fr) -> Value {
fr_to_bytes_le(&value)
}
fn deserialize(value: Value) -> Self::Fr {
let (fr, _) = bytes_le_to_fr(&value);
fr
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}

View File

@@ -8,7 +8,7 @@ use ark_groth16::{
use ark_relations::r1cs::ConstraintMatrices;
use ark_relations::r1cs::SynthesisError;
use ark_std::{rand::thread_rng, UniformRand};
use color_eyre::{Report, Result};
use color_eyre::Result;
use num_bigint::BigInt;
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha20Rng;
@@ -53,84 +53,82 @@ pub struct RLNProofValues {
}
pub fn serialize_field_element(element: Fr) -> Vec<u8> {
fr_to_bytes_le(&element)
return fr_to_bytes_le(&element);
}
pub fn deserialize_field_element(serialized: Vec<u8>) -> Fr {
let (element, _) = bytes_le_to_fr(&serialized);
element
return element;
}
pub fn deserialize_identity_pair(serialized: Vec<u8>) -> (Fr, Fr) {
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized);
let (id_commitment, _) = bytes_le_to_fr(&serialized[read..]);
let (id_commitment, _) = bytes_le_to_fr(&serialized[read..].to_vec());
(identity_secret_hash, id_commitment)
return (identity_secret_hash, id_commitment);
}
pub fn deserialize_identity_tuple(serialized: Vec<u8>) -> (Fr, Fr, Fr, Fr) {
let mut all_read = 0;
let (identity_trapdoor, read) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_trapdoor, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (identity_nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_nullifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_secret_hash, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (identity_commitment, _) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_commitment, _) = bytes_le_to_fr(&serialized[all_read..].to_vec());
(
return (
identity_trapdoor,
identity_nullifier,
identity_secret_hash,
identity_commitment,
)
);
}
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Result<Vec<u8>> {
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Vec<u8> {
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&rln_witness.identity_secret));
serialized.append(&mut vec_fr_to_bytes_le(&rln_witness.path_elements)?);
serialized.append(&mut vec_u8_to_bytes_le(&rln_witness.identity_path_index)?);
serialized.append(&mut vec_fr_to_bytes_le(&rln_witness.path_elements));
serialized.append(&mut vec_u8_to_bytes_le(&rln_witness.identity_path_index));
serialized.append(&mut fr_to_bytes_le(&rln_witness.x));
serialized.append(&mut fr_to_bytes_le(&rln_witness.epoch));
serialized.append(&mut fr_to_bytes_le(&rln_witness.rln_identifier));
Ok(serialized)
serialized
}
pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)> {
pub fn deserialize_witness(serialized: &[u8]) -> (RLNWitnessInput, usize) {
let mut all_read: usize = 0;
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..])?;
let (path_elements, read) = bytes_le_to_vec_fr(&serialized[all_read..].to_vec());
all_read += read;
let (identity_path_index, read) = bytes_le_to_vec_u8(&serialized[all_read..])?;
let (identity_path_index, read) = bytes_le_to_vec_u8(&serialized[all_read..].to_vec());
all_read += read;
let (x, read) = bytes_le_to_fr(&serialized[all_read..]);
let (x, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..]);
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
// TODO: check rln_identifier against public::RLN_IDENTIFIER
if serialized.len() != all_read {
return Err(Report::msg("serialized length is not equal to all_read"));
}
assert_eq!(serialized.len(), all_read);
Ok((
(
RLNWitnessInput {
identity_secret,
path_elements,
@@ -140,7 +138,7 @@ pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)
rln_identifier,
},
all_read,
))
)
}
// This function deserializes input for kilic's rln generate_proof public API
@@ -150,24 +148,24 @@ pub fn deserialize_witness(serialized: &[u8]) -> Result<(RLNWitnessInput, usize)
pub fn proof_inputs_to_rln_witness(
tree: &mut PoseidonTree,
serialized: &[u8],
) -> Result<(RLNWitnessInput, usize)> {
) -> (RLNWitnessInput, usize) {
let mut all_read: usize = 0;
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]);
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let id_index = usize::from_le_bytes(serialized[all_read..all_read + 8].try_into()?);
let id_index = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap());
all_read += 8;
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let signal_len = usize::from_le_bytes(serialized[all_read..all_read + 8].try_into()?);
let signal_len = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap());
all_read += 8;
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
let signal: Vec<u8> = serialized[all_read..all_read + (signal_len as usize)].to_vec();
let merkle_proof = tree.proof(id_index).expect("proof should exist");
let merkle_proof = tree.proof(id_index as usize).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
@@ -175,7 +173,7 @@ pub fn proof_inputs_to_rln_witness(
let rln_identifier = hash_to_field(RLN_IDENTIFIER);
Ok((
(
RLNWitnessInput {
identity_secret,
path_elements,
@@ -185,48 +183,45 @@ pub fn proof_inputs_to_rln_witness(
rln_identifier,
},
all_read,
))
)
}
pub fn rln_witness_from_json(input_json_str: &str) -> Result<RLNWitnessInput> {
pub fn rln_witness_from_json(input_json_str: &str) -> RLNWitnessInput {
let input_json: serde_json::Value =
serde_json::from_str(input_json_str).expect("JSON was not well-formatted");
let identity_secret = str_to_fr(&input_json["identity_secret"].to_string(), 10)?;
let identity_secret = str_to_fr(&input_json["identity_secret"].to_string(), 10);
let path_elements = input_json["path_elements"]
.as_array()
.ok_or(Report::msg("not an array"))?
.unwrap()
.iter()
.map(|v| str_to_fr(&v.to_string(), 10))
.collect::<Result<_>>()?;
.collect();
let identity_path_index_array = input_json["identity_path_index"]
let identity_path_index = input_json["identity_path_index"]
.as_array()
.ok_or(Report::msg("not an arrray"))?;
.unwrap()
.iter()
.map(|v| v.as_u64().unwrap() as u8)
.collect();
let mut identity_path_index: Vec<u8> = vec![];
let x = str_to_fr(&input_json["x"].to_string(), 10);
for v in identity_path_index_array {
identity_path_index.push(v.as_u64().ok_or(Report::msg("not a u64 value"))? as u8);
}
let epoch = str_to_fr(&input_json["epoch"].to_string(), 16);
let x = str_to_fr(&input_json["x"].to_string(), 10)?;
let epoch = str_to_fr(&input_json["epoch"].to_string(), 16)?;
let rln_identifier = str_to_fr(&input_json["rln_identifier"].to_string(), 10)?;
let rln_identifier = str_to_fr(&input_json["rln_identifier"].to_string(), 10);
// TODO: check rln_identifier against public::RLN_IDENTIFIER
Ok(RLNWitnessInput {
RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
epoch,
rln_identifier,
})
}
}
pub fn rln_witness_from_values(
@@ -322,22 +317,22 @@ pub fn serialize_proof_values(rln_proof_values: &RLNProofValues) -> Vec<u8> {
pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) {
let mut all_read: usize = 0;
let (root, read) = bytes_le_to_fr(&serialized[all_read..]);
let (root, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (x, read) = bytes_le_to_fr(&serialized[all_read..]);
let (x, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (y, read) = bytes_le_to_fr(&serialized[all_read..]);
let (y, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (nullifier, read) = bytes_le_to_fr(&serialized[all_read..]);
let (nullifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..]);
let (rln_identifier, read) = bytes_le_to_fr(&serialized[all_read..].to_vec());
all_read += read;
(
@@ -359,26 +354,29 @@ pub fn prepare_prove_input(
epoch: Fr,
signal: &[u8],
) -> Vec<u8> {
let signal_len = u64::try_from(signal.len()).unwrap();
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret));
serialized.append(&mut normalize_usize(id_index));
serialized.append(&mut id_index.to_le_bytes().to_vec());
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut signal.to_vec());
serialized
return serialized;
}
#[allow(clippy::redundant_clone)]
pub fn prepare_verify_input(proof_data: Vec<u8>, signal: &[u8]) -> Vec<u8> {
let signal_len = u64::try_from(signal.len()).unwrap();
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut proof_data.clone());
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut signal.to_vec());
serialized
return serialized;
}
///////////////////////////////////////////////////////
@@ -521,9 +519,9 @@ pub fn compute_id_secret(
if a_1 == computed_a_1 {
// We successfully recovered the identity secret
Ok(a_0)
return Ok(a_0);
} else {
Err("Cannot recover identity_secret_hash from provided shares".into())
return Err("Cannot recover identity_secret_hash from provided shares".into());
}
}
@@ -534,9 +532,9 @@ pub fn compute_id_secret(
#[derive(Error, Debug)]
pub enum ProofError {
#[error("Error reading circuit key: {0}")]
CircuitKeyError(#[from] Report),
CircuitKeyError(#[from] std::io::Error),
#[error("Error producing witness: {0}")]
WitnessError(Report),
WitnessError(color_eyre::Report),
#[error("Error producing proof: {0}")]
SynthesisError(#[from] SynthesisError),
}
@@ -547,21 +545,20 @@ fn calculate_witness_element<E: ark_ec::PairingEngine>(witness: Vec<BigInt>) ->
// convert it to field elements
use num_traits::Signed;
let mut witness_vec = vec![];
for w in witness.into_iter() {
let w = if w.sign() == num_bigint::Sign::Minus {
// Need to negate the witness element if negative
modulus.into()
- w.abs()
.to_biguint()
.ok_or(Report::msg("not a biguint value"))?
} else {
w.to_biguint().ok_or(Report::msg("not a biguint value"))?
};
witness_vec.push(E::Fr::from(w))
}
let witness = witness
.into_iter()
.map(|w| {
let w = if w.sign() == num_bigint::Sign::Minus {
// Need to negate the witness element if negative
modulus.into() - w.abs().to_biguint().unwrap()
} else {
w.to_biguint().unwrap()
};
E::Fr::from(w)
})
.collect::<Vec<_>>();
Ok(witness_vec)
Ok(witness)
}
pub fn generate_proof_with_witness(
@@ -572,8 +569,9 @@ pub fn generate_proof_with_witness(
#[cfg(debug_assertions)]
let now = Instant::now();
let full_assignment =
calculate_witness_element::<Curve>(witness).map_err(ProofError::WitnessError)?;
let full_assignment = calculate_witness_element::<Curve>(witness)
.map_err(ProofError::WitnessError)
.unwrap();
#[cfg(debug_assertions)]
println!("witness generation took: {:.2?}", now.elapsed());
@@ -595,7 +593,8 @@ pub fn generate_proof_with_witness(
proving_key.1.num_instance_variables,
proving_key.1.num_constraints,
full_assignment.as_slice(),
)?;
)
.unwrap();
#[cfg(debug_assertions)]
println!("proof generation took: {:.2?}", now.elapsed());
@@ -603,16 +602,14 @@ pub fn generate_proof_with_witness(
Ok(proof)
}
pub fn inputs_for_witness_calculation(
rln_witness: &RLNWitnessInput,
) -> Result<[(&str, Vec<BigInt>); 6]> {
pub fn inputs_for_witness_calculation(rln_witness: &RLNWitnessInput) -> [(&str, Vec<BigInt>); 6] {
// We confert the path indexes to field elements
// TODO: check if necessary
let mut path_elements = Vec::new();
for v in rln_witness.path_elements.iter() {
path_elements.push(to_bigint(v)?);
}
rln_witness
.path_elements
.iter()
.for_each(|v| path_elements.push(to_bigint(v)));
let mut identity_path_index = Vec::new();
rln_witness
@@ -620,20 +617,20 @@ pub fn inputs_for_witness_calculation(
.iter()
.for_each(|v| identity_path_index.push(BigInt::from(*v)));
Ok([
[
(
"identity_secret",
vec![to_bigint(&rln_witness.identity_secret)?],
vec![to_bigint(&rln_witness.identity_secret)],
),
("path_elements", path_elements),
("identity_path_index", identity_path_index),
("x", vec![to_bigint(&rln_witness.x)?]),
("epoch", vec![to_bigint(&rln_witness.epoch)?]),
("x", vec![to_bigint(&rln_witness.x)]),
("epoch", vec![to_bigint(&rln_witness.epoch)]),
(
"rln_identifier",
vec![to_bigint(&rln_witness.rln_identifier)?],
vec![to_bigint(&rln_witness.rln_identifier)],
),
])
]
}
/// Generates a RLN proof
@@ -647,7 +644,7 @@ pub fn generate_proof(
proving_key: &(ProvingKey<Curve>, ConstraintMatrices<Fr>),
rln_witness: &RLNWitnessInput,
) -> Result<ArkProof<Curve>, ProofError> {
let inputs = inputs_for_witness_calculation(rln_witness)?
let inputs = inputs_for_witness_calculation(rln_witness)
.into_iter()
.map(|(name, values)| (name.to_string(), values));
@@ -738,12 +735,12 @@ pub fn verify_proof(
///
/// Returns a JSON object containing the inputs necessary to calculate
/// the witness with CIRCOM on javascript
pub fn get_json_inputs(rln_witness: &RLNWitnessInput) -> Result<serde_json::Value> {
pub fn get_json_inputs(rln_witness: &RLNWitnessInput) -> serde_json::Value {
let mut path_elements = Vec::new();
for v in rln_witness.path_elements.iter() {
path_elements.push(to_bigint(v)?.to_str_radix(10));
}
rln_witness
.path_elements
.iter()
.for_each(|v| path_elements.push(to_bigint(v).to_str_radix(10)));
let mut identity_path_index = Vec::new();
rln_witness
@@ -752,13 +749,13 @@ pub fn get_json_inputs(rln_witness: &RLNWitnessInput) -> Result<serde_json::Valu
.for_each(|v| identity_path_index.push(BigInt::from(*v).to_str_radix(10)));
let inputs = serde_json::json!({
"identity_secret": to_bigint(&rln_witness.identity_secret)?.to_str_radix(10),
"identity_secret": to_bigint(&rln_witness.identity_secret).to_str_radix(10),
"path_elements": path_elements,
"identity_path_index": identity_path_index,
"x": to_bigint(&rln_witness.x)?.to_str_radix(10),
"epoch": format!("0x{:064x}", to_bigint(&rln_witness.epoch)?),
"rln_identifier": to_bigint(&rln_witness.rln_identifier)?.to_str_radix(10),
"x": to_bigint(&rln_witness.x).to_str_radix(10),
"epoch": format!("0x{:064x}", to_bigint(&rln_witness.epoch)),
"rln_identifier": to_bigint(&rln_witness.rln_identifier).to_str_radix(10),
});
Ok(inputs)
inputs
}

View File

@@ -1,5 +1,5 @@
use crate::circuit::{vk_from_raw, zkey_from_raw, Curve, Fr};
use crate::poseidon_hash::poseidon_hash as utils_poseidon_hash;
use crate::poseidon_hash::poseidon_hash;
use crate::poseidon_tree::PoseidonTree;
use crate::protocol::*;
use crate::utils::*;
@@ -10,9 +10,9 @@ use ark_groth16::{ProvingKey, VerifyingKey};
use ark_relations::r1cs::ConstraintMatrices;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, Write};
use cfg_if::cfg_if;
use color_eyre::Result;
use num_bigint::BigInt;
use std::io::Cursor;
use std::io::{self, Result};
cfg_if! {
if #[cfg(not(target_arch = "wasm32"))] {
@@ -36,8 +36,8 @@ pub const RLN_IDENTIFIER: &[u8] = b"zerokit/rln/010203040506070809";
///
/// I/O is mostly done using writers and readers implementing `std::io::Write` and `std::io::Read`, respectively.
pub struct RLN<'a> {
proving_key: (ProvingKey<Curve>, ConstraintMatrices<Fr>),
verification_key: VerifyingKey<Curve>,
proving_key: Result<(ProvingKey<Curve>, ConstraintMatrices<Fr>)>,
verification_key: Result<VerifyingKey<Curve>>,
tree: PoseidonTree,
// The witness calculator can't be loaded in zerokit. Since this struct
@@ -61,35 +61,35 @@ impl RLN<'_> {
/// use std::io::Cursor;
///
/// let tree_height = 20;
/// let resources = Cursor::new("tree_height_20");
/// let resources = Cursor::new("./resources/tree_height_20/");
///
/// // We create a new RLN instance
/// let mut rln = RLN::new(tree_height, resources);
/// ```
#[cfg(not(target_arch = "wasm32"))]
pub fn new<R: Read>(tree_height: usize, mut input_data: R) -> Result<RLN<'static>> {
pub fn new<R: Read>(tree_height: usize, mut input_data: R) -> RLN<'static> {
// We read input
let mut input: Vec<u8> = Vec::new();
input_data.read_to_end(&mut input)?;
input_data.read_to_end(&mut input).unwrap();
let resources_folder = String::from_utf8(input)?;
let resources_folder = String::from_utf8(input).expect("Found invalid UTF-8");
let witness_calculator = circom_from_folder(&resources_folder)?;
let witness_calculator = circom_from_folder(&resources_folder);
let proving_key = zkey_from_folder(&resources_folder)?;
let verification_key = vk_from_folder(&resources_folder)?;
let proving_key = zkey_from_folder(&resources_folder);
let verification_key = vk_from_folder(&resources_folder);
// We compute a default empty tree
let tree = PoseidonTree::default(tree_height);
Ok(RLN {
RLN {
witness_calculator,
proving_key,
verification_key,
tree,
#[cfg(target_arch = "wasm32")]
_marker: PhantomData,
})
}
}
/// Creates a new RLN object by passing circuit resources as byte vectors.
@@ -130,17 +130,17 @@ impl RLN<'_> {
#[cfg(not(target_arch = "wasm32"))] circom_vec: Vec<u8>,
zkey_vec: Vec<u8>,
vk_vec: Vec<u8>,
) -> Result<RLN<'static>> {
) -> RLN<'static> {
#[cfg(not(target_arch = "wasm32"))]
let witness_calculator = circom_from_raw(circom_vec)?;
let witness_calculator = circom_from_raw(circom_vec);
let proving_key = zkey_from_raw(&zkey_vec)?;
let verification_key = vk_from_raw(&vk_vec, &zkey_vec)?;
let proving_key = zkey_from_raw(&zkey_vec);
let verification_key = vk_from_raw(&vk_vec, &zkey_vec);
// We compute a default empty tree
let tree = PoseidonTree::default(tree_height);
Ok(RLN {
RLN {
#[cfg(not(target_arch = "wasm32"))]
witness_calculator,
proving_key,
@@ -148,7 +148,7 @@ impl RLN<'_> {
tree,
#[cfg(target_arch = "wasm32")]
_marker: PhantomData,
})
}
}
////////////////////////////////////////////////////////
@@ -160,7 +160,7 @@ impl RLN<'_> {
///
/// Input values are:
/// - `tree_height`: the height of the Merkle tree.
pub fn set_tree(&mut self, tree_height: usize) -> Result<()> {
pub fn set_tree(&mut self, tree_height: usize) -> io::Result<()> {
// We compute a default empty tree of desired height
self.tree = PoseidonTree::default(tree_height);
@@ -187,7 +187,7 @@ impl RLN<'_> {
/// let mut buffer = Cursor::new(serialize_field_element(id_commitment));
/// rln.set_leaf(id_index, &mut buffer).unwrap();
/// ```
pub fn set_leaf<R: Read>(&mut self, index: usize, mut input_data: R) -> Result<()> {
pub fn set_leaf<R: Read>(&mut self, index: usize, mut input_data: R) -> io::Result<()> {
// We read input
let mut leaf_byte: Vec<u8> = Vec::new();
input_data.read_to_end(&mut leaf_byte)?;
@@ -229,15 +229,15 @@ impl RLN<'_> {
/// let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves));
/// rln.set_leaves_from(index, &mut buffer).unwrap();
/// ```
pub fn set_leaves_from<R: Read>(&mut self, index: usize, mut input_data: R) -> Result<()> {
pub fn set_leaves_from<R: Read>(&mut self, index: usize, mut input_data: R) -> io::Result<()> {
// We read input
let mut leaves_byte: Vec<u8> = Vec::new();
input_data.read_to_end(&mut leaves_byte)?;
let (leaves, _) = bytes_le_to_vec_fr(&leaves_byte)?;
let (leaves, _) = bytes_le_to_vec_fr(&leaves_byte);
// We set the leaves
self.tree.set_range(index, leaves)
return self.tree.set_range(index, leaves);
}
/// Resets the tree state to default and sets multiple leaves starting from index 0.
@@ -246,12 +246,12 @@ impl RLN<'_> {
///
/// Input values are:
/// - `input_data`: a reader for the serialization of multiple leaf values (serialization done with [`rln::utils::vec_fr_to_bytes_le`](crate::utils::vec_fr_to_bytes_le))
pub fn init_tree_with_leaves<R: Read>(&mut self, input_data: R) -> Result<()> {
pub fn init_tree_with_leaves<R: Read>(&mut self, input_data: R) -> io::Result<()> {
// reset the tree
// NOTE: this requires the tree to be initialized with the correct height initially
// TODO: accept tree_height as a parameter and initialize the tree with that height
self.set_tree(self.tree.depth())?;
self.set_leaves_from(0, input_data)
return self.set_leaves_from(0, input_data);
}
/// Sets a leaf value at the next available never-set leaf index.
@@ -295,7 +295,7 @@ impl RLN<'_> {
/// let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment));
/// rln.set_next_leaf(&mut buffer).unwrap();
/// ```
pub fn set_next_leaf<R: Read>(&mut self, mut input_data: R) -> Result<()> {
pub fn set_next_leaf<R: Read>(&mut self, mut input_data: R) -> io::Result<()> {
// We read input
let mut leaf_byte: Vec<u8> = Vec::new();
input_data.read_to_end(&mut leaf_byte)?;
@@ -320,7 +320,7 @@ impl RLN<'_> {
/// let index = 10;
/// rln.delete_leaf(index).unwrap();
/// ```
pub fn delete_leaf(&mut self, index: usize) -> Result<()> {
pub fn delete_leaf(&mut self, index: usize) -> io::Result<()> {
self.tree.delete(index)?;
Ok(())
}
@@ -338,7 +338,7 @@ impl RLN<'_> {
/// rln.get_root(&mut buffer).unwrap();
/// let (root, _) = bytes_le_to_fr(&buffer.into_inner());
/// ```
pub fn get_root<W: Write>(&self, mut output_data: W) -> Result<()> {
pub fn get_root<W: Write>(&self, mut output_data: W) -> io::Result<()> {
let root = self.tree.root();
output_data.write_all(&fr_to_bytes_le(&root))?;
@@ -366,13 +366,13 @@ impl RLN<'_> {
/// let (path_elements, read) = bytes_le_to_vec_fr(&buffer_inner);
/// let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec());
/// ```
pub fn get_proof<W: Write>(&self, index: usize, mut output_data: W) -> Result<()> {
pub fn get_proof<W: Write>(&self, index: usize, mut output_data: W) -> io::Result<()> {
let merkle_proof = self.tree.proof(index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
output_data.write_all(&vec_fr_to_bytes_le(&path_elements)?)?;
output_data.write_all(&vec_u8_to_bytes_le(&identity_path_index)?)?;
output_data.write_all(&vec_fr_to_bytes_le(&path_elements))?;
output_data.write_all(&vec_u8_to_bytes_le(&identity_path_index))?;
Ok(())
}
@@ -406,11 +406,11 @@ impl RLN<'_> {
&mut self,
mut input_data: R,
mut output_data: W,
) -> Result<()> {
) -> io::Result<()> {
// We read input RLN witness and we deserialize it
let mut serialized: Vec<u8> = Vec::new();
input_data.read_to_end(&mut serialized)?;
let (rln_witness, _) = deserialize_witness(&serialized)?;
let (rln_witness, _) = deserialize_witness(&serialized);
/*
if self.witness_calculator.is_none() {
@@ -418,10 +418,15 @@ impl RLN<'_> {
}
*/
let proof = generate_proof(self.witness_calculator, &self.proving_key, &rln_witness)?;
let proof = generate_proof(
&mut self.witness_calculator,
self.proving_key.as_ref().unwrap(),
&rln_witness,
)
.unwrap();
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
proof.serialize(&mut output_data)?;
proof.serialize(&mut output_data).unwrap();
Ok(())
}
@@ -461,17 +466,22 @@ impl RLN<'_> {
///
/// assert!(verified);
/// ```
pub fn verify<R: Read>(&self, mut input_data: R) -> Result<bool> {
pub fn verify<R: Read>(&self, mut input_data: R) -> io::Result<bool> {
// Input data is serialized for Curve as:
// serialized_proof (compressed, 4*32 bytes) || serialized_proof_values (6*32 bytes), i.e.
// [ proof<128> | root<32> | epoch<32> | share_x<32> | share_y<32> | nullifier<32> | rln_identifier<32> ]
let mut input_byte: Vec<u8> = Vec::new();
input_data.read_to_end(&mut input_byte)?;
let proof = ArkProof::deserialize(&mut Cursor::new(&input_byte[..128]))?;
let proof = ArkProof::deserialize(&mut Cursor::new(&input_byte[..128].to_vec())).unwrap();
let (proof_values, _) = deserialize_proof_values(&input_byte[128..]);
let (proof_values, _) = deserialize_proof_values(&input_byte[128..].to_vec());
let verified = verify_proof(&self.verification_key, &proof, &proof_values)?;
let verified = verify_proof(
self.verification_key.as_ref().unwrap(),
&proof,
&proof_values,
)
.unwrap();
Ok(verified)
}
@@ -500,6 +510,8 @@ impl RLN<'_> {
/// // We generate a random signal
/// let mut rng = rand::thread_rng();
/// let signal: [u8; 32] = rng.gen();
/// let signal_len = u64::try_from(signal.len()).unwrap();
///
/// // We generate a random epoch
/// let epoch = hash_to_field(b"test-epoch");
///
@@ -507,9 +519,9 @@ impl RLN<'_> {
/// // input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
/// let mut serialized: Vec<u8> = Vec::new();
/// serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
/// serialized.append(&mut normalize_usize(identity_index));
/// serialized.append(&mut identity_index.to_le_bytes().to_vec());
/// serialized.append(&mut fr_to_bytes_le(&epoch));
/// serialized.append(&mut normalize_usize(signal_len).resize(8,0));
/// serialized.append(&mut signal_len.to_le_bytes().to_vec());
/// serialized.append(&mut signal.to_vec());
///
/// let mut input_buffer = Cursor::new(serialized);
@@ -525,18 +537,23 @@ impl RLN<'_> {
&mut self,
mut input_data: R,
mut output_data: W,
) -> Result<()> {
) -> io::Result<()> {
// We read input RLN witness and we deserialize it
let mut witness_byte: Vec<u8> = Vec::new();
input_data.read_to_end(&mut witness_byte)?;
let (rln_witness, _) = proof_inputs_to_rln_witness(&mut self.tree, &witness_byte)?;
let (rln_witness, _) = proof_inputs_to_rln_witness(&mut self.tree, &witness_byte);
let proof_values = proof_values_from_witness(&rln_witness);
let proof = generate_proof(self.witness_calculator, &self.proving_key, &rln_witness)?;
let proof = generate_proof(
self.witness_calculator,
self.proving_key.as_ref().unwrap(),
&rln_witness,
)
.unwrap();
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
// This proof is compressed, i.e. 128 bytes long
proof.serialize(&mut output_data)?;
proof.serialize(&mut output_data).unwrap();
output_data.write_all(&serialize_proof_values(&proof_values))?;
Ok(())
@@ -553,15 +570,17 @@ impl RLN<'_> {
calculated_witness: Vec<BigInt>,
rln_witness_vec: Vec<u8>,
mut output_data: W,
) -> Result<()> {
let (rln_witness, _) = deserialize_witness(&rln_witness_vec[..])?;
) -> io::Result<()> {
let (rln_witness, _) = deserialize_witness(&rln_witness_vec[..]);
let proof_values = proof_values_from_witness(&rln_witness);
let proof = generate_proof_with_witness(calculated_witness, &self.proving_key).unwrap();
let proof =
generate_proof_with_witness(calculated_witness, self.proving_key.as_ref().unwrap())
.unwrap();
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
// This proof is compressed, i.e. 128 bytes long
proof.serialize(&mut output_data)?;
proof.serialize(&mut output_data).unwrap();
output_data.write_all(&serialize_proof_values(&proof_values))?;
Ok(())
}
@@ -585,7 +604,7 @@ impl RLN<'_> {
/// // We prepare input for verify_rln_proof API
/// // input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
/// // that is [ proof_data || signal_len<8> | signal<var> ]
/// proof_data.append(&mut normalize_usize(signal_len));
/// proof_data.append(&mut signal_len.to_le_bytes().to_vec());
/// proof_data.append(&mut signal.to_vec());
///
/// let mut input_buffer = Cursor::new(proof_data);
@@ -593,21 +612,27 @@ impl RLN<'_> {
///
/// assert!(verified);
/// ```
pub fn verify_rln_proof<R: Read>(&self, mut input_data: R) -> Result<bool> {
pub fn verify_rln_proof<R: Read>(&self, mut input_data: R) -> io::Result<bool> {
let mut serialized: Vec<u8> = Vec::new();
input_data.read_to_end(&mut serialized)?;
let mut all_read = 0;
let proof = ArkProof::deserialize(&mut Cursor::new(&serialized[..128].to_vec()))?;
let proof = ArkProof::deserialize(&mut Cursor::new(&serialized[..128].to_vec())).unwrap();
all_read += 128;
let (proof_values, read) = deserialize_proof_values(&serialized[all_read..]);
let (proof_values, read) = deserialize_proof_values(&serialized[all_read..].to_vec());
all_read += read;
let signal_len = usize::from_le_bytes(serialized[all_read..all_read + 8].try_into()?);
let signal_len =
u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap()) as usize;
all_read += 8;
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
let verified = verify_proof(&self.verification_key, &proof, &proof_values)?;
let verified = verify_proof(
self.verification_key.as_ref().unwrap(),
&proof,
&proof_values,
)
.unwrap();
// Consistency checks to counter proof tampering
let x = hash_to_field(&signal);
@@ -668,21 +693,31 @@ impl RLN<'_> {
///
/// assert!(verified);
/// ```
pub fn verify_with_roots<R: Read>(&self, mut input_data: R, mut roots_data: R) -> Result<bool> {
pub fn verify_with_roots<R: Read>(
&self,
mut input_data: R,
mut roots_data: R,
) -> io::Result<bool> {
let mut serialized: Vec<u8> = Vec::new();
input_data.read_to_end(&mut serialized)?;
let mut all_read = 0;
let proof = ArkProof::deserialize(&mut Cursor::new(&serialized[..128].to_vec()))?;
let proof = ArkProof::deserialize(&mut Cursor::new(&serialized[..128].to_vec())).unwrap();
all_read += 128;
let (proof_values, read) = deserialize_proof_values(&serialized[all_read..]);
let (proof_values, read) = deserialize_proof_values(&serialized[all_read..].to_vec());
all_read += read;
let signal_len = usize::from_le_bytes(serialized[all_read..all_read + 8].try_into()?);
let signal_len =
u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap()) as usize;
all_read += 8;
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
let verified = verify_proof(&self.verification_key, &proof, &proof_values)?;
let verified = verify_proof(
self.verification_key.as_ref().unwrap(),
&proof,
&proof_values,
)
.unwrap();
// First consistency checks to counter proof tampering
let x = hash_to_field(&signal);
@@ -691,7 +726,7 @@ impl RLN<'_> {
&& (proof_values.rln_identifier == hash_to_field(RLN_IDENTIFIER));
// We skip root validation if proof is already invalid
if !partial_result {
if partial_result == false {
return Ok(partial_result);
}
@@ -714,13 +749,14 @@ impl RLN<'_> {
}
// We validate the root
let roots_verified: bool = if roots.is_empty() {
let roots_verified: bool;
if roots.is_empty() {
// If no root is passed in roots_buffer, we skip proof's root check
true
roots_verified = true;
} else {
// Otherwise we check if proof's root is contained in the passed buffer
roots.contains(&proof_values.root)
};
roots_verified = roots.contains(&proof_values.root);
}
// We combine all checks
Ok(partial_result && roots_verified)
@@ -748,7 +784,7 @@ impl RLN<'_> {
/// // We deserialize the keygen output
/// let (identity_secret_hash, id_commitment) = deserialize_identity_pair(buffer.into_inner());
/// ```
pub fn key_gen<W: Write>(&self, mut output_data: W) -> Result<()> {
pub fn key_gen<W: Write>(&self, mut output_data: W) -> io::Result<()> {
let (identity_secret_hash, id_commitment) = keygen();
output_data.write_all(&fr_to_bytes_le(&identity_secret_hash))?;
output_data.write_all(&fr_to_bytes_le(&id_commitment))?;
@@ -778,7 +814,7 @@ impl RLN<'_> {
/// // We deserialize the keygen output
/// let (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) = deserialize_identity_tuple(buffer.into_inner());
/// ```
pub fn extended_key_gen<W: Write>(&self, mut output_data: W) -> Result<()> {
pub fn extended_key_gen<W: Write>(&self, mut output_data: W) -> io::Result<()> {
let (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) =
extended_keygen();
output_data.write_all(&fr_to_bytes_le(&identity_trapdoor))?;
@@ -817,7 +853,7 @@ impl RLN<'_> {
&self,
mut input_data: R,
mut output_data: W,
) -> Result<()> {
) -> io::Result<()> {
let mut serialized: Vec<u8> = Vec::new();
input_data.read_to_end(&mut serialized)?;
@@ -860,7 +896,7 @@ impl RLN<'_> {
&self,
mut input_data: R,
mut output_data: W,
) -> Result<()> {
) -> io::Result<()> {
let mut serialized: Vec<u8> = Vec::new();
input_data.read_to_end(&mut serialized)?;
@@ -911,21 +947,21 @@ impl RLN<'_> {
mut input_proof_data_1: R,
mut input_proof_data_2: R,
mut output_data: W,
) -> Result<()> {
) -> io::Result<()> {
// We deserialize the two proofs and we get the corresponding RLNProofValues objects
let mut serialized: Vec<u8> = Vec::new();
input_proof_data_1.read_to_end(&mut serialized)?;
// We skip deserialization of the zk-proof at the beginning
let (proof_values_1, _) = deserialize_proof_values(&serialized[128..]);
let (proof_values_1, _) = deserialize_proof_values(&serialized[128..].to_vec());
let external_nullifier_1 =
utils_poseidon_hash(&[proof_values_1.epoch, proof_values_1.rln_identifier]);
poseidon_hash(&[proof_values_1.epoch, proof_values_1.rln_identifier]);
let mut serialized: Vec<u8> = Vec::new();
input_proof_data_2.read_to_end(&mut serialized)?;
// We skip deserialization of the zk-proof at the beginning
let (proof_values_2, _) = deserialize_proof_values(&serialized[128..]);
let (proof_values_2, _) = deserialize_proof_values(&serialized[128..].to_vec());
let external_nullifier_2 =
utils_poseidon_hash(&[proof_values_2.epoch, proof_values_2.rln_identifier]);
poseidon_hash(&[proof_values_2.epoch, proof_values_2.rln_identifier]);
// We continue only if the proof values are for the same epoch
// The idea is that proof values that go as input to this function are verified first (with zk-proof verify), hence ensuring validity of epoch and other fields.
@@ -941,7 +977,8 @@ impl RLN<'_> {
compute_id_secret(share1, share2, external_nullifier_1);
// If an identity secret hash is recovered, we write it to output_data, otherwise nothing will be written.
if let Ok(identity_secret_hash) = recovered_identity_secret_hash {
if recovered_identity_secret_hash.is_ok() {
let identity_secret_hash = recovered_identity_secret_hash.unwrap();
output_data.write_all(&fr_to_bytes_le(&identity_secret_hash))?;
}
}
@@ -949,17 +986,49 @@ impl RLN<'_> {
Ok(())
}
/// Hashes an input signal to an element in the working prime field.
///
/// The result is computed as the Keccak256 of the input signal modulo the prime field characteristic.
///
/// Input values are:
/// - `input_data`: a reader for the byte vector containing the input signal.
///
/// Output values are:
/// - `output_data`: a writer receiving the serialization of the resulting field element (serialization done with [`rln::utils::fr_to_bytes_le`](crate::utils::fr_to_bytes_le))
///
/// Example
/// ```
/// let signal: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
///
/// let mut input_buffer = Cursor::new(&signal);
/// let mut output_buffer = Cursor::new(Vec::<u8>::new());
/// rln.hash(&mut input_buffer, &mut output_buffer)
/// .unwrap();
///
/// // We deserialize the keygen output
/// let field_element = deserialize_field_element(output_buffer.into_inner());
/// ```
pub fn hash<R: Read, W: Write>(&self, mut input_data: R, mut output_data: W) -> io::Result<()> {
let mut serialized: Vec<u8> = Vec::new();
input_data.read_to_end(&mut serialized)?;
let hash = hash_to_field(&serialized);
output_data.write_all(&fr_to_bytes_le(&hash))?;
Ok(())
}
/// Returns the serialization of a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) populated from the identity secret, the Merkle tree index, the epoch and signal.
///
/// Input values are:
/// - `input_data`: a reader for the serialization of `[ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]`
///
/// The function returns the corresponding [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object serialized using [`rln::protocol::serialize_witness`](crate::protocol::serialize_witness)).
pub fn get_serialized_rln_witness<R: Read>(&mut self, mut input_data: R) -> Result<Vec<u8>> {
pub fn get_serialized_rln_witness<R: Read>(&mut self, mut input_data: R) -> Vec<u8> {
// We read input RLN witness and we deserialize it
let mut witness_byte: Vec<u8> = Vec::new();
input_data.read_to_end(&mut witness_byte)?;
let (rln_witness, _) = proof_inputs_to_rln_witness(&mut self.tree, &witness_byte)?;
input_data.read_to_end(&mut witness_byte).unwrap();
let (rln_witness, _) = proof_inputs_to_rln_witness(&mut self.tree, &witness_byte);
serialize_witness(&rln_witness)
}
@@ -970,9 +1039,12 @@ impl RLN<'_> {
/// - `serialized_witness`: the byte serialization of a [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object (serialization done with [`rln::protocol::serialize_witness`](crate::protocol::serialize_witness)).
///
/// The function returns the corresponding JSON encoding of the input [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object.
pub fn get_rln_witness_json(&mut self, serialized_witness: &[u8]) -> Result<serde_json::Value> {
let (rln_witness, _) = deserialize_witness(serialized_witness)?;
get_json_inputs(&rln_witness)
pub fn get_rln_witness_json(
&mut self,
serialized_witness: &[u8],
) -> io::Result<serde_json::Value> {
let (rln_witness, _) = deserialize_witness(serialized_witness);
Ok(get_json_inputs(&rln_witness))
}
}
@@ -981,76 +1053,10 @@ impl Default for RLN<'_> {
fn default() -> Self {
let tree_height = TEST_TREE_HEIGHT;
let buffer = Cursor::new(TEST_RESOURCES_FOLDER);
Self::new(tree_height, buffer).unwrap()
Self::new(tree_height, buffer)
}
}
/// Hashes an input signal to an element in the working prime field.
///
/// The result is computed as the Keccak256 of the input signal modulo the prime field characteristic.
///
/// Input values are:
/// - `input_data`: a reader for the byte vector containing the input signal.
///
/// Output values are:
/// - `output_data`: a writer receiving the serialization of the resulting field element (serialization done with [`rln::utils::fr_to_bytes_le`](crate::utils::fr_to_bytes_le))
///
/// Example
/// ```
/// let signal: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
///
/// let mut input_buffer = Cursor::new(&signal);
/// let mut output_buffer = Cursor::new(Vec::<u8>::new());
/// hash(&mut input_buffer, &mut output_buffer)
/// .unwrap();
///
/// // We deserialize the keygen output
/// let field_element = deserialize_field_element(output_buffer.into_inner());
/// ```
pub fn hash<R: Read, W: Write>(mut input_data: R, mut output_data: W) -> Result<()> {
let mut serialized: Vec<u8> = Vec::new();
input_data.read_to_end(&mut serialized)?;
let hash = hash_to_field(&serialized);
output_data.write_all(&fr_to_bytes_le(&hash))?;
Ok(())
}
/// Hashes a set of elements to a single element in the working prime field, using Poseidon.
///
/// The result is computed as the Poseidon Hash of the input signal.
///
/// Input values are:
/// - `input_data`: a reader for the byte vector containing the input signal.
///
/// Output values are:
/// - `output_data`: a writer receiving the serialization of the resulting field element (serialization done with [`rln::utils::fr_to_bytes_le`](crate::utils::fr_to_bytes_le))
///
/// Example
/// ```
/// let data = vec![hash_to_field(b"foo")];
/// let signal = vec_fr_to_bytes_le(&data);
///
/// let mut input_buffer = Cursor::new(&signal);
/// let mut output_buffer = Cursor::new(Vec::<u8>::new());
/// poseidon_hash(&mut input_buffer, &mut output_buffer)
/// .unwrap();
///
/// // We deserialize the hash output
/// let hash_result = deserialize_field_element(output_buffer.into_inner());
/// ```
pub fn poseidon_hash<R: Read, W: Write>(mut input_data: R, mut output_data: W) -> Result<()> {
let mut serialized: Vec<u8> = Vec::new();
input_data.read_to_end(&mut serialized)?;
let (inputs, _) = bytes_le_to_vec_fr(&serialized)?;
let hash = utils_poseidon_hash(inputs.as_ref());
output_data.write_all(&fr_to_bytes_le(&hash))?;
Ok(())
}
#[cfg(test)]
mod test {
use super::*;
@@ -1072,7 +1078,7 @@ mod test {
// We create a new tree
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
let mut rln = RLN::new(tree_height, input_buffer);
// We first add leaves one by one specifying the index
for (i, leaf) in leaves.iter().enumerate() {
@@ -1111,7 +1117,7 @@ mod test {
rln.set_tree(tree_height).unwrap();
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves));
rln.init_tree_with_leaves(&mut buffer).unwrap();
// We check if number of leaves set is consistent
@@ -1167,10 +1173,10 @@ mod test {
// We create a new tree
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
let mut rln = RLN::new(tree_height, input_buffer);
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves));
rln.init_tree_with_leaves(&mut buffer).unwrap();
// We check if number of leaves set is consistent
@@ -1184,11 +1190,11 @@ mod test {
// `init_tree_with_leaves` resets the tree to the height it was initialized with, using `set_tree`
// We add leaves in a batch starting from index 0..set_index
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves[0..set_index]).unwrap());
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves[0..set_index]));
rln.init_tree_with_leaves(&mut buffer).unwrap();
// We add the remaining n leaves in a batch starting from index m
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves[set_index..]).unwrap());
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves[set_index..]));
rln.set_leaves_from(set_index, &mut buffer).unwrap();
// We check if number of leaves set is consistent
@@ -1221,7 +1227,6 @@ mod test {
assert_eq!(root_batch_with_init, root_single_additions);
}
#[allow(unused_must_use)]
#[test]
// This test checks if `set_leaves_from` throws an error when the index is out of bounds
fn test_set_leaves_bad_index() {
@@ -1238,7 +1243,7 @@ mod test {
// We create a new tree
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
let mut rln = RLN::new(tree_height, input_buffer);
// Get root of empty tree
let mut buffer = Cursor::new(Vec::<u8>::new());
@@ -1246,7 +1251,7 @@ mod test {
let (root_empty, _) = bytes_le_to_fr(&buffer.into_inner());
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves));
rln.set_leaves_from(bad_index, &mut buffer)
.expect_err("Should throw an error");
@@ -1267,21 +1272,25 @@ mod test {
let tree_height = TEST_TREE_HEIGHT;
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
let mut rln = RLN::new(tree_height, input_buffer);
// Note: we only test Groth16 proof generation, so we ignore setting the tree in the RLN object
let rln_witness = random_rln_witness(tree_height);
let proof_values = proof_values_from_witness(&rln_witness);
// We compute a Groth16 proof
let mut input_buffer = Cursor::new(serialize_witness(&rln_witness).unwrap());
let mut input_buffer = Cursor::new(serialize_witness(&rln_witness));
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.prove(&mut input_buffer, &mut output_buffer).unwrap();
let serialized_proof = output_buffer.into_inner();
// Before checking public verify API, we check that the (deserialized) proof generated by prove is actually valid
let proof = ArkProof::deserialize(&mut Cursor::new(&serialized_proof)).unwrap();
let verified = verify_proof(&rln.verification_key, &proof, &proof_values);
let verified = verify_proof(
&rln.verification_key.as_ref().unwrap(),
&proof,
&proof_values,
);
assert!(verified.unwrap());
// We prepare the input to prove API, consisting of serialized_proof (compressed, 4*32 bytes) || serialized_proof_values (6*32 bytes)
@@ -1311,23 +1320,24 @@ mod test {
// We create a new RLN instance
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
let mut rln = RLN::new(tree_height, input_buffer);
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves));
rln.init_tree_with_leaves(&mut buffer).unwrap();
// Generate identity pair
let (identity_secret_hash, id_commitment) = keygen();
// We set as leaf id_commitment after storing its index
let identity_index = rln.tree.leaves_set();
let identity_index = u64::try_from(rln.tree.leaves_set()).unwrap();
let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment));
rln.set_next_leaf(&mut buffer).unwrap();
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let signal_len = u64::try_from(signal.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -1336,9 +1346,9 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut identity_index.to_le_bytes().to_vec());
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut signal.to_vec());
let mut input_buffer = Cursor::new(serialized);
@@ -1352,7 +1362,7 @@ mod test {
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data || signal_len<8> | signal<var> ]
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
proof_data.append(&mut signal.to_vec());
let mut input_buffer = Cursor::new(proof_data);
@@ -1375,23 +1385,24 @@ mod test {
// We create a new RLN instance
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
let mut rln = RLN::new(tree_height, input_buffer);
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves));
rln.init_tree_with_leaves(&mut buffer).unwrap();
// Generate identity pair
let (identity_secret_hash, id_commitment) = keygen();
// We set as leaf id_commitment after storing its index
let identity_index = rln.tree.leaves_set();
let identity_index = u64::try_from(rln.tree.leaves_set()).unwrap();
let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment));
rln.set_next_leaf(&mut buffer).unwrap();
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let signal_len = u64::try_from(signal.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -1400,9 +1411,9 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut identity_index.to_le_bytes().to_vec());
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut signal.to_vec());
let mut input_buffer = Cursor::new(serialized);
@@ -1410,13 +1421,12 @@ mod test {
// We read input RLN witness and we deserialize it
let mut witness_byte: Vec<u8> = Vec::new();
input_buffer.read_to_end(&mut witness_byte).unwrap();
let (rln_witness, _) = proof_inputs_to_rln_witness(&mut rln.tree, &witness_byte).unwrap();
let (rln_witness, _) = proof_inputs_to_rln_witness(&mut rln.tree, &witness_byte);
let serialized_witness = serialize_witness(&rln_witness).unwrap();
let serialized_witness = serialize_witness(&rln_witness);
// Calculate witness outside zerokit (simulating what JS is doing)
let inputs = inputs_for_witness_calculation(&rln_witness)
.unwrap()
.into_iter()
.map(|(name, values)| (name.to_string(), values));
let calculated_witness = rln
@@ -1429,7 +1439,7 @@ mod test {
let calculated_witness_vec: Vec<BigInt> = calculated_witness
.into_iter()
.map(|v| to_bigint(&v).unwrap())
.map(|v| to_bigint(&v))
.collect();
// Generating the proof
@@ -1447,7 +1457,7 @@ mod test {
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data || signal_len<8> | signal<var> ]
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
proof_data.append(&mut signal.to_vec());
let mut input_buffer = Cursor::new(proof_data);
@@ -1471,23 +1481,24 @@ mod test {
// We create a new RLN instance
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
let mut rln = RLN::new(tree_height, input_buffer);
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves));
rln.init_tree_with_leaves(&mut buffer).unwrap();
// Generate identity pair
let (identity_secret_hash, id_commitment) = keygen();
// We set as leaf id_commitment after storing its index
let identity_index = rln.tree.leaves_set();
let identity_index = u64::try_from(rln.tree.leaves_set()).unwrap();
let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment));
rln.set_next_leaf(&mut buffer).unwrap();
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let signal_len = u64::try_from(signal.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -1496,9 +1507,9 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut identity_index.to_le_bytes().to_vec());
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut signal.to_vec());
let mut input_buffer = Cursor::new(serialized);
@@ -1512,7 +1523,7 @@ mod test {
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data || signal_len<8> | signal<var> ]
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
proof_data.append(&mut signal.to_vec());
let input_buffer = Cursor::new(proof_data);
@@ -1557,21 +1568,23 @@ mod test {
// We create a new RLN instance
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
let mut rln = RLN::new(tree_height, input_buffer);
// Generate identity pair
let (identity_secret_hash, id_commitment) = keygen();
// We set as leaf id_commitment after storing its index
let identity_index = rln.tree.leaves_set();
let identity_index = u64::try_from(rln.tree.leaves_set()).unwrap();
let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment));
rln.set_next_leaf(&mut buffer).unwrap();
// We generate two random signals
let mut rng = rand::thread_rng();
let signal1: [u8; 32] = rng.gen();
let signal1_len = u64::try_from(signal1.len()).unwrap();
let signal2: [u8; 32] = rng.gen();
let signal2_len = u64::try_from(signal2.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -1582,18 +1595,18 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized1: Vec<u8> = Vec::new();
serialized1.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized1.append(&mut normalize_usize(identity_index));
serialized1.append(&mut identity_index.to_le_bytes().to_vec());
serialized1.append(&mut fr_to_bytes_le(&epoch));
// The first part is the same for both proof input, so we clone
let mut serialized2 = serialized1.clone();
// We attach the first signal to the first proof input
serialized1.append(&mut normalize_usize(signal1.len()));
serialized1.append(&mut signal1_len.to_le_bytes().to_vec());
serialized1.append(&mut signal1.to_vec());
// We attach the second signal to the first proof input
serialized2.append(&mut normalize_usize(signal2.len()));
serialized2.append(&mut signal2_len.to_le_bytes().to_vec());
serialized2.append(&mut signal2.to_vec());
// We generate the first proof
@@ -1635,20 +1648,21 @@ mod test {
let (identity_secret_hash_new, id_commitment_new) = keygen();
// We add it to the tree
let identity_index_new = rln.tree.leaves_set();
let identity_index_new = u64::try_from(rln.tree.leaves_set()).unwrap();
let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment_new));
rln.set_next_leaf(&mut buffer).unwrap();
// We generate a random signals
let signal3: [u8; 32] = rng.gen();
let signal3_len = u64::try_from(signal3.len()).unwrap();
// We prepare proof input. Note that epoch is the same as before
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized3: Vec<u8> = Vec::new();
serialized3.append(&mut fr_to_bytes_le(&identity_secret_hash_new));
serialized3.append(&mut normalize_usize(identity_index_new));
serialized3.append(&mut identity_index_new.to_le_bytes().to_vec());
serialized3.append(&mut fr_to_bytes_le(&epoch));
serialized3.append(&mut normalize_usize(signal3.len()));
serialized3.append(&mut signal3_len.to_le_bytes().to_vec());
serialized3.append(&mut signal3.to_vec());
// We generate the proof

View File

@@ -2,14 +2,13 @@
use crate::circuit::Fr;
use ark_ff::PrimeField;
use color_eyre::{Report, Result};
use num_bigint::{BigInt, BigUint};
use num_traits::Num;
use std::iter::Extend;
pub fn to_bigint(el: &Fr) -> Result<BigInt> {
let res: BigUint = (*el).try_into()?;
Ok(res.into())
pub fn to_bigint(el: &Fr) -> BigInt {
let res: BigUint = (*el).try_into().unwrap();
res.try_into().unwrap()
}
pub fn fr_byte_size() -> usize {
@@ -17,10 +16,8 @@ pub fn fr_byte_size() -> usize {
(mbs + 64 - (mbs % 64)) / 8
}
pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr> {
if !(radix == 10 || radix == 16) {
return Err(Report::msg("wrong radix"));
}
pub fn str_to_fr(input: &str, radix: u32) -> Fr {
assert!((radix == 10) || (radix == 16));
// We remove any quote present and we trim
let single_quote: char = '\"';
@@ -28,10 +25,16 @@ pub fn str_to_fr(input: &str, radix: u32) -> Result<Fr> {
input_clean = input_clean.trim().to_string();
if radix == 10 {
Ok(BigUint::from_str_radix(&input_clean, radix)?.try_into()?)
BigUint::from_str_radix(&input_clean, radix)
.unwrap()
.try_into()
.unwrap()
} else {
input_clean = input_clean.replace("0x", "");
Ok(BigUint::from_str_radix(&input_clean, radix)?.try_into()?)
BigUint::from_str_radix(&input_clean, radix)
.unwrap()
.try_into()
.unwrap()
}
}
@@ -72,111 +75,99 @@ pub fn fr_to_bytes_be(input: &Fr) -> Vec<u8> {
res
}
pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Result<Vec<u8>> {
pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(input.len().to_le_bytes().to_vec());
bytes.extend(u64::try_from(input.len()).unwrap().to_le_bytes().to_vec());
// We store each element
input.iter().for_each(|el| bytes.extend(fr_to_bytes_le(el)));
Ok(bytes)
bytes
}
pub fn vec_fr_to_bytes_be(input: &[Fr]) -> Result<Vec<u8>> {
pub fn vec_fr_to_bytes_be(input: &[Fr]) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(input.len().to_be_bytes().to_vec());
bytes.extend(u64::try_from(input.len()).unwrap().to_be_bytes().to_vec());
// We store each element
input.iter().for_each(|el| bytes.extend(fr_to_bytes_be(el)));
Ok(bytes)
bytes
}
pub fn vec_u8_to_bytes_le(input: &[u8]) -> Result<Vec<u8>> {
pub fn vec_u8_to_bytes_le(input: &[u8]) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(input.len().to_le_bytes().to_vec());
bytes.extend(u64::try_from(input.len()).unwrap().to_le_bytes().to_vec());
bytes.extend(input);
Ok(bytes)
bytes
}
pub fn vec_u8_to_bytes_be(input: Vec<u8>) -> Result<Vec<u8>> {
pub fn vec_u8_to_bytes_be(input: Vec<u8>) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(input.len().to_be_bytes().to_vec());
bytes.extend(u64::try_from(input.len()).unwrap().to_be_bytes().to_vec());
bytes.extend(input);
Ok(bytes)
bytes
}
pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
pub fn bytes_le_to_vec_u8(input: &[u8]) -> (Vec<u8>, usize) {
let mut read: usize = 0;
let len = usize::from_le_bytes(input[0..8].try_into()?);
let len = u64::from_le_bytes(input[0..8].try_into().unwrap()) as usize;
read += 8;
let res = input[8..8 + len].to_vec();
read += res.len();
Ok((res, read))
(res, read)
}
pub fn bytes_be_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
pub fn bytes_be_to_vec_u8(input: &[u8]) -> (Vec<u8>, usize) {
let mut read: usize = 0;
let len = usize::from_be_bytes(input[0..8].try_into()?);
let len = u64::from_be_bytes(input[0..8].try_into().unwrap()) as usize;
read += 8;
let res = input[8..8 + len].to_vec();
read += res.len();
Ok((res, read))
(res, read)
}
pub fn bytes_le_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
pub fn bytes_le_to_vec_fr(input: &[u8]) -> (Vec<Fr>, usize) {
let mut read: usize = 0;
let mut res: Vec<Fr> = Vec::new();
let len = usize::from_le_bytes(input[0..8].try_into()?);
let len = u64::from_le_bytes(input[0..8].try_into().unwrap()) as usize;
read += 8;
let el_size = fr_byte_size();
for i in 0..len {
let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]);
let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec());
res.push(curr_el);
read += el_size;
}
Ok((res, read))
(res, read)
}
pub fn bytes_be_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
pub fn bytes_be_to_vec_fr(input: &[u8]) -> (Vec<Fr>, usize) {
let mut read: usize = 0;
let mut res: Vec<Fr> = Vec::new();
let len = usize::from_be_bytes(input[0..8].try_into()?);
let len = u64::from_be_bytes(input[0..8].try_into().unwrap()) as usize;
read += 8;
let el_size = fr_byte_size();
for i in 0..len {
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)]);
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec());
res.push(curr_el);
read += el_size;
}
Ok((res, read))
}
pub fn normalize_usize(input: usize) -> Vec<u8> {
let mut normalized_usize = input.to_le_bytes().to_vec();
normalized_usize.resize(8, 0);
normalized_usize
(res, read)
}
/* Old conversion utilities between different libraries data types

View File

@@ -3,8 +3,8 @@ mod test {
use ark_std::{rand::thread_rng, UniformRand};
use rand::Rng;
use rln::circuit::*;
use rln::ffi::{hash as ffi_hash, poseidon_hash as ffi_poseidon_hash, *};
use rln::poseidon_hash::{poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
use rln::ffi::*;
use rln::poseidon_hash::poseidon_hash;
use rln::protocol::*;
use rln::public::RLN;
use rln::utils::*;
@@ -78,7 +78,7 @@ mod test {
assert!(success, "set tree call failed");
// We add leaves in a batch into the tree
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
let leaves_ser = vec_fr_to_bytes_le(&leaves);
let input_buffer = &Buffer::from(leaves_ser.as_ref());
let success = init_tree_with_leaves(rln_pointer, input_buffer);
assert!(success, "init tree with leaves call failed");
@@ -153,7 +153,7 @@ mod test {
let set_index = rng.gen_range(0..no_of_leaves) as usize;
// We add leaves in a batch into the tree
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
let leaves_ser = vec_fr_to_bytes_le(&leaves);
let input_buffer = &Buffer::from(leaves_ser.as_ref());
let success = init_tree_with_leaves(rln_pointer, input_buffer);
assert!(success, "init tree with leaves call failed");
@@ -170,13 +170,13 @@ mod test {
// `init_tree_with_leaves` resets the tree to the height it was initialized with, using `set_tree`
// We add leaves in a batch starting from index 0..set_index
let leaves_m = vec_fr_to_bytes_le(&leaves[0..set_index]).unwrap();
let leaves_m = vec_fr_to_bytes_le(&leaves[0..set_index]);
let buffer = &Buffer::from(leaves_m.as_ref());
let success = init_tree_with_leaves(rln_pointer, buffer);
assert!(success, "init tree with leaves call failed");
// We add the remaining n leaves in a batch starting from index set_index
let leaves_n = vec_fr_to_bytes_le(&leaves[set_index..]).unwrap();
let leaves_n = vec_fr_to_bytes_le(&leaves[set_index..]);
let buffer = &Buffer::from(leaves_n.as_ref());
let success = set_leaves_from(rln_pointer, set_index, buffer);
assert!(success, "set leaves from call failed");
@@ -248,7 +248,7 @@ mod test {
let (root_empty, _) = bytes_le_to_fr(&result_data);
// We add leaves in a batch into the tree
let leaves = vec_fr_to_bytes_le(&leaves).unwrap();
let leaves = vec_fr_to_bytes_le(&leaves);
let buffer = &Buffer::from(leaves.as_ref());
let success = set_leaves_from(rln_pointer, bad_index, buffer);
assert!(!success, "set leaves from call succeeded");
@@ -280,7 +280,7 @@ mod test {
// generate identity
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
let id_commitment = utils_poseidon_hash(&vec![identity_secret_hash]);
let id_commitment = poseidon_hash(&vec![identity_secret_hash]);
// We prepare id_commitment and we set the leaf at provided index
let leaf_ser = fr_to_bytes_le(&id_commitment);
@@ -303,86 +303,71 @@ mod test {
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (path_elements, read) = bytes_le_to_vec_fr(&result_data).unwrap();
let (identity_path_index, _) = bytes_le_to_vec_u8(&result_data[read..].to_vec()).unwrap();
let (path_elements, read) = bytes_le_to_vec_fr(&result_data);
let (identity_path_index, _) = bytes_le_to_vec_u8(&result_data[read..].to_vec());
// We check correct computation of the path and indexes
let mut expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
)
.unwrap(),
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
)
.unwrap(),
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
)
.unwrap(),
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
)
.unwrap(),
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
)
.unwrap(),
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
)
.unwrap(),
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
)
.unwrap(),
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
)
.unwrap(),
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
)
.unwrap(),
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
)
.unwrap(),
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
)
.unwrap(),
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
)
.unwrap(),
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
)
.unwrap(),
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
)
.unwrap(),
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
)
.unwrap(),
),
];
let mut expected_identity_path_index: Vec<u8> =
@@ -394,23 +379,19 @@ mod test {
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
)
.unwrap(),
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
)
.unwrap(),
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
)
.unwrap(),
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
)
.unwrap(),
),
]);
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
}
@@ -419,8 +400,7 @@ mod test {
expected_path_elements.append(&mut vec![str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
)
.unwrap()]);
)]);
expected_identity_path_index.append(&mut vec![0]);
}
@@ -459,7 +439,7 @@ mod test {
let proof_values = proof_values_from_witness(&rln_witness);
// We prepare id_commitment and we set the leaf at provided index
let rln_witness_ser = serialize_witness(&rln_witness).unwrap();
let rln_witness_ser = serialize_witness(&rln_witness);
let input_buffer = &Buffer::from(rln_witness_ser.as_ref());
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let now = Instant::now();
@@ -589,7 +569,7 @@ mod test {
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
// We add leaves in a batch into the tree
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
let leaves_ser = vec_fr_to_bytes_le(&leaves);
let input_buffer = &Buffer::from(leaves_ser.as_ref());
let success = init_tree_with_leaves(rln_pointer, input_buffer);
assert!(success, "init tree with leaves call failed");
@@ -609,11 +589,12 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index: usize = no_of_leaves;
let identity_index: u64 = no_of_leaves;
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let signal_len = u64::try_from(signal.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -622,9 +603,9 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut identity_index.to_le_bytes().to_vec());
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut signal.to_vec());
// We call generate_rln_proof
@@ -639,7 +620,7 @@ mod test {
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data | signal_len<8> | signal<var> ]
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
proof_data.append(&mut signal.to_vec());
// We call verify_rln_proof
@@ -673,7 +654,7 @@ mod test {
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
// We add leaves in a batch into the tree
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
let leaves_ser = vec_fr_to_bytes_le(&leaves);
let input_buffer = &Buffer::from(leaves_ser.as_ref());
let success = init_tree_with_leaves(rln_pointer, input_buffer);
assert!(success, "set leaves call failed");
@@ -693,11 +674,12 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index: usize = no_of_leaves;
let identity_index: u64 = no_of_leaves;
// We generate a random signal
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let signal_len = u64::try_from(signal.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -706,9 +688,9 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut normalize_usize(identity_index));
serialized.append(&mut identity_index.to_le_bytes().to_vec());
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal_len.to_le_bytes().to_vec());
serialized.append(&mut signal.to_vec());
// We call generate_rln_proof
@@ -723,7 +705,7 @@ mod test {
// We prepare input for verify_rln_proof API
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
// that is [ proof_data | signal_len<8> | signal<var> ]
proof_data.append(&mut normalize_usize(signal.len()));
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
proof_data.append(&mut signal.to_vec());
// We test verify_with_roots
@@ -805,16 +787,18 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index: usize = 0;
let identity_index: u64 = 0;
// We generate two proofs using same epoch but different signals.
// We generate two random signals
let mut rng = rand::thread_rng();
let signal1: [u8; 32] = rng.gen();
let signal1_len = u64::try_from(signal1.len()).unwrap();
// We generate two random signals
let signal2: [u8; 32] = rng.gen();
let signal2_len = u64::try_from(signal2.len()).unwrap();
// We generate a random epoch
let epoch = hash_to_field(b"test-epoch");
@@ -823,18 +807,18 @@ mod test {
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
let mut serialized1: Vec<u8> = Vec::new();
serialized1.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized1.append(&mut normalize_usize(identity_index));
serialized1.append(&mut identity_index.to_le_bytes().to_vec());
serialized1.append(&mut fr_to_bytes_le(&epoch));
// The first part is the same for both proof input, so we clone
let mut serialized2 = serialized1.clone();
// We attach the first signal to the first proof input
serialized1.append(&mut normalize_usize(signal1.len()));
serialized1.append(&mut signal1_len.to_le_bytes().to_vec());
serialized1.append(&mut signal1.to_vec());
// We attach the second signal to the first proof input
serialized2.append(&mut normalize_usize(signal2.len()));
serialized2.append(&mut signal2_len.to_le_bytes().to_vec());
serialized2.append(&mut signal2.to_vec());
// We call generate_rln_proof for first proof values
@@ -893,19 +877,20 @@ mod test {
let success = set_next_leaf(rln_pointer, input_buffer);
assert!(success, "set next leaf call failed");
let identity_index_new: usize = 1;
let identity_index_new: u64 = 1;
// We generate a random signals
let signal3: [u8; 32] = rng.gen();
let signal3_len = u64::try_from(signal3.len()).unwrap();
// We prepare input for generate_rln_proof API
// input_data is [ identity_secret<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
// Note that epoch is the same as before
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash_new));
serialized.append(&mut normalize_usize(identity_index_new));
serialized.append(&mut identity_index_new.to_le_bytes().to_vec());
serialized.append(&mut fr_to_bytes_le(&epoch));
serialized.append(&mut normalize_usize(signal3.len()));
serialized.append(&mut signal3_len.to_le_bytes().to_vec());
serialized.append(&mut signal3.to_vec());
// We call generate_rln_proof
@@ -972,15 +957,16 @@ mod test {
assert_eq!(
identity_secret_hash,
expected_identity_secret_hash_seed_bytes.unwrap()
expected_identity_secret_hash_seed_bytes
);
assert_eq!(id_commitment, expected_id_commitment_seed_bytes.unwrap());
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
}
#[test]
// Tests hash to field using FFI APIs
fn test_seeded_extended_keygen_ffi() {
let tree_height = TEST_TREE_HEIGHT;
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
@@ -1018,31 +1004,34 @@ mod test {
16,
);
assert_eq!(
identity_trapdoor,
expected_identity_trapdoor_seed_bytes.unwrap()
);
assert_eq!(
identity_nullifier,
expected_identity_nullifier_seed_bytes.unwrap()
);
assert_eq!(identity_trapdoor, expected_identity_trapdoor_seed_bytes);
assert_eq!(identity_nullifier, expected_identity_nullifier_seed_bytes);
assert_eq!(
identity_secret_hash,
expected_identity_secret_hash_seed_bytes.unwrap()
expected_identity_secret_hash_seed_bytes
);
assert_eq!(id_commitment, expected_id_commitment_seed_bytes.unwrap());
assert_eq!(id_commitment, expected_id_commitment_seed_bytes);
}
#[test]
// Tests hash to field using FFI APIs
fn test_hash_to_field_ffi() {
let tree_height = TEST_TREE_HEIGHT;
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
// We prepare id_commitment and we set the leaf at provided index
let input_buffer = &Buffer::from(signal.as_ref());
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = ffi_hash(input_buffer, output_buffer.as_mut_ptr());
let success = hash(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
assert!(success, "hash call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
@@ -1054,30 +1043,4 @@ mod test {
assert_eq!(hash1, hash2);
}
#[test]
// Test Poseidon hash FFI
fn test_poseidon_hash_ffi() {
// generate random number between 1..ROUND_PARAMS.len()
let mut rng = thread_rng();
let number_of_inputs = rng.gen_range(1..ROUND_PARAMS.len());
let mut inputs = Vec::with_capacity(number_of_inputs);
for _ in 0..number_of_inputs {
inputs.push(Fr::rand(&mut rng));
}
let inputs_ser = vec_fr_to_bytes_le(&inputs).unwrap();
let input_buffer = &Buffer::from(inputs_ser.as_ref());
let expected_hash = utils_poseidon_hash(inputs.as_ref());
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = ffi_poseidon_hash(input_buffer, output_buffer.as_mut_ptr());
assert!(success, "poseidon hash call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (received_hash, _) = bytes_le_to_fr(&result_data);
assert_eq!(received_hash, expected_hash);
}
}

View File

@@ -82,7 +82,7 @@ mod test {
// Test module for testing pmtree integration and features in zerokit
// enabled only if the pmtree feature is enabled
#[cfg(feature = "pmtree-ft")]
#[cfg(feature = "pmtree")]
#[cfg(test)]
mod pmtree_test {
@@ -91,29 +91,47 @@ mod pmtree_test {
use rln::poseidon_hash::poseidon_hash;
use rln::poseidon_tree::PoseidonHash;
use rln::protocol::hash_to_field;
use rln::utils::str_to_fr;
use rln::utils::{bytes_le_to_fr, fr_to_bytes_le, str_to_fr};
use sled::Db as Sled;
use std::collections::HashMap;
use std::fs;
use std::{collections::HashMap, path::PathBuf};
use std::path::Path;
use utils::{FullMerkleTree, OptimalMerkleTree};
// The pmtree Hasher trait used by pmtree Merkle tree
impl pmtree::Hasher for PoseidonHash {
type Fr = Fr;
fn default_leaf() -> Self::Fr {
Fr::from(0)
}
fn serialize(value: Self::Fr) -> Value {
fr_to_bytes_le(&value)
}
fn deserialize(value: Value) -> Self::Fr {
let (fr, _) = bytes_le_to_fr(&value);
fr
}
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
poseidon_hash(inputs)
}
}
// pmtree supports in-memory and on-disk databases (Database trait) for storing the Merkle tree state
// We implement Database for hashmaps, an in-memory database
struct MemoryDB(HashMap<DBKey, Value>);
#[derive(Default)]
struct MemoryDBConfig {}
impl Database for MemoryDB {
type Config = MemoryDBConfig;
fn new(_config: Self::Config) -> Result<Self> {
fn new(_dbpath: &str) -> Result<Self> {
Ok(MemoryDB(HashMap::new()))
}
fn load(_config: Self::Config) -> Result<Self> {
Err(Box::new(Error("Cannot load in-memory DB".to_string())))
fn load(_dbpath: &str) -> Result<Self> {
Err(Error("Cannot load in-memory DB".to_string()))
}
fn get(&self, key: DBKey) -> Result<Option<Value>> {
@@ -125,46 +143,36 @@ mod pmtree_test {
Ok(())
}
fn put_batch(&mut self, subtree: HashMap<DBKey, Value>) -> Result<()> {
self.0.extend(subtree);
Ok(())
}
}
// We implement Database for sled DB, an on-disk database
struct SledDB(Sled);
impl Database for SledDB {
type Config = sled::Config;
fn new(config: Self::Config) -> Result<Self> {
let dbpath = config.path;
if config.dbpath.exists() {
match fs::remove_dir_all(&config.dbpath) {
fn new(dbpath: &str) -> Result<Self> {
if Path::new(dbpath).exists() {
match fs::remove_dir_all(dbpath) {
Ok(x) => x,
Err(e) => return Err(Box::new(Error(e.to_string()))),
Err(e) => return Err(Error(e.to_string())),
}
}
let db: Sled = match config.open() {
let db: Sled = match sled::open(dbpath) {
Ok(db) => db,
Err(e) => return Err(Box::new(Error(e.to_string()))),
Err(e) => return Err(Error(e.to_string())),
};
Ok(SledDB(db))
}
fn load(config: Self::Config) -> Result<Self> {
let db: Sled = match sled::open(config.dbpath) {
fn load(dbpath: &str) -> Result<Self> {
let db: Sled = match sled::open(dbpath) {
Ok(db) => db,
Err(e) => return Err(Box::new(Error(e.to_string()))),
Err(e) => return Err(Error(e.to_string())),
};
if !db.was_recovered() {
return Err(Box::new(Error(
"Trying to load non-existing database!".to_string(),
)));
return Err(Error("Trying to load non-existing database!".to_string()));
}
Ok(SledDB(db))
@@ -173,27 +181,16 @@ mod pmtree_test {
fn get(&self, key: DBKey) -> Result<Option<Value>> {
match self.0.get(key) {
Ok(value) => Ok(value.map(|val| val.to_vec())),
Err(e) => Err(Box::new(Error(e.to_string()))),
Err(e) => Err(Error(e.to_string())),
}
}
fn put(&mut self, key: DBKey, value: Value) -> Result<()> {
match self.0.insert(key, value) {
Ok(_) => Ok(()),
Err(e) => Err(Box::new(Error(e.to_string()))),
Err(e) => Err(Error(e.to_string())),
}
}
fn put_batch(&mut self, subtree: HashMap<DBKey, Value>) -> Result<()> {
let mut batch = sled::Batch::default();
for (key, value) in subtree {
batch.insert(&key, value);
}
self.0.apply_batch(batch)?;
Ok(())
}
}
#[test]
@@ -344,7 +341,6 @@ mod pmtree_test {
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
.unwrap()
);
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
@@ -357,103 +353,83 @@ mod pmtree_test {
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
)
.unwrap(),
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
)
.unwrap(),
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
)
.unwrap(),
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
)
.unwrap(),
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
)
.unwrap(),
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
)
.unwrap(),
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
)
.unwrap(),
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
)
.unwrap(),
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
)
.unwrap(),
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
)
.unwrap(),
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
)
.unwrap(),
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
)
.unwrap(),
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
)
.unwrap(),
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
)
.unwrap(),
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
)
.unwrap(),
),
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
)
.unwrap(),
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
)
.unwrap(),
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
)
.unwrap(),
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
)
.unwrap(),
),
str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
)
.unwrap(),
),
];
let expected_identity_path_index: Vec<u8> =
@@ -491,7 +467,6 @@ mod pmtree_test {
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
.unwrap()
);
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
@@ -504,103 +479,83 @@ mod pmtree_test {
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
)
.unwrap(),
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
)
.unwrap(),
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
)
.unwrap(),
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
)
.unwrap(),
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
)
.unwrap(),
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
)
.unwrap(),
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
)
.unwrap(),
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
)
.unwrap(),
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
)
.unwrap(),
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
)
.unwrap(),
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
)
.unwrap(),
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
)
.unwrap(),
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
)
.unwrap(),
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
)
.unwrap(),
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
)
.unwrap(),
),
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
)
.unwrap(),
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
)
.unwrap(),
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
)
.unwrap(),
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
)
.unwrap(),
),
str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
)
.unwrap(),
),
];
let expected_identity_path_index: Vec<u8> =

View File

@@ -184,7 +184,6 @@ mod test {
"0x1984f2e01184aef5cb974640898a5f5c25556554e2b06d99d4841badb8b198cd",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 19 {
assert_eq!(
@@ -193,7 +192,6 @@ mod test {
"0x219ceb53f2b1b7a6cf74e80d50d44d68ecb4a53c6cc65b25593c8d56343fb1fe",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 20 {
assert_eq!(
@@ -202,7 +200,6 @@ mod test {
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
.unwrap()
);
}
@@ -216,78 +213,63 @@ mod test {
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
)
.unwrap(),
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
)
.unwrap(),
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
)
.unwrap(),
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
)
.unwrap(),
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
)
.unwrap(),
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
)
.unwrap(),
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
)
.unwrap(),
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
)
.unwrap(),
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
)
.unwrap(),
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
)
.unwrap(),
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
)
.unwrap(),
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
)
.unwrap(),
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
)
.unwrap(),
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
)
.unwrap(),
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
)
.unwrap(),
),
];
let mut expected_identity_path_index: Vec<u8> =
@@ -299,23 +281,19 @@ mod test {
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
)
.unwrap(),
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
)
.unwrap(),
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
)
.unwrap(),
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
)
.unwrap(),
),
]);
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
}
@@ -324,8 +302,7 @@ mod test {
expected_path_elements.append(&mut vec![str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
)
.unwrap()]);
)]);
expected_identity_path_index.append(&mut vec![0]);
}
@@ -342,7 +319,7 @@ mod test {
// We generate all relevant keys
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let builder = circom_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let builder = circom_from_folder(TEST_RESOURCES_FOLDER);
// We compute witness from the json input example
let mut witness_json: &str = "";
@@ -357,12 +334,10 @@ mod test {
let rln_witness = rln_witness_from_json(witness_json);
let rln_witness_unwrapped = rln_witness.unwrap();
// Let's generate a zkSNARK proof
let proof = generate_proof(builder, &proving_key, &rln_witness_unwrapped).unwrap();
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
let proof_values = proof_values_from_witness(&rln_witness_unwrapped);
let proof_values = proof_values_from_witness(&rln_witness);
// Let's verify the proof
let verified = verify_proof(&verification_key, &proof, &proof_values);
@@ -403,7 +378,7 @@ mod test {
// We generate all relevant keys
let proving_key = zkey_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let verification_key = vk_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let builder = circom_from_folder(TEST_RESOURCES_FOLDER).unwrap();
let builder = circom_from_folder(TEST_RESOURCES_FOLDER);
// Let's generate a zkSNARK proof
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
@@ -429,10 +404,10 @@ mod test {
witness_json = WITNESS_JSON_20;
}
let rln_witness = rln_witness_from_json(witness_json).unwrap();
let rln_witness = rln_witness_from_json(witness_json);
let ser = serialize_witness(&rln_witness).unwrap();
let (deser, _) = deserialize_witness(&ser).unwrap();
let ser = serialize_witness(&rln_witness);
let (deser, _) = deserialize_witness(&ser);
assert_eq!(rln_witness, deser);
// We test Proof values serialization
@@ -454,13 +429,11 @@ mod test {
let expected_identity_secret_hash_seed_phrase = str_to_fr(
"0x20df38f3f00496f19fe7c6535492543b21798ed7cb91aebe4af8012db884eda3",
16,
)
.unwrap();
);
let expected_id_commitment_seed_phrase = str_to_fr(
"0x1223a78a5d66043a7f9863e14507dc80720a5602b2a894923e5b5147d5a9c325",
16,
)
.unwrap();
);
assert_eq!(
identity_secret_hash,
@@ -476,13 +449,11 @@ mod test {
let expected_identity_secret_hash_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
)
.unwrap();
);
let expected_id_commitment_seed_bytes = str_to_fr(
"0xbf16d2b5c0d6f9d9d561e05bfca16a81b4b873bb063508fae360d8c74cef51f",
16,
)
.unwrap();
);
assert_eq!(
identity_secret_hash,

View File

@@ -1,11 +1,10 @@
#[cfg(test)]
mod test {
use ark_std::{rand::thread_rng, UniformRand};
use rand::Rng;
use rln::circuit::{Fr, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
use rln::poseidon_hash::{poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
use rln::circuit::{TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
use rln::poseidon_hash::poseidon_hash;
use rln::protocol::{compute_tree_root, deserialize_identity_tuple, hash_to_field};
use rln::public::{hash as public_hash, poseidon_hash as public_poseidon_hash, RLN};
use rln::public::RLN;
use rln::utils::*;
use std::io::Cursor;
@@ -16,11 +15,11 @@ mod test {
let leaf_index = 3;
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
let mut rln = RLN::new(tree_height, input_buffer);
// generate identity
let identity_secret_hash = hash_to_field(b"test-merkle-proof");
let id_commitment = utils_poseidon_hash(&vec![identity_secret_hash]);
let id_commitment = poseidon_hash(&vec![identity_secret_hash]);
// We pass id_commitment as Read buffer to RLN's set_leaf
let mut buffer = Cursor::new(fr_to_bytes_le(&id_commitment));
@@ -38,7 +37,6 @@ mod test {
"0x1984f2e01184aef5cb974640898a5f5c25556554e2b06d99d4841badb8b198cd",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 19 {
assert_eq!(
@@ -47,7 +45,6 @@ mod test {
"0x219ceb53f2b1b7a6cf74e80d50d44d68ecb4a53c6cc65b25593c8d56343fb1fe",
16
)
.unwrap()
);
} else if TEST_TREE_HEIGHT == 20 {
assert_eq!(
@@ -56,7 +53,6 @@ mod test {
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
16
)
.unwrap()
);
}
@@ -65,86 +61,71 @@ mod test {
rln.get_proof(leaf_index, &mut buffer).unwrap();
let buffer_inner = buffer.into_inner();
let (path_elements, read) = bytes_le_to_vec_fr(&buffer_inner).unwrap();
let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec()).unwrap();
let (path_elements, read) = bytes_le_to_vec_fr(&buffer_inner);
let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec());
// We check correct computation of the path and indexes
let mut expected_path_elements = vec![
str_to_fr(
"0x0000000000000000000000000000000000000000000000000000000000000000",
16,
)
.unwrap(),
),
str_to_fr(
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
16,
)
.unwrap(),
),
str_to_fr(
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
16,
)
.unwrap(),
),
str_to_fr(
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
16,
)
.unwrap(),
),
str_to_fr(
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
16,
)
.unwrap(),
),
str_to_fr(
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
16,
)
.unwrap(),
),
str_to_fr(
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
16,
)
.unwrap(),
),
str_to_fr(
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
16,
)
.unwrap(),
),
str_to_fr(
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
16,
)
.unwrap(),
),
str_to_fr(
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
16,
)
.unwrap(),
),
str_to_fr(
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
16,
)
.unwrap(),
),
str_to_fr(
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
16,
)
.unwrap(),
),
str_to_fr(
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
16,
)
.unwrap(),
),
str_to_fr(
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
16,
)
.unwrap(),
),
str_to_fr(
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
16,
)
.unwrap(),
),
];
let mut expected_identity_path_index: Vec<u8> =
@@ -156,23 +137,19 @@ mod test {
str_to_fr(
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
16,
)
.unwrap(),
),
str_to_fr(
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
16,
)
.unwrap(),
),
str_to_fr(
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
16,
)
.unwrap(),
),
str_to_fr(
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
16,
)
.unwrap(),
),
]);
expected_identity_path_index.append(&mut vec![0, 0, 0, 0]);
}
@@ -181,8 +158,7 @@ mod test {
expected_path_elements.append(&mut vec![str_to_fr(
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
16,
)
.unwrap()]);
)]);
expected_identity_path_index.append(&mut vec![0]);
}
@@ -216,13 +192,11 @@ mod test {
let expected_identity_secret_hash_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
)
.unwrap();
);
let expected_id_commitment_seed_bytes = str_to_fr(
"0xbf16d2b5c0d6f9d9d561e05bfca16a81b4b873bb063508fae360d8c74cef51f",
16,
)
.unwrap();
);
assert_eq!(
identity_secret_hash,
@@ -251,23 +225,19 @@ mod test {
let expected_identity_trapdoor_seed_bytes = str_to_fr(
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
16,
)
.unwrap();
);
let expected_identity_nullifier_seed_bytes = str_to_fr(
"0x1f18714c7bc83b5bca9e89d404cf6f2f585bc4c0f7ed8b53742b7e2b298f50b4",
16,
)
.unwrap();
);
let expected_identity_secret_hash_seed_bytes = str_to_fr(
"0x2aca62aaa7abaf3686fff2caf00f55ab9462dc12db5b5d4bcf3994e671f8e521",
16,
)
.unwrap();
);
let expected_id_commitment_seed_bytes = str_to_fr(
"0x68b66aa0a8320d2e56842581553285393188714c48f9b17acd198b4f1734c5c",
16,
)
.unwrap();
);
assert_eq!(identity_trapdoor, expected_identity_trapdoor_seed_bytes);
assert_eq!(identity_nullifier, expected_identity_nullifier_seed_bytes);
@@ -280,13 +250,15 @@ mod test {
#[test]
fn test_hash_to_field() {
let mut rng = thread_rng();
let rln = RLN::default();
let mut rng = rand::thread_rng();
let signal: [u8; 32] = rng.gen();
let mut input_buffer = Cursor::new(&signal);
let mut output_buffer = Cursor::new(Vec::<u8>::new());
public_hash(&mut input_buffer, &mut output_buffer).unwrap();
rln.hash(&mut input_buffer, &mut output_buffer).unwrap();
let serialized_hash = output_buffer.into_inner();
let (hash1, _) = bytes_le_to_fr(&serialized_hash);
@@ -294,24 +266,4 @@ mod test {
assert_eq!(hash1, hash2);
}
#[test]
fn test_poseidon_hash() {
let mut rng = thread_rng();
let number_of_inputs = rng.gen_range(1..ROUND_PARAMS.len());
let mut inputs = Vec::with_capacity(number_of_inputs);
for _ in 0..number_of_inputs {
inputs.push(Fr::rand(&mut rng));
}
let expected_hash = utils_poseidon_hash(&inputs);
let mut input_buffer = Cursor::new(vec_fr_to_bytes_le(&inputs).unwrap());
let mut output_buffer = Cursor::new(Vec::<u8>::new());
public_poseidon_hash(&mut input_buffer, &mut output_buffer).unwrap();
let serialized_hash = output_buffer.into_inner();
let (hash, _) = bytes_le_to_fr(&serialized_hash);
assert_eq!(hash, expected_hash);
}
}

View File

@@ -2,7 +2,6 @@
name = "semaphore-wrapper"
version = "0.1.0"
edition = "2021"
license = "MIT OR Apache-2.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@@ -12,12 +11,13 @@ dylib = [ "wasmer/dylib", "wasmer-engine-dylib", "wasmer-compiler-cranelift" ]
[dependencies]
ark-bn254 = { version = "0.3.0" }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features=["circom-2"], rev = "35ce5a9" }
ark-circom = { git = "https://github.com/gakonst/ark-circom", features=["circom-2"] }
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
ark-relations = { version = "0.3.0", default-features = false }
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
color-eyre = "0.6.1"
color-eyre = "0.5"
num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
once_cell = "1.8"
rand = "0.8.4"
semaphore = { git = "https://github.com/worldcoin/semaphore-rs", rev = "ee658c2"}
@@ -32,10 +32,10 @@ rand_chacha = "0.3.1"
serde_json = "1.0.79"
[build-dependencies]
color-eyre = "0.6.1"
color-eyre = "0.5"
wasmer = { version = "2.0" }
wasmer-engine-dylib = { version = "2.2.1", optional = true }
wasmer-compiler-cranelift = { version = "3.1.1", optional = true }
wasmer-compiler-cranelift = { version = "2.2.1", optional = true }
[profile.release]
codegen-units = 1
@@ -47,4 +47,4 @@ opt-level = 3
# Dependencies are optimized, even in a dev build. This improves dev performance
# while having neglible impact on incremental build times.
[profile.dev.package."*"]
opt-level = 3
opt-level = 3

View File

@@ -8,11 +8,3 @@ Goal is also to provide a basic FFI around protocol.rs, which is currently not
in scope for that project.
See that project for more information.
## Build and Test
To build and test, run the following commands within the module folder
```bash
cargo make build
cargo make test
```

View File

@@ -37,7 +37,7 @@ fn build_circuit() -> Result<()> {
.current_dir("./vendor/semaphore")
.status()?
.success()
.then_some(())
.then(|| ())
.ok_or(eyre!("procees returned failure"))?;
Ok(())
};

View File

@@ -50,7 +50,7 @@ fn from_dylib(path: &Path) -> Mutex<WitnessCalculator> {
#[must_use]
pub fn zkey() -> &'static (ProvingKey<Bn254>, ConstraintMatrices<Fr>) {
&ZKEY
&*ZKEY
}
#[cfg(feature = "dylib")]

View File

@@ -12,7 +12,7 @@ use ark_groth16::{
};
use ark_relations::r1cs::SynthesisError;
use ark_std::UniformRand;
use color_eyre::{Report, Result};
use color_eyre::Result;
use ethers_core::types::U256;
use rand::{thread_rng, Rng};
use semaphore::{
@@ -89,7 +89,7 @@ pub enum ProofError {
#[error("Error reading circuit key: {0}")]
CircuitKeyError(#[from] std::io::Error),
#[error("Error producing witness: {0}")]
WitnessError(Report),
WitnessError(color_eyre::Report),
#[error("Error producing proof: {0}")]
SynthesisError(#[from] SynthesisError),
#[error("Error converting public input: {0}")]

View File

@@ -2,15 +2,13 @@
name = "utils"
version = "0.1.0"
edition = "2021"
license = "MIT OR Apache-2.0"
[dependencies]
ark-ff = { version = "=0.3.0", default-features = false, features = ["asm"] }
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
color-eyre = "=0.6.2"
ark-ff = { version = "0.3.0", default-features = false, features = ["asm"] }
num-bigint = { version = "0.4.3", default-features = false, features = ["rand"] }
[dev-dependencies]
ark-bn254 = "=0.3.0"
ark-bn254 = { version = "0.3.0" }
num-traits = "0.2.11"
hex-literal = "0.3.4"
tiny-keccak = { version = "2.0.2", features = ["keccak"] }

View File

@@ -16,14 +16,13 @@
#![allow(dead_code)]
use std::collections::HashMap;
use std::io;
use std::{
cmp::max,
fmt::Debug,
iter::{once, repeat, successors},
};
use color_eyre::{Report, Result};
/// In the Hasher trait we define the node type, the default leaf
/// and the hash function used to initialize a Merkle Tree implementation
pub trait Hasher {
@@ -87,7 +86,7 @@ impl<H: Hasher> OptimalMerkleTree<H> {
cached_nodes.reverse();
OptimalMerkleTree {
cached_nodes: cached_nodes.clone(),
depth,
depth: depth,
nodes: HashMap::new(),
next_index: 0,
}
@@ -115,12 +114,15 @@ impl<H: Hasher> OptimalMerkleTree<H> {
}
// Sets a leaf at the specified tree index
pub fn set(&mut self, index: usize, leaf: H::Fr) -> Result<()> {
pub fn set(&mut self, index: usize, leaf: H::Fr) -> io::Result<()> {
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds set size",
));
}
self.nodes.insert((self.depth, index), leaf);
self.recalculate_from(index)?;
self.recalculate_from(index);
self.next_index = max(self.next_index, index + 1);
Ok(())
}
@@ -130,28 +132,31 @@ impl<H: Hasher> OptimalMerkleTree<H> {
&mut self,
start: usize,
leaves: I,
) -> Result<()> {
) -> io::Result<()> {
let leaves = leaves.into_iter().collect::<Vec<_>>();
// check if the range is valid
if start + leaves.len() > self.capacity() {
return Err(Report::msg("provided range exceeds set size"));
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"provided range exceeds set size",
));
}
for (i, leaf) in leaves.iter().enumerate() {
self.nodes.insert((self.depth, start + i), *leaf);
self.recalculate_from(start + i)?;
self.recalculate_from(start + i);
}
self.next_index = max(self.next_index, start + leaves.len());
Ok(())
}
// Sets a leaf at the next available index
pub fn update_next(&mut self, leaf: H::Fr) -> Result<()> {
pub fn update_next(&mut self, leaf: H::Fr) -> io::Result<()> {
self.set(self.next_index, leaf)?;
Ok(())
}
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
pub fn delete(&mut self, index: usize) -> Result<()> {
pub fn delete(&mut self, index: usize) -> io::Result<()> {
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
@@ -160,9 +165,12 @@ impl<H: Hasher> OptimalMerkleTree<H> {
}
// Computes a merkle proof the the leaf at the specified index
pub fn proof(&self, index: usize) -> Result<OptimalMerkleProof<H>> {
pub fn proof(&self, index: usize) -> io::Result<OptimalMerkleProof<H>> {
if index >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds set size",
));
}
let mut witness = Vec::<(H::Fr, u8)>::with_capacity(self.depth);
let mut i = index;
@@ -176,17 +184,17 @@ impl<H: Hasher> OptimalMerkleTree<H> {
break;
}
}
if i != 0 {
Err(Report::msg("i != 0"))
} else {
Ok(OptimalMerkleProof(witness))
}
assert_eq!(i, 0);
Ok(OptimalMerkleProof(witness))
}
// Verifies a Merkle proof with respect to the input leaf and the tree root
pub fn verify(&self, leaf: &H::Fr, witness: &OptimalMerkleProof<H>) -> Result<bool> {
pub fn verify(&self, leaf: &H::Fr, witness: &OptimalMerkleProof<H>) -> io::Result<bool> {
if witness.length() != self.depth {
return Err(Report::msg("witness length doesn't match tree depth"));
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"witness length doesn't match tree depth",
));
}
let expected_root = witness.compute_root_from(leaf);
Ok(expected_root.eq(&self.root()))
@@ -211,7 +219,7 @@ impl<H: Hasher> OptimalMerkleTree<H> {
H::hash(&[self.get_node(depth, b), self.get_node(depth, b + 1)])
}
fn recalculate_from(&mut self, index: usize) -> Result<()> {
fn recalculate_from(&mut self, index: usize) {
let mut i = index;
let mut depth = self.depth;
loop {
@@ -223,13 +231,8 @@ impl<H: Hasher> OptimalMerkleTree<H> {
break;
}
}
if depth != 0 {
return Err(Report::msg("did not reach the depth"));
}
if i != 0 {
return Err(Report::msg("did not go through all indexes"));
}
Ok(())
assert_eq!(depth, 0);
assert_eq!(i, 0);
}
}
@@ -384,7 +387,7 @@ impl<H: Hasher> FullMerkleTree<H> {
}
// Sets a leaf at the specified tree index
pub fn set(&mut self, leaf: usize, hash: H::Fr) -> Result<()> {
pub fn set(&mut self, leaf: usize, hash: H::Fr) -> io::Result<()> {
self.set_range(leaf, once(hash))?;
self.next_index = max(self.next_index, leaf + 1);
Ok(())
@@ -392,34 +395,41 @@ impl<H: Hasher> FullMerkleTree<H> {
// Sets tree nodes, starting from start index
// Function proper of FullMerkleTree implementation
fn set_range<I: IntoIterator<Item = H::Fr>>(&mut self, start: usize, hashes: I) -> Result<()> {
fn set_range<I: IntoIterator<Item = H::Fr>>(
&mut self,
start: usize,
hashes: I,
) -> io::Result<()> {
let index = self.capacity() + start - 1;
let mut count = 0;
// first count number of hashes, and check that they fit in the tree
// then insert into the tree
let hashes = hashes.into_iter().collect::<Vec<_>>();
if hashes.len() + start > self.capacity() {
return Err(Report::msg("provided hashes do not fit in the tree"));
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"provided hashes do not fit in the tree",
));
}
hashes.into_iter().for_each(|hash| {
self.nodes[index + count] = hash;
count += 1;
});
if count != 0 {
self.update_nodes(index, index + (count - 1))?;
self.update_nodes(index, index + (count - 1));
self.next_index = max(self.next_index, start + count);
}
Ok(())
}
// Sets a leaf at the next available index
pub fn update_next(&mut self, leaf: H::Fr) -> Result<()> {
pub fn update_next(&mut self, leaf: H::Fr) -> io::Result<()> {
self.set(self.next_index, leaf)?;
Ok(())
}
// Deletes a leaf at a certain index by setting it to its default value (next_index is not updated)
pub fn delete(&mut self, index: usize) -> Result<()> {
pub fn delete(&mut self, index: usize) -> io::Result<()> {
// We reset the leaf only if we previously set a leaf at that index
if index < self.next_index {
self.set(index, H::default_leaf())?;
@@ -428,9 +438,12 @@ impl<H: Hasher> FullMerkleTree<H> {
}
// Computes a merkle proof the the leaf at the specified index
pub fn proof(&self, leaf: usize) -> Result<FullMerkleProof<H>> {
pub fn proof(&self, leaf: usize) -> io::Result<FullMerkleProof<H>> {
if leaf >= self.capacity() {
return Err(Report::msg("index exceeds set size"));
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds set size",
));
}
let mut index = self.capacity() + leaf - 1;
let mut path = Vec::with_capacity(self.depth + 1);
@@ -447,7 +460,7 @@ impl<H: Hasher> FullMerkleTree<H> {
}
// Verifies a Merkle proof with respect to the input leaf and the tree root
pub fn verify(&self, hash: &H::Fr, proof: &FullMerkleProof<H>) -> Result<bool> {
pub fn verify(&self, hash: &H::Fr, proof: &FullMerkleProof<H>) -> io::Result<bool> {
Ok(proof.compute_root_from(hash) == self.root())
}
@@ -474,18 +487,15 @@ impl<H: Hasher> FullMerkleTree<H> {
(index + 2).next_power_of_two().trailing_zeros() as usize - 1
}
fn update_nodes(&mut self, start: usize, end: usize) -> Result<()> {
if self.levels(start) != self.levels(end) {
return Err(Report::msg("self.levels(start) != self.levels(end)"));
}
fn update_nodes(&mut self, start: usize, end: usize) {
debug_assert_eq!(self.levels(start), self.levels(end));
if let (Some(start), Some(end)) = (self.parent(start), self.parent(end)) {
for parent in start..=end {
let child = self.first_child(parent);
self.nodes[parent] = H::hash(&[self.nodes[child], self.nodes[child + 1]]);
}
self.update_nodes(start, end)?;
self.update_nodes(start, end);
}
Ok(())
}
}

View File

@@ -1,3 +1,2 @@
#[allow(clippy::module_inception)]
pub mod merkle_tree;
pub use self::merkle_tree::*;

View File

@@ -88,8 +88,8 @@ impl PoseidonGrainLFSR {
}
// b50, ..., b79 are set to 1
for item in state.iter_mut().skip(50) {
*item = true;
for i in 50..=79 {
state[i] = true;
}
let head = 0;
@@ -111,7 +111,7 @@ impl PoseidonGrainLFSR {
let mut new_bit = self.update();
// Loop until the first bit is true
while !new_bit {
while new_bit == false {
// Discard the second bit
let _ = self.update();
// Obtain another first bit
@@ -263,8 +263,8 @@ pub fn find_poseidon_ark_and_mds<F: PrimeField>(
let ys = lfsr.get_field_elements_mod_p::<F>(rate);
for i in 0..(rate) {
for (j, ys_item) in ys.iter().enumerate().take(rate) {
mds[i][j] = (xs[i] + ys_item).inverse().unwrap();
for j in 0..(rate) {
mds[i][j] = (xs[i] + &ys[j]).inverse().unwrap();
}
}

View File

@@ -28,7 +28,8 @@ impl<F: PrimeField> Poseidon<F> {
pub fn from(poseidon_params: &[(usize, usize, usize, usize)]) -> Self {
let mut read_params = Vec::<RoundParamenters<F>>::new();
for &(t, n_rounds_f, n_rounds_p, skip_matrices) in poseidon_params {
for i in 0..poseidon_params.len() {
let (t, n_rounds_f, n_rounds_p, skip_matrices) = poseidon_params[i];
let (ark, mds) = find_poseidon_ark_and_mds::<F>(
1, // is_field = 1
0, // is_sbox_inverse = 0
@@ -39,10 +40,10 @@ impl<F: PrimeField> Poseidon<F> {
skip_matrices,
);
let rp = RoundParamenters {
t,
n_rounds_p,
n_rounds_f,
skip_matrices,
t: t,
n_rounds_p: n_rounds_p,
n_rounds_f: n_rounds_f,
skip_matrices: skip_matrices,
c: ark,
m: mds,
};
@@ -66,11 +67,11 @@ impl<F: PrimeField> Poseidon<F> {
pub fn sbox(&self, n_rounds_f: usize, n_rounds_p: usize, state: &mut [F], i: usize) {
if (i < n_rounds_f / 2) || (i >= n_rounds_f / 2 + n_rounds_p) {
for current_state in &mut state.iter_mut() {
let aux = *current_state;
*current_state *= *current_state;
*current_state *= *current_state;
*current_state *= aux;
for j in 0..state.len() {
let aux = state[j];
state[j] *= state[j];
state[j] *= state[j];
state[j] *= aux;
}
} else {
let aux = state[0];
@@ -84,9 +85,9 @@ impl<F: PrimeField> Poseidon<F> {
let mut new_state: Vec<F> = Vec::new();
for i in 0..state.len() {
new_state.push(F::zero());
for (j, state_item) in state.iter().enumerate() {
for j in 0..state.len() {
let mut mij = m[i][j];
mij *= state_item;
mij *= state[j];
new_state[i] += mij;
}
}
@@ -115,7 +116,7 @@ impl<F: PrimeField> Poseidon<F> {
self.ark(
&mut state,
&self.round_params[param_index].c,
i * self.round_params[param_index].t,
(i as usize) * self.round_params[param_index].t,
);
self.sbox(
self.round_params[param_index].n_rounds_f,