mirror of
https://github.com/vacp2p/zerokit.git
synced 2026-01-09 13:47:58 -05:00
Compare commits
52 Commits
expose-has
...
zerokit_ut
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b51896c3a7 | ||
|
|
0c5ef6abcf | ||
|
|
a1c292cb2e | ||
|
|
c6c1bfde91 | ||
|
|
bf3d1d3309 | ||
|
|
7110e00674 | ||
|
|
99966d1a6e | ||
|
|
7d63912ace | ||
|
|
ef1da42d94 | ||
|
|
ecb4d9307f | ||
|
|
d1414a44c5 | ||
|
|
6d58320077 | ||
|
|
be2dccfdd0 | ||
|
|
9d4ed68450 | ||
|
|
5cf2b2e05e | ||
|
|
36158e8d08 | ||
|
|
c8cf033f32 | ||
|
|
23d2331b78 | ||
|
|
c6b7a8c0a4 | ||
|
|
4ec93c5e1f | ||
|
|
c83c9902d7 | ||
|
|
131cacab35 | ||
|
|
8a365f0c9e | ||
|
|
c561741339 | ||
|
|
90fdfb9d78 | ||
|
|
56b9285fef | ||
|
|
be88a432d7 | ||
|
|
8cfd83de54 | ||
|
|
2793fe0e24 | ||
|
|
0d35571215 | ||
|
|
9cc86e526e | ||
|
|
ecd056884c | ||
|
|
96497db7c5 | ||
|
|
ba8f011cc1 | ||
|
|
9dc92ec1ce | ||
|
|
75d760c179 | ||
|
|
72a3ce1770 | ||
|
|
b841e725a0 | ||
|
|
3177e3ae74 | ||
|
|
2c4de0484a | ||
|
|
fcd4854037 | ||
|
|
d68dc1ad8e | ||
|
|
8c3d60ed01 | ||
|
|
c2d386cb74 | ||
|
|
8f2c9e3586 | ||
|
|
584c2cf4c0 | ||
|
|
2c4b399126 | ||
|
|
c4b699ddff | ||
|
|
33d3732922 | ||
|
|
654c77dcf6 | ||
|
|
783f875d3b | ||
|
|
fd7d7d9318 |
140
.github/workflows/ci.yml
vendored
140
.github/workflows/ci.yml
vendored
@@ -28,18 +28,18 @@ on:
|
||||
name: Tests
|
||||
|
||||
jobs:
|
||||
|
||||
multiplier:
|
||||
test:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
crate: [multiplier, semaphore, rln, utils]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: multiplier - ${{ matrix.platform }}
|
||||
name: test - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
@@ -52,33 +52,8 @@ jobs:
|
||||
- name: cargo-make test
|
||||
run: |
|
||||
cargo make test --release
|
||||
working-directory: multiplier
|
||||
working-directory: ${{ matrix.crate }}
|
||||
|
||||
semaphore:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: semaphore - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install Dependencies
|
||||
run: make installdeps
|
||||
- name: cargo-make test
|
||||
run: |
|
||||
cargo make test --release
|
||||
working-directory: semaphore
|
||||
|
||||
rln-wasm:
|
||||
strategy:
|
||||
matrix:
|
||||
@@ -86,9 +61,9 @@ jobs:
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: rln-wasm - ${{ matrix.platform }}
|
||||
name: test - rln-wasm - ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
@@ -102,71 +77,22 @@ jobs:
|
||||
uses: jetli/wasm-pack-action@v0.3.0
|
||||
- run: cargo make build
|
||||
working-directory: rln-wasm
|
||||
- run: cargo-make test
|
||||
- run: cargo make test --release
|
||||
working-directory: rln-wasm
|
||||
|
||||
rln:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: rln - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install Dependencies
|
||||
run: make installdeps
|
||||
- name: cargo-make test
|
||||
run: |
|
||||
cargo make test --release
|
||||
working-directory: rln
|
||||
|
||||
utils:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: utils - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Install Dependencies
|
||||
run: make installdeps
|
||||
- name: cargo-make test
|
||||
run: |
|
||||
cargo make test --release
|
||||
working-directory: utils
|
||||
|
||||
lint:
|
||||
strategy:
|
||||
matrix:
|
||||
# we run lint tests only on ubuntu
|
||||
platform: [ubuntu-latest]
|
||||
crate: [multiplier, semaphore, rln, utils]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: lint - ${{ matrix.platform }}
|
||||
name: lint - ${{ matrix.crate }} - ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
@@ -179,27 +105,33 @@ jobs:
|
||||
run: make installdeps
|
||||
- name: cargo fmt
|
||||
if: success() || failure()
|
||||
run: cargo fmt --all -- --check
|
||||
- name: multiplier - cargo clippy
|
||||
run: cargo fmt -- --check
|
||||
working-directory: ${{ matrix.crate }}
|
||||
- name: cargo clippy
|
||||
if: success() || failure()
|
||||
run: |
|
||||
cargo clippy --release -- -D warnings
|
||||
working-directory: multiplier
|
||||
- name: semaphore - cargo clippy
|
||||
if: success() || failure()
|
||||
run: |
|
||||
cargo clippy --release -- -D warnings
|
||||
working-directory: semaphore
|
||||
- name: rln - cargo clippy
|
||||
if: success() || failure()
|
||||
run: |
|
||||
cargo clippy --release -- -D warnings
|
||||
working-directory: rln
|
||||
- name: utils - cargo clippy
|
||||
if: success() || failure()
|
||||
run: |
|
||||
cargo clippy --release -- -D warnings
|
||||
working-directory: utils
|
||||
working-directory: ${{ matrix.crate }}
|
||||
# We skip clippy on rln-wasm, since wasm target is managed by cargo make
|
||||
# Currently not treating warnings as error, too noisy
|
||||
# -- -D warnings
|
||||
# -- -D warnings
|
||||
benchmark:
|
||||
# run only in pull requests
|
||||
if: github.event_name == 'pull_request'
|
||||
strategy:
|
||||
matrix:
|
||||
# we run benchmark tests only on ubuntu
|
||||
platform: [ubuntu-latest]
|
||||
crate: [rln, utils]
|
||||
runs-on: ${{ matrix.platform }}
|
||||
timeout-minutes: 60
|
||||
|
||||
name: benchmark - ${{ matrix.platform }} - ${{ matrix.crate }}
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- uses: boa-dev/criterion-compare-action@v3
|
||||
with:
|
||||
branchName: ${{ github.base_ref }}
|
||||
cwd: ${{ matrix.crate }}
|
||||
8
.github/workflows/nightly-release.yml
vendored
8
.github/workflows/nightly-release.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
@@ -51,7 +51,7 @@ jobs:
|
||||
- aarch64-apple-darwin
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
@@ -81,7 +81,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
- name: Install stable toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
@@ -115,7 +115,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: master
|
||||
- name: Download artifacts
|
||||
|
||||
2
.github/workflows/sync-labels.yml
vendored
2
.github/workflows/sync-labels.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: micnncim/action-label-syncer@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
1081
Cargo.lock
generated
1081
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -4,9 +4,11 @@ members = [
|
||||
"private-settlement",
|
||||
"semaphore",
|
||||
"rln",
|
||||
"rln-cli",
|
||||
"rln-wasm",
|
||||
"utils",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
# Compilation profile for any non-workspace member.
|
||||
# Dependencies are optimized, even in a dev build. This improves dev performance
|
||||
@@ -19,4 +21,4 @@ opt-level = 3
|
||||
opt-level = "s"
|
||||
|
||||
[profile.release.package."semaphore"]
|
||||
codegen-units = 1
|
||||
codegen-units = 1
|
||||
|
||||
5
Makefile
5
Makefile
@@ -8,12 +8,13 @@ all: .pre-build build
|
||||
.pre-build: .fetch-submodules
|
||||
@cargo install cargo-make
|
||||
ifdef CI
|
||||
@cargo install cross --git https://github.com/cross-rs/cross --branch main
|
||||
@cargo install cross --git https://github.com/cross-rs/cross.git --rev 1511a28
|
||||
endif
|
||||
|
||||
installdeps: .pre-build
|
||||
ifeq ($(shell uname),Darwin)
|
||||
@brew update
|
||||
# commented due to https://github.com/orgs/Homebrew/discussions/4612
|
||||
# @brew update
|
||||
@brew install cmake ninja
|
||||
else ifeq ($(shell uname),Linux)
|
||||
@sudo apt-get update
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "multiplier"
|
||||
version = "0.1.0"
|
||||
version = "0.3.0"
|
||||
edition = "2018"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "private-settlement"
|
||||
version = "0.1.0"
|
||||
version = "0.3.0"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
|
||||
13
rln-cli/Cargo.toml
Normal file
13
rln-cli/Cargo.toml
Normal file
@@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "rln-cli"
|
||||
version = "0.3.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
rln = { path = "../rln" }
|
||||
clap = { version = "4.2.7", features = ["cargo", "derive", "env"]}
|
||||
clap_derive = { version = "=4.2.0" }
|
||||
color-eyre = "=0.6.2"
|
||||
# serialization
|
||||
serde_json = "1.0.48"
|
||||
serde = { version = "1.0.130", features = ["derive"] }
|
||||
67
rln-cli/src/commands.rs
Normal file
67
rln-cli/src/commands.rs
Normal file
@@ -0,0 +1,67 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use clap::Subcommand;
|
||||
|
||||
#[derive(Subcommand)]
|
||||
pub(crate) enum Commands {
|
||||
New {
|
||||
tree_height: usize,
|
||||
/// Sets a custom config file
|
||||
#[arg(short, long)]
|
||||
config: PathBuf,
|
||||
},
|
||||
NewWithParams {
|
||||
tree_height: usize,
|
||||
/// Sets a custom config file
|
||||
#[arg(short, long)]
|
||||
config: PathBuf,
|
||||
#[arg(short, long)]
|
||||
tree_config_input: PathBuf,
|
||||
},
|
||||
SetTree {
|
||||
tree_height: usize,
|
||||
},
|
||||
SetLeaf {
|
||||
index: usize,
|
||||
#[arg(short, long)]
|
||||
file: PathBuf,
|
||||
},
|
||||
SetMultipleLeaves {
|
||||
index: usize,
|
||||
#[arg(short, long)]
|
||||
file: PathBuf,
|
||||
},
|
||||
ResetMultipleLeaves {
|
||||
#[arg(short, long)]
|
||||
file: PathBuf,
|
||||
},
|
||||
SetNextLeaf {
|
||||
#[arg(short, long)]
|
||||
file: PathBuf,
|
||||
},
|
||||
DeleteLeaf {
|
||||
index: usize,
|
||||
},
|
||||
GetRoot,
|
||||
GetProof {
|
||||
index: usize,
|
||||
},
|
||||
Prove {
|
||||
#[arg(short, long)]
|
||||
input: PathBuf,
|
||||
},
|
||||
Verify {
|
||||
#[arg(short, long)]
|
||||
file: PathBuf,
|
||||
},
|
||||
GenerateProof {
|
||||
#[arg(short, long)]
|
||||
input: PathBuf,
|
||||
},
|
||||
VerifyWithRoots {
|
||||
#[arg(short, long)]
|
||||
input: PathBuf,
|
||||
#[arg(short, long)]
|
||||
roots: PathBuf,
|
||||
},
|
||||
}
|
||||
29
rln-cli/src/config.rs
Normal file
29
rln-cli/src/config.rs
Normal file
@@ -0,0 +1,29 @@
|
||||
use color_eyre::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{fs::File, io::Read, path::PathBuf};
|
||||
|
||||
pub const RLN_STATE_PATH: &str = "RLN_STATE_PATH";
|
||||
|
||||
#[derive(Default, Serialize, Deserialize)]
|
||||
pub(crate) struct Config {
|
||||
pub inner: Option<InnerConfig>,
|
||||
}
|
||||
|
||||
#[derive(Default, Serialize, Deserialize)]
|
||||
pub(crate) struct InnerConfig {
|
||||
pub file: PathBuf,
|
||||
pub tree_height: usize,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub(crate) fn load_config() -> Result<Config> {
|
||||
let path = PathBuf::from(std::env::var(RLN_STATE_PATH)?);
|
||||
|
||||
let mut file = File::open(path)?;
|
||||
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents)?;
|
||||
let state: Config = serde_json::from_str(&contents)?;
|
||||
Ok(state)
|
||||
}
|
||||
}
|
||||
157
rln-cli/src/main.rs
Normal file
157
rln-cli/src/main.rs
Normal file
@@ -0,0 +1,157 @@
|
||||
use std::{fs::File, io::Read, path::Path};
|
||||
|
||||
use clap::Parser;
|
||||
use color_eyre::{Report, Result};
|
||||
use commands::Commands;
|
||||
use rln::public::RLN;
|
||||
use state::State;
|
||||
|
||||
mod commands;
|
||||
mod config;
|
||||
mod state;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Option<Commands>,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
|
||||
let mut state = State::load_state()?;
|
||||
|
||||
match &cli.command {
|
||||
Some(Commands::New {
|
||||
tree_height,
|
||||
config,
|
||||
}) => {
|
||||
let resources = File::open(&config)?;
|
||||
state.rln = Some(RLN::new(*tree_height, resources)?);
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::NewWithParams {
|
||||
tree_height,
|
||||
config,
|
||||
tree_config_input,
|
||||
}) => {
|
||||
let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
for filename in ["rln.wasm", "rln_final.zkey", "verification_key.json"] {
|
||||
let fullpath = config.join(Path::new(filename));
|
||||
let mut file = File::open(&fullpath)?;
|
||||
let metadata = std::fs::metadata(&fullpath)?;
|
||||
let mut buffer = vec![0; metadata.len() as usize];
|
||||
file.read_exact(&mut buffer)?;
|
||||
resources.push(buffer);
|
||||
}
|
||||
let tree_config_input_file = File::open(&tree_config_input)?;
|
||||
state.rln = Some(RLN::new_with_params(
|
||||
*tree_height,
|
||||
resources[0].clone(),
|
||||
resources[1].clone(),
|
||||
resources[2].clone(),
|
||||
tree_config_input_file,
|
||||
)?);
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::SetTree { tree_height }) => {
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.set_tree(*tree_height)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::SetLeaf { index, file }) => {
|
||||
let input_data = File::open(&file)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.set_leaf(*index, input_data)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::SetMultipleLeaves { index, file }) => {
|
||||
let input_data = File::open(&file)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.set_leaves_from(*index, input_data)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::ResetMultipleLeaves { file }) => {
|
||||
let input_data = File::open(&file)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.init_tree_with_leaves(input_data)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::SetNextLeaf { file }) => {
|
||||
let input_data = File::open(&file)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.set_next_leaf(input_data)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::DeleteLeaf { index }) => {
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.delete_leaf(*index)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::GetRoot) => {
|
||||
let writer = std::io::stdout();
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.get_root(writer)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::GetProof { index }) => {
|
||||
let writer = std::io::stdout();
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.get_proof(*index, writer)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::Prove { input }) => {
|
||||
let input_data = File::open(&input)?;
|
||||
let writer = std::io::stdout();
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.prove(input_data, writer)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::Verify { file }) => {
|
||||
let input_data = File::open(&file)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.verify(input_data)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::GenerateProof { input }) => {
|
||||
let input_data = File::open(&input)?;
|
||||
let writer = std::io::stdout();
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.generate_rln_proof(input_data, writer)?;
|
||||
Ok(())
|
||||
}
|
||||
Some(Commands::VerifyWithRoots { input, roots }) => {
|
||||
let input_data = File::open(&input)?;
|
||||
let roots_data = File::open(&roots)?;
|
||||
state
|
||||
.rln
|
||||
.ok_or(Report::msg("no RLN instance initialized"))?
|
||||
.verify_with_roots(input_data, roots_data)?;
|
||||
Ok(())
|
||||
}
|
||||
None => Ok(()),
|
||||
}
|
||||
}
|
||||
23
rln-cli/src/state.rs
Normal file
23
rln-cli/src/state.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use color_eyre::Result;
|
||||
use rln::public::RLN;
|
||||
use std::fs::File;
|
||||
|
||||
use crate::config::{Config, InnerConfig};
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct State<'a> {
|
||||
pub rln: Option<RLN<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> State<'a> {
|
||||
pub(crate) fn load_state() -> Result<State<'a>> {
|
||||
let config = Config::load_config()?;
|
||||
let rln = if let Some(InnerConfig { file, tree_height }) = config.inner {
|
||||
let resources = File::open(&file)?;
|
||||
Some(RLN::new(tree_height, resources)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(State { rln })
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "rln-wasm"
|
||||
version = "0.0.7"
|
||||
version = "0.0.9"
|
||||
edition = "2021"
|
||||
license = "MIT or Apache2"
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ use std::vec::Vec;
|
||||
|
||||
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
|
||||
use num_bigint::BigInt;
|
||||
use rln::public::RLN;
|
||||
use rln::public::{hash, poseidon_hash, RLN};
|
||||
use wasm_bindgen::prelude::*;
|
||||
|
||||
#[wasm_bindgen]
|
||||
@@ -97,6 +97,40 @@ macro_rules! call_bool_method_with_error_msg {
|
||||
}
|
||||
}
|
||||
|
||||
// Macro to execute a function with arbitrary amount of arguments,
|
||||
// First argument is the function to execute
|
||||
// Rest are all other arguments to the method
|
||||
macro_rules! fn_call_with_output_and_error_msg {
|
||||
// this variant is needed for the case when
|
||||
// there are zero other arguments
|
||||
($func:ident, $error_msg:expr) => {
|
||||
{
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
if let Err(err) = $func(&mut output_data) {
|
||||
std::mem::forget(output_data);
|
||||
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
|
||||
} else {
|
||||
let result = Uint8Array::from(&output_data[..]);
|
||||
std::mem::forget(output_data);
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
};
|
||||
($func:ident, $error_msg:expr, $( $arg:expr ),* ) => {
|
||||
{
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
if let Err(err) = $func($($arg.process()),*, &mut output_data) {
|
||||
std::mem::forget(output_data);
|
||||
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
|
||||
} else {
|
||||
let result = Uint8Array::from(&output_data[..]);
|
||||
std::mem::forget(output_data);
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
trait ProcessArg {
|
||||
type ReturnType;
|
||||
fn process(self) -> Self::ReturnType;
|
||||
@@ -196,6 +230,29 @@ pub fn wasm_set_leaves_from(
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = deleteLeaf)]
|
||||
pub fn wasm_delete_leaf(ctx: *mut RLNWrapper, index: usize) -> Result<(), String> {
|
||||
call_with_error_msg!(ctx, delete_leaf, "could not delete leaf".to_string(), index)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = setMetadata)]
|
||||
pub fn wasm_set_metadata(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
|
||||
call_with_error_msg!(
|
||||
ctx,
|
||||
set_metadata,
|
||||
"could not set metadata".to_string(),
|
||||
&*input.to_vec()
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = getMetadata)]
|
||||
pub fn wasm_get_metadata(ctx: *mut RLNWrapper) -> Result<Uint8Array, String> {
|
||||
call_with_output_and_error_msg!(ctx, get_metadata, "could not get metadata".to_string())
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[wasm_bindgen(js_name = initTreeWithLeaves)]
|
||||
pub fn wasm_init_tree_with_leaves(ctx: *mut RLNWrapper, input: Uint8Array) -> Result<(), String> {
|
||||
@@ -333,3 +390,17 @@ pub fn wasm_verify_with_roots(
|
||||
pub fn wasm_get_root(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
|
||||
call_with_output_and_error_msg!(ctx, get_root, "could not obtain root")
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = hash)]
|
||||
pub fn wasm_hash(input: Uint8Array) -> Result<Uint8Array, String> {
|
||||
fn_call_with_output_and_error_msg!(hash, "could not generate hash", &input.to_vec()[..])
|
||||
}
|
||||
|
||||
#[wasm_bindgen(js_name = poseidonHash)]
|
||||
pub fn wasm_poseidon_hash(input: Uint8Array) -> Result<Uint8Array, String> {
|
||||
fn_call_with_output_and_error_msg!(
|
||||
poseidon_hash,
|
||||
"could not generate poseidon hash",
|
||||
&input.to_vec()[..]
|
||||
)
|
||||
}
|
||||
|
||||
@@ -6,8 +6,7 @@ mod tests {
|
||||
use rln::circuit::TEST_TREE_HEIGHT;
|
||||
use rln::utils::normalize_usize;
|
||||
use rln_wasm::*;
|
||||
use wasm_bindgen::prelude::*;
|
||||
use wasm_bindgen::JsValue;
|
||||
use wasm_bindgen::{prelude::*, JsValue};
|
||||
use wasm_bindgen_test::wasm_bindgen_test;
|
||||
|
||||
#[wasm_bindgen(module = "src/utils.js")]
|
||||
@@ -30,7 +29,7 @@ mod tests {
|
||||
let vk = read_file(&vk_path).unwrap();
|
||||
|
||||
// Creating an instance of RLN
|
||||
let rln_instance = wasm_new(tree_height, zkey, vk);
|
||||
let rln_instance = wasm_new(tree_height, zkey, vk).unwrap();
|
||||
|
||||
// Creating membership key
|
||||
let mem_keys = wasm_key_gen(rln_instance).unwrap();
|
||||
@@ -59,10 +58,11 @@ mod tests {
|
||||
let serialized_message = Uint8Array::from(&serialized_vec[..]);
|
||||
|
||||
let serialized_rln_witness =
|
||||
wasm_get_serialized_rln_witness(rln_instance, serialized_message);
|
||||
wasm_get_serialized_rln_witness(rln_instance, serialized_message).unwrap();
|
||||
|
||||
// Obtaining inputs that should be sent to circom witness calculator
|
||||
let json_inputs = rln_witness_to_json(rln_instance, serialized_rln_witness.clone());
|
||||
let json_inputs =
|
||||
rln_witness_to_json(rln_instance, serialized_rln_witness.clone()).unwrap();
|
||||
|
||||
// Calculating witness with JS
|
||||
// (Using a JSON since wasm_bindgen does not like Result<Vec<JsBigInt>,JsValue>)
|
||||
@@ -108,4 +108,28 @@ mod tests {
|
||||
let is_proof_valid = wasm_verify_with_roots(rln_instance, proof_with_signal, roots);
|
||||
assert!(is_proof_valid.unwrap(), "verifying proof with roots failed");
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn test_metadata() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let circom_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln.wasm");
|
||||
let zkey_path = format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/rln_final.zkey");
|
||||
let vk_path =
|
||||
format!("../rln/resources/tree_height_{TEST_TREE_HEIGHT}/verification_key.json");
|
||||
let zkey = read_file(&zkey_path).unwrap();
|
||||
let vk = read_file(&vk_path).unwrap();
|
||||
|
||||
// Creating an instance of RLN
|
||||
let rln_instance = wasm_new(tree_height, zkey, vk).unwrap();
|
||||
|
||||
|
||||
let test_metadata = Uint8Array::new(&JsValue::from_str("test"));
|
||||
// Inserting random metadata
|
||||
wasm_set_metadata(rln_instance, test_metadata.clone()).unwrap();
|
||||
|
||||
// Getting metadata
|
||||
let metadata = wasm_get_metadata(rln_instance).unwrap();
|
||||
|
||||
assert_eq!(metadata.to_vec(), test_metadata.to_vec());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
[package]
|
||||
name = "rln"
|
||||
version = "0.1.0"
|
||||
version = "0.3.2"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "APIs to manage, compute and verify zkSNARK proofs and RLN primitives"
|
||||
documentation = "https://github.com/vacp2p/zerokit"
|
||||
homepage = "https://vac.dev"
|
||||
repository = "https://github.com/vacp2p/zerokit"
|
||||
|
||||
[lib]
|
||||
crate-type = ["rlib", "staticlib"]
|
||||
bench = false
|
||||
|
||||
# This flag disable cargo doctests, i.e. testing example code-snippets in documentation
|
||||
doctest = false
|
||||
@@ -20,39 +25,44 @@ ark-bn254 = { version = "=0.4.0" }
|
||||
ark-groth16 = { version = "=0.4.0", features = ["parallel"], default-features = false }
|
||||
ark-relations = { version = "=0.4.0", default-features = false, features = [ "std" ] }
|
||||
ark-serialize = { version = "=0.4.1", default-features = false }
|
||||
ark-circom = { git = "https://github.com/gakonst/ark-circom", default-features = false, features = ["circom-2"] }
|
||||
ark-circom = { version = "=0.1.0", default-features = false, features = ["circom-2"] }
|
||||
|
||||
# WASM
|
||||
wasmer = { version = "2.3.0", default-features = false }
|
||||
wasmer = { version = "=2.3.0", default-features = false }
|
||||
|
||||
# error handling
|
||||
color-eyre = "=0.6.2"
|
||||
thiserror = "=1.0.38"
|
||||
thiserror = "=1.0.39"
|
||||
|
||||
# utilities
|
||||
cfg-if = "=1.0"
|
||||
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
|
||||
num-traits = "0.2.11"
|
||||
once_cell = "1.14.0"
|
||||
num-traits = "=0.2.15"
|
||||
once_cell = "=1.17.1"
|
||||
rand = "=0.8.5"
|
||||
rand_chacha = "=0.3.1"
|
||||
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
|
||||
utils = { path = "../utils/", default-features = false }
|
||||
pmtree = { git = "https://github.com/Rate-Limiting-Nullifier/pmtree", rev = "f6d1a1fecad72cd39e6808e78085091d541dc882", optional = true}
|
||||
utils = { package = "zerokit_utils", version = "=0.3.2", path = "../utils/", default-features = false }
|
||||
|
||||
# serialization
|
||||
serde_json = "1.0.48"
|
||||
serde_json = "=1.0.96"
|
||||
serde = { version = "=1.0.163", features = ["derive"] }
|
||||
|
||||
include_dir = "=0.7.3"
|
||||
|
||||
[dev-dependencies]
|
||||
sled = "=0.34.7"
|
||||
criterion = { version = "=0.4.0", features = ["html_reports"] }
|
||||
|
||||
[features]
|
||||
default = ["parallel", "wasmer/sys-default"]
|
||||
default = ["parallel", "wasmer/sys-default", "pmtree-ft"]
|
||||
parallel = ["ark-ec/parallel", "ark-ff/parallel", "ark-std/parallel", "ark-groth16/parallel", "utils/parallel"]
|
||||
wasm = ["wasmer/js", "wasmer/std"]
|
||||
fullmerkletree = ["default"]
|
||||
|
||||
# Note: pmtree feature is still experimental
|
||||
pmtree-ft = ["default", "pmtree"]
|
||||
pmtree-ft = ["utils/pmtree-ft"]
|
||||
|
||||
[[bench]]
|
||||
name = "pmtree_benchmark"
|
||||
harness = false
|
||||
|
||||
@@ -5,3 +5,7 @@ args = ["build", "--release"]
|
||||
[tasks.test]
|
||||
command = "cargo"
|
||||
args = ["test", "--release"]
|
||||
|
||||
[tasks.bench]
|
||||
command = "cargo"
|
||||
args = ["bench"]
|
||||
|
||||
44
rln/benches/pmtree_benchmark.rs
Normal file
44
rln/benches/pmtree_benchmark.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use utils::ZerokitMerkleTree;
|
||||
|
||||
use rln::{circuit::Fr, pm_tree_adapter::PmTree};
|
||||
|
||||
pub fn pmtree_benchmark(c: &mut Criterion) {
|
||||
let mut tree = PmTree::default(2).unwrap();
|
||||
|
||||
let leaves: Vec<Fr> = (0..4).map(|s| Fr::from(s)).collect();
|
||||
|
||||
c.bench_function("Pmtree::set", |b| {
|
||||
b.iter(|| {
|
||||
tree.set(0, leaves[0]).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("Pmtree:delete", |b| {
|
||||
b.iter(|| {
|
||||
tree.delete(0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("Pmtree::override_range", |b| {
|
||||
b.iter(|| {
|
||||
tree.override_range(0, leaves.clone(), [0, 1, 2, 3])
|
||||
.unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("Pmtree::compute_root", |b| {
|
||||
b.iter(|| {
|
||||
tree.compute_root().unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("Pmtree::get", |b| {
|
||||
b.iter(|| {
|
||||
tree.get(0).unwrap();
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(benches, pmtree_benchmark);
|
||||
criterion_main!(benches);
|
||||
156
rln/src/ffi.rs
156
rln/src/ffi.rs
@@ -12,7 +12,15 @@ macro_rules! call {
|
||||
($instance:expr, $method:ident $(, $arg:expr)*) => {
|
||||
{
|
||||
let new_instance: &mut RLN = $instance.process();
|
||||
new_instance.$method($($arg.process()),*).is_ok()
|
||||
match new_instance.$method($($arg.process()),*) {
|
||||
Ok(()) => {
|
||||
true
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("execution error: {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -30,13 +38,17 @@ macro_rules! call_with_output_arg {
|
||||
{
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
let new_instance = $instance.process();
|
||||
if new_instance.$method(&mut output_data).is_ok() {
|
||||
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
} else {
|
||||
std::mem::forget(output_data);
|
||||
false
|
||||
match new_instance.$method(&mut output_data) {
|
||||
Ok(()) => {
|
||||
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
}
|
||||
Err(err) => {
|
||||
std::mem::forget(output_data);
|
||||
eprintln!("execution error: {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -44,13 +56,17 @@ macro_rules! call_with_output_arg {
|
||||
{
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
let new_instance = $instance.process();
|
||||
if new_instance.$method($($arg.process()),*, &mut output_data).is_ok() {
|
||||
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
} else {
|
||||
std::mem::forget(output_data);
|
||||
false
|
||||
match new_instance.$method($($arg.process()),*, &mut output_data) {
|
||||
Ok(()) => {
|
||||
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
}
|
||||
Err(err) => {
|
||||
std::mem::forget(output_data);
|
||||
eprintln!("execution error: {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -66,13 +82,17 @@ macro_rules! no_ctx_call_with_output_arg {
|
||||
($method:ident, $output_arg:expr, $( $arg:expr ),* ) => {
|
||||
{
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
if $method($($arg.process()),*, &mut output_data).is_ok() {
|
||||
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
} else {
|
||||
std::mem::forget(output_data);
|
||||
false
|
||||
match $method($($arg.process()),*, &mut output_data) {
|
||||
Ok(()) => {
|
||||
unsafe { *$output_arg = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
}
|
||||
Err(err) => {
|
||||
std::mem::forget(output_data);
|
||||
eprintln!("execution error: {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -89,8 +109,11 @@ macro_rules! call_with_bool_arg {
|
||||
{
|
||||
let new_instance = $instance.process();
|
||||
if match new_instance.$method($($arg.process()),*,) {
|
||||
Ok(verified) => verified,
|
||||
Err(_) => return false,
|
||||
Ok(result) => result,
|
||||
Err(err) => {
|
||||
eprintln!("execution error: {err}");
|
||||
return false
|
||||
},
|
||||
} {
|
||||
unsafe { *$bool_arg = true };
|
||||
} else {
|
||||
@@ -171,11 +194,15 @@ impl<'a> From<&Buffer> for &'a [u8] {
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut *mut RLN) -> bool {
|
||||
if let Ok(rln) = RLN::new(tree_height, input_buffer.process()) {
|
||||
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
|
||||
true
|
||||
} else {
|
||||
false
|
||||
match RLN::new(tree_height, input_buffer.process()) {
|
||||
Ok(rln) => {
|
||||
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
|
||||
true
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("could not instantiate rln: {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -186,18 +213,24 @@ pub extern "C" fn new_with_params(
|
||||
circom_buffer: *const Buffer,
|
||||
zkey_buffer: *const Buffer,
|
||||
vk_buffer: *const Buffer,
|
||||
tree_config: *const Buffer,
|
||||
ctx: *mut *mut RLN,
|
||||
) -> bool {
|
||||
if let Ok(rln) = RLN::new_with_params(
|
||||
match RLN::new_with_params(
|
||||
tree_height,
|
||||
circom_buffer.process().to_vec(),
|
||||
zkey_buffer.process().to_vec(),
|
||||
vk_buffer.process().to_vec(),
|
||||
tree_config.process(),
|
||||
) {
|
||||
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
|
||||
true
|
||||
} else {
|
||||
false
|
||||
Ok(rln) => {
|
||||
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
|
||||
true
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("could not instantiate rln: {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -222,6 +255,12 @@ pub extern "C" fn set_leaf(ctx: *mut RLN, index: usize, input_buffer: *const Buf
|
||||
call!(ctx, set_leaf, index, input_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn get_leaf(ctx: *mut RLN, index: usize, output_buffer: *mut Buffer) -> bool {
|
||||
call_with_output_arg!(ctx, get_leaf, output_buffer, index)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn set_next_leaf(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
|
||||
@@ -244,6 +283,33 @@ pub extern "C" fn init_tree_with_leaves(ctx: *mut RLN, input_buffer: *const Buff
|
||||
call!(ctx, init_tree_with_leaves, input_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn atomic_operation(
|
||||
ctx: *mut RLN,
|
||||
index: usize,
|
||||
leaves_buffer: *const Buffer,
|
||||
indices_buffer: *const Buffer,
|
||||
) -> bool {
|
||||
call!(ctx, atomic_operation, index, leaves_buffer, indices_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn seq_atomic_operation(
|
||||
ctx: *mut RLN,
|
||||
leaves_buffer: *const Buffer,
|
||||
indices_buffer: *const Buffer,
|
||||
) -> bool {
|
||||
call!(
|
||||
ctx,
|
||||
atomic_operation,
|
||||
ctx.process().leaves_set(),
|
||||
leaves_buffer,
|
||||
indices_buffer
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn get_root(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
|
||||
@@ -368,6 +434,28 @@ pub extern "C" fn recover_id_secret(
|
||||
)
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// Persistent metadata APIs
|
||||
////////////////////////////////////////////////////////
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn set_metadata(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
|
||||
call!(ctx, set_metadata, input_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn get_metadata(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
|
||||
call_with_output_arg!(ctx, get_metadata, output_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn flush(ctx: *mut RLN) -> bool {
|
||||
call!(ctx, flush)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn hash(input_buffer: *const Buffer, output_buffer: *mut Buffer) -> bool {
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
// This crate instantiate the Poseidon hash algorithm
|
||||
|
||||
/// This crate instantiates the Poseidon hash algorithm.
|
||||
use crate::{circuit::Fr, utils::bytes_le_to_fr};
|
||||
use once_cell::sync::Lazy;
|
||||
use tiny_keccak::{Hasher, Keccak};
|
||||
use utils::poseidon::Poseidon;
|
||||
|
||||
// These indexed constants hardcodes the supported round parameters tuples (t, RF, RN, SKIP_MATRICES) for the Bn254 scalar field
|
||||
// SKIP_MATRICES is the index of the randomly generated secure MDS matrix. See security note in the zerokit_utils::poseidon::poseidon_constants crate on this.
|
||||
// TODO: generate these parameters
|
||||
/// These indexed constants hardcode the supported round parameters tuples (t, RF, RN, SKIP_MATRICES) for the Bn254 scalar field.
|
||||
/// SKIP_MATRICES is the index of the randomly generated secure MDS matrix.
|
||||
/// TODO: generate these parameters
|
||||
pub const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
|
||||
(2, 8, 56, 0),
|
||||
(3, 8, 57, 0),
|
||||
@@ -19,7 +18,7 @@ pub const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
|
||||
(9, 8, 63, 0),
|
||||
];
|
||||
|
||||
// Poseidon Hash wrapper over above implementation. Adapted from semaphore-rs poseidon hash wrapper.
|
||||
/// Poseidon Hash wrapper over above implementation.
|
||||
static POSEIDON: Lazy<Poseidon<Fr>> = Lazy::new(|| Poseidon::<Fr>::from(&ROUND_PARAMS));
|
||||
|
||||
pub fn poseidon_hash(input: &[Fr]) -> Fr {
|
||||
@@ -28,10 +27,26 @@ pub fn poseidon_hash(input: &[Fr]) -> Fr {
|
||||
.expect("hash with fixed input size can't fail")
|
||||
}
|
||||
|
||||
// Hashes arbitrary signal to the underlying prime field
|
||||
/// The zerokit RLN Merkle tree Hasher.
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub struct PoseidonHash;
|
||||
|
||||
/// The default Hasher trait used by Merkle tree implementation in utils.
|
||||
impl utils::merkle_tree::Hasher for PoseidonHash {
|
||||
type Fr = Fr;
|
||||
|
||||
fn default_leaf() -> Self::Fr {
|
||||
Self::Fr::from(0)
|
||||
}
|
||||
|
||||
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
|
||||
poseidon_hash(inputs)
|
||||
}
|
||||
}
|
||||
|
||||
/// Hashes arbitrary signal to the underlying prime field.
|
||||
pub fn hash_to_field(signal: &[u8]) -> Fr {
|
||||
// We hash the input signal using Keccak256
|
||||
// (note that a bigger curve order might require a bigger hash blocksize)
|
||||
let mut hash = [0; 32];
|
||||
let mut hasher = Keccak::v256();
|
||||
hasher.update(signal);
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
pub mod circuit;
|
||||
pub mod hashers;
|
||||
#[cfg(feature = "pmtree-ft")]
|
||||
pub mod pm_tree_adapter;
|
||||
pub mod poseidon_tree;
|
||||
pub mod protocol;
|
||||
pub mod public;
|
||||
|
||||
339
rln/src/pm_tree_adapter.rs
Normal file
339
rln/src/pm_tree_adapter.rs
Normal file
@@ -0,0 +1,339 @@
|
||||
use std::fmt::Debug;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
|
||||
use color_eyre::{Report, Result};
|
||||
use serde_json::Value;
|
||||
|
||||
use utils::pmtree::{Database, Hasher};
|
||||
use utils::*;
|
||||
|
||||
use crate::circuit::Fr;
|
||||
use crate::hashers::{poseidon_hash, PoseidonHash};
|
||||
use crate::utils::{bytes_le_to_fr, fr_to_bytes_le};
|
||||
|
||||
const METADATA_KEY: [u8; 8] = *b"metadata";
|
||||
|
||||
pub struct PmTree {
|
||||
tree: pmtree::MerkleTree<SledDB, PoseidonHash>,
|
||||
// metadata that an application may use to store additional information
|
||||
metadata: Vec<u8>,
|
||||
}
|
||||
|
||||
pub struct PmTreeProof {
|
||||
proof: pmtree::tree::MerkleProof<PoseidonHash>,
|
||||
}
|
||||
|
||||
pub type FrOf<H> = <H as Hasher>::Fr;
|
||||
|
||||
// The pmtree Hasher trait used by pmtree Merkle tree
|
||||
impl Hasher for PoseidonHash {
|
||||
type Fr = Fr;
|
||||
|
||||
fn serialize(value: Self::Fr) -> pmtree::Value {
|
||||
fr_to_bytes_le(&value)
|
||||
}
|
||||
|
||||
fn deserialize(value: pmtree::Value) -> Self::Fr {
|
||||
let (fr, _) = bytes_le_to_fr(&value);
|
||||
fr
|
||||
}
|
||||
|
||||
fn default_leaf() -> Self::Fr {
|
||||
Fr::from(0)
|
||||
}
|
||||
|
||||
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
|
||||
poseidon_hash(inputs)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_tmp_path() -> PathBuf {
|
||||
std::env::temp_dir().join(format!("pmtree-{}", rand::random::<u64>()))
|
||||
}
|
||||
|
||||
fn get_tmp() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
pub struct PmtreeConfig(Config);
|
||||
|
||||
impl FromStr for PmtreeConfig {
|
||||
type Err = Report;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self> {
|
||||
let config: Value = serde_json::from_str(s)?;
|
||||
|
||||
let path = config["path"].as_str();
|
||||
let path = path.map(PathBuf::from);
|
||||
let temporary = config["temporary"].as_bool();
|
||||
let cache_capacity = config["cache_capacity"].as_u64();
|
||||
let flush_every_ms = config["flush_every_ms"].as_u64();
|
||||
let mode = match config["mode"].as_str() {
|
||||
Some("HighThroughput") => Mode::HighThroughput,
|
||||
Some("LowSpace") => Mode::LowSpace,
|
||||
_ => Mode::HighThroughput,
|
||||
};
|
||||
let use_compression = config["use_compression"].as_bool();
|
||||
|
||||
if temporary.is_some()
|
||||
&& path.is_some()
|
||||
&& temporary.unwrap()
|
||||
&& path.as_ref().unwrap().exists()
|
||||
{
|
||||
return Err(Report::msg(format!(
|
||||
"Path {:?} already exists, cannot use temporary",
|
||||
path.unwrap()
|
||||
)));
|
||||
}
|
||||
|
||||
let config = Config::new()
|
||||
.temporary(temporary.unwrap_or(get_tmp()))
|
||||
.path(path.unwrap_or(get_tmp_path()))
|
||||
.cache_capacity(cache_capacity.unwrap_or(1024 * 1024 * 1024))
|
||||
.flush_every_ms(flush_every_ms)
|
||||
.mode(mode)
|
||||
.use_compression(use_compression.unwrap_or(false));
|
||||
Ok(PmtreeConfig(config))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for PmtreeConfig {
|
||||
fn default() -> Self {
|
||||
let tmp_path = get_tmp_path();
|
||||
PmtreeConfig(
|
||||
Config::new()
|
||||
.temporary(true)
|
||||
.path(tmp_path)
|
||||
.cache_capacity(150_000)
|
||||
.mode(Mode::HighThroughput)
|
||||
.use_compression(false)
|
||||
.flush_every_ms(Some(12_000)),
|
||||
)
|
||||
}
|
||||
}
|
||||
impl Debug for PmtreeConfig {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for PmtreeConfig {
|
||||
fn clone(&self) -> Self {
|
||||
PmtreeConfig(self.0.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl ZerokitMerkleTree for PmTree {
|
||||
type Proof = PmTreeProof;
|
||||
type Hasher = PoseidonHash;
|
||||
type Config = PmtreeConfig;
|
||||
|
||||
fn default(depth: usize) -> Result<Self> {
|
||||
let default_config = PmtreeConfig::default();
|
||||
PmTree::new(depth, Self::Hasher::default_leaf(), default_config)
|
||||
}
|
||||
|
||||
fn new(depth: usize, _default_leaf: FrOf<Self::Hasher>, config: Self::Config) -> Result<Self> {
|
||||
let tree_loaded = pmtree::MerkleTree::load(config.clone().0);
|
||||
let tree = match tree_loaded {
|
||||
Ok(tree) => tree,
|
||||
Err(_) => pmtree::MerkleTree::new(depth, config.0)?,
|
||||
};
|
||||
|
||||
Ok(PmTree {
|
||||
tree,
|
||||
metadata: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn depth(&self) -> usize {
|
||||
self.tree.depth()
|
||||
}
|
||||
|
||||
fn capacity(&self) -> usize {
|
||||
self.tree.capacity()
|
||||
}
|
||||
|
||||
fn leaves_set(&mut self) -> usize {
|
||||
self.tree.leaves_set()
|
||||
}
|
||||
|
||||
fn root(&self) -> FrOf<Self::Hasher> {
|
||||
self.tree.root()
|
||||
}
|
||||
|
||||
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>> {
|
||||
Ok(self.tree.root())
|
||||
}
|
||||
|
||||
fn set(&mut self, index: usize, leaf: FrOf<Self::Hasher>) -> Result<()> {
|
||||
self.tree
|
||||
.set(index, leaf)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
|
||||
fn set_range<I: IntoIterator<Item = FrOf<Self::Hasher>>>(
|
||||
&mut self,
|
||||
start: usize,
|
||||
values: I,
|
||||
) -> Result<()> {
|
||||
self.tree
|
||||
.set_range(start, values)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
|
||||
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>> {
|
||||
self.tree.get(index).map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
|
||||
fn override_range<I: IntoIterator<Item = FrOf<Self::Hasher>>, J: IntoIterator<Item = usize>>(
|
||||
&mut self,
|
||||
start: usize,
|
||||
leaves: I,
|
||||
indices: J,
|
||||
) -> Result<()> {
|
||||
let leaves = leaves.into_iter().collect::<Vec<_>>();
|
||||
let mut indices = indices.into_iter().collect::<Vec<_>>();
|
||||
indices.sort();
|
||||
|
||||
match (leaves.is_empty(), indices.is_empty()) {
|
||||
(true, true) => Err(Report::msg("no leaves or indices to be removed")),
|
||||
(false, true) => self.set_range_with_leaves(start, leaves),
|
||||
(true, false) => self.remove_indices(indices),
|
||||
(false, false) => self.remove_indices_and_set_leaves(start, leaves, indices),
|
||||
}
|
||||
}
|
||||
|
||||
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<()> {
|
||||
self.tree
|
||||
.update_next(leaf)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
|
||||
fn delete(&mut self, index: usize) -> Result<()> {
|
||||
self.tree
|
||||
.delete(index)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
|
||||
fn proof(&self, index: usize) -> Result<Self::Proof> {
|
||||
let proof = self.tree.proof(index)?;
|
||||
Ok(PmTreeProof { proof })
|
||||
}
|
||||
|
||||
fn verify(&self, leaf: &FrOf<Self::Hasher>, witness: &Self::Proof) -> Result<bool> {
|
||||
if self.tree.verify(leaf, &witness.proof) {
|
||||
Ok(true)
|
||||
} else {
|
||||
Err(Report::msg("verify failed"))
|
||||
}
|
||||
}
|
||||
|
||||
fn set_metadata(&mut self, metadata: &[u8]) -> Result<()> {
|
||||
self.tree.db.put(METADATA_KEY, metadata.to_vec())?;
|
||||
self.metadata = metadata.to_vec();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn metadata(&self) -> Result<Vec<u8>> {
|
||||
if !self.metadata.is_empty() {
|
||||
return Ok(self.metadata.clone());
|
||||
}
|
||||
// if empty, try searching the db
|
||||
let data = self.tree.db.get(METADATA_KEY)?;
|
||||
|
||||
if data.is_none() {
|
||||
return Err(Report::msg("metadata does not exist"));
|
||||
}
|
||||
Ok(data.unwrap())
|
||||
}
|
||||
|
||||
fn close_db_connection(&mut self) -> Result<()> {
|
||||
self.tree.db.close().map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
type PmTreeHasher = <PmTree as ZerokitMerkleTree>::Hasher;
|
||||
type FrOfPmTreeHasher = FrOf<PmTreeHasher>;
|
||||
|
||||
impl PmTree {
|
||||
fn set_range_with_leaves(&mut self, start: usize, leaves: Vec<FrOfPmTreeHasher>) -> Result<()> {
|
||||
self.tree
|
||||
.set_range(start, leaves)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
|
||||
fn remove_indices(&mut self, indices: Vec<usize>) -> Result<()> {
|
||||
let start = indices[0];
|
||||
let end = indices.last().unwrap() + 1;
|
||||
|
||||
let mut new_leaves: Vec<_> = (start..end)
|
||||
.map(|i| self.tree.get(i))
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
new_leaves
|
||||
.iter_mut()
|
||||
.take(indices.len())
|
||||
.for_each(|leaf| *leaf = PmTreeHasher::default_leaf());
|
||||
|
||||
self.tree
|
||||
.set_range(start, new_leaves)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
|
||||
fn remove_indices_and_set_leaves(
|
||||
&mut self,
|
||||
start: usize,
|
||||
leaves: Vec<FrOfPmTreeHasher>,
|
||||
indices: Vec<usize>,
|
||||
) -> Result<()> {
|
||||
let min_index = *indices.first().unwrap();
|
||||
let max_index = start + leaves.len();
|
||||
|
||||
// Generated a placeholder with the exact size needed,
|
||||
// Initiated with default values to be overridden throughout the method
|
||||
let mut set_values = vec![PmTreeHasher::default_leaf(); max_index - min_index];
|
||||
|
||||
// If the index is not in indices list, keep the original value
|
||||
for i in min_index..start {
|
||||
if !indices.contains(&i) {
|
||||
let value = self.tree.get(i)?;
|
||||
set_values[i - min_index] = value;
|
||||
}
|
||||
}
|
||||
|
||||
// Insert new leaves after 'start' position
|
||||
for (i, &leaf) in leaves.iter().enumerate() {
|
||||
set_values[start - min_index + i] = leaf;
|
||||
}
|
||||
|
||||
self.tree
|
||||
.set_range(min_index, set_values)
|
||||
.map_err(|e| Report::msg(e.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
impl ZerokitMerkleProof for PmTreeProof {
|
||||
type Index = u8;
|
||||
type Hasher = PoseidonHash;
|
||||
|
||||
fn length(&self) -> usize {
|
||||
self.proof.length()
|
||||
}
|
||||
|
||||
fn leaf_index(&self) -> usize {
|
||||
self.proof.leaf_index()
|
||||
}
|
||||
|
||||
fn get_path_elements(&self) -> Vec<FrOf<Self::Hasher>> {
|
||||
self.proof.get_path_elements()
|
||||
}
|
||||
|
||||
fn get_path_index(&self) -> Vec<Self::Index> {
|
||||
self.proof.get_path_index()
|
||||
}
|
||||
fn compute_root_from(&self, leaf: &FrOf<Self::Hasher>) -> FrOf<Self::Hasher> {
|
||||
self.proof.compute_root_from(leaf)
|
||||
}
|
||||
}
|
||||
@@ -2,15 +2,16 @@
|
||||
|
||||
// Implementation inspired by https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/poseidon_tree.rs (no differences)
|
||||
|
||||
use crate::circuit::Fr;
|
||||
use crate::hashers::poseidon_hash;
|
||||
use cfg_if::cfg_if;
|
||||
use utils::merkle_tree::*;
|
||||
|
||||
#[cfg(feature = "pmtree-ft")]
|
||||
use crate::utils::{bytes_le_to_fr, fr_to_bytes_le};
|
||||
#[cfg(feature = "pmtree-ft")]
|
||||
use pmtree::*;
|
||||
cfg_if! {
|
||||
if #[cfg(feature = "pmtree-ft")] {
|
||||
use crate::pm_tree_adapter::*;
|
||||
} else {
|
||||
use crate::hashers::{PoseidonHash};
|
||||
use utils::merkle_tree::*;
|
||||
}
|
||||
}
|
||||
|
||||
// The zerokit RLN default Merkle tree implementation is the OptimalMerkleTree.
|
||||
// To switch to FullMerkleTree implementation, it is enough to enable the fullmerkletree feature
|
||||
@@ -19,48 +20,11 @@ cfg_if! {
|
||||
if #[cfg(feature = "fullmerkletree")] {
|
||||
pub type PoseidonTree = FullMerkleTree<PoseidonHash>;
|
||||
pub type MerkleProof = FullMerkleProof<PoseidonHash>;
|
||||
} else if #[cfg(feature = "pmtree-ft")] {
|
||||
pub type PoseidonTree = PmTree;
|
||||
pub type MerkleProof = PmTreeProof;
|
||||
} else {
|
||||
pub type PoseidonTree = OptimalMerkleTree<PoseidonHash>;
|
||||
pub type MerkleProof = OptimalMerkleProof<PoseidonHash>;
|
||||
}
|
||||
}
|
||||
|
||||
// The zerokit RLN Merkle tree Hasher
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub struct PoseidonHash;
|
||||
|
||||
// The default Hasher trait used by Merkle tree implementation in utils
|
||||
impl utils::merkle_tree::Hasher for PoseidonHash {
|
||||
type Fr = Fr;
|
||||
|
||||
fn default_leaf() -> Self::Fr {
|
||||
Self::Fr::from(0)
|
||||
}
|
||||
|
||||
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
|
||||
poseidon_hash(inputs)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "pmtree-ft")]
|
||||
// The pmtree Hasher trait used by pmtree Merkle tree
|
||||
impl pmtree::Hasher for PoseidonHash {
|
||||
type Fr = Fr;
|
||||
|
||||
fn default_leaf() -> Self::Fr {
|
||||
Fr::from(0)
|
||||
}
|
||||
|
||||
fn serialize(value: Self::Fr) -> Value {
|
||||
fr_to_bytes_le(&value)
|
||||
}
|
||||
|
||||
fn deserialize(value: Value) -> Self::Fr {
|
||||
let (fr, _) = bytes_le_to_fr(&value);
|
||||
fr
|
||||
}
|
||||
|
||||
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
|
||||
poseidon_hash(inputs)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -155,13 +155,17 @@ pub fn proof_inputs_to_rln_witness(
|
||||
let (identity_secret, read) = bytes_le_to_fr(&serialized[all_read..]);
|
||||
all_read += read;
|
||||
|
||||
let id_index = usize::from_le_bytes(serialized[all_read..all_read + 8].try_into()?);
|
||||
let id_index = usize::try_from(u64::from_le_bytes(
|
||||
serialized[all_read..all_read + 8].try_into()?,
|
||||
))?;
|
||||
all_read += 8;
|
||||
|
||||
let (epoch, read) = bytes_le_to_fr(&serialized[all_read..]);
|
||||
all_read += read;
|
||||
|
||||
let signal_len = usize::from_le_bytes(serialized[all_read..all_read + 8].try_into()?);
|
||||
let signal_len = usize::try_from(u64::from_le_bytes(
|
||||
serialized[all_read..all_read + 8].try_into()?,
|
||||
))?;
|
||||
all_read += 8;
|
||||
|
||||
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
|
||||
|
||||
@@ -10,11 +10,10 @@ use ark_groth16::{ProvingKey, VerifyingKey};
|
||||
use ark_relations::r1cs::ConstraintMatrices;
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, Write};
|
||||
use cfg_if::cfg_if;
|
||||
use color_eyre::Result;
|
||||
use color_eyre::{Report, Result};
|
||||
use num_bigint::BigInt;
|
||||
use std::io::Cursor;
|
||||
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
// use rkyv::Deserialize;
|
||||
|
||||
cfg_if! {
|
||||
if #[cfg(not(target_arch = "wasm32"))] {
|
||||
@@ -22,6 +21,9 @@ cfg_if! {
|
||||
use std::sync::Mutex;
|
||||
use crate::circuit::{circom_from_folder, vk_from_folder, circom_from_raw, zkey_from_folder, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
|
||||
use ark_circom::WitnessCalculator;
|
||||
use serde_json::{json, Value};
|
||||
use utils::{Hasher};
|
||||
use std::str::FromStr;
|
||||
} else {
|
||||
use std::marker::*;
|
||||
}
|
||||
@@ -63,7 +65,7 @@ impl RLN<'_> {
|
||||
/// use std::io::Cursor;
|
||||
///
|
||||
/// let tree_height = 20;
|
||||
/// let resources = Cursor::new("tree_height_20");
|
||||
/// let resources = Cursor::new(json!({"resources_folder": "tree_height_20"});
|
||||
///
|
||||
/// // We create a new RLN instance
|
||||
/// let mut rln = RLN::new(tree_height, resources);
|
||||
@@ -74,15 +76,29 @@ impl RLN<'_> {
|
||||
let mut input: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut input)?;
|
||||
|
||||
let resources_folder = String::from_utf8(input)?;
|
||||
let rln_config: Value = serde_json::from_str(&String::from_utf8(input)?)?;
|
||||
let resources_folder = rln_config["resources_folder"]
|
||||
.as_str()
|
||||
.unwrap_or(TEST_RESOURCES_FOLDER);
|
||||
let tree_config = rln_config["tree_config"].to_string();
|
||||
|
||||
let witness_calculator = circom_from_folder(&resources_folder)?;
|
||||
let witness_calculator = circom_from_folder(resources_folder)?;
|
||||
|
||||
let proving_key = zkey_from_folder(&resources_folder)?;
|
||||
let verification_key = vk_from_folder(&resources_folder)?;
|
||||
let proving_key = zkey_from_folder(resources_folder)?;
|
||||
let verification_key = vk_from_folder(resources_folder)?;
|
||||
|
||||
let tree_config: <PoseidonTree as ZerokitMerkleTree>::Config = if tree_config.is_empty() {
|
||||
<PoseidonTree as ZerokitMerkleTree>::Config::default()
|
||||
} else {
|
||||
<PoseidonTree as ZerokitMerkleTree>::Config::from_str(&tree_config)?
|
||||
};
|
||||
|
||||
// We compute a default empty tree
|
||||
let tree = PoseidonTree::default(tree_height);
|
||||
let tree = PoseidonTree::new(
|
||||
tree_height,
|
||||
<PoseidonTree as ZerokitMerkleTree>::Hasher::default_leaf(),
|
||||
tree_config,
|
||||
)?;
|
||||
|
||||
Ok(RLN {
|
||||
witness_calculator,
|
||||
@@ -101,6 +117,7 @@ impl RLN<'_> {
|
||||
/// - `circom_vec`: a byte vector containing the ZK circuit (`rln.wasm`) as binary file
|
||||
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) as binary file
|
||||
/// - `vk_vec`: a byte vector containing to the verification key (`verification_key.json`) as binary file
|
||||
/// - `tree_config`: a reader for a string containing a json with the merkle tree configuration
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
@@ -118,6 +135,8 @@ impl RLN<'_> {
|
||||
/// let mut buffer = vec![0; metadata.len() as usize];
|
||||
/// file.read_exact(&mut buffer).expect("buffer overflow");
|
||||
/// resources.push(buffer);
|
||||
/// let tree_config = "{}".to_string();
|
||||
/// let tree_config_buffer = &Buffer::from(tree_config.as_bytes());
|
||||
/// }
|
||||
///
|
||||
/// let mut rln = RLN::new_with_params(
|
||||
@@ -125,11 +144,51 @@ impl RLN<'_> {
|
||||
/// resources[0].clone(),
|
||||
/// resources[1].clone(),
|
||||
/// resources[2].clone(),
|
||||
/// tree_config_buffer,
|
||||
/// );
|
||||
/// ```
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn new_with_params<R: Read>(
|
||||
tree_height: usize,
|
||||
circom_vec: Vec<u8>,
|
||||
zkey_vec: Vec<u8>,
|
||||
vk_vec: Vec<u8>,
|
||||
mut tree_config_input: R,
|
||||
) -> Result<RLN<'static>> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let witness_calculator = circom_from_raw(circom_vec)?;
|
||||
|
||||
let proving_key = zkey_from_raw(&zkey_vec)?;
|
||||
let verification_key = vk_from_raw(&vk_vec, &zkey_vec)?;
|
||||
|
||||
let mut tree_config_vec: Vec<u8> = Vec::new();
|
||||
tree_config_input.read_to_end(&mut tree_config_vec)?;
|
||||
let tree_config_str = String::from_utf8(tree_config_vec)?;
|
||||
let tree_config: <PoseidonTree as ZerokitMerkleTree>::Config = if tree_config_str.is_empty()
|
||||
{
|
||||
<PoseidonTree as ZerokitMerkleTree>::Config::default()
|
||||
} else {
|
||||
<PoseidonTree as ZerokitMerkleTree>::Config::from_str(&tree_config_str)?
|
||||
};
|
||||
|
||||
// We compute a default empty tree
|
||||
let tree = PoseidonTree::new(
|
||||
tree_height,
|
||||
<PoseidonTree as ZerokitMerkleTree>::Hasher::default_leaf(),
|
||||
tree_config,
|
||||
)?;
|
||||
|
||||
Ok(RLN {
|
||||
witness_calculator,
|
||||
proving_key,
|
||||
verification_key,
|
||||
tree,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn new_with_params(
|
||||
tree_height: usize,
|
||||
#[cfg(not(target_arch = "wasm32"))] circom_vec: Vec<u8>,
|
||||
zkey_vec: Vec<u8>,
|
||||
vk_vec: Vec<u8>,
|
||||
) -> Result<RLN<'static>> {
|
||||
@@ -140,15 +199,12 @@ impl RLN<'_> {
|
||||
let verification_key = vk_from_raw(&vk_vec, &zkey_vec)?;
|
||||
|
||||
// We compute a default empty tree
|
||||
let tree = PoseidonTree::default(tree_height);
|
||||
let tree = PoseidonTree::default(tree_height)?;
|
||||
|
||||
Ok(RLN {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
witness_calculator,
|
||||
proving_key,
|
||||
verification_key,
|
||||
tree,
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
@@ -164,7 +220,7 @@ impl RLN<'_> {
|
||||
/// - `tree_height`: the height of the Merkle tree.
|
||||
pub fn set_tree(&mut self, tree_height: usize) -> Result<()> {
|
||||
// We compute a default empty tree of desired height
|
||||
self.tree = PoseidonTree::default(tree_height);
|
||||
self.tree = PoseidonTree::default(tree_height)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -201,6 +257,31 @@ impl RLN<'_> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Gets a leaf value at position index in the internal Merkle tree.
|
||||
/// The leaf value is written to output_data.
|
||||
/// Input values are:
|
||||
/// - `index`: the index of the leaf
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
/// use crate::protocol::*;
|
||||
/// use std::io::Cursor;
|
||||
///
|
||||
/// let id_index = 10;
|
||||
/// let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
/// rln.get_leaf(id_index, &mut buffer).unwrap();
|
||||
/// let id_commitment = deserialize_field_element(&buffer.into_inner()).unwrap();
|
||||
pub fn get_leaf<W: Write>(&self, index: usize, mut output_data: W) -> Result<()> {
|
||||
// We get the leaf at input index
|
||||
let leaf = self.tree.get(index)?;
|
||||
|
||||
// We serialize the leaf and write it to output
|
||||
let leaf_byte = fr_to_bytes_le(&leaf);
|
||||
output_data.write_all(&leaf_byte)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sets multiple leaves starting from position index in the internal Merkle tree.
|
||||
///
|
||||
/// If n leaves are passed as input, these will be set at positions `index`, `index+1`, ..., `index+n-1` respectively.
|
||||
@@ -239,7 +320,10 @@ impl RLN<'_> {
|
||||
let (leaves, _) = bytes_le_to_vec_fr(&leaves_byte)?;
|
||||
|
||||
// We set the leaves
|
||||
self.tree.set_range(index, leaves)
|
||||
self.tree
|
||||
.override_range(index, leaves, [])
|
||||
.map_err(|_| Report::msg("Could not set leaves"))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Resets the tree state to default and sets multiple leaves starting from index 0.
|
||||
@@ -256,6 +340,76 @@ impl RLN<'_> {
|
||||
self.set_leaves_from(0, input_data)
|
||||
}
|
||||
|
||||
/// Sets multiple leaves starting from position index in the internal Merkle tree.
|
||||
/// Also accepts an array of indices to remove from the tree.
|
||||
///
|
||||
/// If n leaves are passed as input, these will be set at positions `index`, `index+1`, ..., `index+n-1` respectively.
|
||||
/// If m indices are passed as input, these will be removed from the tree.
|
||||
///
|
||||
/// This function updates the internal Merkle tree `next_index value indicating the next available index corresponding to a never-set leaf as `next_index = max(next_index, index + n)`.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `index`: the index of the first leaf to be set
|
||||
/// - `input_leaves`: a reader for the serialization of multiple leaf values (serialization done with [`rln::utils::vec_fr_to_bytes_le`](crate::utils::vec_fr_to_bytes_le))
|
||||
/// - `input_indices`: a reader for the serialization of multiple indices to remove (serialization done with [`rln::utils::vec_u8_to_bytes_le`](crate::utils::vec_u8_to_bytes_le))
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
/// use rln::circuit::Fr;
|
||||
/// use rln::utils::*;
|
||||
///
|
||||
/// let start_index = 10;
|
||||
/// let no_of_leaves = 256;
|
||||
///
|
||||
/// // We generate a vector of random leaves
|
||||
/// let mut leaves: Vec<Fr> = Vec::new();
|
||||
/// let mut rng = thread_rng();
|
||||
/// for _ in 0..no_of_leaves {
|
||||
/// let (_, id_commitment) = keygen();
|
||||
/// leaves.push(id_commitment);
|
||||
/// }
|
||||
///
|
||||
/// let mut indices: Vec<u8> = Vec::new();
|
||||
/// for i in 0..no_of_leaves {
|
||||
/// if i % 2 == 0 {
|
||||
/// indices.push(i as u8);
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// // We atomically add leaves and remove indices from the tree
|
||||
/// let mut leaves_buffer = Cursor::new(vec_fr_to_bytes_le(&leaves));
|
||||
/// let mut indices_buffer = Cursor::new(vec_u8_to_bytes_le(&indices));
|
||||
/// rln.set_leaves_from(index, &mut leaves_buffer, indices_buffer).unwrap();
|
||||
/// ```
|
||||
pub fn atomic_operation<R: Read>(
|
||||
&mut self,
|
||||
index: usize,
|
||||
mut input_leaves: R,
|
||||
mut input_indices: R,
|
||||
) -> Result<()> {
|
||||
// We read input
|
||||
let mut leaves_byte: Vec<u8> = Vec::new();
|
||||
input_leaves.read_to_end(&mut leaves_byte)?;
|
||||
|
||||
let (leaves, _) = bytes_le_to_vec_fr(&leaves_byte)?;
|
||||
|
||||
let mut indices_byte: Vec<u8> = Vec::new();
|
||||
input_indices.read_to_end(&mut indices_byte)?;
|
||||
|
||||
let (indices, _) = bytes_le_to_vec_u8(&indices_byte)?;
|
||||
let indices: Vec<usize> = indices.iter().map(|x| *x as usize).collect();
|
||||
|
||||
// We set the leaves
|
||||
self.tree
|
||||
.override_range(index, leaves, indices)
|
||||
.map_err(|e| Report::msg(format!("Could not perform the batch operation: {e}")))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn leaves_set(&mut self) -> usize {
|
||||
self.tree.leaves_set()
|
||||
}
|
||||
|
||||
/// Sets a leaf value at the next available never-set leaf index.
|
||||
///
|
||||
/// This function updates the internal Merkle tree `next_index` value indicating the next available index corresponding to a never-set leaf as `next_index = next_index + 1`.
|
||||
@@ -327,6 +481,43 @@ impl RLN<'_> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sets some metadata that a consuming application may want to store in the RLN object.
|
||||
/// This metadata is not used by the RLN module.
|
||||
///
|
||||
/// Input values are:
|
||||
/// - `metadata`: a byte vector containing the metadata
|
||||
///
|
||||
/// Example
|
||||
///
|
||||
/// ```
|
||||
/// let metadata = b"some metadata";
|
||||
/// rln.set_metadata(metadata).unwrap();
|
||||
/// ```
|
||||
pub fn set_metadata(&mut self, metadata: &[u8]) -> Result<()> {
|
||||
self.tree.set_metadata(metadata)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the metadata stored in the RLN object.
|
||||
///
|
||||
/// Output values are:
|
||||
/// - `output_data`: a writer receiving the serialization of the metadata
|
||||
///
|
||||
/// Example
|
||||
///
|
||||
/// ```
|
||||
/// use std::io::Cursor;
|
||||
///
|
||||
/// let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
/// rln.get_metadata(&mut buffer).unwrap();
|
||||
/// let metadata = buffer.into_inner();
|
||||
/// ```
|
||||
pub fn get_metadata<W: Write>(&self, mut output_data: W) -> Result<()> {
|
||||
let metadata = self.tree.metadata()?;
|
||||
output_data.write_all(&metadata)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Returns the Merkle tree root
|
||||
///
|
||||
/// Output values are:
|
||||
@@ -605,7 +796,9 @@ impl RLN<'_> {
|
||||
let (proof_values, read) = deserialize_proof_values(&serialized[all_read..]);
|
||||
all_read += read;
|
||||
|
||||
let signal_len = usize::from_le_bytes(serialized[all_read..all_read + 8].try_into()?);
|
||||
let signal_len = usize::try_from(u64::from_le_bytes(
|
||||
serialized[all_read..all_read + 8].try_into()?,
|
||||
))?;
|
||||
all_read += 8;
|
||||
|
||||
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
|
||||
@@ -681,7 +874,9 @@ impl RLN<'_> {
|
||||
let (proof_values, read) = deserialize_proof_values(&serialized[all_read..]);
|
||||
all_read += read;
|
||||
|
||||
let signal_len = usize::from_le_bytes(serialized[all_read..all_read + 8].try_into()?);
|
||||
let signal_len = usize::try_from(u64::from_le_bytes(
|
||||
serialized[all_read..all_read + 8].try_into()?,
|
||||
))?;
|
||||
all_read += 8;
|
||||
|
||||
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
|
||||
@@ -978,13 +1173,21 @@ impl RLN<'_> {
|
||||
let (rln_witness, _) = deserialize_witness(serialized_witness)?;
|
||||
get_json_inputs(&rln_witness)
|
||||
}
|
||||
|
||||
/// Closes the connection to the Merkle tree database.
|
||||
/// This function should be called before the RLN object is dropped.
|
||||
/// If not called, the connection will be closed when the RLN object is dropped.
|
||||
/// This improves robustness of the tree.
|
||||
pub fn flush(&mut self) -> Result<()> {
|
||||
self.tree.close_db_connection()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
impl Default for RLN<'_> {
|
||||
fn default() -> Self {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let buffer = Cursor::new(TEST_RESOURCES_FOLDER);
|
||||
let buffer = Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
Self::new(tree_height, buffer).unwrap()
|
||||
}
|
||||
}
|
||||
@@ -1060,8 +1263,8 @@ mod test {
|
||||
use super::*;
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
use serde_json::json;
|
||||
use utils::ZerokitMerkleTree;
|
||||
// use rkyv::Deserialize;
|
||||
|
||||
#[test]
|
||||
// We test merkle batch Merkle tree additions
|
||||
@@ -1077,7 +1280,8 @@ mod test {
|
||||
}
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
|
||||
// We first add leaves one by one specifying the index
|
||||
@@ -1132,7 +1336,7 @@ mod test {
|
||||
|
||||
// We now delete all leaves set and check if the root corresponds to the empty tree root
|
||||
// delete calls over indexes higher than no_of_leaves are ignored and will not increase self.tree.next_index
|
||||
for i in 0..2 * no_of_leaves {
|
||||
for i in 0..no_of_leaves {
|
||||
rln.delete_leaf(i).unwrap();
|
||||
}
|
||||
|
||||
@@ -1172,7 +1376,8 @@ mod test {
|
||||
let set_index = rng.gen_range(0..no_of_leaves) as usize;
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
@@ -1225,6 +1430,158 @@ mod test {
|
||||
let (root_single_additions, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
assert_eq!(root_batch_with_init, root_single_additions);
|
||||
|
||||
rln.flush().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Tests the atomic_operation fn, which set_leaves_from uses internally
|
||||
fn test_atomic_operation() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Fr> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..no_of_leaves {
|
||||
leaves.push(Fr::rand(&mut rng));
|
||||
}
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
rln.init_tree_with_leaves(&mut buffer).unwrap();
|
||||
|
||||
// We check if number of leaves set is consistent
|
||||
assert_eq!(rln.tree.leaves_set(), no_of_leaves);
|
||||
|
||||
// We get the root of the tree obtained adding leaves in batch
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_root(&mut buffer).unwrap();
|
||||
let (root_after_insertion, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
// We check if number of leaves set is consistent
|
||||
assert_eq!(rln.tree.leaves_set(), no_of_leaves);
|
||||
|
||||
let last_leaf = leaves.last().unwrap();
|
||||
let last_leaf_index = no_of_leaves - 1;
|
||||
let indices = vec![last_leaf_index as u8];
|
||||
let last_leaf = vec![*last_leaf];
|
||||
let indices_buffer = Cursor::new(vec_u8_to_bytes_le(&indices).unwrap());
|
||||
let leaves_buffer = Cursor::new(vec_fr_to_bytes_le(&last_leaf).unwrap());
|
||||
|
||||
rln.atomic_operation(last_leaf_index, leaves_buffer, indices_buffer)
|
||||
.unwrap();
|
||||
|
||||
// We get the root of the tree obtained after a no-op
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_root(&mut buffer).unwrap();
|
||||
let (root_after_noop, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
assert_eq!(root_after_insertion, root_after_noop);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_atomic_operation_zero_indexed() {
|
||||
// Test duplicated from https://github.com/waku-org/go-zerokit-rln/pull/12/files
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Fr> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..no_of_leaves {
|
||||
leaves.push(Fr::rand(&mut rng));
|
||||
}
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
rln.init_tree_with_leaves(&mut buffer).unwrap();
|
||||
|
||||
// We check if number of leaves set is consistent
|
||||
assert_eq!(rln.tree.leaves_set(), no_of_leaves);
|
||||
|
||||
// We get the root of the tree obtained adding leaves in batch
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_root(&mut buffer).unwrap();
|
||||
let (root_after_insertion, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
let zero_index = 0;
|
||||
let indices = vec![zero_index as u8];
|
||||
let zero_leaf: Vec<Fr> = vec![];
|
||||
let indices_buffer = Cursor::new(vec_u8_to_bytes_le(&indices).unwrap());
|
||||
let leaves_buffer = Cursor::new(vec_fr_to_bytes_le(&zero_leaf).unwrap());
|
||||
rln.atomic_operation(0, leaves_buffer, indices_buffer)
|
||||
.unwrap();
|
||||
|
||||
// We get the root of the tree obtained after a deletion
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_root(&mut buffer).unwrap();
|
||||
let (root_after_deletion, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
assert_ne!(root_after_insertion, root_after_deletion);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_atomic_operation_consistency() {
|
||||
// Test duplicated from https://github.com/waku-org/go-zerokit-rln/pull/12/files
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Fr> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..no_of_leaves {
|
||||
leaves.push(Fr::rand(&mut rng));
|
||||
}
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
|
||||
rln.init_tree_with_leaves(&mut buffer).unwrap();
|
||||
|
||||
// We check if number of leaves set is consistent
|
||||
assert_eq!(rln.tree.leaves_set(), no_of_leaves);
|
||||
|
||||
// We get the root of the tree obtained adding leaves in batch
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_root(&mut buffer).unwrap();
|
||||
let (root_after_insertion, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
let set_index = rng.gen_range(0..no_of_leaves) as usize;
|
||||
let indices = vec![set_index as u8];
|
||||
let zero_leaf: Vec<Fr> = vec![];
|
||||
let indices_buffer = Cursor::new(vec_u8_to_bytes_le(&indices).unwrap());
|
||||
let leaves_buffer = Cursor::new(vec_fr_to_bytes_le(&zero_leaf).unwrap());
|
||||
rln.atomic_operation(0, leaves_buffer, indices_buffer)
|
||||
.unwrap();
|
||||
|
||||
// We get the root of the tree obtained after a deletion
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_root(&mut buffer).unwrap();
|
||||
let (root_after_deletion, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
assert_ne!(root_after_insertion, root_after_deletion);
|
||||
|
||||
// We get the leaf
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_leaf(set_index, &mut output_buffer).unwrap();
|
||||
let (received_leaf, _) = bytes_le_to_fr(output_buffer.into_inner().as_ref());
|
||||
|
||||
assert_eq!(received_leaf, Fr::from(0));
|
||||
}
|
||||
|
||||
#[allow(unused_must_use)]
|
||||
@@ -1243,7 +1600,8 @@ mod test {
|
||||
let bad_index = (1 << tree_height) - rng.gen_range(0..no_of_leaves) as usize;
|
||||
|
||||
// We create a new tree
|
||||
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
|
||||
// Get root of empty tree
|
||||
@@ -1272,7 +1630,8 @@ mod test {
|
||||
fn test_groth16_proof() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
|
||||
// Note: we only test Groth16 proof generation, so we ignore setting the tree in the RLN object
|
||||
@@ -1316,7 +1675,8 @@ mod test {
|
||||
}
|
||||
|
||||
// We create a new RLN instance
|
||||
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
@@ -1380,7 +1740,8 @@ mod test {
|
||||
}
|
||||
|
||||
// We create a new RLN instance
|
||||
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
@@ -1476,7 +1837,8 @@ mod test {
|
||||
}
|
||||
|
||||
// We create a new RLN instance
|
||||
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
@@ -1562,7 +1924,8 @@ mod test {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a new RLN instance
|
||||
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
|
||||
// Generate identity pair
|
||||
@@ -1681,4 +2044,50 @@ mod test {
|
||||
// We ensure that an empty value was written to output_buffer, i.e. no secret is recovered
|
||||
assert!(serialized_identity_secret_hash.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_leaf() {
|
||||
// We generate a random tree
|
||||
let tree_height = 10;
|
||||
let mut rng = thread_rng();
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
|
||||
// We generate a random leaf
|
||||
let leaf = Fr::rand(&mut rng);
|
||||
|
||||
// We generate a random index
|
||||
let index = rng.gen_range(0..rln.tree.capacity());
|
||||
|
||||
// We add the leaf to the tree
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(&leaf));
|
||||
rln.set_leaf(index, &mut buffer).unwrap();
|
||||
|
||||
// We get the leaf
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_leaf(index, &mut output_buffer).unwrap();
|
||||
|
||||
// We ensure that the leaf is the same as the one we added
|
||||
let (received_leaf, _) = bytes_le_to_fr(output_buffer.into_inner().as_ref());
|
||||
assert_eq!(received_leaf, leaf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_metadata() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
|
||||
let arbitrary_metadata: &[u8] = b"block_number:200000";
|
||||
rln.set_metadata(arbitrary_metadata).unwrap();
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_metadata(&mut buffer).unwrap();
|
||||
let received_metadata = buffer.into_inner();
|
||||
|
||||
assert_eq!(arbitrary_metadata, received_metadata);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,7 +75,7 @@ pub fn fr_to_bytes_be(input: &Fr) -> Vec<u8> {
|
||||
pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Result<Vec<u8>> {
|
||||
let mut bytes: Vec<u8> = Vec::new();
|
||||
//We store the vector length
|
||||
bytes.extend(input.len().to_le_bytes().to_vec());
|
||||
bytes.extend(u64::try_from(input.len())?.to_le_bytes().to_vec());
|
||||
|
||||
// We store each element
|
||||
input.iter().for_each(|el| bytes.extend(fr_to_bytes_le(el)));
|
||||
@@ -86,7 +86,7 @@ pub fn vec_fr_to_bytes_le(input: &[Fr]) -> Result<Vec<u8>> {
|
||||
pub fn vec_fr_to_bytes_be(input: &[Fr]) -> Result<Vec<u8>> {
|
||||
let mut bytes: Vec<u8> = Vec::new();
|
||||
//We store the vector length
|
||||
bytes.extend(input.len().to_be_bytes().to_vec());
|
||||
bytes.extend(u64::try_from(input.len())?.to_be_bytes().to_vec());
|
||||
|
||||
// We store each element
|
||||
input.iter().for_each(|el| bytes.extend(fr_to_bytes_be(el)));
|
||||
@@ -97,7 +97,7 @@ pub fn vec_fr_to_bytes_be(input: &[Fr]) -> Result<Vec<u8>> {
|
||||
pub fn vec_u8_to_bytes_le(input: &[u8]) -> Result<Vec<u8>> {
|
||||
let mut bytes: Vec<u8> = Vec::new();
|
||||
//We store the vector length
|
||||
bytes.extend(input.len().to_le_bytes().to_vec());
|
||||
bytes.extend(u64::try_from(input.len())?.to_le_bytes().to_vec());
|
||||
|
||||
bytes.extend(input);
|
||||
|
||||
@@ -107,7 +107,7 @@ pub fn vec_u8_to_bytes_le(input: &[u8]) -> Result<Vec<u8>> {
|
||||
pub fn vec_u8_to_bytes_be(input: Vec<u8>) -> Result<Vec<u8>> {
|
||||
let mut bytes: Vec<u8> = Vec::new();
|
||||
//We store the vector length
|
||||
bytes.extend(input.len().to_be_bytes().to_vec());
|
||||
bytes.extend(u64::try_from(input.len())?.to_be_bytes().to_vec());
|
||||
|
||||
bytes.extend(input);
|
||||
|
||||
@@ -117,7 +117,7 @@ pub fn vec_u8_to_bytes_be(input: Vec<u8>) -> Result<Vec<u8>> {
|
||||
pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
|
||||
let mut read: usize = 0;
|
||||
|
||||
let len = usize::from_le_bytes(input[0..8].try_into()?);
|
||||
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
|
||||
read += 8;
|
||||
|
||||
let res = input[8..8 + len].to_vec();
|
||||
@@ -129,7 +129,7 @@ pub fn bytes_le_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
|
||||
pub fn bytes_be_to_vec_u8(input: &[u8]) -> Result<(Vec<u8>, usize)> {
|
||||
let mut read: usize = 0;
|
||||
|
||||
let len = usize::from_be_bytes(input[0..8].try_into()?);
|
||||
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into()?))?;
|
||||
read += 8;
|
||||
|
||||
let res = input[8..8 + len].to_vec();
|
||||
@@ -143,7 +143,7 @@ pub fn bytes_le_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
|
||||
let mut read: usize = 0;
|
||||
let mut res: Vec<Fr> = Vec::new();
|
||||
|
||||
let len = usize::from_le_bytes(input[0..8].try_into()?);
|
||||
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into()?))?;
|
||||
read += 8;
|
||||
|
||||
let el_size = fr_byte_size();
|
||||
@@ -160,7 +160,7 @@ pub fn bytes_be_to_vec_fr(input: &[u8]) -> Result<(Vec<Fr>, usize)> {
|
||||
let mut read: usize = 0;
|
||||
let mut res: Vec<Fr> = Vec::new();
|
||||
|
||||
let len = usize::from_be_bytes(input[0..8].try_into()?);
|
||||
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into()?))?;
|
||||
read += 8;
|
||||
|
||||
let el_size = fr_byte_size();
|
||||
|
||||
178
rln/tests/ffi.rs
178
rln/tests/ffi.rs
@@ -8,6 +8,7 @@ mod test {
|
||||
use rln::protocol::*;
|
||||
use rln::public::RLN;
|
||||
use rln::utils::*;
|
||||
use serde_json::json;
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::mem::MaybeUninit;
|
||||
@@ -28,7 +29,8 @@ mod test {
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let input_config = json!({ "resource_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
@@ -96,8 +98,7 @@ mod test {
|
||||
|
||||
// We now delete all leaves set and check if the root corresponds to the empty tree root
|
||||
// delete calls over indexes higher than no_of_leaves are ignored and will not increase self.tree.next_index
|
||||
let delete_range = 2 * no_of_leaves;
|
||||
for i in 0..delete_range {
|
||||
for i in 0..no_of_leaves {
|
||||
let success = delete_leaf(rln_pointer, i);
|
||||
assert!(success, "delete leaf call failed");
|
||||
}
|
||||
@@ -136,7 +137,8 @@ mod test {
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
@@ -216,6 +218,71 @@ mod test {
|
||||
assert_eq!(root_batch_with_init, root_single_additions);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// This test is similar to the one in public.rs but it uses the RLN object as a pointer
|
||||
fn test_atomic_operation_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Fr> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..no_of_leaves {
|
||||
leaves.push(Fr::rand(&mut rng));
|
||||
}
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let leaves_ser = vec_fr_to_bytes_le(&leaves).unwrap();
|
||||
let input_buffer = &Buffer::from(leaves_ser.as_ref());
|
||||
let success = init_tree_with_leaves(rln_pointer, input_buffer);
|
||||
assert!(success, "init tree with leaves call failed");
|
||||
|
||||
// We get the root of the tree obtained adding leaves in batch
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_after_insertion, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
let last_leaf = leaves.last().unwrap();
|
||||
let last_leaf_index = no_of_leaves - 1;
|
||||
let indices = vec![last_leaf_index as u8];
|
||||
let last_leaf = vec![*last_leaf];
|
||||
let indices = vec_u8_to_bytes_le(&indices).unwrap();
|
||||
let indices_buffer = &Buffer::from(indices.as_ref());
|
||||
let leaves = vec_fr_to_bytes_le(&last_leaf).unwrap();
|
||||
let leaves_buffer = &Buffer::from(leaves.as_ref());
|
||||
|
||||
let success = atomic_operation(
|
||||
rln_pointer,
|
||||
last_leaf_index as usize,
|
||||
leaves_buffer,
|
||||
indices_buffer,
|
||||
);
|
||||
assert!(success, "atomic operation call failed");
|
||||
|
||||
// We get the root of the tree obtained after a no-op
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_after_noop, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
assert_eq!(root_after_insertion, root_after_noop);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// This test is similar to the one in public.rs but it uses the RLN object as a pointer
|
||||
fn test_set_leaves_bad_index_ffi() {
|
||||
@@ -233,7 +300,8 @@ mod test {
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
@@ -273,7 +341,8 @@ mod test {
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
@@ -441,7 +510,8 @@ mod test {
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
@@ -503,7 +573,8 @@ mod test {
|
||||
|
||||
// We create a RLN instance using a resource folder path
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
@@ -546,11 +617,14 @@ mod test {
|
||||
|
||||
// Creating a RLN instance passing the raw data
|
||||
let mut rln_pointer_raw_bytes = MaybeUninit::<*mut RLN>::uninit();
|
||||
let tree_config = "".to_string();
|
||||
let tree_config_buffer = &Buffer::from(tree_config.as_bytes());
|
||||
let success = new_with_params(
|
||||
tree_height,
|
||||
circom_data,
|
||||
zkey_data,
|
||||
vk_data,
|
||||
tree_config_buffer,
|
||||
rln_pointer_raw_bytes.as_mut_ptr(),
|
||||
);
|
||||
assert!(success, "RLN object creation failed");
|
||||
@@ -583,7 +657,8 @@ mod test {
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
@@ -667,7 +742,8 @@ mod test {
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
@@ -785,7 +861,8 @@ mod test {
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
@@ -944,7 +1021,8 @@ mod test {
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
@@ -983,7 +1061,8 @@ mod test {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
@@ -1080,4 +1159,77 @@ mod test {
|
||||
|
||||
assert_eq!(received_hash, expected_hash);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_leaf() {
|
||||
// We create a RLN instance
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 1 << TEST_TREE_HEIGHT;
|
||||
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
// We generate a new identity tuple from an input seed
|
||||
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
let input_buffer = &Buffer::from(seed_bytes);
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success =
|
||||
seeded_extended_key_gen(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "seeded key gen call failed");
|
||||
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (_, _, _, id_commitment) = deserialize_identity_tuple(result_data);
|
||||
|
||||
// We insert the id_commitment into the tree at a random index
|
||||
let mut rng = thread_rng();
|
||||
let index = rng.gen_range(0..no_of_leaves) as usize;
|
||||
let leaf = fr_to_bytes_le(&id_commitment);
|
||||
let input_buffer = &Buffer::from(leaf.as_ref());
|
||||
let success = set_leaf(rln_pointer, index, input_buffer);
|
||||
assert!(success, "set leaf call failed");
|
||||
|
||||
// We get the leaf at the same index
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_leaf(rln_pointer, index, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get leaf call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (received_id_commitment, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
// We check that the received id_commitment is the same as the one we inserted
|
||||
assert_eq!(received_id_commitment, id_commitment);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_metadata() {
|
||||
// We create a RLN instance
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_config = json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string();
|
||||
let input_buffer = &Buffer::from(input_config.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
let input_buffer = &Buffer::from(seed_bytes);
|
||||
|
||||
let success = set_metadata(rln_pointer, input_buffer);
|
||||
assert!(success, "set_metadata call failed");
|
||||
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_metadata(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get_metadata call failed");
|
||||
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
|
||||
assert_eq!(result_data, seed_bytes.to_vec());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use rln::circuit::*;
|
||||
use rln::poseidon_tree::*;
|
||||
use rln::hashers::PoseidonHash;
|
||||
use utils::{FullMerkleTree, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
#[test]
|
||||
@@ -25,16 +25,16 @@ mod test {
|
||||
|
||||
for _ in 0..sample_size.try_into().unwrap() {
|
||||
let now = Instant::now();
|
||||
FullMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
FullMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
|
||||
gen_time_full += now.elapsed().as_nanos();
|
||||
|
||||
let now = Instant::now();
|
||||
OptimalMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
OptimalMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
|
||||
gen_time_opt += now.elapsed().as_nanos();
|
||||
}
|
||||
|
||||
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
|
||||
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height).unwrap();
|
||||
|
||||
for i in 0..sample_size.try_into().unwrap() {
|
||||
let now = Instant::now();
|
||||
@@ -78,540 +78,3 @@ mod test {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Test module for testing pmtree integration and features in zerokit
|
||||
// enabled only if the pmtree feature is enabled
|
||||
|
||||
#[cfg(feature = "pmtree-ft")]
|
||||
#[cfg(test)]
|
||||
mod pmtree_test {
|
||||
|
||||
use pmtree::*;
|
||||
use rln::circuit::Fr;
|
||||
use rln::hashers::{hash_to_field, poseidon_hash};
|
||||
use rln::poseidon_tree::PoseidonHash;
|
||||
use rln::protocol::hash_to_field;
|
||||
use rln::utils::str_to_fr;
|
||||
use sled::Db as Sled;
|
||||
use std::fs;
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
use utils::{FullMerkleTree, OptimalMerkleTree};
|
||||
|
||||
// pmtree supports in-memory and on-disk databases (Database trait) for storing the Merkle tree state
|
||||
|
||||
// We implement Database for hashmaps, an in-memory database
|
||||
struct MemoryDB(HashMap<DBKey, Value>);
|
||||
|
||||
#[derive(Default)]
|
||||
struct MemoryDBConfig {}
|
||||
|
||||
impl Database for MemoryDB {
|
||||
type Config = MemoryDBConfig;
|
||||
|
||||
fn new(_config: Self::Config) -> Result<Self> {
|
||||
Ok(MemoryDB(HashMap::new()))
|
||||
}
|
||||
|
||||
fn load(_config: Self::Config) -> Result<Self> {
|
||||
Err(Box::new(Error("Cannot load in-memory DB".to_string())))
|
||||
}
|
||||
|
||||
fn get(&self, key: DBKey) -> Result<Option<Value>> {
|
||||
Ok(self.0.get(&key).cloned())
|
||||
}
|
||||
|
||||
fn put(&mut self, key: DBKey, value: Value) -> Result<()> {
|
||||
self.0.insert(key, value);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn put_batch(&mut self, subtree: HashMap<DBKey, Value>) -> Result<()> {
|
||||
self.0.extend(subtree);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// We implement Database for sled DB, an on-disk database
|
||||
struct SledDB(Sled);
|
||||
|
||||
impl Database for SledDB {
|
||||
type Config = sled::Config;
|
||||
|
||||
fn new(config: Self::Config) -> Result<Self> {
|
||||
let dbpath = config.path;
|
||||
if config.dbpath.exists() {
|
||||
match fs::remove_dir_all(&config.dbpath) {
|
||||
Ok(x) => x,
|
||||
Err(e) => return Err(Box::new(Error(e.to_string()))),
|
||||
}
|
||||
}
|
||||
|
||||
let db: Sled = match config.open() {
|
||||
Ok(db) => db,
|
||||
Err(e) => return Err(Box::new(Error(e.to_string()))),
|
||||
};
|
||||
|
||||
Ok(SledDB(db))
|
||||
}
|
||||
|
||||
fn load(config: Self::Config) -> Result<Self> {
|
||||
let db: Sled = match sled::open(config.dbpath) {
|
||||
Ok(db) => db,
|
||||
Err(e) => return Err(Box::new(Error(e.to_string()))),
|
||||
};
|
||||
|
||||
if !db.was_recovered() {
|
||||
return Err(Box::new(Error(
|
||||
"Trying to load non-existing database!".to_string(),
|
||||
)));
|
||||
}
|
||||
|
||||
Ok(SledDB(db))
|
||||
}
|
||||
|
||||
fn get(&self, key: DBKey) -> Result<Option<Value>> {
|
||||
match self.0.get(key) {
|
||||
Ok(value) => Ok(value.map(|val| val.to_vec())),
|
||||
Err(e) => Err(Box::new(Error(e.to_string()))),
|
||||
}
|
||||
}
|
||||
|
||||
fn put(&mut self, key: DBKey, value: Value) -> Result<()> {
|
||||
match self.0.insert(key, value) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => Err(Box::new(Error(e.to_string()))),
|
||||
}
|
||||
}
|
||||
|
||||
fn put_batch(&mut self, subtree: HashMap<DBKey, Value>) -> Result<()> {
|
||||
let mut batch = sled::Batch::default();
|
||||
|
||||
for (key, value) in subtree {
|
||||
batch.insert(&key, value);
|
||||
}
|
||||
|
||||
self.0.apply_batch(batch)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
/// A basic performance comparison between the two supported Merkle Tree implementations and in-memory/on-disk pmtree implementations
|
||||
fn test_zerokit_and_pmtree_merkle_implementations_performances() {
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
let tree_height = 20;
|
||||
let sample_size = 100;
|
||||
|
||||
let leaves: Vec<Fr> = (0..sample_size).map(|s| Fr::from(s)).collect();
|
||||
|
||||
let mut gen_time_full: u128 = 0;
|
||||
let mut upd_time_full: u128 = 0;
|
||||
let mut gen_time_opt: u128 = 0;
|
||||
let mut upd_time_opt: u128 = 0;
|
||||
let mut gen_time_pm_memory: u128 = 0;
|
||||
let mut upd_time_pm_memory: u128 = 0;
|
||||
let mut gen_time_pm_sled: u128 = 0;
|
||||
let mut upd_time_pm_sled: u128 = 0;
|
||||
|
||||
for _ in 0..sample_size.try_into().unwrap() {
|
||||
let now = Instant::now();
|
||||
FullMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
gen_time_full += now.elapsed().as_nanos();
|
||||
|
||||
let now = Instant::now();
|
||||
OptimalMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
gen_time_opt += now.elapsed().as_nanos();
|
||||
|
||||
let now = Instant::now();
|
||||
pmtree::MerkleTree::<MemoryDB, PoseidonHash>::default(tree_height).unwrap();
|
||||
gen_time_pm_memory += now.elapsed().as_nanos();
|
||||
|
||||
let now = Instant::now();
|
||||
pmtree::MerkleTree::<SledDB, PoseidonHash>::default(tree_height).unwrap();
|
||||
gen_time_pm_sled += now.elapsed().as_nanos();
|
||||
}
|
||||
|
||||
let mut tree_full = FullMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
let mut tree_opt = OptimalMerkleTree::<PoseidonHash>::default(tree_height);
|
||||
let mut tree_pm_memory =
|
||||
pmtree::MerkleTree::<MemoryDB, PoseidonHash>::default(tree_height).unwrap();
|
||||
let mut tree_pm_sled =
|
||||
pmtree::MerkleTree::<SledDB, PoseidonHash>::default(tree_height).unwrap();
|
||||
|
||||
for i in 0..sample_size.try_into().unwrap() {
|
||||
let now = Instant::now();
|
||||
tree_full.set(i, leaves[i]).unwrap();
|
||||
upd_time_full += now.elapsed().as_nanos();
|
||||
let proof = tree_full.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
|
||||
let now = Instant::now();
|
||||
tree_opt.set(i, leaves[i]).unwrap();
|
||||
upd_time_opt += now.elapsed().as_nanos();
|
||||
let proof = tree_opt.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
|
||||
let now = Instant::now();
|
||||
tree_pm_memory.set(i, leaves[i]).unwrap();
|
||||
upd_time_pm_memory += now.elapsed().as_nanos();
|
||||
let proof = tree_pm_memory.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
|
||||
let now = Instant::now();
|
||||
tree_pm_sled.set(i, leaves[i]).unwrap();
|
||||
upd_time_pm_sled += now.elapsed().as_nanos();
|
||||
let proof = tree_pm_sled.proof(i).expect("index should be set");
|
||||
assert_eq!(proof.leaf_index(), i);
|
||||
}
|
||||
|
||||
// We check all roots are the same
|
||||
let tree_full_root = tree_full.root();
|
||||
let tree_opt_root = tree_opt.root();
|
||||
let tree_pm_memory_root = tree_pm_memory.root();
|
||||
let tree_pm_sled_root = tree_pm_sled.root();
|
||||
|
||||
assert_eq!(tree_full_root, tree_opt_root);
|
||||
assert_eq!(tree_opt_root, tree_pm_memory_root);
|
||||
assert_eq!(tree_pm_memory_root, tree_pm_sled_root);
|
||||
|
||||
println!(" Average tree generation time:");
|
||||
println!(
|
||||
" - Full Merkle Tree: {:?}",
|
||||
Duration::from_nanos((gen_time_full / sample_size).try_into().unwrap())
|
||||
);
|
||||
println!(
|
||||
" - Optimal Merkle Tree: {:?}",
|
||||
Duration::from_nanos((gen_time_opt / sample_size).try_into().unwrap())
|
||||
);
|
||||
|
||||
println!(
|
||||
" - Pmtree-HashMap Merkle Tree: {:?}",
|
||||
Duration::from_nanos((gen_time_pm_memory / sample_size).try_into().unwrap())
|
||||
);
|
||||
|
||||
println!(
|
||||
" - Pmtree-Sled Merkle Tree: {:?}",
|
||||
Duration::from_nanos((gen_time_pm_sled / sample_size).try_into().unwrap())
|
||||
);
|
||||
|
||||
println!(" Average update_next execution time:");
|
||||
println!(
|
||||
" - Full Merkle Tree: {:?}",
|
||||
Duration::from_nanos((upd_time_full / sample_size).try_into().unwrap())
|
||||
);
|
||||
|
||||
println!(
|
||||
" - Optimal Merkle Tree: {:?}",
|
||||
Duration::from_nanos((upd_time_opt / sample_size).try_into().unwrap())
|
||||
);
|
||||
|
||||
println!(
|
||||
" - Pmtree-HashMap Merkle Tree: {:?}",
|
||||
Duration::from_nanos((upd_time_pm_memory / sample_size).try_into().unwrap())
|
||||
);
|
||||
|
||||
println!(
|
||||
" - Pmtree-Sled Merkle Tree: {:?}",
|
||||
Duration::from_nanos((upd_time_pm_sled / sample_size).try_into().unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
// The following two tests contain values that come from public::test_merkle_proof test
|
||||
// We check that pmtree and zerokit Merkle tree implementations match.
|
||||
|
||||
#[test]
|
||||
fn test_pmtree_hashmap() -> Result<()> {
|
||||
let tree_height = 20;
|
||||
|
||||
let mut tree = pmtree::MerkleTree::<MemoryDB, PoseidonHash>::default(tree_height).unwrap();
|
||||
|
||||
let leaf_index = 3;
|
||||
|
||||
let identity_secret = hash_to_field(b"test-merkle-proof");
|
||||
let id_commitment = poseidon_hash(&[identity_secret]);
|
||||
|
||||
// let default_leaf = Fr::from(0);
|
||||
tree.set(leaf_index, id_commitment).unwrap();
|
||||
|
||||
// We check correct computation of the root
|
||||
let root = tree.root();
|
||||
|
||||
assert_eq!(
|
||||
root,
|
||||
str_to_fr(
|
||||
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
|
||||
16
|
||||
)
|
||||
.unwrap()
|
||||
);
|
||||
|
||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
// These values refers to tree height = 20
|
||||
let expected_path_elements = vec![
|
||||
str_to_fr(
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
];
|
||||
|
||||
let expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
assert_eq!(path_elements, expected_path_elements);
|
||||
assert_eq!(identity_path_index, expected_identity_path_index);
|
||||
|
||||
// We check correct verification of the proof
|
||||
assert!(tree.verify(&id_commitment, &merkle_proof));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pmtree_sled() -> Result<()> {
|
||||
let tree_height = 20;
|
||||
|
||||
let mut tree = pmtree::MerkleTree::<SledDB, PoseidonHash>::default(tree_height).unwrap();
|
||||
|
||||
let leaf_index = 3;
|
||||
|
||||
let identity_secret = hash_to_field(b"test-merkle-proof");
|
||||
let id_commitment = poseidon_hash(&[identity_secret]);
|
||||
|
||||
// let default_leaf = Fr::from(0);
|
||||
tree.set(leaf_index, id_commitment).unwrap();
|
||||
|
||||
// We check correct computation of the root
|
||||
let root = tree.root();
|
||||
|
||||
assert_eq!(
|
||||
root,
|
||||
str_to_fr(
|
||||
"0x21947ffd0bce0c385f876e7c97d6a42eec5b1fe935aab2f01c1f8a8cbcc356d2",
|
||||
16
|
||||
)
|
||||
.unwrap()
|
||||
);
|
||||
|
||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
// These values refers to tree height = 20
|
||||
let expected_path_elements = vec![
|
||||
str_to_fr(
|
||||
"0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x22f98aa9ce704152ac17354914ad73ed1167ae6596af510aa5b3649325e06c92",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2a7c7c9b6ce5880b9f6f228d72bf6a575a526f29c66ecceef8b753d38bba7323",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x2e8186e558698ec1c67af9c14d463ffc470043c9c2988b954d75dd643f36b992",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x0f57c5571e9a4eab49e2c8cf050dae948aef6ead647392273546249d1c1ff10f",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
str_to_fr(
|
||||
"0x1830ee67b5fb554ad5f63d4388800e1cfe78e310697d46e43c9ce36134f72cca",
|
||||
16,
|
||||
)
|
||||
.unwrap(),
|
||||
];
|
||||
|
||||
let expected_identity_path_index: Vec<u8> =
|
||||
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
|
||||
|
||||
assert_eq!(path_elements, expected_path_elements);
|
||||
assert_eq!(identity_path_index, expected_identity_path_index);
|
||||
|
||||
// We check correct verification of the proof
|
||||
assert!(tree.verify(&id_commitment, &merkle_proof));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,8 @@ mod test {
|
||||
use rln::utils::str_to_fr;
|
||||
use utils::{ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
|
||||
|
||||
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
|
||||
const WITNESS_JSON_15: &str = r#"
|
||||
{
|
||||
@@ -172,7 +174,12 @@ mod test {
|
||||
|
||||
// generate merkle tree
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(tree_height, default_leaf);
|
||||
let mut tree = PoseidonTree::new(
|
||||
tree_height,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
tree.set(leaf_index, id_commitment.into()).unwrap();
|
||||
|
||||
// We check correct computation of the root
|
||||
@@ -382,7 +389,12 @@ mod test {
|
||||
|
||||
//// generate merkle tree
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(tree_height, default_leaf);
|
||||
let mut tree = PoseidonTree::new(
|
||||
tree_height,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
tree.set(leaf_index, id_commitment.into()).unwrap();
|
||||
|
||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||
|
||||
@@ -7,6 +7,7 @@ mod test {
|
||||
use rln::protocol::{compute_tree_root, deserialize_identity_tuple};
|
||||
use rln::public::{hash as public_hash, poseidon_hash as public_poseidon_hash, RLN};
|
||||
use rln::utils::*;
|
||||
use serde_json::json;
|
||||
use std::io::Cursor;
|
||||
|
||||
#[test]
|
||||
@@ -15,7 +16,8 @@ mod test {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
|
||||
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
|
||||
let input_buffer =
|
||||
Cursor::new(json!({ "resources_folder": TEST_RESOURCES_FOLDER }).to_string());
|
||||
let mut rln = RLN::new(tree_height, input_buffer).unwrap();
|
||||
|
||||
// generate identity
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "semaphore-wrapper"
|
||||
version = "0.1.0"
|
||||
version = "0.3.0"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
@@ -21,7 +21,7 @@ color-eyre = "0.6.1"
|
||||
once_cell = "1.8"
|
||||
rand = "0.8.4"
|
||||
semaphore = { git = "https://github.com/worldcoin/semaphore-rs", rev = "ee658c2"}
|
||||
ethers-core = { git = "https://github.com/gakonst/ethers-rs", default-features = false }
|
||||
ethers-core = { version = "2.0.8", default-features = false }
|
||||
ruint = { version = "1.2.0", features = [ "serde", "num-bigint", "ark-ff" ] }
|
||||
serde = "1.0"
|
||||
thiserror = "1.0.0"
|
||||
@@ -47,4 +47,4 @@ opt-level = 3
|
||||
# Dependencies are optimized, even in a dev build. This improves dev performance
|
||||
# while having neglible impact on incremental build times.
|
||||
[profile.dev.package."*"]
|
||||
opt-level = 3
|
||||
opt-level = 3
|
||||
|
||||
@@ -1,20 +1,36 @@
|
||||
[package]
|
||||
name = "utils"
|
||||
version = "0.1.0"
|
||||
name = "zerokit_utils"
|
||||
version = "0.3.2"
|
||||
edition = "2021"
|
||||
license = "MIT OR Apache-2.0"
|
||||
description = "Various utilities for Zerokit"
|
||||
documentation = "https://github.com/vacp2p/zerokit"
|
||||
homepage = "https://vac.dev"
|
||||
repository = "https://github.com/vacp2p/zerokit"
|
||||
|
||||
[lib]
|
||||
bench = false
|
||||
|
||||
[dependencies]
|
||||
ark-ff = { version = "=0.4.1", default-features = false, features = ["asm"] }
|
||||
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
|
||||
color-eyre = "=0.6.2"
|
||||
pmtree = { package = "pmtree", version = "=2.0.0", optional = true}
|
||||
sled = "=0.34.7"
|
||||
serde = "=1.0.163"
|
||||
|
||||
[dev-dependencies]
|
||||
ark-bn254 = "=0.4.0"
|
||||
num-traits = "0.2.11"
|
||||
hex-literal = "0.3.4"
|
||||
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
|
||||
num-traits = "=0.2.15"
|
||||
hex-literal = "=0.3.4"
|
||||
tiny-keccak = { version = "=2.0.2", features = ["keccak"] }
|
||||
criterion = { version = "=0.4.0", features = ["html_reports"] }
|
||||
|
||||
[features]
|
||||
default = ["parallel"]
|
||||
parallel = ["ark-ff/parallel"]
|
||||
pmtree-ft = ["pmtree"]
|
||||
|
||||
[[bench]]
|
||||
name = "merkle_tree_benchmark"
|
||||
harness = false
|
||||
|
||||
@@ -5,3 +5,7 @@ args = ["build", "--release"]
|
||||
[tasks.test]
|
||||
command = "cargo"
|
||||
args = ["test", "--release"]
|
||||
|
||||
[tasks.bench]
|
||||
command = "cargo"
|
||||
args = ["bench"]
|
||||
15
utils/README.md
Normal file
15
utils/README.md
Normal file
@@ -0,0 +1,15 @@
|
||||
# Utils crate
|
||||
|
||||
## Building
|
||||
|
||||
1. `cargo build`
|
||||
|
||||
## Testing
|
||||
|
||||
1. `cargo test`
|
||||
|
||||
## Benchmarking
|
||||
|
||||
1. `cargo bench`
|
||||
|
||||
To view the results of the benchmark, open the `target/criterion/report/index.html` file generated after the bench
|
||||
119
utils/benches/merkle_tree_benchmark.rs
Normal file
119
utils/benches/merkle_tree_benchmark.rs
Normal file
@@ -0,0 +1,119 @@
|
||||
use criterion::{criterion_group, criterion_main, Criterion};
|
||||
use hex_literal::hex;
|
||||
use tiny_keccak::{Hasher as _, Keccak};
|
||||
use zerokit_utils::{
|
||||
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,
|
||||
ZerokitMerkleTree,
|
||||
};
|
||||
|
||||
#[derive(Clone, Copy, Eq, PartialEq)]
|
||||
struct Keccak256;
|
||||
|
||||
impl Hasher for Keccak256 {
|
||||
type Fr = [u8; 32];
|
||||
|
||||
fn default_leaf() -> Self::Fr {
|
||||
[0; 32]
|
||||
}
|
||||
|
||||
fn hash(inputs: &[Self::Fr]) -> Self::Fr {
|
||||
let mut output = [0; 32];
|
||||
let mut hasher = Keccak::v256();
|
||||
for element in inputs {
|
||||
hasher.update(element);
|
||||
}
|
||||
hasher.finalize(&mut output);
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
pub fn optimal_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
let mut tree =
|
||||
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default()).unwrap();
|
||||
|
||||
let leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
];
|
||||
|
||||
c.bench_function("OptimalMerkleTree::set", |b| {
|
||||
b.iter(|| {
|
||||
tree.set(0, leaves[0]).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("OptimalMerkleTree::delete", |b| {
|
||||
b.iter(|| {
|
||||
tree.delete(0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("OptimalMerkleTree::override_range", |b| {
|
||||
b.iter(|| {
|
||||
tree.override_range(0, leaves, [0, 1, 2, 3]).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("OptimalMerkleTree::compute_root", |b| {
|
||||
b.iter(|| {
|
||||
tree.compute_root().unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("OptimalMerkleTree::get", |b| {
|
||||
b.iter(|| {
|
||||
tree.get(0).unwrap();
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
pub fn full_merkle_tree_benchmark(c: &mut Criterion) {
|
||||
let mut tree =
|
||||
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
|
||||
|
||||
let leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
];
|
||||
|
||||
c.bench_function("FullMerkleTree::set", |b| {
|
||||
b.iter(|| {
|
||||
tree.set(0, leaves[0]).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("FullMerkleTree::delete", |b| {
|
||||
b.iter(|| {
|
||||
tree.delete(0).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("FullMerkleTree::override_range", |b| {
|
||||
b.iter(|| {
|
||||
tree.override_range(0, leaves, [0, 1, 2, 3]).unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("FullMerkleTree::compute_root", |b| {
|
||||
b.iter(|| {
|
||||
tree.compute_root().unwrap();
|
||||
})
|
||||
});
|
||||
|
||||
c.bench_function("FullMerkleTree::get", |b| {
|
||||
b.iter(|| {
|
||||
tree.get(0).unwrap();
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
benches,
|
||||
optimal_merkle_tree_benchmark,
|
||||
full_merkle_tree_benchmark
|
||||
);
|
||||
criterion_main!(benches);
|
||||
@@ -3,3 +3,8 @@ pub use self::poseidon::*;
|
||||
|
||||
pub mod merkle_tree;
|
||||
pub use self::merkle_tree::*;
|
||||
|
||||
#[cfg(feature = "pmtree-ft")]
|
||||
pub mod pm_tree;
|
||||
#[cfg(feature = "pmtree-ft")]
|
||||
pub use self::pm_tree::*;
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
use crate::merkle_tree::{Hasher, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
use crate::merkle_tree::{FrOf, Hasher, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
use color_eyre::{Report, Result};
|
||||
use std::{
|
||||
cmp::max,
|
||||
fmt::Debug,
|
||||
iter::{once, repeat, successors},
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
@@ -28,6 +29,9 @@ pub struct FullMerkleTree<H: Hasher> {
|
||||
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
|
||||
// (deletions leave next_index unchanged)
|
||||
next_index: usize,
|
||||
|
||||
// metadata that an application may use to store additional information
|
||||
metadata: Vec<u8>,
|
||||
}
|
||||
|
||||
/// Element of a Merkle proof
|
||||
@@ -44,17 +48,33 @@ pub enum FullMerkleBranch<H: Hasher> {
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct FullMerkleProof<H: Hasher>(pub Vec<FullMerkleBranch<H>>);
|
||||
|
||||
/// Implementations
|
||||
#[derive(Default)]
|
||||
pub struct FullMerkleConfig(());
|
||||
|
||||
impl<H: Hasher> ZerokitMerkleTree<H> for FullMerkleTree<H> {
|
||||
impl FromStr for FullMerkleConfig {
|
||||
type Err = Report;
|
||||
|
||||
fn from_str(_s: &str) -> Result<Self> {
|
||||
Ok(FullMerkleConfig::default())
|
||||
}
|
||||
}
|
||||
|
||||
/// Implementations
|
||||
impl<H: Hasher> ZerokitMerkleTree for FullMerkleTree<H>
|
||||
where
|
||||
H: Hasher,
|
||||
{
|
||||
type Proof = FullMerkleProof<H>;
|
||||
fn default(depth: usize) -> Self {
|
||||
FullMerkleTree::<H>::new(depth, H::default_leaf())
|
||||
type Hasher = H;
|
||||
type Config = FullMerkleConfig;
|
||||
|
||||
fn default(depth: usize) -> Result<Self> {
|
||||
FullMerkleTree::<H>::new(depth, Self::Hasher::default_leaf(), Self::Config::default())
|
||||
}
|
||||
|
||||
/// Creates a new `MerkleTree`
|
||||
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
|
||||
fn new(depth: usize, initial_leaf: H::Fr) -> Self {
|
||||
fn new(depth: usize, initial_leaf: FrOf<Self::Hasher>, _config: Self::Config) -> Result<Self> {
|
||||
// Compute cache node values, leaf to root
|
||||
let cached_nodes = successors(Some(initial_leaf), |prev| Some(H::hash(&[*prev, *prev])))
|
||||
.take(depth + 1)
|
||||
@@ -72,12 +92,17 @@ impl<H: Hasher> ZerokitMerkleTree<H> for FullMerkleTree<H> {
|
||||
|
||||
let next_index = 0;
|
||||
|
||||
Self {
|
||||
Ok(Self {
|
||||
depth,
|
||||
cached_nodes,
|
||||
nodes,
|
||||
next_index,
|
||||
}
|
||||
metadata: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn close_db_connection(&mut self) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Returns the depth of the tree
|
||||
@@ -97,20 +122,32 @@ impl<H: Hasher> ZerokitMerkleTree<H> for FullMerkleTree<H> {
|
||||
|
||||
#[must_use]
|
||||
// Returns the root of the tree
|
||||
fn root(&self) -> H::Fr {
|
||||
fn root(&self) -> FrOf<Self::Hasher> {
|
||||
self.nodes[0]
|
||||
}
|
||||
|
||||
// Sets a leaf at the specified tree index
|
||||
fn set(&mut self, leaf: usize, hash: H::Fr) -> Result<()> {
|
||||
fn set(&mut self, leaf: usize, hash: FrOf<Self::Hasher>) -> Result<()> {
|
||||
self.set_range(leaf, once(hash))?;
|
||||
self.next_index = max(self.next_index, leaf + 1);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Get a leaf from the specified tree index
|
||||
fn get(&self, leaf: usize) -> Result<FrOf<Self::Hasher>> {
|
||||
if leaf >= self.capacity() {
|
||||
return Err(Report::msg("leaf index out of bounds"));
|
||||
}
|
||||
Ok(self.nodes[self.capacity() + leaf - 1])
|
||||
}
|
||||
|
||||
// Sets tree nodes, starting from start index
|
||||
// Function proper of FullMerkleTree implementation
|
||||
fn set_range<I: IntoIterator<Item = H::Fr>>(&mut self, start: usize, hashes: I) -> Result<()> {
|
||||
fn set_range<I: IntoIterator<Item = FrOf<Self::Hasher>>>(
|
||||
&mut self,
|
||||
start: usize,
|
||||
hashes: I,
|
||||
) -> Result<()> {
|
||||
let index = self.capacity() + start - 1;
|
||||
let mut count = 0;
|
||||
// first count number of hashes, and check that they fit in the tree
|
||||
@@ -130,8 +167,41 @@ impl<H: Hasher> ZerokitMerkleTree<H> for FullMerkleTree<H> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
|
||||
where
|
||||
I: IntoIterator<Item = FrOf<Self::Hasher>>,
|
||||
J: IntoIterator<Item = usize>,
|
||||
{
|
||||
let index = self.capacity() + start - 1;
|
||||
let mut count = 0;
|
||||
let leaves = leaves.into_iter().collect::<Vec<_>>();
|
||||
let to_remove_indices = to_remove_indices.into_iter().collect::<Vec<_>>();
|
||||
// first count number of hashes, and check that they fit in the tree
|
||||
// then insert into the tree
|
||||
if leaves.len() + start - to_remove_indices.len() > self.capacity() {
|
||||
return Err(Report::msg("provided hashes do not fit in the tree"));
|
||||
}
|
||||
|
||||
// remove leaves
|
||||
for i in &to_remove_indices {
|
||||
self.delete(*i)?;
|
||||
}
|
||||
|
||||
// insert new leaves
|
||||
for hash in leaves {
|
||||
self.nodes[index + count] = hash;
|
||||
count += 1;
|
||||
}
|
||||
|
||||
if count != 0 {
|
||||
self.update_nodes(index, index + (count - 1))?;
|
||||
self.next_index = max(self.next_index, start + count - to_remove_indices.len());
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Sets a leaf at the next available index
|
||||
fn update_next(&mut self, leaf: H::Fr) -> Result<()> {
|
||||
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<()> {
|
||||
self.set(self.next_index, leaf)?;
|
||||
Ok(())
|
||||
}
|
||||
@@ -165,9 +235,22 @@ impl<H: Hasher> ZerokitMerkleTree<H> for FullMerkleTree<H> {
|
||||
}
|
||||
|
||||
// Verifies a Merkle proof with respect to the input leaf and the tree root
|
||||
fn verify(&self, hash: &H::Fr, proof: &FullMerkleProof<H>) -> Result<bool> {
|
||||
fn verify(&self, hash: &FrOf<Self::Hasher>, proof: &FullMerkleProof<H>) -> Result<bool> {
|
||||
Ok(proof.compute_root_from(hash) == self.root())
|
||||
}
|
||||
|
||||
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>> {
|
||||
Ok(self.root())
|
||||
}
|
||||
|
||||
fn set_metadata(&mut self, metadata: &[u8]) -> Result<()> {
|
||||
self.metadata = metadata.to_vec();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn metadata(&self) -> Result<Vec<u8>> {
|
||||
Ok(self.metadata.to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: Hasher> FullMerkleTree<H>
|
||||
@@ -212,8 +295,9 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: Hasher> ZerokitMerkleProof<H> for FullMerkleProof<H> {
|
||||
impl<H: Hasher> ZerokitMerkleProof for FullMerkleProof<H> {
|
||||
type Index = u8;
|
||||
type Hasher = H;
|
||||
|
||||
#[must_use]
|
||||
// Returns the length of a Merkle proof
|
||||
@@ -232,7 +316,7 @@ impl<H: Hasher> ZerokitMerkleProof<H> for FullMerkleProof<H> {
|
||||
|
||||
#[must_use]
|
||||
/// Returns the path elements forming a Merkle proof
|
||||
fn get_path_elements(&self) -> Vec<H::Fr> {
|
||||
fn get_path_elements(&self) -> Vec<FrOf<Self::Hasher>> {
|
||||
self.0
|
||||
.iter()
|
||||
.map(|x| match x {
|
||||
@@ -255,7 +339,7 @@ impl<H: Hasher> ZerokitMerkleProof<H> for FullMerkleProof<H> {
|
||||
|
||||
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
|
||||
#[must_use]
|
||||
fn compute_root_from(&self, hash: &H::Fr) -> H::Fr {
|
||||
fn compute_root_from(&self, hash: &FrOf<Self::Hasher>) -> FrOf<Self::Hasher> {
|
||||
self.0.iter().fold(*hash, |hash, branch| match branch {
|
||||
FullMerkleBranch::Left(sibling) => H::hash(&[hash, *sibling]),
|
||||
FullMerkleBranch::Right(sibling) => H::hash(&[*sibling, hash]),
|
||||
|
||||
@@ -13,6 +13,8 @@
|
||||
//! * Disk based storage backend (using mmaped files should be easy)
|
||||
//! * Implement serialization for tree and Merkle proof
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
use color_eyre::Result;
|
||||
|
||||
/// In the Hasher trait we define the node type, the default leaf
|
||||
@@ -28,39 +30,51 @@ pub trait Hasher {
|
||||
fn hash(input: &[Self::Fr]) -> Self::Fr;
|
||||
}
|
||||
|
||||
/// In the ZerokitMerkleTree trait we define the methods that are required to be implemented by a Merkle tree
|
||||
/// Including, OptimalMerkleTree, FullMerkleTree, Pmtree
|
||||
pub trait ZerokitMerkleTree<H: Hasher>
|
||||
where
|
||||
H: Hasher,
|
||||
{
|
||||
type Proof;
|
||||
pub type FrOf<H> = <H as Hasher>::Fr;
|
||||
|
||||
fn default(depth: usize) -> Self;
|
||||
fn new(depth: usize, default_leaf: H::Fr) -> Self;
|
||||
/// In the ZerokitMerkleTree trait we define the methods that are required to be implemented by a Merkle tree
|
||||
/// Including, OptimalMerkleTree, FullMerkleTree
|
||||
pub trait ZerokitMerkleTree {
|
||||
type Proof: ZerokitMerkleProof;
|
||||
type Hasher: Hasher;
|
||||
type Config: Default + FromStr;
|
||||
|
||||
fn default(depth: usize) -> Result<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
fn new(depth: usize, default_leaf: FrOf<Self::Hasher>, config: Self::Config) -> Result<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
fn depth(&self) -> usize;
|
||||
fn capacity(&self) -> usize;
|
||||
fn leaves_set(&mut self) -> usize;
|
||||
fn root(&self) -> H::Fr;
|
||||
fn set(&mut self, index: usize, leaf: H::Fr) -> Result<()>;
|
||||
fn root(&self) -> FrOf<Self::Hasher>;
|
||||
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>>;
|
||||
fn set(&mut self, index: usize, leaf: FrOf<Self::Hasher>) -> Result<()>;
|
||||
fn set_range<I>(&mut self, start: usize, leaves: I) -> Result<()>
|
||||
where
|
||||
I: IntoIterator<Item = H::Fr>;
|
||||
fn update_next(&mut self, leaf: H::Fr) -> Result<()>;
|
||||
I: IntoIterator<Item = FrOf<Self::Hasher>>;
|
||||
fn get(&self, index: usize) -> Result<FrOf<Self::Hasher>>;
|
||||
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
|
||||
where
|
||||
I: IntoIterator<Item = FrOf<Self::Hasher>>,
|
||||
J: IntoIterator<Item = usize>;
|
||||
fn update_next(&mut self, leaf: FrOf<Self::Hasher>) -> Result<()>;
|
||||
fn delete(&mut self, index: usize) -> Result<()>;
|
||||
fn proof(&self, index: usize) -> Result<Self::Proof>;
|
||||
fn verify(&self, leaf: &H::Fr, witness: &Self::Proof) -> Result<bool>;
|
||||
fn verify(&self, leaf: &FrOf<Self::Hasher>, witness: &Self::Proof) -> Result<bool>;
|
||||
fn set_metadata(&mut self, metadata: &[u8]) -> Result<()>;
|
||||
fn metadata(&self) -> Result<Vec<u8>>;
|
||||
fn close_db_connection(&mut self) -> Result<()>;
|
||||
}
|
||||
|
||||
pub trait ZerokitMerkleProof<H: Hasher>
|
||||
where
|
||||
H: Hasher,
|
||||
{
|
||||
pub trait ZerokitMerkleProof {
|
||||
type Index;
|
||||
type Hasher: Hasher;
|
||||
|
||||
fn length(&self) -> usize;
|
||||
fn leaf_index(&self) -> usize;
|
||||
fn get_path_elements(&self) -> Vec<H::Fr>;
|
||||
fn get_path_elements(&self) -> Vec<FrOf<Self::Hasher>>;
|
||||
fn get_path_index(&self) -> Vec<Self::Index>;
|
||||
fn compute_root_from(&self, leaf: &H::Fr) -> H::Fr;
|
||||
fn compute_root_from(&self, leaf: &FrOf<Self::Hasher>) -> FrOf<Self::Hasher>;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use crate::merkle_tree::{Hasher, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
use crate::FrOf;
|
||||
use color_eyre::{Report, Result};
|
||||
use std::collections::HashMap;
|
||||
use std::str::FromStr;
|
||||
use std::{cmp::max, fmt::Debug};
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
@@ -28,6 +30,9 @@ where
|
||||
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
|
||||
// (deletions leave next_index unchanged)
|
||||
next_index: usize,
|
||||
|
||||
// metadata that an application may use to store additional information
|
||||
metadata: Vec<u8>,
|
||||
}
|
||||
|
||||
/// The Merkle proof
|
||||
@@ -35,33 +40,51 @@ where
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct OptimalMerkleProof<H: Hasher>(pub Vec<(H::Fr, u8)>);
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct OptimalMerkleConfig(());
|
||||
|
||||
impl FromStr for OptimalMerkleConfig {
|
||||
type Err = Report;
|
||||
|
||||
fn from_str(_s: &str) -> Result<Self> {
|
||||
Ok(OptimalMerkleConfig::default())
|
||||
}
|
||||
}
|
||||
|
||||
/// Implementations
|
||||
|
||||
impl<H: Hasher> ZerokitMerkleTree<H> for OptimalMerkleTree<H>
|
||||
impl<H: Hasher> ZerokitMerkleTree for OptimalMerkleTree<H>
|
||||
where
|
||||
H: Hasher,
|
||||
{
|
||||
type Proof = OptimalMerkleProof<H>;
|
||||
type Hasher = H;
|
||||
type Config = OptimalMerkleConfig;
|
||||
|
||||
fn default(depth: usize) -> Self {
|
||||
OptimalMerkleTree::<H>::new(depth, H::default_leaf())
|
||||
fn default(depth: usize) -> Result<Self> {
|
||||
OptimalMerkleTree::<H>::new(depth, H::default_leaf(), Self::Config::default())
|
||||
}
|
||||
|
||||
/// Creates a new `MerkleTree`
|
||||
/// depth - the height of the tree made only of hash nodes. 2^depth is the maximum number of leaves hash nodes
|
||||
fn new(depth: usize, default_leaf: H::Fr) -> Self {
|
||||
fn new(depth: usize, default_leaf: H::Fr, _config: Self::Config) -> Result<Self> {
|
||||
let mut cached_nodes: Vec<H::Fr> = Vec::with_capacity(depth + 1);
|
||||
cached_nodes.push(default_leaf);
|
||||
for i in 0..depth {
|
||||
cached_nodes.push(H::hash(&[cached_nodes[i]; 2]));
|
||||
}
|
||||
cached_nodes.reverse();
|
||||
OptimalMerkleTree {
|
||||
Ok(OptimalMerkleTree {
|
||||
cached_nodes: cached_nodes.clone(),
|
||||
depth,
|
||||
nodes: HashMap::new(),
|
||||
next_index: 0,
|
||||
}
|
||||
metadata: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
fn close_db_connection(&mut self) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Returns the depth of the tree
|
||||
@@ -96,6 +119,14 @@ where
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Get a leaf from the specified tree index
|
||||
fn get(&self, index: usize) -> Result<H::Fr> {
|
||||
if index >= self.capacity() {
|
||||
return Err(Report::msg("index exceeds set size"));
|
||||
}
|
||||
Ok(self.get_node(self.depth, index))
|
||||
}
|
||||
|
||||
// Sets multiple leaves from the specified tree index
|
||||
fn set_range<I: IntoIterator<Item = H::Fr>>(&mut self, start: usize, leaves: I) -> Result<()> {
|
||||
let leaves = leaves.into_iter().collect::<Vec<_>>();
|
||||
@@ -111,6 +142,36 @@ where
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn override_range<I, J>(&mut self, start: usize, leaves: I, to_remove_indices: J) -> Result<()>
|
||||
where
|
||||
I: IntoIterator<Item = FrOf<Self::Hasher>>,
|
||||
J: IntoIterator<Item = usize>,
|
||||
{
|
||||
let leaves = leaves.into_iter().collect::<Vec<_>>();
|
||||
let to_remove_indices = to_remove_indices.into_iter().collect::<Vec<_>>();
|
||||
// check if the range is valid
|
||||
if leaves.len() + start - to_remove_indices.len() > self.capacity() {
|
||||
return Err(Report::msg("provided range exceeds set size"));
|
||||
}
|
||||
|
||||
// remove leaves
|
||||
for i in &to_remove_indices {
|
||||
self.delete(*i)?;
|
||||
}
|
||||
|
||||
// add leaves
|
||||
for (i, leaf) in leaves.iter().enumerate() {
|
||||
self.nodes.insert((self.depth, start + i), *leaf);
|
||||
self.recalculate_from(start + i)?;
|
||||
}
|
||||
|
||||
self.next_index = max(
|
||||
self.next_index,
|
||||
start + leaves.len() - to_remove_indices.len(),
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Sets a leaf at the next available index
|
||||
fn update_next(&mut self, leaf: H::Fr) -> Result<()> {
|
||||
self.set(self.next_index, leaf)?;
|
||||
@@ -158,6 +219,20 @@ where
|
||||
let expected_root = witness.compute_root_from(leaf);
|
||||
Ok(expected_root.eq(&self.root()))
|
||||
}
|
||||
|
||||
fn compute_root(&mut self) -> Result<FrOf<Self::Hasher>> {
|
||||
self.recalculate_from(0)?;
|
||||
Ok(self.root())
|
||||
}
|
||||
|
||||
fn set_metadata(&mut self, metadata: &[u8]) -> Result<()> {
|
||||
self.metadata = metadata.to_vec();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn metadata(&self) -> Result<Vec<u8>> {
|
||||
Ok(self.metadata.to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: Hasher> OptimalMerkleTree<H>
|
||||
@@ -205,11 +280,13 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: Hasher> ZerokitMerkleProof<H> for OptimalMerkleProof<H>
|
||||
impl<H: Hasher> ZerokitMerkleProof for OptimalMerkleProof<H>
|
||||
where
|
||||
H: Hasher,
|
||||
{
|
||||
type Index = u8;
|
||||
type Hasher = H;
|
||||
|
||||
#[must_use]
|
||||
// Returns the length of a Merkle proof
|
||||
fn length(&self) -> usize {
|
||||
|
||||
4
utils/src/pm_tree/mod.rs
Normal file
4
utils/src/pm_tree/mod.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
pub mod sled_adapter;
|
||||
pub use self::sled_adapter::*;
|
||||
pub use pmtree;
|
||||
pub use sled::*;
|
||||
105
utils/src/pm_tree/sled_adapter.rs
Normal file
105
utils/src/pm_tree/sled_adapter.rs
Normal file
@@ -0,0 +1,105 @@
|
||||
use pmtree::*;
|
||||
|
||||
use sled::Db as Sled;
|
||||
use std::collections::HashMap;
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
|
||||
pub struct SledDB(Sled);
|
||||
|
||||
impl SledDB {
|
||||
fn new_with_tries(config: <SledDB as Database>::Config, tries: u32) -> PmtreeResult<Self> {
|
||||
// If we've tried more than 10 times, we give up and return an error.
|
||||
if tries >= 10 {
|
||||
return Err(PmtreeErrorKind::DatabaseError(
|
||||
DatabaseErrorKind::CustomError(format!(
|
||||
"Cannot create database: exceeded maximum retry attempts. {config:#?}"
|
||||
)),
|
||||
));
|
||||
}
|
||||
match config.open() {
|
||||
Ok(db) => Ok(SledDB(db)),
|
||||
Err(e) if e.to_string().contains("WouldBlock") => {
|
||||
// try till the fd is freed
|
||||
// sleep for 10^tries milliseconds, then recursively try again
|
||||
thread::sleep(Duration::from_millis(10u64.pow(tries)));
|
||||
Self::new_with_tries(config, tries + 1)
|
||||
}
|
||||
Err(e) => {
|
||||
// On any other error, we return immediately.
|
||||
Err(PmtreeErrorKind::DatabaseError(
|
||||
DatabaseErrorKind::CustomError(format!(
|
||||
"Cannot create database: {e} {config:#?}"
|
||||
)),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Database for SledDB {
|
||||
type Config = sled::Config;
|
||||
|
||||
fn new(config: Self::Config) -> PmtreeResult<Self> {
|
||||
let db = Self::new_with_tries(config, 0)?;
|
||||
Ok(db)
|
||||
}
|
||||
|
||||
fn load(config: Self::Config) -> PmtreeResult<Self> {
|
||||
let db = match config.open() {
|
||||
Ok(db) => db,
|
||||
Err(e) => {
|
||||
return Err(PmtreeErrorKind::DatabaseError(
|
||||
DatabaseErrorKind::CustomError(format!("Cannot load database: {e}")),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
if !db.was_recovered() {
|
||||
return Err(PmtreeErrorKind::DatabaseError(
|
||||
DatabaseErrorKind::CustomError(format!(
|
||||
"Database was not recovered: {}",
|
||||
config.path.display()
|
||||
)),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(SledDB(db))
|
||||
}
|
||||
|
||||
fn close(&mut self) -> PmtreeResult<()> {
|
||||
let _ = self.0.flush().map_err(|_| {
|
||||
PmtreeErrorKind::DatabaseError(DatabaseErrorKind::CustomError(
|
||||
"Cannot flush database".to_string(),
|
||||
))
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get(&self, key: DBKey) -> PmtreeResult<Option<Value>> {
|
||||
match self.0.get(key) {
|
||||
Ok(value) => Ok(value.map(|val| val.to_vec())),
|
||||
Err(_e) => Err(PmtreeErrorKind::TreeError(TreeErrorKind::InvalidKey)),
|
||||
}
|
||||
}
|
||||
|
||||
fn put(&mut self, key: DBKey, value: Value) -> PmtreeResult<()> {
|
||||
match self.0.insert(key, value) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(_e) => Err(PmtreeErrorKind::TreeError(TreeErrorKind::InvalidKey)),
|
||||
}
|
||||
}
|
||||
|
||||
fn put_batch(&mut self, subtree: HashMap<DBKey, Value>) -> PmtreeResult<()> {
|
||||
let mut batch = sled::Batch::default();
|
||||
|
||||
for (key, value) in subtree {
|
||||
batch.insert(&key, value);
|
||||
}
|
||||
|
||||
self.0
|
||||
.apply_batch(batch)
|
||||
.map_err(|_| PmtreeErrorKind::TreeError(TreeErrorKind::InvalidKey))?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -36,20 +36,12 @@ impl PoseidonGrainLFSR {
|
||||
assert!(is_field == 1);
|
||||
|
||||
// b0, b1 describes the field
|
||||
if is_field == 1 {
|
||||
state[1] = true;
|
||||
} else {
|
||||
state[1] = false;
|
||||
}
|
||||
state[1] = is_field == 1;
|
||||
|
||||
assert!(is_sbox_an_inverse == 0 || is_sbox_an_inverse == 1);
|
||||
|
||||
// b2, ..., b5 describes the S-BOX
|
||||
if is_sbox_an_inverse == 1 {
|
||||
state[5] = true;
|
||||
} else {
|
||||
state[5] = false;
|
||||
}
|
||||
state[5] = is_sbox_an_inverse == 1;
|
||||
|
||||
// b6, ..., b17 are the binary representation of n (prime_num_bits)
|
||||
{
|
||||
|
||||
@@ -3,8 +3,11 @@
|
||||
mod test {
|
||||
use hex_literal::hex;
|
||||
use tiny_keccak::{Hasher as _, Keccak};
|
||||
use utils::{FullMerkleTree, Hasher, OptimalMerkleTree, ZerokitMerkleProof, ZerokitMerkleTree};
|
||||
|
||||
use zerokit_utils::{
|
||||
FullMerkleConfig, FullMerkleTree, Hasher, OptimalMerkleConfig, OptimalMerkleTree,
|
||||
ZerokitMerkleProof, ZerokitMerkleTree,
|
||||
};
|
||||
#[derive(Clone, Copy, Eq, PartialEq)]
|
||||
struct Keccak256;
|
||||
|
||||
impl Hasher for Keccak256 {
|
||||
@@ -44,14 +47,17 @@ mod test {
|
||||
hex!("a9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36"),
|
||||
];
|
||||
|
||||
let mut tree = FullMerkleTree::<Keccak256>::new(2, [0; 32]);
|
||||
let mut tree =
|
||||
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
|
||||
assert_eq!(tree.root(), default_tree_root);
|
||||
for i in 0..leaves.len() {
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
assert_eq!(tree.root(), roots[i]);
|
||||
}
|
||||
|
||||
let mut tree = OptimalMerkleTree::<Keccak256>::new(2, [0; 32]);
|
||||
let mut tree =
|
||||
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
|
||||
.unwrap();
|
||||
assert_eq!(tree.root(), default_tree_root);
|
||||
for i in 0..leaves.len() {
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
@@ -69,7 +75,8 @@ mod test {
|
||||
];
|
||||
|
||||
// We thest the FullMerkleTree implementation
|
||||
let mut tree = FullMerkleTree::<Keccak256>::new(2, [0; 32]);
|
||||
let mut tree =
|
||||
FullMerkleTree::<Keccak256>::new(2, [0; 32], FullMerkleConfig::default()).unwrap();
|
||||
for i in 0..leaves.len() {
|
||||
// We set the leaves
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
@@ -93,7 +100,9 @@ mod test {
|
||||
}
|
||||
|
||||
// We test the OptimalMerkleTree implementation
|
||||
let mut tree = OptimalMerkleTree::<Keccak256>::new(2, [0; 32]);
|
||||
let mut tree =
|
||||
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
|
||||
.unwrap();
|
||||
for i in 0..leaves.len() {
|
||||
// We set the leaves
|
||||
tree.set(i, leaves[i]).unwrap();
|
||||
@@ -116,4 +125,41 @@ mod test {
|
||||
.unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_override_range() {
|
||||
let initial_leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
];
|
||||
|
||||
let mut tree =
|
||||
OptimalMerkleTree::<Keccak256>::new(2, [0; 32], OptimalMerkleConfig::default())
|
||||
.unwrap();
|
||||
|
||||
// We set the leaves
|
||||
tree.set_range(0, initial_leaves.iter().cloned()).unwrap();
|
||||
|
||||
let new_leaves = [
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000005"),
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000006"),
|
||||
];
|
||||
|
||||
let to_delete_indices: [usize; 2] = [0, 1];
|
||||
|
||||
// We override the leaves
|
||||
tree.override_range(
|
||||
0, // start from the end of the initial leaves
|
||||
new_leaves.iter().cloned(),
|
||||
to_delete_indices.iter().cloned(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// ensure that the leaves are set correctly
|
||||
for i in 0..new_leaves.len() {
|
||||
assert_eq!(tree.get_leaf(i), new_leaves[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ mod test {
|
||||
use ark_bn254::Fr;
|
||||
use num_bigint::BigUint;
|
||||
use num_traits::Num;
|
||||
use utils::poseidon_hash::Poseidon;
|
||||
use zerokit_utils::poseidon_hash::Poseidon;
|
||||
|
||||
const ROUND_PARAMS: [(usize, usize, usize, usize); 8] = [
|
||||
(2, 8, 56, 0),
|
||||
|
||||
Reference in New Issue
Block a user