Compare commits

...

25 Commits

Author SHA1 Message Date
vinhtc27
5826c683fd feat(rln-wasm): add browser benchmark with simple HTML file and default data (will remove later) 2025-04-01 17:03:30 +07:00
vinhtc27
42ca07874f chore(rln-wasm): move install wasm-pack into make installdeps command, update CI and rln-wasm documentation 2025-03-31 15:16:16 +07:00
vinhtc27
33cb652837 chore(rln-wasm): add CI task name 2025-03-31 15:00:48 +07:00
vinhtc27
0564fa805e feat(rln-wasm): add ark-zkey feature to rln-wasm, update CI workflow and related documentation 2025-03-31 14:54:32 +07:00
vinhtc27
d9da659c30 chore(rln-wasm): remove redundant clone calls in benchmark file 2025-03-29 17:18:50 +07:00
vinhtc27
da11d40772 feat(rln-wasm): add simple benchmark for rln-wasm module 2025-03-29 15:37:28 +07:00
vinhtc27
778ef47875 chore(rln): refactor rln module by moving circuit-related files to src/circuit folder 2025-03-27 17:23:24 +07:00
vinhtc27
7018c2704f feat(rln-wasm): use curl to install wasm-pack instead of jetli/wasm-pack-action 2025-03-27 14:40:27 +07:00
vinhtc27
64ceed89b0 feat(rln): remove ark-circom and wasmer by cloning CircomReduction struct and read_zkey function 2025-03-27 14:36:37 +07:00
vinhtc27
3093cd9484 feat(rln-wasm): use wasm-bindgen v0.2.100 2025-03-27 03:15:11 +07:00
vinhtc27
ee6fc7f933 feat(rln-wasm): use wasm-bindgen v0.2.93 to fix wasm-opt error on CI pipeline 2025-03-27 03:09:06 +07:00
vinhtc27
8d2ec262d2 feat(rln-wasm): remove unuse dependencies 2025-03-27 02:49:39 +07:00
vinhtc27
dcc0e4c62e fix(rln): update RLN example to use prepare_prove_input 2025-03-27 01:57:11 +07:00
vinhtc27
3e1e3df7d1 feat(rln-wasm): use brew and apt-get to install wabt instead of cloning repo 2025-03-20 11:13:15 +07:00
vinhtc27
88bc49163d fix(rln): remove previously added bin folder for wasm-strip 2025-03-20 10:38:16 +07:00
vinhtc27
e914552bcc fix(rln): improve installdeps command for macOS CI pipeline 2025-03-20 03:10:14 +07:00
vinhtc27
ff5b9064c3 feat(rln-wasm): vendor wasmerv4.4.0 for ark-circom v0.5.0 to fix crate_type error 2025-03-20 01:49:31 +07:00
vinhtc27
093894f075 feat(rln-wasm): try to use wasmer@v0.4.4 in rln-wasm to fix the error: crate_type within an #![cfg_attr] attribute is forbidden in CI again 2025-03-19 18:58:16 +07:00
vinhtc27
51a7a56a48 feat(rln-wasm): replace unsupported --features flag with RUSTFLAGS env var in wasm-pack build 2025-03-19 18:48:35 +07:00
vinhtc27
062f859827 feat(rln-wasm): resolve WASM build failures (os error 2) caused by Node.js version mismatch 2025-03-19 18:27:52 +07:00
vinhtc27
03ac7b074d feat(rln-wasm): missing Cargo.lock change 2025-03-19 17:22:31 +07:00
vinhtc27
c8a25351a6 feat(rln-wasm): try to use wasmer@v0.4.4 in rln-wasm to fix the error: crate_type within an #![cfg_attr] attribute is forbidden in CI 2025-03-19 17:21:48 +07:00
vinhtc27
27ef21e5e8 feat(rln-wasm): use jetli/wasm-pack-action@v0.4.0 to install latest wasm-pack for CI 2025-03-19 14:01:57 +07:00
vinhtc27
9817a0cacc feat(rln-wasm): bring back wasm support for zerokit v0.7.0
- Use ark-circom wasm feature for WebAssembly target
- Integrate rln.wasm file for witness calculation
- Fix dependencies issues related to compatibility with ark-circom 0.5.0
- Bump dependencies versions across all modules
- Update documentation
2025-03-19 13:40:56 +07:00
vinhtc27
83fd6a7c0e feat(rln-wasm): porting the rln-wasm feature from v0.6.1 to v0.7.0 2025-03-17 16:29:56 +07:00
42 changed files with 2373 additions and 3793 deletions

View File

@@ -5,6 +5,7 @@ on:
paths-ignore:
- "**.md"
- "!.github/workflows/*.yml"
- "!rln-wasm/**"
- "!rln/src/**"
- "!rln/resources/**"
- "!utils/src/**"
@@ -12,6 +13,7 @@ on:
paths-ignore:
- "**.md"
- "!.github/workflows/*.yml"
- "!rln-wasm/**"
- "!rln/src/**"
- "!rln/resources/**"
- "!utils/src/**"
@@ -76,6 +78,43 @@ jobs:
fi
working-directory: ${{ matrix.crate }}
rln-wasm:
strategy:
matrix:
platform: [ ubuntu-latest, macos-latest ]
feature: [ "default", "arkzkey" ]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: test - rln-wasm - ${{ matrix.platform }} - ${{ matrix.feature }}
steps:
- uses: actions/checkout@v3
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install Dependencies
run: make installdeps
- name: cargo-make build
run: |
if [ ${{ matrix.feature }} == default ]; then
cargo make build
else
cargo make build_${{ matrix.feature }}
fi
working-directory: rln-wasm
- name: cargo-make test
run: |
if [ ${{ matrix.feature }} == default ]; then
cargo make test --release
else
cargo make test_${{ matrix.feature }} --release
fi
working-directory: rln-wasm
lint:
strategy:
matrix:
@@ -108,6 +147,9 @@ jobs:
run: |
cargo clippy --release -- -D warnings
working-directory: ${{ matrix.crate }}
# We skip clippy on rln-wasm, since wasm target is managed by cargo make
# Currently not treating warnings as error, too noisy
# -- -D warnings
benchmark-utils:
# run only in pull requests

View File

@@ -84,9 +84,39 @@ jobs:
path: ${{ matrix.target }}-${{ matrix.feature }}-rln.tar.gz
retention-days: 2
browser-rln-wasm:
name: Browser build (RLN WASM)
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- uses: Swatinem/rust-cache@v2
- name: Install dependencies
run: make installdeps
- name: cross make build
run: |
cross make build
mkdir release
cp pkg/** release/
tar -czvf browser-rln-wasm.tar.gz release/
working-directory: rln-wasm
- name: Upload archive artifact
uses: actions/upload-artifact@v4
with:
name: browser-rln-wasm-archive
path: rln-wasm/browser-rln-wasm.tar.gz
retention-days: 2
prepare-prerelease:
name: Prepare pre-release
needs: [ linux, macos ]
needs: [ linux, macos, browser-rln-wasm ]
runs-on: ubuntu-latest
steps:
- name: Checkout code

View File

@@ -1,3 +1,5 @@
# CHANGE LOG
## 2023-02-28 v0.2
This release contains:
@@ -10,7 +12,6 @@ This release contains:
- Dual License under Apache 2.0 and MIT
- RLN compiles as a static library, which can be consumed through a C FFI
## 2022-09-19 v0.1
Initial beta release.

3816
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
[workspace]
members = ["rln", "rln-cli", "utils"]
default-members = ["rln", "rln-cli", "utils"]
members = ["rln", "rln-cli", "rln-wasm", "utils"]
default-members = ["rln", "rln-cli", "rln-wasm", "utils"]
resolver = "2"
# Compilation profile for any non-workspace member.

View File

@@ -15,18 +15,16 @@ installdeps: .pre-build
ifeq ($(shell uname),Darwin)
# commented due to https://github.com/orgs/Homebrew/discussions/4612
# @brew update
@brew install cmake ninja
@git -C "wabt" pull || git clone --recursive https://github.com/WebAssembly/wabt.git "wabt"
@cd wabt && mkdir -p build && make
@brew install cmake ninja wabt
else ifeq ($(shell uname),Linux)
@sudo apt-get update
@sudo apt-get install -y cmake ninja-build
@git -C "wabt" pull || git clone --recursive https://github.com/WebAssembly/wabt.git "wabt"
@cd wabt && mkdir -p build && cd build && cmake .. -GNinja && ninja && sudo ninja install
@sudo apt-get install -y cmake ninja-build wabt
endif
# nvm already checks if it's installed, and no-ops if it is
@curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash
@. ${HOME}/.nvm/nvm.sh && nvm install 18.20.2 && nvm use 18.20.2;
@curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash || true
@. "$$HOME/.nvm/nvm.sh" && nvm install 23.10.0 && nvm use 23.10.0 || true
# Install wasm-pack
@curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh || true
build: .pre-build
@cargo make build
@@ -35,4 +33,4 @@ test: .pre-build
@cargo make test
clean:
@cargo clean
@cargo clean

View File

@@ -13,14 +13,15 @@ path = "src/examples/stateless.rs"
required-features = ["stateless"]
[dependencies]
rln = { path = "../rln", default-features = true, features = ["pmtree-ft"] }
rln = { path = "../rln", default-features = false }
zerokit_utils = { path = "../utils" }
clap = { version = "4.5.29", features = ["cargo", "derive", "env"] }
clap_derive = { version = "4.5.28" }
color-eyre = "0.6.2"
serde_json = "1.0.138"
serde = { version = "1.0.217", features = ["derive"] }
clap = { version = "4.5.34", features = ["cargo", "derive", "env"] }
clap_derive = { version = "4.5.32" }
color-eyre = "0.6.3"
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
[features]
default = []
arkzkey = ["rln/arkzkey"]
stateless = ["rln/stateless"]

View File

@@ -5,4 +5,3 @@ args = ["build", "--release"]
[tasks.test]
command = "cargo"
args = ["test", "--release"]

6
rln-wasm/.gitignore vendored Normal file
View File

@@ -0,0 +1,6 @@
/target
**/*.rs.bk
Cargo.lock
bin/
pkg/
wasm-pack.log

39
rln-wasm/Cargo.toml Normal file
View File

@@ -0,0 +1,39 @@
[package]
name = "rln-wasm"
version = "0.1.0"
edition = "2021"
license = "MIT or Apache2"
[lib]
crate-type = ["cdylib", "rlib"]
required-features = ["stateless"]
[dependencies]
rln = { path = "../rln", default-features = false }
num-bigint = { version = "0.4.6", default-features = false, features = [
"rand",
"serde",
] }
wasm-bindgen = "0.2.100"
serde-wasm-bindgen = "0.6.5"
js-sys = "0.3.77"
serde_json = "1.0"
# The `console_error_panic_xhook` crate provides better debugging of panics by
# logging them with `console.error`. This is great for development, but requires
# all the `std::fmt` and `std::panicking` infrastructure, so isn't great for
# code size when deploying.
console_error_panic_hook = { version = "0.1.7", optional = true }
zerokit_utils = { path = "../utils" }
[target.'cfg(target_arch = "wasm32")'.dependencies]
getrandom = { version = "0.2.15", features = ["js"] }
[dev-dependencies]
wasm-bindgen-test = "0.3.50"
wasm-bindgen-futures = "0.4.50"
[features]
default = ["console_error_panic_hook"]
stateless = ["rln/stateless"]
arkzkey = ["rln/arkzkey"]

61
rln-wasm/Makefile.toml Normal file
View File

@@ -0,0 +1,61 @@
[tasks.build]
clear = true
dependencies = ["pack_build", "pack_rename", "post_build"]
[tasks.build_arkzkey]
clear = true
dependencies = ["pack_build_arkzkey", "pack_rename", "post_build"]
[tasks.pack_build]
command = "wasm-pack"
args = ["build", "--release", "--target", "web", "--scope", "waku"]
env = { "RUSTFLAGS" = "--cfg feature=\"stateless\"" }
[tasks.pack_build_arkzkey]
command = "wasm-pack"
args = ["build", "--release", "--target", "web", "--scope", "waku"]
env = { "RUSTFLAGS" = "--cfg feature=\"stateless\" --cfg feature=\"arkzkey\"" }
[tasks.pack_rename]
script = "sed -i.bak 's/rln-wasm/zerokit-rln-wasm/g' pkg/package.json && rm pkg/package.json.bak"
[tasks.post_build]
command = "wasm-strip"
args = ["./pkg/rln_wasm_bg.wasm"]
[tasks.test]
command = "wasm-pack"
args = [
"test",
"--release",
"--node",
"--target",
"wasm32-unknown-unknown",
"--",
"--nocapture",
]
env = { "RUSTFLAGS" = "--cfg feature=\"stateless\"" }
dependencies = ["build"]
[tasks.test_arkzkey]
command = "wasm-pack"
args = [
"test",
"--release",
"--node",
"--target",
"wasm32-unknown-unknown",
"--",
"--nocapture",
]
env = { "RUSTFLAGS" = "--cfg feature=\"stateless\" --cfg feature=\"arkzkey\"" }
dependencies = ["build_arkzkey"]
[tasks.login]
command = "wasm-pack"
args = ["login"]
[tasks.publish]
command = "wasm-pack"
args = ["publish", "--access", "public", "--target", "web"]

75
rln-wasm/README.md Normal file
View File

@@ -0,0 +1,75 @@
# RLN for WASM
This library is used in [waku-org/js-rln](https://github.com/waku-org/js-rln/)
> **Note**: This project requires `wasm-pack` for compiling Rust to WebAssembly, `cargo-make` for running the build commands, and `wasm-strip` to reduce the size of the generated WebAssembly binaries. Make sure they are installed before proceeding.
Install `wasm-pack`:
```bash
curl https://rustwasm.github.io/wasm-pack/installer/init.sh -sSf | sh
```
Install `cargo-make`
```bash
cargo install cargo-make
```
Install `wasm-strip` via `wabt`:
```bash
brew install wabt # macOS
sudo apt-get install wabt # Ubuntu
```
Or install everything needed for `zerokit` which includes `wasm-pack`, `cargo-make` and `wabt`:
```bash
make installdeps
```
## Building the library
First, navigate to the rln-wasm directory:
```bash
cd rln-wasm
```
Compile zerokit for `wasm32-unknown-unknown`:
```bash
cargo make build
```
Or compile with the **arkzkey** feature enabled
```bash
cargo make build_arkzkey
```
Compile a slimmer version of zerokit for `wasm32-unknown-unknown`:
```bash
cargo make post_build
```
## Running tests and benchmarks
```bash
cargo make test
```
Or test with the **arkzkey** feature enabled
```bash
cargo make test_arkzkey
```
## Publishing an npm package
```bash
cargo make login
cargo make publish
```

File diff suppressed because one or more lines are too long

375
rln-wasm/benchs/index.html Normal file
View File

@@ -0,0 +1,375 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>RLN WASM Benchmark</title>
<style>
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
max-width: 700px;
margin: 0 auto;
padding: 20px;
color: #333;
}
h1 {
text-align: center;
margin-bottom: 30px;
}
.panel {
background: #f8f9fa;
border-radius: 8px;
padding: 20px;
margin-bottom: 20px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.05);
}
.file-input {
margin-bottom: 15px;
}
label {
display: block;
margin-bottom: 5px;
font-weight: 500;
}
button {
background: #4361ee;
color: white;
border: none;
padding: 10px 20px;
border-radius: 4px;
font-size: 16px;
cursor: pointer;
margin-top: 10px;
width: 100%;
}
button:hover {
background: #3a56d4;
}
button:disabled {
background: #cccccc;
cursor: not-allowed;
}
.results-table {
width: 100%;
border-collapse: collapse;
margin-top: 10px;
}
.results-table th,
.results-table td {
padding: 12px;
text-align: left;
border-bottom: 1px solid #ddd;
}
.results-table th {
font-weight: 600;
background-color: #f1f3f5;
}
.operation {
font-weight: 500;
}
.time {
font-family: monospace;
text-align: right;
}
.status {
padding: 10px;
margin-top: 15px;
border-radius: 4px;
text-align: center;
}
.success {
background-color: #d4edda;
color: #155724;
}
.error {
background-color: #f8d7da;
color: #721c24;
}
.running {
background-color: #cce5ff;
color: #004085;
}
/* Download button style */
.download-btn {
background: #28a745;
margin-top: 15px;
}
.download-btn:hover {
background: #218838;
}
/* Summary section */
.summary {
margin-top: 15px;
padding-top: 15px;
border-top: 1px solid #ddd;
}
.summary h3 {
margin-top: 0;
}
</style>
</head>
<body>
<h1>RLN WASM Benchmark</h1>
<div class="panel">
<div class="file-input">
<label for="zkeyFile">zKey File:</label>
<input type="file" id="zkeyFile">
</div>
<div class="file-input">
<label for="rootFile">Root File:</label>
<input type="file" id="rootFile">
</div>
<div class="file-input">
<label for="witnessFile">Witness File:</label>
<input type="file" id="witnessFile">
</div>
<div class="file-input">
<label for="messageFile">Message File:</label>
<input type="file" id="messageFile">
</div>
<div class="file-input">
<label for="proofFile">Proof File:</label>
<input type="file" id="proofFile">
</div>
<button id="runBenchmark">Run Benchmark</button>
<div id="status" class="status"></div>
</div>
<div class="panel">
<h2>Results</h2>
<table class="results-table">
<thead>
<tr>
<th>Operation</th>
<th>Time (ms)</th>
</tr>
</thead>
<tbody id="results">
<!-- Results will be populated here -->
</tbody>
</table>
<div id="summarySection" class="summary" style="display: none;">
<h3>Summary</h3>
<div id="summaryContent"></div>
<button id="downloadResults" class="download-btn">Download Results</button>
</div>
</div>
<script type="module">
import init, * as RLN from '../pkg/rln_wasm.js';
const runBtn = document.getElementById('runBenchmark');
const results = document.getElementById('results');
const status = document.getElementById('status');
const summarySection = document.getElementById('summarySection');
const summaryContent = document.getElementById('summaryContent');
const downloadBtn = document.getElementById('downloadResults');
// Track benchmark operations
const benchmarks = [];
// Measure operation time
async function benchmark(name, fn) {
updateStatus(`Running: ${name}...`, 'running');
const start = performance.now();
try {
const result = await fn();
const duration = performance.now() - start;
// Record result
benchmarks.push({ name, duration, success: true });
updateResults();
return { result, duration };
} catch (error) {
const duration = performance.now() - start;
benchmarks.push({ name: `${name} (FAILED)`, duration, success: false, error: error.message });
updateResults();
throw error;
}
}
// Update results table
function updateResults() {
results.innerHTML = '';
benchmarks.forEach(b => {
const row = document.createElement('tr');
const nameCell = document.createElement('td');
nameCell.className = 'operation';
nameCell.textContent = b.name;
const timeCell = document.createElement('td');
timeCell.className = 'time';
timeCell.textContent = b.duration.toFixed(2);
row.appendChild(nameCell);
row.appendChild(timeCell);
if (!b.success) {
row.style.color = '#dc3545';
}
results.appendChild(row);
});
}
// Update status message
function updateStatus(message, type = '') {
status.textContent = message;
status.className = `status ${type}`;
}
// Show benchmark summary
function showSummary() {
if (benchmarks.length === 0) return;
const successfulOps = benchmarks.filter(b => b.success).length;
let summaryHTML = `
<p><strong>Operations:</strong> ${successfulOps}/${benchmarks.length} successful</p>
`;
summaryContent.innerHTML = summaryHTML;
summarySection.style.display = 'block';
}
// Download results as JSON
downloadBtn.addEventListener('click', () => {
const dataStr = JSON.stringify({
timestamp: new Date().toISOString(),
operations: benchmarks,
}, null, 2);
const blob = new Blob([dataStr], { type: 'application/json' });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `rln-benchmark-${new Date().toISOString().slice(0, 19)}.json`;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
URL.revokeObjectURL(url);
});
// Read file as Uint8Array
async function readFile(file) {
const buffer = await file.arrayBuffer();
return new Uint8Array(buffer);
}
// Parse witness JSON
async function parseWitness(file) {
const text = await file.text();
try {
const data = JSON.parse(text);
if (!Array.isArray(data)) {
throw new Error("Witness JSON must be an array");
}
return data.map(value => BigInt(value));
} catch (e) {
throw new Error(`Failed to parse witness JSON: ${e.message}`);
}
}
// Main benchmark runner
runBtn.addEventListener('click', async () => {
const files = {
zkey: document.getElementById('zkeyFile').files[0],
root: document.getElementById('rootFile').files[0],
witness: document.getElementById('witnessFile').files[0],
message: document.getElementById('messageFile').files[0],
proof: document.getElementById('proofFile').files[0]
};
// Validation
if (!files.zkey || !files.root || !files.proof) {
updateStatus('Please select zKey, Root, and Proof files', 'error');
return;
}
const canGenerate = files.witness && files.message;
try {
// Clear previous results
benchmarks.length = 0;
updateResults();
summarySection.style.display = 'none';
runBtn.disabled = true;
// Initialize
const { result: _ } = await benchmark('Initialize WASM Module', async () => {
return await init();
});
// Load files
const zkeyData = await readFile(files.zkey);
const rootData = await readFile(files.root);
// Create RLN instance
const { result: instance } = await benchmark('Create RLN Instance', async () => {
return RLN.newRLN(zkeyData);
});
// Handle proof generation (if witness and message files provided)
if (canGenerate) {
const witnessData = await parseWitness(files.witness);
const messageData = await readFile(files.message);
await benchmark('Generate RLN Proof', async () => {
return RLN.generateRLNProofWithWitness(instance, witnessData, messageData);
});
}
// Verify uploaded proof (required)
const proofData = await readFile(files.proof);
await benchmark('Verify Proof', async () => {
return RLN.verifyWithRoots(instance, proofData, rootData);
});
updateStatus('Benchmark completed successfully!', 'success');
showSummary();
} catch (error) {
console.error(error);
updateStatus(`Error: ${error.message}`, 'error');
showSummary();
} finally {
runBtn.disabled = false;
}
});
</script>
</body>
</html>

Binary file not shown.

BIN
rln-wasm/benchs/root Normal file

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,324 @@
module.exports = async function builder(code, options) {
options = options || {};
let wasmModule;
try {
wasmModule = await WebAssembly.compile(code);
} catch (err) {
console.log(err);
console.log(
"\nTry to run circom --c in order to generate c++ code instead\n"
);
throw new Error(err);
}
let wc;
let errStr = "";
let msgStr = "";
const instance = await WebAssembly.instantiate(wasmModule, {
runtime: {
exceptionHandler: function (code) {
let err;
if (code == 1) {
err = "Signal not found.\n";
} else if (code == 2) {
err = "Too many signals set.\n";
} else if (code == 3) {
err = "Signal already set.\n";
} else if (code == 4) {
err = "Assert Failed.\n";
} else if (code == 5) {
err = "Not enough memory.\n";
} else if (code == 6) {
err = "Input signal array access exceeds the size.\n";
} else {
err = "Unknown error.\n";
}
throw new Error(err + errStr);
},
printErrorMessage: function () {
errStr += getMessage() + "\n";
// console.error(getMessage());
},
writeBufferMessage: function () {
const msg = getMessage();
// Any calls to `log()` will always end with a `\n`, so that's when we print and reset
if (msg === "\n") {
console.log(msgStr);
msgStr = "";
} else {
// If we've buffered other content, put a space in between the items
if (msgStr !== "") {
msgStr += " ";
}
// Then append the message to the message we are creating
msgStr += msg;
}
},
showSharedRWMemory: function () {
printSharedRWMemory();
},
},
});
const sanityCheck = options;
// options &&
// (
// options.sanityCheck ||
// options.logGetSignal ||
// options.logSetSignal ||
// options.logStartComponent ||
// options.logFinishComponent
// );
wc = new WitnessCalculator(instance, sanityCheck);
return wc;
function getMessage() {
var message = "";
var c = instance.exports.getMessageChar();
while (c != 0) {
message += String.fromCharCode(c);
c = instance.exports.getMessageChar();
}
return message;
}
function printSharedRWMemory() {
const shared_rw_memory_size = instance.exports.getFieldNumLen32();
const arr = new Uint32Array(shared_rw_memory_size);
for (let j = 0; j < shared_rw_memory_size; j++) {
arr[shared_rw_memory_size - 1 - j] =
instance.exports.readSharedRWMemory(j);
}
// If we've buffered other content, put a space in between the items
if (msgStr !== "") {
msgStr += " ";
}
// Then append the value to the message we are creating
msgStr += fromArray32(arr).toString();
}
};
class WitnessCalculator {
constructor(instance, sanityCheck) {
this.instance = instance;
this.version = this.instance.exports.getVersion();
this.n32 = this.instance.exports.getFieldNumLen32();
this.instance.exports.getRawPrime();
const arr = new Uint32Array(this.n32);
for (let i = 0; i < this.n32; i++) {
arr[this.n32 - 1 - i] = this.instance.exports.readSharedRWMemory(i);
}
this.prime = fromArray32(arr);
this.witnessSize = this.instance.exports.getWitnessSize();
this.sanityCheck = sanityCheck;
}
circom_version() {
return this.instance.exports.getVersion();
}
async _doCalculateWitness(input, sanityCheck) {
//input is assumed to be a map from signals to arrays of bigints
this.instance.exports.init(this.sanityCheck || sanityCheck ? 1 : 0);
const keys = Object.keys(input);
var input_counter = 0;
keys.forEach((k) => {
const h = fnvHash(k);
const hMSB = parseInt(h.slice(0, 8), 16);
const hLSB = parseInt(h.slice(8, 16), 16);
const fArr = flatArray(input[k]);
let signalSize = this.instance.exports.getInputSignalSize(hMSB, hLSB);
if (signalSize < 0) {
throw new Error(`Signal ${k} not found\n`);
}
if (fArr.length < signalSize) {
throw new Error(`Not enough values for input signal ${k}\n`);
}
if (fArr.length > signalSize) {
throw new Error(`Too many values for input signal ${k}\n`);
}
for (let i = 0; i < fArr.length; i++) {
const arrFr = toArray32(BigInt(fArr[i]) % this.prime, this.n32);
for (let j = 0; j < this.n32; j++) {
this.instance.exports.writeSharedRWMemory(j, arrFr[this.n32 - 1 - j]);
}
try {
this.instance.exports.setInputSignal(hMSB, hLSB, i);
input_counter++;
} catch (err) {
// console.log(`After adding signal ${i} of ${k}`)
throw new Error(err);
}
}
});
if (input_counter < this.instance.exports.getInputSize()) {
throw new Error(
`Not all inputs have been set. Only ${input_counter} out of ${this.instance.exports.getInputSize()}`
);
}
}
async calculateWitness(input, sanityCheck) {
const w = [];
await this._doCalculateWitness(input, sanityCheck);
for (let i = 0; i < this.witnessSize; i++) {
this.instance.exports.getWitness(i);
const arr = new Uint32Array(this.n32);
for (let j = 0; j < this.n32; j++) {
arr[this.n32 - 1 - j] = this.instance.exports.readSharedRWMemory(j);
}
w.push(fromArray32(arr));
}
return w;
}
async calculateBinWitness(input, sanityCheck) {
const buff32 = new Uint32Array(this.witnessSize * this.n32);
const buff = new Uint8Array(buff32.buffer);
await this._doCalculateWitness(input, sanityCheck);
for (let i = 0; i < this.witnessSize; i++) {
this.instance.exports.getWitness(i);
const pos = i * this.n32;
for (let j = 0; j < this.n32; j++) {
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
}
}
return buff;
}
async calculateWTNSBin(input, sanityCheck) {
const buff32 = new Uint32Array(this.witnessSize * this.n32 + this.n32 + 11);
const buff = new Uint8Array(buff32.buffer);
await this._doCalculateWitness(input, sanityCheck);
//"wtns"
buff[0] = "w".charCodeAt(0);
buff[1] = "t".charCodeAt(0);
buff[2] = "n".charCodeAt(0);
buff[3] = "s".charCodeAt(0);
//version 2
buff32[1] = 2;
//number of sections: 2
buff32[2] = 2;
//id section 1
buff32[3] = 1;
const n8 = this.n32 * 4;
//id section 1 length in 64bytes
const idSection1length = 8 + n8;
const idSection1lengthHex = idSection1length.toString(16);
buff32[4] = parseInt(idSection1lengthHex.slice(0, 8), 16);
buff32[5] = parseInt(idSection1lengthHex.slice(8, 16), 16);
//this.n32
buff32[6] = n8;
//prime number
this.instance.exports.getRawPrime();
var pos = 7;
for (let j = 0; j < this.n32; j++) {
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
}
pos += this.n32;
// witness size
buff32[pos] = this.witnessSize;
pos++;
//id section 2
buff32[pos] = 2;
pos++;
// section 2 length
const idSection2length = n8 * this.witnessSize;
const idSection2lengthHex = idSection2length.toString(16);
buff32[pos] = parseInt(idSection2lengthHex.slice(0, 8), 16);
buff32[pos + 1] = parseInt(idSection2lengthHex.slice(8, 16), 16);
pos += 2;
for (let i = 0; i < this.witnessSize; i++) {
this.instance.exports.getWitness(i);
for (let j = 0; j < this.n32; j++) {
buff32[pos + j] = this.instance.exports.readSharedRWMemory(j);
}
pos += this.n32;
}
return buff;
}
}
function toArray32(rem, size) {
const res = []; //new Uint32Array(size); //has no unshift
const radix = BigInt(0x100000000);
while (rem) {
res.unshift(Number(rem % radix));
rem = rem / radix;
}
if (size) {
var i = size - res.length;
while (i > 0) {
res.unshift(0);
i--;
}
}
return res;
}
function fromArray32(arr) {
//returns a BigInt
var res = BigInt(0);
const radix = BigInt(0x100000000);
for (let i = 0; i < arr.length; i++) {
res = res * radix + BigInt(arr[i]);
}
return res;
}
function flatArray(a) {
var res = [];
fillArray(res, a);
return res;
function fillArray(res, a) {
if (Array.isArray(a)) {
for (let i = 0; i < a.length; i++) {
fillArray(res, a[i]);
}
} else {
res.push(a);
}
}
}
function fnvHash(str) {
const uint64_max = BigInt(2) ** BigInt(64);
let hash = BigInt("0xCBF29CE484222325");
for (var i = 0; i < str.length; i++) {
hash ^= BigInt(str[i].charCodeAt());
hash *= BigInt(0x100000001b3);
hash %= uint64_max;
}
let shash = hash.toString(16);
let n = 16 - shash.length;
shash = "0".repeat(n).concat(shash);
return shash;
}

294
rln-wasm/src/lib.rs Normal file
View File

@@ -0,0 +1,294 @@
#![cfg(target_arch = "wasm32")]
use js_sys::{BigInt as JsBigInt, Object, Uint8Array};
use num_bigint::BigInt;
use rln::public::{hash, poseidon_hash, RLN};
use std::vec::Vec;
use wasm_bindgen::prelude::*;
#[wasm_bindgen(js_name = initPanicHook)]
pub fn init_panic_hook() {
console_error_panic_hook::set_once();
}
#[wasm_bindgen(js_name = RLN)]
pub struct RLNWrapper {
// The purpose of this wrapper is to hold a RLN instance with the 'static lifetime
// because wasm_bindgen does not allow returning elements with lifetimes
instance: RLN,
}
// Macro to call methods with arbitrary amount of arguments,
// which have the last argument is output buffer pointer
// First argument to the macro is context,
// second is the actual method on `RLN`
// third is the aforementioned output buffer argument
// rest are all other arguments to the method
macro_rules! call_with_output_and_error_msg {
// this variant is needed for the case when
// there are zero other arguments
($instance:expr, $method:ident, $error_msg:expr) => {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
if let Err(err) = new_instance.instance.$method(&mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
($instance:expr, $method:ident, $error_msg:expr, $( $arg:expr ),* ) => {
{
let mut output_data: Vec<u8> = Vec::new();
let new_instance = $instance.process();
if let Err(err) = new_instance.instance.$method($($arg.process()),*, &mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
}
macro_rules! call {
($instance:expr, $method:ident $(, $arg:expr)*) => {
{
let new_instance: &mut RLNWrapper = $instance.process();
new_instance.instance.$method($($arg.process()),*)
}
}
}
macro_rules! call_bool_method_with_error_msg {
($instance:expr, $method:ident, $error_msg:expr $(, $arg:expr)*) => {
{
let new_instance: &RLNWrapper = $instance.process();
new_instance.instance.$method($($arg.process()),*).map_err(|err| format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
}
}
}
// Macro to execute a function with arbitrary amount of arguments,
// First argument is the function to execute
// Rest are all other arguments to the method
macro_rules! fn_call_with_output_and_error_msg {
// this variant is needed for the case when
// there are zero other arguments
($func:ident, $error_msg:expr) => {
{
let mut output_data: Vec<u8> = Vec::new();
if let Err(err) = $func(&mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
($func:ident, $error_msg:expr, $( $arg:expr ),* ) => {
{
let mut output_data: Vec<u8> = Vec::new();
if let Err(err) = $func($($arg.process()),*, &mut output_data) {
std::mem::forget(output_data);
Err(format!("Msg: {:#?}, Error: {:#?}", $error_msg, err))
} else {
let result = Uint8Array::from(&output_data[..]);
std::mem::forget(output_data);
Ok(result)
}
}
};
}
trait ProcessArg {
type ReturnType;
fn process(self) -> Self::ReturnType;
}
impl ProcessArg for usize {
type ReturnType = usize;
fn process(self) -> Self::ReturnType {
self
}
}
impl<T> ProcessArg for Vec<T> {
type ReturnType = Vec<T>;
fn process(self) -> Self::ReturnType {
self
}
}
impl ProcessArg for *const RLN {
type ReturnType = &'static RLN;
fn process(self) -> Self::ReturnType {
unsafe { &*self }
}
}
impl ProcessArg for *const RLNWrapper {
type ReturnType = &'static RLNWrapper;
fn process(self) -> Self::ReturnType {
unsafe { &*self }
}
}
impl ProcessArg for *mut RLNWrapper {
type ReturnType = &'static mut RLNWrapper;
fn process(self) -> Self::ReturnType {
unsafe { &mut *self }
}
}
impl<'a> ProcessArg for &'a [u8] {
type ReturnType = &'a [u8];
fn process(self) -> Self::ReturnType {
self
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = newRLN)]
pub fn wasm_new(zkey: Uint8Array) -> Result<*mut RLNWrapper, String> {
let instance = RLN::new_with_params(zkey.to_vec()).map_err(|err| format!("{:#?}", err))?;
let wrapper = RLNWrapper { instance };
Ok(Box::into_raw(Box::new(wrapper)))
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = rlnWitnessToJson)]
pub fn wasm_rln_witness_to_json(
ctx: *mut RLNWrapper,
serialized_witness: Uint8Array,
) -> Result<Object, String> {
let inputs = call!(
ctx,
get_rln_witness_bigint_json,
&serialized_witness.to_vec()[..]
)
.map_err(|err| err.to_string())?;
let js_value = serde_wasm_bindgen::to_value(&inputs).map_err(|err| err.to_string())?;
Object::from_entries(&js_value).map_err(|err| format!("{:#?}", err))
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateRLNProofWithWitness)]
pub fn wasm_generate_rln_proof_with_witness(
ctx: *mut RLNWrapper,
calculated_witness: Vec<JsBigInt>,
serialized_witness: Uint8Array,
) -> Result<Uint8Array, String> {
let mut witness_vec: Vec<BigInt> = vec![];
for v in calculated_witness {
witness_vec.push(
v.to_string(10)
.map_err(|err| format!("{:#?}", err))?
.as_string()
.ok_or("not a string error")?
.parse::<BigInt>()
.map_err(|err| format!("{:#?}", err))?,
);
}
call_with_output_and_error_msg!(
ctx,
generate_rln_proof_with_witness,
"could not generate proof",
witness_vec,
serialized_witness.to_vec()
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateMembershipKey)]
pub fn wasm_key_gen(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(ctx, key_gen, "could not generate membership keys")
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateExtendedMembershipKey)]
pub fn wasm_extended_key_gen(ctx: *const RLNWrapper) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(ctx, extended_key_gen, "could not generate membership keys")
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateSeededMembershipKey)]
pub fn wasm_seeded_key_gen(ctx: *const RLNWrapper, seed: Uint8Array) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(
ctx,
seeded_key_gen,
"could not generate membership key",
&seed.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = generateSeededExtendedMembershipKey)]
pub fn wasm_seeded_extended_key_gen(
ctx: *const RLNWrapper,
seed: Uint8Array,
) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(
ctx,
seeded_extended_key_gen,
"could not generate membership key",
&seed.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = recovedIDSecret)]
pub fn wasm_recover_id_secret(
ctx: *const RLNWrapper,
input_proof_data_1: Uint8Array,
input_proof_data_2: Uint8Array,
) -> Result<Uint8Array, String> {
call_with_output_and_error_msg!(
ctx,
recover_id_secret,
"could not recover id secret",
&input_proof_data_1.to_vec()[..],
&input_proof_data_2.to_vec()[..]
)
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[wasm_bindgen(js_name = verifyWithRoots)]
pub fn wasm_verify_with_roots(
ctx: *const RLNWrapper,
proof: Uint8Array,
roots: Uint8Array,
) -> Result<bool, String> {
call_bool_method_with_error_msg!(
ctx,
verify_with_roots,
"error while verifying proof with roots".to_string(),
&proof.to_vec()[..],
&roots.to_vec()[..]
)
}
#[wasm_bindgen(js_name = hash)]
pub fn wasm_hash(input: Uint8Array) -> Result<Uint8Array, String> {
fn_call_with_output_and_error_msg!(hash, "could not generate hash", &input.to_vec()[..])
}
#[wasm_bindgen(js_name = poseidonHash)]
pub fn wasm_poseidon_hash(input: Uint8Array) -> Result<Uint8Array, String> {
fn_call_with_output_and_error_msg!(
poseidon_hash,
"could not generate poseidon hash",
&input.to_vec()[..]
)
}

26
rln-wasm/src/utils.js Normal file
View File

@@ -0,0 +1,26 @@
const fs = require("fs");
// Utils functions for loading circom witness calculator and reading files from test
module.exports = {
read_file: function (path) {
return fs.readFileSync(path);
},
calculateWitness: async function (circom_path, inputs) {
const wc = require("../resources/witness_calculator.js");
const wasmFile = fs.readFileSync(circom_path);
const wasmFileBuffer = wasmFile.slice(
wasmFile.byteOffset,
wasmFile.byteOffset + wasmFile.byteLength
);
const witnessCalculator = await wc(wasmFileBuffer);
const calculatedWitness = await witnessCalculator.calculateWitness(
inputs,
false
);
return JSON.stringify(calculatedWitness, (key, value) =>
typeof value === "bigint" ? value.toString() : value
);
},
};

274
rln-wasm/tests/rln-wasm.rs Normal file
View File

@@ -0,0 +1,274 @@
#![cfg(target_arch = "wasm32")]
#[cfg(test)]
mod tests {
use js_sys::{BigInt as JsBigInt, Date, Object, Uint8Array};
use rln::circuit::{Fr, TEST_TREE_HEIGHT};
use rln::hashers::{hash_to_field, poseidon_hash};
use rln::poseidon_tree::PoseidonTree;
use rln::utils::{
bytes_le_to_fr, fr_to_bytes_le, normalize_usize, vec_fr_to_bytes_le, vec_u8_to_bytes_le,
};
use rln_wasm::*;
use wasm_bindgen::{prelude::*, JsValue};
use wasm_bindgen_test::wasm_bindgen_test;
use zerokit_utils::merkle_tree::merkle_tree::ZerokitMerkleTree;
use zerokit_utils::ZerokitMerkleProof;
#[wasm_bindgen(module = "src/utils.js")]
extern "C" {
#[wasm_bindgen(catch)]
fn read_file(path: &str) -> Result<Uint8Array, JsValue>;
#[wasm_bindgen(catch)]
async fn calculateWitness(circom_path: &str, input: Object) -> Result<JsValue, JsValue>;
}
#[cfg(feature = "arkzkey")]
const ZKEY_PATH: &str = "../rln/resources/tree_height_20/rln_final.arkzkey";
#[cfg(not(feature = "arkzkey"))]
const ZKEY_PATH: &str = "../rln/resources/tree_height_20/rln_final.zkey";
const CIRCOM_PATH: &str = "../rln/resources/tree_height_20/rln.wasm";
#[wasm_bindgen_test]
pub async fn rln_wasm_benchmark() {
let mut results = String::from("\nbenchmarks:\n");
let iterations = 10;
// Benchmark wasm_new
let zkey = read_file(&ZKEY_PATH).unwrap();
let start_wasm_new = Date::now();
for _ in 0..iterations {
let _ = wasm_new(zkey.clone()).unwrap();
}
let wasm_new_result = Date::now() - start_wasm_new;
// Initialize instance for other benchmarks
let rln_instance = wasm_new(zkey).unwrap();
// Benchmark wasm_key_gen
let start_wasm_key_gen = Date::now();
for _ in 0..iterations {
let _ = wasm_key_gen(rln_instance);
}
let wasm_key_gen_result = Date::now() - start_wasm_key_gen;
// Setup for proof generation and verification
let mut tree = PoseidonTree::default(TEST_TREE_HEIGHT).unwrap();
let mem_keys = wasm_key_gen(rln_instance).unwrap();
let id_key = mem_keys.subarray(0, 32);
let id_commitment = mem_keys.subarray(32, 64);
let epoch = hash_to_field(b"test-epoch");
let rln_identifier = hash_to_field(b"test-rln-identifier");
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
// Prepare inputs for other benchmarks
let signal = b"Hello World";
let identity_index = tree.leaves_set();
let user_message_limit = Fr::from(100);
let message_id = fr_to_bytes_le(&Fr::from(0));
let external_nullifier_bytes = fr_to_bytes_le(&external_nullifier);
let (id_commitment_fr, _) = bytes_le_to_fr(&id_commitment.to_vec()[..]);
let rate_commitment = poseidon_hash(&[id_commitment_fr, user_message_limit]);
tree.update_next(rate_commitment).unwrap();
let x = hash_to_field(signal);
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut id_key.to_vec());
serialized.append(&mut fr_to_bytes_le(&user_message_limit).to_vec());
serialized.append(&mut message_id.to_vec());
serialized.append(&mut vec_fr_to_bytes_le(&path_elements).unwrap());
serialized.append(&mut vec_u8_to_bytes_le(&identity_path_index).unwrap());
serialized.append(&mut fr_to_bytes_le(&x));
serialized.append(&mut external_nullifier_bytes.to_vec());
let serialized_message = Uint8Array::from(&serialized[..]);
let json_inputs =
wasm_rln_witness_to_json(rln_instance, serialized_message.clone()).unwrap();
// Benchmark calculateWitness
let start_calculate_witness = Date::now();
for _ in 0..iterations {
let _ = calculateWitness(&CIRCOM_PATH, json_inputs.clone()).await;
}
let calculate_witness_result = Date::now() - start_calculate_witness;
// Calculate witness other benchmarks
let calculated_witness_json = calculateWitness(&CIRCOM_PATH, json_inputs)
.await
.unwrap()
.as_string()
.unwrap();
let calculated_witness_vec_str: Vec<String> =
serde_json::from_str(&calculated_witness_json).unwrap();
let calculated_witness: Vec<JsBigInt> = calculated_witness_vec_str
.iter()
.map(|x| JsBigInt::new(&x.into()).unwrap())
.collect();
// Benchmark wasm_generate_rln_proof_with_witness
let start_wasm_generate_rln_proof_with_witness = Date::now();
for _ in 0..iterations {
let _ = wasm_generate_rln_proof_with_witness(
rln_instance,
calculated_witness.clone(),
serialized_message.clone(),
);
}
let wasm_generate_rln_proof_with_witness_result =
Date::now() - start_wasm_generate_rln_proof_with_witness;
// Generate a proof for other benchmarks
let proof = wasm_generate_rln_proof_with_witness(
rln_instance,
calculated_witness,
serialized_message,
)
.unwrap();
let mut proof_bytes = proof.to_vec();
proof_bytes.append(&mut normalize_usize(signal.len()));
proof_bytes.append(&mut signal.to_vec());
let root = tree.root();
let root_le = fr_to_bytes_le(&root);
let roots = Uint8Array::from(&root_le[..]);
let proof_with_signal = Uint8Array::from(&proof_bytes[..]);
// Benchmark wasm_verify_with_roots
let start_wasm_verify_with_roots = Date::now();
for _ in 0..iterations {
let _ = wasm_verify_with_roots(rln_instance, proof_with_signal.clone(), roots.clone());
}
let wasm_verify_with_roots_result = Date::now() - start_wasm_verify_with_roots;
let is_proof_valid = wasm_verify_with_roots(rln_instance, proof_with_signal, roots);
assert!(is_proof_valid.unwrap(), "verifying proof with roots failed");
// Format and display results
let format_duration = |duration_ms: f64| -> String {
let avg_ms = duration_ms / (iterations as f64);
if avg_ms >= 1000.0 {
format!("{:.3} s", avg_ms / 1000.0)
} else {
format!("{:.3} ms", avg_ms)
}
};
results.push_str(&format!("wasm_new: {}\n", format_duration(wasm_new_result)));
results.push_str(&format!(
"wasm_key_gen: {}\n",
format_duration(wasm_key_gen_result)
));
results.push_str(&format!(
"calculateWitness: {}\n",
format_duration(calculate_witness_result)
));
results.push_str(&format!(
"wasm_generate_rln_proof_with_witness: {}\n",
format_duration(wasm_generate_rln_proof_with_witness_result)
));
results.push_str(&format!(
"wasm_verify_with_roots: {}\n",
format_duration(wasm_verify_with_roots_result)
));
// Log the results
wasm_bindgen_test::console_log!("{results}");
}
#[wasm_bindgen_test]
pub async fn rln_wasm_test() {
let zkey = read_file(&ZKEY_PATH).unwrap();
// Creating an instance of RLN
let rln_instance = wasm_new(zkey).unwrap();
let mut tree = PoseidonTree::default(TEST_TREE_HEIGHT).unwrap();
// Creating membership key
let mem_keys = wasm_key_gen(rln_instance).unwrap();
let id_key = mem_keys.subarray(0, 32);
let id_commitment = mem_keys.subarray(32, 64);
// Prepare the message
let signal = b"Hello World";
let identity_index = tree.leaves_set();
// Setting up the epoch and rln_identifier
let epoch = hash_to_field(b"test-epoch");
let rln_identifier = hash_to_field(b"test-rln-identifier");
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
let external_nullifier = fr_to_bytes_le(&external_nullifier);
let user_message_limit = Fr::from(100);
let message_id = fr_to_bytes_le(&Fr::from(0));
let (id_commitment_fr, _) = bytes_le_to_fr(&id_commitment.to_vec()[..]);
let rate_commitment = poseidon_hash(&[id_commitment_fr, user_message_limit]);
tree.update_next(rate_commitment).unwrap();
let x = hash_to_field(signal);
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
let path_elements = merkle_proof.get_path_elements();
let identity_path_index = merkle_proof.get_path_index();
// Serializing the message
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut id_key.to_vec());
serialized.append(&mut fr_to_bytes_le(&user_message_limit).to_vec());
serialized.append(&mut message_id.to_vec());
serialized.append(&mut vec_fr_to_bytes_le(&path_elements).unwrap());
serialized.append(&mut vec_u8_to_bytes_le(&identity_path_index).unwrap());
serialized.append(&mut fr_to_bytes_le(&x));
serialized.append(&mut external_nullifier.to_vec());
let serialized_message = Uint8Array::from(&serialized[..]);
// Obtaining inputs that should be sent to circom witness calculator
let json_inputs =
wasm_rln_witness_to_json(rln_instance, serialized_message.clone()).unwrap();
// Calculating witness with JS
// (Using a JSON since wasm_bindgen does not like Result<Vec<JsBigInt>,JsValue>)
let calculated_witness_json = calculateWitness(&CIRCOM_PATH, json_inputs)
.await
.unwrap()
.as_string()
.unwrap();
let calculated_witness_vec_str: Vec<String> =
serde_json::from_str(&calculated_witness_json).unwrap();
let calculated_witness: Vec<JsBigInt> = calculated_witness_vec_str
.iter()
.map(|x| JsBigInt::new(&x.into()).unwrap())
.collect();
// Generating proof
let proof = wasm_generate_rln_proof_with_witness(
rln_instance,
calculated_witness,
serialized_message,
)
.unwrap();
// Add signal_len | signal
let mut proof_bytes = proof.to_vec();
proof_bytes.append(&mut normalize_usize(signal.len()));
proof_bytes.append(&mut signal.to_vec());
// Validating Proof with Roots
let root = tree.root();
let root_le = fr_to_bytes_le(&root);
let roots = Uint8Array::from(&root_le[..]);
let proof_with_signal = Uint8Array::from(&proof_bytes[..]);
let is_proof_valid = wasm_verify_with_roots(rln_instance, proof_with_signal, roots);
assert!(is_proof_valid.unwrap(), "verifying proof with roots failed");
}
}

View File

@@ -18,78 +18,81 @@ doctest = false
[dependencies]
# ZKP Generation
ark-bn254 = { version = "0.5.0", features = ["std"] }
ark-ff = { version = "0.5.0", features = ["std", "asm"] }
ark-serialize = { version = "0.5.0", features = ["derive"] }
ark-ec = { version = "0.5.0", default-features = false }
ark-std = { version = "0.5.0", default-features = false }
ark-groth16 = { version = "0.5.0", features = [
ark-relations = { version = "0.5.0", features = ["std"] }
ark-ff = { version = "0.5.0", default-features = false, features = [
"parallel",
] }
ark-ec = { version = "0.5.0", default-features = false, features = [
"parallel",
] }
ark-std = { version = "0.5.0", default-features = false, features = [
"parallel",
] }
ark-poly = { version = "0.5.0", default-features = false, features = [
"parallel",
] }
ark-groth16 = { version = "0.5.0", default-features = false, features = [
"parallel",
] }
ark-serialize = { version = "0.5.0", default-features = false, features = [
"parallel",
], default-features = false }
ark-relations = { version = "0.5.0", default-features = false, features = [
"std",
] }
ark-circom = { version = "0.5.0" }
ark-r1cs-std = { version = "0.5.0" }
# error handling
color-eyre = "0.6.2"
thiserror = "2.0.11"
color-eyre = "0.6.3"
thiserror = "2.0.12"
# utilities
byteorder = "1.4.3"
byteorder = "1.5.0"
cfg-if = "1.0"
num-bigint = { version = "0.4.6", default-features = false, features = [
"rand",
"std",
] }
num-traits = "0.2.19"
once_cell = "1.19.0"
lazy_static = "1.4.0"
once_cell = "1.21.1"
lazy_static = "1.5.0"
rand = "0.8.5"
rand_chacha = "0.3.1"
ruint = { version = "1.12.4", features = ["rand", "serde", "ark-ff-04"] }
ruint = { version = "1.14.0", features = ["rand", "serde", "ark-ff-04"] }
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
utils = { package = "zerokit_utils", version = "0.5.2", path = "../utils/", default-features = false }
utils = { package = "zerokit_utils", version = "0.5.2", path = "../utils", default-features = false }
# serialization
prost = "0.13.1"
prost = "0.13.5"
serde_json = "1.0"
serde = { version = "1.0", features = ["derive"] }
document-features = { version = "=0.2.10", optional = true }
document-features = { version = "0.2.11", optional = true }
[dev-dependencies]
sled = "0.34.7"
criterion = { version = "0.4.0", features = ["html_reports"] }
[features]
default = ["parallel", "pmtree-ft"]
parallel = [
"ark-ec/parallel",
"ark-ff/parallel",
"ark-std/parallel",
"ark-groth16/parallel",
"utils/parallel",
]
default = ["pmtree-ft"]
fullmerkletree = ["default"]
arkzkey = []
stateless = []
arkzkey = []
# Note: pmtree feature is still experimental
pmtree-ft = ["utils/pmtree-ft"]
[[bench]]
name = "pmtree_benchmark"
harness = false
[[bench]]
name = "circuit_loading_benchmark"
harness = false
[[bench]]
name = "circuit_loading_arkzkey_benchmark"
harness = false
required-features = ["arkzkey"]
[[bench]]
name = "circuit_loading_benchmark"
harness = false
[[bench]]
name = "pmtree_benchmark"
harness = false
[[bench]]
name = "poseidon_tree_benchmark"
harness = false

View File

@@ -34,9 +34,9 @@ use std::io::Cursor;
use rln::{
circuit::Fr,
hashers::{hash_to_field, poseidon_hash},
protocol::{keygen, prepare_verify_input},
protocol::{keygen, prepare_prove_input, prepare_verify_input},
public::RLN,
utils::{fr_to_bytes_le, normalize_usize},
utils::fr_to_bytes_le,
};
use serde_json::json;
@@ -53,8 +53,8 @@ fn main() {
// 3. Add a rate commitment to the Merkle tree
let id_index = 10;
let user_message_limit = 10;
let rate_commitment = poseidon_hash(&[id_commitment, Fr::from(user_message_limit)]);
let user_message_limit = Fr::from(10);
let rate_commitment = poseidon_hash(&[id_commitment, user_message_limit]);
let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
rln.set_leaf(id_index, &mut buffer).unwrap();
@@ -65,25 +65,28 @@ fn main() {
// We generate rln_identifier from a date seed and we ensure is
// mapped to a field element by hashing-to-field its content
let rln_identifier = hash_to_field(b"test-rln-identifier");
// We generate a external nullifier
let external_nullifier = poseidon_hash(&[epoch, rln_identifier]);
// We choose a message_id satisfy 0 <= message_id < user_message_limit
let message_id = Fr::from(1);
// 5. Generate and verify a proof for a message
let signal = b"RLN is awesome";
// 6. Prepare input for generate_rln_proof API
// input_data is [ identity_secret<32> | id_index<8> | external_nullifier<32> | user_message_limit<32> | message_id<32> | signal_len<8> | signal<var> ]
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut fr_to_bytes_le(&identity_secret_hash));
serialized.append(&mut normalize_usize(id_index));
serialized.append(&mut fr_to_bytes_le(&Fr::from(user_message_limit)));
serialized.append(&mut fr_to_bytes_le(&Fr::from(1)));
serialized.append(&mut fr_to_bytes_le(&external_nullifier));
serialized.append(&mut normalize_usize(signal.len()));
serialized.append(&mut signal.to_vec());
let prove_input = prepare_prove_input(
identity_secret_hash,
id_index,
user_message_limit,
message_id,
external_nullifier,
signal,
);
// 7. Generate a RLN proof
// We generate a RLN proof for proof_input
let mut input_buffer = Cursor::new(serialized);
let mut input_buffer = Cursor::new(prove_input);
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.generate_rln_proof(&mut input_buffer, &mut output_buffer)
.unwrap();

View File

@@ -1,5 +1,5 @@
use ark_circom::read_zkey;
use criterion::{criterion_group, criterion_main, Criterion};
use rln::circuit::zkey::read_zkey;
use std::io::Cursor;
pub fn zkey_load_benchmark(c: &mut Criterion) {

Binary file not shown.

View File

@@ -13,8 +13,8 @@ use std::{
ops::{BitAnd, BitOr, BitXor, Deref, Shl, Shr},
};
use crate::circuit::iden3calc::proto;
use crate::circuit::Fr;
use crate::iden3calc::proto;
pub const M: U256 =
uint!(21888242871839275222246405745257275088548364400416034343698204186575808495617_U256);

View File

@@ -7,7 +7,7 @@ use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use prost::Message;
use std::io::{Read, Write};
use crate::iden3calc::{
use crate::circuit::iden3calc::{
graph,
graph::{Operation, TresOperation, UnoOperation},
proto, InputSignalsInfo,

View File

@@ -1,5 +1,9 @@
// This crate provides interfaces for the zero-knowledge circuit and keys
pub mod iden3calc;
pub mod qap;
pub mod zkey;
use ::lazy_static::lazy_static;
use ark_bn254::{
Bn254, Fq as ArkFq, Fq2 as ArkFq2, Fr as ArkFr, G1Affine as ArkG1Affine,
@@ -10,7 +14,7 @@ use ark_relations::r1cs::ConstraintMatrices;
use cfg_if::cfg_if;
use color_eyre::{Report, Result};
use crate::iden3calc::calc_witness;
use crate::circuit::iden3calc::calc_witness;
#[cfg(feature = "arkzkey")]
use {
@@ -19,13 +23,15 @@ use {
};
#[cfg(not(feature = "arkzkey"))]
use {ark_circom::read_zkey, std::io::Cursor};
use {crate::circuit::zkey::read_zkey, std::io::Cursor};
#[cfg(feature = "arkzkey")]
pub const ARKZKEY_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/rln_final.arkzkey");
pub const ARKZKEY_BYTES: &[u8] = include_bytes!("../../resources/tree_height_20/rln_final.arkzkey");
pub const ZKEY_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/rln_final.zkey");
const GRAPH_BYTES: &[u8] = include_bytes!("../resources/tree_height_20/graph.bin");
pub const ZKEY_BYTES: &[u8] = include_bytes!("../../resources/tree_height_20/rln_final.zkey");
#[cfg(not(target_arch = "wasm32"))]
const GRAPH_BYTES: &[u8] = include_bytes!("../../resources/tree_height_20/graph.bin");
lazy_static! {
static ref ZKEY: (ProvingKey<Curve>, ConstraintMatrices<Fr>) = {
@@ -73,6 +79,7 @@ pub fn zkey_from_raw(zkey_data: &[u8]) -> Result<(ProvingKey<Curve>, ConstraintM
}
// Loads the proving key
#[cfg(not(target_arch = "wasm32"))]
pub fn zkey_from_folder() -> &'static (ProvingKey<Curve>, ConstraintMatrices<Fr>) {
&ZKEY
}
@@ -87,16 +94,6 @@ pub fn vk_from_raw(zkey_data: &[u8]) -> Result<VerifyingKey<Curve>> {
Err(Report::msg("No proving/verification key found!"))
}
// Checks verification key to be correct with respect to proving key
pub fn check_vk_from_zkey(verifying_key: VerifyingKey<Curve>) -> Result<()> {
let (proving_key, _matrices) = zkey_from_folder();
if proving_key.vk == verifying_key {
Ok(())
} else {
Err(Report::msg("verifying_keys are not equal"))
}
}
pub fn calculate_rln_witness<I: IntoIterator<Item = (String, Vec<Fr>)>>(
inputs: I,
graph_data: &[u8],
@@ -104,6 +101,7 @@ pub fn calculate_rln_witness<I: IntoIterator<Item = (String, Vec<Fr>)>>(
calc_witness(inputs, graph_data)
}
#[cfg(not(target_arch = "wasm32"))]
pub fn graph_from_folder() -> &'static [u8] {
GRAPH_BYTES
}

114
rln/src/circuit/qap.rs Normal file
View File

@@ -0,0 +1,114 @@
// This file is based on the code by arkworks. Its preimage can be found here:
// https://github.com/arkworks-rs/circom-compat/blob/3c95ed98e23a408b4d99a53e483a9bba39685a4e/src/circom/qap.rs
use ark_ff::PrimeField;
use ark_groth16::r1cs_to_qap::{evaluate_constraint, LibsnarkReduction, R1CSToQAP};
use ark_poly::EvaluationDomain;
use ark_relations::r1cs::{ConstraintMatrices, ConstraintSystemRef, SynthesisError};
use ark_std::{cfg_into_iter, cfg_iter, cfg_iter_mut, vec};
/// Implements the witness map used by snarkjs. The arkworks witness map calculates the
/// coefficients of H through computing (AB-C)/Z in the evaluation domain and going back to the
/// coefficients domain. snarkjs instead precomputes the Lagrange form of the powers of tau bases
/// in a domain twice as large and the witness map is computed as the odd coefficients of (AB-C)
/// in that domain. This serves as HZ when computing the C proof element.
pub struct CircomReduction;
impl R1CSToQAP for CircomReduction {
#[allow(clippy::type_complexity)]
fn instance_map_with_evaluation<F: PrimeField, D: EvaluationDomain<F>>(
cs: ConstraintSystemRef<F>,
t: &F,
) -> Result<(Vec<F>, Vec<F>, Vec<F>, F, usize, usize), SynthesisError> {
LibsnarkReduction::instance_map_with_evaluation::<F, D>(cs, t)
}
fn witness_map_from_matrices<F: PrimeField, D: EvaluationDomain<F>>(
matrices: &ConstraintMatrices<F>,
num_inputs: usize,
num_constraints: usize,
full_assignment: &[F],
) -> Result<Vec<F>, SynthesisError> {
let zero = F::zero();
let domain =
D::new(num_constraints + num_inputs).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;
let domain_size = domain.size();
let mut a = vec![zero; domain_size];
let mut b = vec![zero; domain_size];
#[allow(unexpected_cfgs)]
cfg_iter_mut!(a[..num_constraints])
.zip(cfg_iter_mut!(b[..num_constraints]))
.zip(cfg_iter!(&matrices.a))
.zip(cfg_iter!(&matrices.b))
.for_each(|(((a, b), at_i), bt_i)| {
*a = evaluate_constraint(at_i, full_assignment);
*b = evaluate_constraint(bt_i, full_assignment);
});
{
let start = num_constraints;
let end = start + num_inputs;
a[start..end].clone_from_slice(&full_assignment[..num_inputs]);
}
let mut c = vec![zero; domain_size];
#[allow(unexpected_cfgs)]
cfg_iter_mut!(c[..num_constraints])
.zip(&a)
.zip(&b)
.for_each(|((c_i, &a), &b)| {
*c_i = a * b;
});
domain.ifft_in_place(&mut a);
domain.ifft_in_place(&mut b);
let root_of_unity = {
let domain_size_double = 2 * domain_size;
let domain_double =
D::new(domain_size_double).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;
domain_double.element(1)
};
D::distribute_powers_and_mul_by_const(&mut a, root_of_unity, F::one());
D::distribute_powers_and_mul_by_const(&mut b, root_of_unity, F::one());
domain.fft_in_place(&mut a);
domain.fft_in_place(&mut b);
let mut ab = domain.mul_polynomials_in_evaluation_domain(&a, &b);
drop(a);
drop(b);
domain.ifft_in_place(&mut c);
D::distribute_powers_and_mul_by_const(&mut c, root_of_unity, F::one());
domain.fft_in_place(&mut c);
#[allow(unexpected_cfgs)]
cfg_iter_mut!(ab)
.zip(c)
.for_each(|(ab_i, c_i)| *ab_i -= &c_i);
Ok(ab)
}
fn h_query_scalars<F: PrimeField, D: EvaluationDomain<F>>(
max_power: usize,
t: F,
_: F,
delta_inverse: F,
) -> Result<Vec<F>, SynthesisError> {
// the usual H query has domain-1 powers. Z has domain powers. So HZ has 2*domain-1 powers.
#[allow(unexpected_cfgs)]
let mut scalars = cfg_into_iter!(0..2 * max_power + 1)
.map(|i| delta_inverse * t.pow([i as u64]))
.collect::<Vec<_>>();
let domain_size = scalars.len();
let domain = D::new(domain_size).ok_or(SynthesisError::PolynomialDegreeTooLarge)?;
// generate the lagrange coefficients
domain.ifft_in_place(&mut scalars);
#[allow(unexpected_cfgs)]
Ok(cfg_into_iter!(scalars).skip(1).step_by(2).collect())
}
}

371
rln/src/circuit/zkey.rs Normal file
View File

@@ -0,0 +1,371 @@
// This file is based on the code by arkworks. Its preimage can be found here:
// https://github.com/arkworks-rs/circom-compat/blob/3c95ed98e23a408b4d99a53e483a9bba39685a4e/src/zkey.rs
//! ZKey Parsing
//!
//! Each ZKey file is broken into sections:
//! Header(1)
//! Prover Type 1 Groth
//! HeaderGroth(2)
//! n8q
//! q
//! n8r
//! r
//! NVars
//! NPub
//! DomainSize (multiple of 2
//! alpha1
//! beta1
//! delta1
//! beta2
//! gamma2
//! delta2
//! IC(3)
//! Coefs(4)
//! PointsA(5)
//! PointsB1(6)
//! PointsB2(7)
//! PointsC(8)
//! PointsH(9)
//! Contributions(10)
use ark_ff::{BigInteger256, PrimeField};
use ark_relations::r1cs::ConstraintMatrices;
use ark_serialize::{CanonicalDeserialize, SerializationError};
use ark_std::log2;
use byteorder::{LittleEndian, ReadBytesExt};
use std::{
collections::HashMap,
io::{Read, Seek, SeekFrom},
};
use ark_bn254::{Bn254, Fq, Fq2, Fr, G1Affine, G2Affine};
use ark_groth16::{ProvingKey, VerifyingKey};
use num_traits::Zero;
type IoResult<T> = Result<T, SerializationError>;
#[derive(Clone, Debug)]
struct Section {
position: u64,
#[allow(dead_code)]
size: usize,
}
/// Reads a SnarkJS ZKey file into an Arkworks ProvingKey.
pub fn read_zkey<R: Read + Seek>(
reader: &mut R,
) -> IoResult<(ProvingKey<Bn254>, ConstraintMatrices<Fr>)> {
let mut binfile = BinFile::new(reader)?;
let proving_key = binfile.proving_key()?;
let matrices = binfile.matrices()?;
Ok((proving_key, matrices))
}
#[derive(Debug)]
struct BinFile<'a, R> {
#[allow(dead_code)]
ftype: String,
#[allow(dead_code)]
version: u32,
sections: HashMap<u32, Vec<Section>>,
reader: &'a mut R,
}
impl<'a, R: Read + Seek> BinFile<'a, R> {
fn new(reader: &'a mut R) -> IoResult<Self> {
let mut magic = [0u8; 4];
reader.read_exact(&mut magic)?;
let version = reader.read_u32::<LittleEndian>()?;
let num_sections = reader.read_u32::<LittleEndian>()?;
let mut sections = HashMap::new();
for _ in 0..num_sections {
let section_id = reader.read_u32::<LittleEndian>()?;
let section_length = reader.read_u64::<LittleEndian>()?;
let section = sections.entry(section_id).or_insert_with(Vec::new);
section.push(Section {
position: reader.stream_position()?,
size: section_length as usize,
});
reader.seek(SeekFrom::Current(section_length as i64))?;
}
Ok(Self {
ftype: std::str::from_utf8(&magic[..]).unwrap().to_string(),
version,
sections,
reader,
})
}
fn proving_key(&mut self) -> IoResult<ProvingKey<Bn254>> {
let header = self.groth_header()?;
let ic = self.ic(header.n_public)?;
let a_query = self.a_query(header.n_vars)?;
let b_g1_query = self.b_g1_query(header.n_vars)?;
let b_g2_query = self.b_g2_query(header.n_vars)?;
let l_query = self.l_query(header.n_vars - header.n_public - 1)?;
let h_query = self.h_query(header.domain_size as usize)?;
let vk = VerifyingKey::<Bn254> {
alpha_g1: header.verifying_key.alpha_g1,
beta_g2: header.verifying_key.beta_g2,
gamma_g2: header.verifying_key.gamma_g2,
delta_g2: header.verifying_key.delta_g2,
gamma_abc_g1: ic,
};
let pk = ProvingKey::<Bn254> {
vk,
beta_g1: header.verifying_key.beta_g1,
delta_g1: header.verifying_key.delta_g1,
a_query,
b_g1_query,
b_g2_query,
h_query,
l_query,
};
Ok(pk)
}
fn get_section(&self, id: u32) -> Section {
self.sections.get(&id).unwrap()[0].clone()
}
fn groth_header(&mut self) -> IoResult<HeaderGroth> {
let section = self.get_section(2);
let header = HeaderGroth::new(&mut self.reader, &section)?;
Ok(header)
}
fn ic(&mut self, n_public: usize) -> IoResult<Vec<G1Affine>> {
// the range is non-inclusive so we do +1 to get all inputs
self.g1_section(n_public + 1, 3)
}
/// Returns the [`ConstraintMatrices`] corresponding to the zkey
pub fn matrices(&mut self) -> IoResult<ConstraintMatrices<Fr>> {
let header = self.groth_header()?;
let section = self.get_section(4);
self.reader.seek(SeekFrom::Start(section.position))?;
let num_coeffs: u32 = self.reader.read_u32::<LittleEndian>()?;
// insantiate AB
let mut matrices = vec![vec![vec![]; header.domain_size as usize]; 2];
let mut max_constraint_index = 0;
for _ in 0..num_coeffs {
let matrix: u32 = self.reader.read_u32::<LittleEndian>()?;
let constraint: u32 = self.reader.read_u32::<LittleEndian>()?;
let signal: u32 = self.reader.read_u32::<LittleEndian>()?;
let value: Fr = deserialize_field_fr(&mut self.reader)?;
max_constraint_index = std::cmp::max(max_constraint_index, constraint);
matrices[matrix as usize][constraint as usize].push((value, signal as usize));
}
let num_constraints = max_constraint_index as usize - header.n_public;
// Remove the public input constraints, Arkworks adds them later
matrices.iter_mut().for_each(|m| {
m.truncate(num_constraints);
});
// This is taken from Arkworks' to_matrices() function
let a = matrices[0].clone();
let b = matrices[1].clone();
let a_num_non_zero: usize = a.iter().map(|lc| lc.len()).sum();
let b_num_non_zero: usize = b.iter().map(|lc| lc.len()).sum();
let matrices = ConstraintMatrices {
num_instance_variables: header.n_public + 1,
num_witness_variables: header.n_vars - header.n_public,
num_constraints,
a_num_non_zero,
b_num_non_zero,
c_num_non_zero: 0,
a,
b,
c: vec![],
};
Ok(matrices)
}
fn a_query(&mut self, n_vars: usize) -> IoResult<Vec<G1Affine>> {
self.g1_section(n_vars, 5)
}
fn b_g1_query(&mut self, n_vars: usize) -> IoResult<Vec<G1Affine>> {
self.g1_section(n_vars, 6)
}
fn b_g2_query(&mut self, n_vars: usize) -> IoResult<Vec<G2Affine>> {
self.g2_section(n_vars, 7)
}
fn l_query(&mut self, n_vars: usize) -> IoResult<Vec<G1Affine>> {
self.g1_section(n_vars, 8)
}
fn h_query(&mut self, n_vars: usize) -> IoResult<Vec<G1Affine>> {
self.g1_section(n_vars, 9)
}
fn g1_section(&mut self, num: usize, section_id: usize) -> IoResult<Vec<G1Affine>> {
let section = self.get_section(section_id as u32);
self.reader.seek(SeekFrom::Start(section.position))?;
deserialize_g1_vec(self.reader, num as u32)
}
fn g2_section(&mut self, num: usize, section_id: usize) -> IoResult<Vec<G2Affine>> {
let section = self.get_section(section_id as u32);
self.reader.seek(SeekFrom::Start(section.position))?;
deserialize_g2_vec(self.reader, num as u32)
}
}
#[derive(Default, Clone, Debug, CanonicalDeserialize)]
pub struct ZVerifyingKey {
alpha_g1: G1Affine,
beta_g1: G1Affine,
beta_g2: G2Affine,
gamma_g2: G2Affine,
delta_g1: G1Affine,
delta_g2: G2Affine,
}
impl ZVerifyingKey {
fn new<R: Read>(reader: &mut R) -> IoResult<Self> {
let alpha_g1 = deserialize_g1(reader)?;
let beta_g1 = deserialize_g1(reader)?;
let beta_g2 = deserialize_g2(reader)?;
let gamma_g2 = deserialize_g2(reader)?;
let delta_g1 = deserialize_g1(reader)?;
let delta_g2 = deserialize_g2(reader)?;
Ok(Self {
alpha_g1,
beta_g1,
beta_g2,
gamma_g2,
delta_g1,
delta_g2,
})
}
}
#[derive(Clone, Debug)]
struct HeaderGroth {
#[allow(dead_code)]
n8q: u32,
#[allow(dead_code)]
q: BigInteger256,
#[allow(dead_code)]
n8r: u32,
#[allow(dead_code)]
r: BigInteger256,
n_vars: usize,
n_public: usize,
domain_size: u32,
#[allow(dead_code)]
power: u32,
verifying_key: ZVerifyingKey,
}
impl HeaderGroth {
fn new<R: Read + Seek>(reader: &mut R, section: &Section) -> IoResult<Self> {
reader.seek(SeekFrom::Start(section.position))?;
Self::read(reader)
}
fn read<R: Read>(mut reader: &mut R) -> IoResult<Self> {
// TODO: Impl From<u32> in Arkworks
let n8q: u32 = u32::deserialize_uncompressed(&mut reader)?;
// group order r of Bn254
let q = BigInteger256::deserialize_uncompressed(&mut reader)?;
let n8r: u32 = u32::deserialize_uncompressed(&mut reader)?;
// Prime field modulus
let r = BigInteger256::deserialize_uncompressed(&mut reader)?;
let n_vars = u32::deserialize_uncompressed(&mut reader)? as usize;
let n_public = u32::deserialize_uncompressed(&mut reader)? as usize;
let domain_size: u32 = u32::deserialize_uncompressed(&mut reader)?;
let power = log2(domain_size as usize);
let verifying_key = ZVerifyingKey::new(&mut reader)?;
Ok(Self {
n8q,
q,
n8r,
r,
n_vars,
n_public,
domain_size,
power,
verifying_key,
})
}
}
// need to divide by R, since snarkjs outputs the zkey with coefficients
// multiplieid by R^2
fn deserialize_field_fr<R: Read>(reader: &mut R) -> IoResult<Fr> {
let bigint = BigInteger256::deserialize_uncompressed(reader)?;
Ok(Fr::new_unchecked(Fr::new_unchecked(bigint).into_bigint()))
}
// skips the multiplication by R because Circom points are already in Montgomery form
fn deserialize_field<R: Read>(reader: &mut R) -> IoResult<Fq> {
let bigint = BigInteger256::deserialize_uncompressed(reader)?;
// if you use Fq::new it multiplies by R
Ok(Fq::new_unchecked(bigint))
}
pub fn deserialize_field2<R: Read>(reader: &mut R) -> IoResult<Fq2> {
let c0 = deserialize_field(reader)?;
let c1 = deserialize_field(reader)?;
Ok(Fq2::new(c0, c1))
}
fn deserialize_g1<R: Read>(reader: &mut R) -> IoResult<G1Affine> {
let x = deserialize_field(reader)?;
let y = deserialize_field(reader)?;
let infinity = x.is_zero() && y.is_zero();
if infinity {
Ok(G1Affine::identity())
} else {
Ok(G1Affine::new(x, y))
}
}
fn deserialize_g2<R: Read>(reader: &mut R) -> IoResult<G2Affine> {
let f1 = deserialize_field2(reader)?;
let f2 = deserialize_field2(reader)?;
let infinity = f1.is_zero() && f2.is_zero();
if infinity {
Ok(G2Affine::identity())
} else {
Ok(G2Affine::new(f1, f2))
}
}
fn deserialize_g1_vec<R: Read>(reader: &mut R, n_vars: u32) -> IoResult<Vec<G1Affine>> {
(0..n_vars).map(|_| deserialize_g1(reader)).collect()
}
fn deserialize_g2_vec<R: Read>(reader: &mut R, n_vars: u32) -> IoResult<Vec<G2Affine>> {
(0..n_vars).map(|_| deserialize_g2(reader)).collect()
}

View File

@@ -1,8 +1,7 @@
#![allow(dead_code)]
pub mod circuit;
#[cfg(not(target_arch = "wasm32"))]
pub mod ffi;
pub mod hashers;
pub mod iden3calc;
#[cfg(feature = "pmtree-ft")]
pub mod pm_tree_adapter;
pub mod poseidon_tree;
@@ -11,5 +10,3 @@ pub mod public;
#[cfg(test)]
pub mod public_api_tests;
pub mod utils;
pub mod ffi;

View File

@@ -1,7 +1,6 @@
// This crate collects all the underlying primitives used to implement RLN
use ark_bn254::Fr;
use ark_circom::CircomReduction;
use ark_groth16::{prepare_verifying_key, Groth16, Proof as ArkProof, ProvingKey, VerifyingKey};
use ark_relations::r1cs::{ConstraintMatrices, SynthesisError};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
@@ -16,7 +15,7 @@ use std::time::Instant;
use thiserror::Error;
use tiny_keccak::{Hasher as _, Keccak};
use crate::circuit::{calculate_rln_witness, Curve};
use crate::circuit::{calculate_rln_witness, qap::CircomReduction, Curve};
use crate::hashers::{hash_to_field, poseidon_hash};
use crate::poseidon_tree::*;
use crate::public::RLN_IDENTIFIER;

View File

@@ -8,16 +8,24 @@ use {
utils::{Hasher, ZerokitMerkleProof, ZerokitMerkleTree},
};
use crate::circuit::{graph_from_folder, vk_from_raw, zkey_from_folder, zkey_from_raw, Curve, Fr};
use crate::circuit::{vk_from_raw, zkey_from_raw, Curve, Fr};
use crate::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash};
use crate::protocol::*;
use crate::utils::*;
#[cfg(not(target_arch = "wasm32"))]
use {
crate::circuit::{graph_from_folder, zkey_from_folder},
std::default::Default,
};
use ark_groth16::{Proof as ArkProof, ProvingKey, VerifyingKey};
use ark_relations::r1cs::ConstraintMatrices;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, Write};
use color_eyre::{Report, Result};
use std::{default::Default, io::Cursor};
use std::io::Cursor;
#[cfg(target_arch = "wasm32")]
use num_bigint::BigInt;
/// The application-specific RLN identifier.
///
@@ -32,6 +40,7 @@ pub const RLN_IDENTIFIER: &[u8] = b"zerokit/rln/010203040506070809";
pub struct RLN {
proving_key: (ProvingKey<Curve>, ConstraintMatrices<Fr>),
pub(crate) verification_key: VerifyingKey<Curve>,
#[cfg(not(target_arch = "wasm32"))]
pub(crate) graph_data: Vec<u8>,
#[cfg(not(feature = "stateless"))]
pub(crate) tree: PoseidonTree,
@@ -54,7 +63,7 @@ impl RLN {
/// // We create a new RLN instance
/// let mut rln = RLN::new(tree_height, input);
/// ```
#[cfg(not(feature = "stateless"))]
#[cfg(all(not(target_arch = "wasm32"), not(feature = "stateless")))]
pub fn new<R: Read>(tree_height: usize, mut input_data: R) -> Result<RLN> {
// We read input
let mut input: Vec<u8> = Vec::new();
@@ -98,7 +107,7 @@ impl RLN {
/// let mut rln = RLN::new();
/// ```
#[cfg_attr(docsrs, doc(cfg(feature = "stateless")))]
#[cfg(feature = "stateless")]
#[cfg(all(not(target_arch = "wasm32"), feature = "stateless"))]
pub fn new() -> Result<RLN> {
let proving_key = zkey_from_folder();
let verification_key = &proving_key.0.vk;
@@ -116,6 +125,7 @@ impl RLN {
/// Input parameters are
/// - `tree_height`: the height of the internal Merkle tree
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) as binary file
/// - `graph_data`: a byte vector containing the graph data (`graph.bin`) as binary file
/// - `tree_config_input`: a reader for a string containing a json with the merkle tree configuration
///
/// Example:
@@ -143,11 +153,10 @@ impl RLN {
/// tree_height,
/// resources[0].clone(),
/// resources[1].clone(),
/// resources[2].clone(),
/// tree_config_buffer,
/// );
/// ```
#[cfg(not(feature = "stateless"))]
#[cfg(all(not(target_arch = "wasm32"), not(feature = "stateless")))]
pub fn new_with_params<R: Read>(
tree_height: usize,
zkey_vec: Vec<u8>,
@@ -187,6 +196,7 @@ impl RLN {
///
/// Input parameters are
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) as binary file
/// - `graph_data`: a byte vector containing the graph data (`graph.bin`) as binary file
///
/// Example:
/// ```
@@ -208,10 +218,9 @@ impl RLN {
/// let mut rln = RLN::new_with_params(
/// resources[0].clone(),
/// resources[1].clone(),
/// resources[2].clone(),
/// );
/// ```
#[cfg(feature = "stateless")]
#[cfg(all(not(target_arch = "wasm32"), feature = "stateless"))]
pub fn new_with_params(zkey_vec: Vec<u8>, graph_data: Vec<u8>) -> Result<RLN> {
let proving_key = zkey_from_raw(&zkey_vec)?;
let verification_key = vk_from_raw(&zkey_vec)?;
@@ -223,6 +232,44 @@ impl RLN {
})
}
/// Creates a new stateless RLN object by passing circuit resources as byte vectors.
///
/// Input parameters are
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) as binary file
///
/// Example:
/// ```
/// use std::fs::File;
/// use std::io::Read;
///
/// let resources_folder = "./resources/tree_height_20/";
///
/// let mut resources: Vec<Vec<u8>> = Vec::new();
/// for filename in ["rln_final.zkey", "graph.bin"] {
/// let fullpath = format!("{resources_folder}{filename}");
/// let mut file = File::open(&fullpath).expect("no file found");
/// let metadata = std::fs::metadata(&fullpath).expect("unable to read metadata");
/// let mut buffer = vec![0; metadata.len() as usize];
/// file.read_exact(&mut buffer).expect("buffer overflow");
/// resources.push(buffer);
/// }
///
/// let mut rln = RLN::new_with_params(
/// resources[0].clone(),
/// resources[1].clone(),
/// );
/// ```
#[cfg(all(target_arch = "wasm32", feature = "stateless"))]
pub fn new_with_params(zkey_vec: Vec<u8>) -> Result<RLN> {
let proving_key = zkey_from_raw(&zkey_vec)?;
let verification_key = vk_from_raw(&zkey_vec)?;
Ok(RLN {
proving_key,
verification_key,
})
}
////////////////////////////////////////////////////////
// Merkle-tree APIs
////////////////////////////////////////////////////////
@@ -698,6 +745,7 @@ impl RLN {
/// rln.prove(&mut input_buffer, &mut output_buffer).unwrap();
/// let zk_proof = output_buffer.into_inner();
/// ```
#[cfg(not(target_arch = "wasm32"))]
pub fn prove<R: Read, W: Write>(
&mut self,
mut input_data: R,
@@ -818,7 +866,7 @@ impl RLN {
/// // proof_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]
/// let mut proof_data = output_buffer.into_inner();
/// ```
#[cfg(not(feature = "stateless"))]
#[cfg(all(not(target_arch = "wasm32"), not(feature = "stateless")))]
pub fn generate_rln_proof<R: Read, W: Write>(
&mut self,
mut input_data: R,
@@ -844,6 +892,7 @@ impl RLN {
//
// output_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]
// we skip it from documentation for now
#[cfg(not(target_arch = "wasm32"))]
pub fn generate_rln_proof_with_witness<R: Read, W: Write>(
&mut self,
mut input_data: R,
@@ -863,6 +912,28 @@ impl RLN {
Ok(())
}
// Generate RLN Proof using a witness calculated from outside zerokit
//
// output_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]
#[cfg(target_arch = "wasm32")]
pub fn generate_rln_proof_with_witness<W: Write>(
&mut self,
calculated_witness: Vec<BigInt>,
rln_witness_vec: Vec<u8>,
mut output_data: W,
) -> Result<()> {
let (rln_witness, _) = deserialize_witness(&rln_witness_vec[..])?;
let proof_values = proof_values_from_witness(&rln_witness)?;
let proof = generate_proof_with_witness(calculated_witness, &self.proving_key).unwrap();
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
// This proof is compressed, i.e. 128 bytes long
proof.serialize_compressed(&mut output_data)?;
output_data.write_all(&serialize_proof_values(&proof_values))?;
Ok(())
}
/// Verifies a zkSNARK RLN proof against the provided proof values and the state of the internal Merkle tree.
///
/// Input values are:
@@ -1301,6 +1372,7 @@ impl RLN {
}
}
#[cfg(not(target_arch = "wasm32"))]
impl Default for RLN {
fn default() -> Self {
#[cfg(not(feature = "stateless"))]

View File

@@ -486,7 +486,6 @@ mod tree_test {
assert_eq!(received_leaf, Fr::from(0));
}
#[allow(unused_must_use)]
#[test]
// This test checks if `set_leaves_from` throws an error when the index is out of bounds
fn test_set_leaves_bad_index() {
@@ -511,6 +510,8 @@ mod tree_test {
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves).unwrap());
#[allow(unused_must_use)]
rln.set_leaves_from(bad_index, &mut buffer)
.expect_err("Should throw an error");

View File

@@ -137,8 +137,10 @@ pub fn bytes_le_to_vec_usize(input: &[u8]) -> Result<Vec<usize>> {
}
#[inline(always)]
pub fn normalize_usize(input: usize) -> [u8; 8] {
input.to_le_bytes()
pub fn normalize_usize(input: usize) -> Vec<u8> {
let mut normalized_usize = input.to_le_bytes().to_vec();
normalized_usize.resize(8, 0);
normalized_usize
}
#[inline(always)] // using for test

View File

@@ -12,27 +12,28 @@ repository = "https://github.com/vacp2p/zerokit"
bench = false
[dependencies]
ark-ff = { version = "0.5.0", features = ["asm"] }
ark-ff = { version = "0.5.0", default-features = false, features = [
"parallel",
] }
num-bigint = { version = "0.4.6", default-features = false, features = [
"rand",
] }
color-eyre = "0.6.2"
pmtree = { package = "vacp2p_pmtree", version = "=2.0.2", optional = true }
color-eyre = "0.6.3"
pmtree = { package = "vacp2p_pmtree", version = "2.0.2", optional = true }
sled = "0.34.7"
serde = "1.0"
lazy_static = "1.4.0"
lazy_static = "1.5.0"
hex = "0.4"
[dev-dependencies]
ark-bn254 = { version = "0.5.0", features = ["std"] }
num-traits = "0.2.19"
hex-literal = "0.3.4"
hex-literal = "1.0.0"
tiny-keccak = { version = "2.0.2", features = ["keccak"] }
criterion = { version = "0.4.0", features = ["html_reports"] }
[features]
default = ["parallel"]
parallel = ["ark-ff/parallel"]
default = []
pmtree-ft = ["pmtree"]
[[bench]]

View File

@@ -8,4 +8,4 @@ args = ["test", "--release"]
[tasks.bench]
command = "cargo"
args = ["bench"]
args = ["bench"]

View File

@@ -125,7 +125,6 @@ where
self.next_index
}
#[must_use]
// Returns the root of the tree
fn root(&self) -> FrOf<Self::Hasher> {
self.nodes[0]
@@ -341,14 +340,12 @@ impl<H: Hasher> ZerokitMerkleProof for FullMerkleProof<H> {
type Index = u8;
type Hasher = H;
#[must_use]
// Returns the length of a Merkle proof
fn length(&self) -> usize {
self.0.len()
}
/// Computes the leaf index corresponding to a Merkle proof
#[must_use]
fn leaf_index(&self) -> usize {
self.0.iter().rev().fold(0, |index, branch| match branch {
FullMerkleBranch::Left(_) => index << 1,
@@ -356,7 +353,6 @@ impl<H: Hasher> ZerokitMerkleProof for FullMerkleProof<H> {
})
}
#[must_use]
/// Returns the path elements forming a Merkle proof
fn get_path_elements(&self) -> Vec<FrOf<Self::Hasher>> {
self.0
@@ -368,7 +364,6 @@ impl<H: Hasher> ZerokitMerkleProof for FullMerkleProof<H> {
}
/// Returns the path indexes forming a Merkle proof
#[must_use]
fn get_path_index(&self) -> Vec<Self::Index> {
self.0
.iter()
@@ -380,7 +375,6 @@ impl<H: Hasher> ZerokitMerkleProof for FullMerkleProof<H> {
}
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
#[must_use]
fn compute_root_from(&self, hash: &FrOf<Self::Hasher>) -> FrOf<Self::Hasher> {
self.0.iter().fold(*hash, |hash, branch| match branch {
FullMerkleBranch::Left(sibling) => H::hash(&[hash, *sibling]),

View File

@@ -109,7 +109,6 @@ where
self.next_index
}
#[must_use]
// Returns the root of the tree
fn root(&self) -> H::Fr {
self.get_node(0, 0)
@@ -326,14 +325,12 @@ where
type Index = u8;
type Hasher = H;
#[must_use]
// Returns the length of a Merkle proof
fn length(&self) -> usize {
self.0.len()
}
/// Computes the leaf index corresponding to a Merkle proof
#[must_use]
fn leaf_index(&self) -> usize {
// In current implementation the path indexes in a proof correspond to the binary representation of the leaf index
let mut binary_repr = self.get_path_index();
@@ -343,19 +340,16 @@ where
.fold(0, |acc, digit| (acc << 1) + usize::from(digit))
}
#[must_use]
/// Returns the path elements forming a Merkle proof
fn get_path_elements(&self) -> Vec<H::Fr> {
self.0.iter().map(|x| x.0).collect()
}
/// Returns the path indexes forming a Merkle proof
#[must_use]
fn get_path_index(&self) -> Vec<u8> {
self.0.iter().map(|x| x.1).collect()
}
#[must_use]
/// Computes the Merkle root corresponding by iteratively hashing a Merkle proof with a given input leaf
fn compute_root_from(&self, leaf: &H::Fr) -> H::Fr {
let mut acc: H::Fr = *leaf;

View File

@@ -9,8 +9,6 @@
// The following implementation was adapted from https://github.com/arkworks-rs/sponge/blob/7d9b3a474c9ddb62890014aeaefcb142ac2b3776/src/poseidon/grain_lfsr.rs
#![allow(dead_code)]
use ark_ff::PrimeField;
use num_bigint::BigUint;
@@ -20,7 +18,6 @@ pub struct PoseidonGrainLFSR {
pub head: usize,
}
#[allow(unused_variables)]
impl PoseidonGrainLFSR {
pub fn new(
is_field: u64,