mirror of
https://github.com/zkonduit/ezkl.git
synced 2026-01-13 08:17:57 -05:00
Compare commits
63 Commits
v12.0.2
...
verifier-r
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
41d3233223 | ||
|
|
cda4b2dacd | ||
|
|
1ddbce537f | ||
|
|
5faa56f359 | ||
|
|
97c1397fe6 | ||
|
|
e5aa48fbd6 | ||
|
|
e3051b79cd | ||
|
|
fe960767df | ||
|
|
64fbc8a1c9 | ||
|
|
d4ebf8f120 | ||
|
|
6788a9c726 | ||
|
|
9fd3799ed1 | ||
|
|
c9351c0607 | ||
|
|
a2a8488d67 | ||
|
|
c9f9d17f16 | ||
|
|
9908f5c214 | ||
|
|
66ae2f7bac | ||
|
|
4d18a5a903 | ||
|
|
b49b0487c4 | ||
|
|
cd1860246c | ||
|
|
ad59186eb6 | ||
|
|
61b7a8e9b5 | ||
|
|
5dbc7d5176 | ||
|
|
472a505063 | ||
|
|
1588c9735e | ||
|
|
9d876d5bf9 | ||
|
|
fd29794cdd | ||
|
|
a616fbb759 | ||
|
|
f1fe01952f | ||
|
|
23f71873ce | ||
|
|
31168b0a99 | ||
|
|
35bb286d40 | ||
|
|
d5944d36fe | ||
|
|
3df63d540d | ||
|
|
779f82e0dc | ||
|
|
889db3a6fe | ||
|
|
fab08bbe9d | ||
|
|
72f1892d54 | ||
|
|
f3e531c3e7 | ||
|
|
00f8dd42f6 | ||
|
|
099726b245 | ||
|
|
d5f18495de | ||
|
|
add04f6090 | ||
|
|
6b71bdc920 | ||
|
|
3e5153db9f | ||
|
|
a1dd82a3c1 | ||
|
|
f6acf241c9 | ||
|
|
dbe812b88d | ||
|
|
36188ab542 | ||
|
|
cd9d7c3d50 | ||
|
|
f5ae49e1c5 | ||
|
|
f25b420429 | ||
|
|
f59aaf80c5 | ||
|
|
257e275773 | ||
|
|
2fe0eb4b27 | ||
|
|
bdad19b83c | ||
|
|
a17aad064b | ||
|
|
985205ae40 | ||
|
|
b08dc28ed5 | ||
|
|
b3997cd325 | ||
|
|
83cb957299 | ||
|
|
c92be15b81 | ||
|
|
6924797e48 |
3
.github/workflows/engine.yml
vendored
3
.github/workflows/engine.yml
vendored
@@ -26,6 +26,9 @@ jobs:
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
# Pin to version 0.12.1
|
||||
version: 'v0.12.1'
|
||||
- name: Add wasm32-unknown-unknown target
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
|
||||
|
||||
7
.github/workflows/release.yml
vendored
7
.github/workflows/release.yml
vendored
@@ -181,9 +181,14 @@ jobs:
|
||||
echo "target flag is: ${{ env.TARGET_FLAGS }}"
|
||||
echo "target dir is: ${{ env.TARGET_DIR }}"
|
||||
|
||||
- name: Build release binary
|
||||
- name: Build release binary (no asm)
|
||||
if: matrix.build != 'linux-gnu'
|
||||
run: ${{ env.CARGO }} build --release ${{ env.TARGET_FLAGS }} -Z sparse-registry
|
||||
|
||||
- name: Build release binary (asm)
|
||||
if: matrix.build == 'linux-gnu'
|
||||
run: ${{ env.CARGO }} build --release ${{ env.TARGET_FLAGS }} -Z sparse-registry --features asm
|
||||
|
||||
- name: Strip release binary
|
||||
if: matrix.build != 'windows-msvc' && matrix.build != 'linux-aarch64'
|
||||
run: strip "target/${{ matrix.target }}/release/ezkl"
|
||||
|
||||
232
.github/workflows/rust.yml
vendored
232
.github/workflows/rust.yml
vendored
@@ -65,40 +65,40 @@ jobs:
|
||||
- name: Library tests (original lookup)
|
||||
run: cargo nextest run --lib --verbose --no-default-features --features ezkl
|
||||
|
||||
ultra-overflow-tests-gpu:
|
||||
runs-on: GPU
|
||||
env:
|
||||
ENABLE_ICICLE_GPU: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2024-07-18
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: baptiste0928/cargo-install@v1
|
||||
with:
|
||||
crate: cargo-nextest
|
||||
locked: true
|
||||
- uses: mwilliamson/setup-wasmtime-action@v2
|
||||
with:
|
||||
wasmtime-version: "3.0.1"
|
||||
- name: Install wasm32-wasi
|
||||
run: rustup target add wasm32-wasi
|
||||
- name: Install cargo-wasi
|
||||
run: cargo install cargo-wasi
|
||||
# - name: Matmul overflow (wasi)
|
||||
# run: cargo wasi test matmul_col_ultra_overflow -- --include-ignored --nocapture
|
||||
# - name: Conv overflow (wasi)
|
||||
# run: cargo wasi test conv_col_ultra_overflow -- --include-ignored --nocapture
|
||||
- name: lookup overflow
|
||||
run: cargo nextest run --release lookup_ultra_overflow --no-capture --features icicle -- --include-ignored
|
||||
- name: Matmul overflow
|
||||
run: RUST_LOG=debug cargo nextest run matmul_col_ultra_overflow --no-capture --features icicle -- --include-ignored
|
||||
- name: Conv overflow
|
||||
run: RUST_LOG=debug cargo nextest run conv_col_ultra_overflow --no-capture --features icicle -- --include-ignored
|
||||
- name: Conv + relu overflow
|
||||
run: cargo nextest run --release conv_relu_col_ultra_overflow --no-capture --features icicle -- --include-ignored
|
||||
# ultra-overflow-tests-gpu:
|
||||
# runs-on: GPU
|
||||
# env:
|
||||
# ENABLE_ICICLE_GPU: true
|
||||
# steps:
|
||||
# - uses: actions/checkout@v4
|
||||
# - uses: actions-rs/toolchain@v1
|
||||
# with:
|
||||
# toolchain: nightly-2024-07-18
|
||||
# override: true
|
||||
# components: rustfmt, clippy
|
||||
# - uses: baptiste0928/cargo-install@v1
|
||||
# with:
|
||||
# crate: cargo-nextest
|
||||
# locked: true
|
||||
# - uses: mwilliamson/setup-wasmtime-action@v2
|
||||
# with:
|
||||
# wasmtime-version: "3.0.1"
|
||||
# - name: Install wasm32-wasi
|
||||
# run: rustup target add wasm32-wasi
|
||||
# - name: Install cargo-wasi
|
||||
# run: cargo install cargo-wasi
|
||||
# # - name: Matmul overflow (wasi)
|
||||
# # run: cargo wasi test matmul_col_ultra_overflow -- --include-ignored --nocapture
|
||||
# # - name: Conv overflow (wasi)
|
||||
# # run: cargo wasi test conv_col_ultra_overflow -- --include-ignored --nocapture
|
||||
# - name: lookup overflow
|
||||
# run: cargo nextest run lookup_ultra_overflow --no-capture --features icicle -- --include-ignored
|
||||
# - name: Matmul overflow
|
||||
# run: RUST_LOG=debug cargo nextest run matmul_col_ultra_overflow --no-capture --features icicle -- --include-ignored
|
||||
# - name: Conv overflow
|
||||
# run: RUST_LOG=debug cargo nextest run conv_col_ultra_overflow --no-capture --features icicle -- --include-ignored
|
||||
# - name: Conv + relu overflow
|
||||
# run: cargo nextest run conv_relu_col_ultra_overflow --no-capture --features icicle -- --include-ignored
|
||||
|
||||
ultra-overflow-tests_og-lookup:
|
||||
runs-on: non-gpu
|
||||
@@ -184,7 +184,6 @@ jobs:
|
||||
|
||||
wasm32-tests:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build, library-tests, docs, python-tests, python-integration-tests]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rs/toolchain@v1
|
||||
@@ -193,6 +192,9 @@ jobs:
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
# Pin to version 0.12.1
|
||||
version: 'v0.12.1'
|
||||
- uses: nanasess/setup-chromedriver@v2
|
||||
# with:
|
||||
# chromedriver-version: "115.0.5790.102"
|
||||
@@ -241,7 +243,7 @@ jobs:
|
||||
- name: public outputs and tolerance > 0
|
||||
run: cargo nextest run --release --verbose tests::mock_tolerance_public_outputs_ --test-threads 32
|
||||
- name: public outputs + batch size == 10
|
||||
run: cargo nextest run --release --verbose tests::mock_large_batch_public_outputs_ --test-threads 32
|
||||
run: cargo nextest run --release --verbose tests::mock_large_batch_public_outputs_ --test-threads 16
|
||||
- name: kzg inputs
|
||||
run: cargo nextest run --release --verbose tests::mock_kzg_input_::t --test-threads 32
|
||||
- name: kzg params
|
||||
@@ -327,12 +329,12 @@ jobs:
|
||||
cd in-browser-evm-verifier
|
||||
pnpm build:commonjs
|
||||
cd ..
|
||||
- name: Install solc
|
||||
run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
|
||||
# - name: Install solc
|
||||
# run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
|
||||
- name: Install Anvil
|
||||
run: cargo install --git https://github.com/foundry-rs/foundry --rev 62cdea8ff9e6efef011f77e295823b5f2dbeb3a1 --locked anvil --force
|
||||
- name: KZG prove and verify tests (EVM + VK rendered seperately)
|
||||
run: cargo nextest run --release --verbose tests_evm::kzg_evm_prove_and_verify_render_seperately_ --test-threads 1
|
||||
- name: KZG prove and verify tests (EVM + reusable verifier + col-overflow)
|
||||
run: cargo nextest run --release --verbose tests_evm::kzg_evm_prove_and_verify_reusable_verifier --test-threads 1
|
||||
- name: KZG prove and verify tests (EVM + kzg all)
|
||||
run: cargo nextest run --release --verbose tests_evm::kzg_evm_kzg_all_prove_and_verify --test-threads 1
|
||||
- name: KZG prove and verify tests (EVM + kzg inputs)
|
||||
@@ -373,6 +375,9 @@ jobs:
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: jetli/wasm-pack-action@v0.4.0
|
||||
with:
|
||||
# Pin to version 0.12.1
|
||||
version: 'v0.12.1'
|
||||
- name: Add wasm32-unknown-unknown target
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
|
||||
@@ -435,40 +440,40 @@ jobs:
|
||||
- name: KZG prove and verify tests (hashed outputs)
|
||||
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_hashed
|
||||
|
||||
prove-and-verify-tests-gpu:
|
||||
runs-on: GPU
|
||||
env:
|
||||
ENABLE_ICICLE_GPU: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2024-07-18
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- name: Add rust-src
|
||||
run: rustup component add rust-src --toolchain nightly-2024-07-18-x86_64-unknown-linux-gnu
|
||||
- uses: actions/checkout@v3
|
||||
- uses: baptiste0928/cargo-install@v1
|
||||
with:
|
||||
crate: cargo-nextest
|
||||
locked: true
|
||||
- name: KZG prove and verify tests (kzg outputs)
|
||||
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_kzg_output --features icicle --test-threads 1
|
||||
- name: KZG prove and verify tests (public outputs + column overflow)
|
||||
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_with_overflow_::w --features icicle --test-threads 1
|
||||
- name: KZG prove and verify tests (public outputs + fixed params + column overflow)
|
||||
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_with_overflow_fixed_params_ --features icicle --test-threads 1
|
||||
- name: KZG prove and verify tests (public outputs)
|
||||
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_::t --features icicle --test-threads 1
|
||||
- name: KZG prove and verify tests (public outputs + column overflow)
|
||||
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_::t --features icicle --test-threads 1
|
||||
- name: KZG prove and verify tests (public inputs)
|
||||
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_public_input --features icicle --test-threads 1
|
||||
- name: KZG prove and verify tests (fixed params)
|
||||
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_fixed_params --features icicle --test-threads 1
|
||||
- name: KZG prove and verify tests (hashed outputs)
|
||||
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_hashed --features icicle --test-threads 1
|
||||
# prove-and-verify-tests-gpu:
|
||||
# runs-on: GPU
|
||||
# env:
|
||||
# ENABLE_ICICLE_GPU: true
|
||||
# steps:
|
||||
# - uses: actions/checkout@v4
|
||||
# - uses: actions-rs/toolchain@v1
|
||||
# with:
|
||||
# toolchain: nightly-2024-07-18
|
||||
# override: true
|
||||
# components: rustfmt, clippy
|
||||
# - name: Add rust-src
|
||||
# run: rustup component add rust-src --toolchain nightly-2024-07-18-x86_64-unknown-linux-gnu
|
||||
# - uses: actions/checkout@v3
|
||||
# - uses: baptiste0928/cargo-install@v1
|
||||
# with:
|
||||
# crate: cargo-nextest
|
||||
# locked: true
|
||||
# - name: KZG prove and verify tests (kzg outputs)
|
||||
# run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_kzg_output --features icicle --test-threads 1
|
||||
# - name: KZG prove and verify tests (public outputs + column overflow)
|
||||
# run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_with_overflow_::w --features icicle --test-threads 1
|
||||
# - name: KZG prove and verify tests (public outputs + fixed params + column overflow)
|
||||
# run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_with_overflow_fixed_params_ --features icicle --test-threads 1
|
||||
# - name: KZG prove and verify tests (public outputs)
|
||||
# run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_::t --features icicle --test-threads 1
|
||||
# - name: KZG prove and verify tests (public outputs + column overflow)
|
||||
# run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_::t --features icicle --test-threads 1
|
||||
# - name: KZG prove and verify tests (public inputs)
|
||||
# run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_public_input --features icicle --test-threads 1
|
||||
# - name: KZG prove and verify tests (fixed params)
|
||||
# run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_fixed_params --features icicle --test-threads 1
|
||||
# - name: KZG prove and verify tests (hashed outputs)
|
||||
# run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_hashed --features icicle --test-threads 1
|
||||
|
||||
prove-and-verify-mock-aggr-tests:
|
||||
runs-on: self-hosted
|
||||
@@ -503,7 +508,7 @@ jobs:
|
||||
crate: cargo-nextest
|
||||
locked: true
|
||||
- name: KZG )tests
|
||||
run: cargo nextest run --release --verbose tests_aggr::kzg_aggr_prove_and_verify_ --features icicle --test-threads 1 -- --include-ignored
|
||||
run: cargo nextest run --verbose tests_aggr::kzg_aggr_prove_and_verify_ --features icicle --test-threads 1 -- --include-ignored
|
||||
|
||||
prove-and-verify-aggr-tests:
|
||||
runs-on: large-self-hosted
|
||||
@@ -536,8 +541,8 @@ jobs:
|
||||
with:
|
||||
crate: cargo-nextest
|
||||
locked: true
|
||||
- name: Install solc
|
||||
run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
|
||||
# - name: Install solc
|
||||
# run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
|
||||
- name: Install Anvil
|
||||
run: cargo install --git https://github.com/foundry-rs/foundry --rev 62cdea8ff9e6efef011f77e295823b5f2dbeb3a1 --locked anvil --force
|
||||
- name: KZG prove and verify aggr tests
|
||||
@@ -575,8 +580,8 @@ jobs:
|
||||
components: rustfmt, clippy
|
||||
- name: Install cmake
|
||||
run: sudo apt-get install -y cmake
|
||||
- name: Install solc
|
||||
run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
|
||||
# - name: Install solc
|
||||
# run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
|
||||
- name: Setup Virtual Env and Install python dependencies
|
||||
run: python -m venv .env --clear; source .env/bin/activate; pip install -r requirements.txt;
|
||||
- name: Install Anvil
|
||||
@@ -652,8 +657,8 @@ jobs:
|
||||
with:
|
||||
crate: cargo-nextest
|
||||
locked: true
|
||||
- name: Install solc
|
||||
run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
|
||||
# - name: Install solc
|
||||
# run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
|
||||
- name: Install Anvil
|
||||
run: cargo install --git https://github.com/foundry-rs/foundry --rev 62cdea8ff9e6efef011f77e295823b5f2dbeb3a1 --locked anvil --force
|
||||
- name: Install pip
|
||||
@@ -681,3 +686,68 @@ jobs:
|
||||
run: source .env/bin/activate; cargo nextest run py_tests::tests::voice_
|
||||
- name: NBEATS tutorial
|
||||
run: source .env/bin/activate; cargo nextest run py_tests::tests::nbeats_
|
||||
# - name: Reusable verifier tutorial
|
||||
# run: source .env/bin/activate; cargo nextest run py_tests::tests::reusable_
|
||||
|
||||
ios-integration-tests:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2024-07-18
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: baptiste0928/cargo-install@v1
|
||||
with:
|
||||
crate: cargo-nextest
|
||||
locked: true
|
||||
- name: Run ios tests
|
||||
run: CARGO_BUILD_TARGET=aarch64-apple-darwin RUSTUP_TOOLCHAIN=nightly-2024-07-18-aarch64-apple-darwin cargo test --test ios_integration_tests --features ios-bindings-test --no-default-features
|
||||
|
||||
swift-package-tests:
|
||||
runs-on: macos-latest
|
||||
needs: [ios-integration-tests]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2024-07-18
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- name: Build EzklCoreBindings
|
||||
run: CONFIGURATION=debug cargo run --bin ios_gen_bindings --features "ios-bindings uuid camino uniffi_bindgen" --no-default-features
|
||||
|
||||
- name: Clone ezkl-swift- repository
|
||||
run: |
|
||||
git clone https://github.com/zkonduit/ezkl-swift-package.git
|
||||
|
||||
- name: Copy EzklCoreBindings
|
||||
run: |
|
||||
rm -rf ezkl-swift-package/Sources/EzklCoreBindings
|
||||
cp -r build/EzklCoreBindings ezkl-swift-package/Sources/
|
||||
|
||||
- name: Set up Xcode environment
|
||||
run: |
|
||||
sudo xcode-select -s /Applications/Xcode.app/Contents/Developer
|
||||
sudo xcodebuild -license accept
|
||||
|
||||
- name: Run Package Tests
|
||||
run: |
|
||||
cd ezkl-swift-package
|
||||
xcodebuild test \
|
||||
-scheme EzklPackage \
|
||||
-destination 'platform=iOS Simulator,name=iPhone 15 Pro,OS=17.5' \
|
||||
-resultBundlePath ../testResults
|
||||
|
||||
- name: Run Example App Tests
|
||||
run: |
|
||||
cd ezkl-swift-package/Example
|
||||
xcodebuild test \
|
||||
-project Example.xcodeproj \
|
||||
-scheme EzklApp \
|
||||
-destination 'platform=iOS Simulator,name=iPhone 15 Pro,OS=17.5' \
|
||||
-parallel-testing-enabled NO \
|
||||
-resultBundlePath ../../exampleTestResults \
|
||||
-skip-testing:EzklAppUITests/EzklAppUITests/testButtonClicksInOrder
|
||||
75
.github/workflows/update-ios-package.yml
vendored
Normal file
75
.github/workflows/update-ios-package.yml
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
name: Build and Publish EZKL iOS SPM package
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "The tag to release"
|
||||
required: true
|
||||
push:
|
||||
tags:
|
||||
- "*"
|
||||
|
||||
jobs:
|
||||
build-and-update:
|
||||
runs-on: macos-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout EZKL
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
|
||||
- name: Build EzklCoreBindings
|
||||
run: CONFIGURATION=release cargo run --bin ios_gen_bindings --features "ios-bindings uuid camino uniffi_bindgen" --no-default-features
|
||||
|
||||
- name: Clone ezkl-swift-package repository
|
||||
run: |
|
||||
git clone https://github.com/zkonduit/ezkl-swift-package.git
|
||||
|
||||
- name: Copy EzklCoreBindings
|
||||
run: |
|
||||
rm -rf ezkl-swift-package/Sources/EzklCoreBindings
|
||||
cp -r build/EzklCoreBindings ezkl-swift-package/Sources/
|
||||
|
||||
- name: Set up Xcode environment
|
||||
run: |
|
||||
sudo xcode-select -s /Applications/Xcode.app/Contents/Developer
|
||||
sudo xcodebuild -license accept
|
||||
|
||||
- name: Run Package Tests
|
||||
run: |
|
||||
cd ezkl-swift-package
|
||||
xcodebuild test \
|
||||
-scheme EzklPackage \
|
||||
-destination 'platform=iOS Simulator,name=iPhone 15 Pro,OS=17.5' \
|
||||
-resultBundlePath ../testResults
|
||||
|
||||
- name: Run Example App Tests
|
||||
run: |
|
||||
cd ezkl-swift-package/Example
|
||||
xcodebuild test \
|
||||
-project Example.xcodeproj \
|
||||
-scheme EzklApp \
|
||||
-destination 'platform=iOS Simulator,name=iPhone 15 Pro,OS=17.5' \
|
||||
-parallel-testing-enabled NO \
|
||||
-resultBundlePath ../../exampleTestResults \
|
||||
-skip-testing:EzklAppUITests/EzklAppUITests/testButtonClicksInOrder
|
||||
|
||||
- name: Commit and Push Changes to feat/ezkl-direct-integration
|
||||
run: |
|
||||
cd ezkl-swift-package
|
||||
git config user.name "GitHub Action"
|
||||
git config user.email "action@github.com"
|
||||
git add Sources/EzklCoreBindings
|
||||
git commit -m "Automatically updated EzklCoreBindings for EZKL"
|
||||
git tag ${{ github.event.inputs.tag }}
|
||||
git remote set-url origin https://zkonduit:${EZKL_PORTER_TOKEN}@github.com/zkonduit/ezkl-swift-package.git
|
||||
git push origin
|
||||
git push origin --tags
|
||||
env:
|
||||
EZKL_PORTER_TOKEN: ${{ secrets.EZKL_PORTER_TOKEN }}
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -9,6 +9,7 @@ pkg
|
||||
!AttestData.sol
|
||||
!VerifierBase.sol
|
||||
!LoadInstances.sol
|
||||
!VerifierManager.sol
|
||||
*.pf
|
||||
*.vk
|
||||
*.pk
|
||||
@@ -46,7 +47,7 @@ var/
|
||||
node_modules
|
||||
/dist
|
||||
timingData.json
|
||||
!tests/wasm/pk.key
|
||||
!tests/wasm/vk.key
|
||||
!tests/assets/pk.key
|
||||
!tests/assets/vk.key
|
||||
docs/python/build
|
||||
!tests/wasm/vk_aggr.key
|
||||
!tests/assets/vk_aggr.key
|
||||
567
Cargo.lock
generated
567
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
181
Cargo.toml
181
Cargo.toml
@@ -4,6 +4,7 @@ cargo-features = ["profile-rustflags"]
|
||||
name = "ezkl"
|
||||
version = "0.0.0"
|
||||
edition = "2021"
|
||||
default-run = "ezkl"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@@ -11,86 +12,109 @@ edition = "2021"
|
||||
# Name to be imported within python
|
||||
# Example: import ezkl
|
||||
name = "ezkl"
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
crate-type = ["cdylib", "rlib", "staticlib"]
|
||||
|
||||
|
||||
[dependencies]
|
||||
halo2_gadgets = { git = "https://github.com/zkonduit/halo2", branch = "ac/optional-selector-poly" }
|
||||
halo2_proofs = { git = "https://github.com/zkonduit/halo2", branch = "ac/optional-selector-poly" }
|
||||
halo2curves = { git = "https://github.com/privacy-scaling-explorations/halo2curves", rev = "9fff22c", features = [
|
||||
halo2curves = { git = "https://github.com/privacy-scaling-explorations/halo2curves", rev = "b753a832e92d5c86c5c997327a9cf9de86a18851", features = [
|
||||
"derive_serde",
|
||||
"derive_serde",
|
||||
] }
|
||||
rand = { version = "0.8", default_features = false }
|
||||
itertools = { version = "0.10.3", default_features = false }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete = "4.5.2"
|
||||
serde = { version = "1.0.126", features = ["derive"], optional = true }
|
||||
serde_json = { version = "1.0.97", default_features = false, features = [
|
||||
"float_roundtrip",
|
||||
"raw_value",
|
||||
], optional = true }
|
||||
log = { version = "0.4.17", default_features = false, optional = true }
|
||||
thiserror = { version = "1.0.38", default_features = false }
|
||||
hex = { version = "0.4.3", default_features = false }
|
||||
halo2_proofs = { git = "https://github.com/zkonduit/halo2", package = "halo2_proofs", branch = "ac/cache-lookup-commitments", features = [
|
||||
"circuit-params",
|
||||
] }
|
||||
rand = { version = "0.8", default-features = false }
|
||||
itertools = { version = "0.10.3", default-features = false }
|
||||
clap = { version = "4.5.3", features = ["derive"], optional = true }
|
||||
serde = { version = "1.0.126", features = ["derive"] }
|
||||
clap_complete = { version = "4.5.2", optional = true }
|
||||
log = { version = "0.4.17", default-features = false }
|
||||
thiserror = { version = "1.0.38", default-features = false }
|
||||
hex = { version = "0.4.3", default-features = false }
|
||||
halo2_wrong_ecc = { git = "https://github.com/zkonduit/halo2wrong", branch = "ac/chunked-mv-lookup", package = "ecc" }
|
||||
snark-verifier = { git = "https://github.com/zkonduit/snark-verifier", branch = "ac/chunked-mv-lookup", features = [
|
||||
"derive_serde",
|
||||
] }
|
||||
halo2_solidity_verifier = { git = "https://github.com/alexander-camuto/halo2-solidity-verifier", branch = "main" }
|
||||
halo2_solidity_verifier = { git = "https://github.com/alexander-camuto/halo2-solidity-verifier", branch = "vka-log", optional = true }
|
||||
maybe-rayon = { version = "0.1.1", default_features = false }
|
||||
bincode = { version = "1.3.3", default_features = false }
|
||||
unzip-n = "0.1.2"
|
||||
num = "0.4.1"
|
||||
portable-atomic = "1.6.0"
|
||||
tosubcommand = { git = "https://github.com/zkonduit/enum_to_subcommand", package = "tosubcommand" }
|
||||
metal = { git = "https://github.com/gfx-rs/metal-rs", optional = true }
|
||||
semver = "1.0.22"
|
||||
portable-atomic = { version = "1.6.0", optional = true }
|
||||
tosubcommand = { git = "https://github.com/zkonduit/enum_to_subcommand", package = "tosubcommand", optional = true }
|
||||
semver = { version = "1.0.22", optional = true }
|
||||
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||
serde_json = { version = "1.0.97", features = ["float_roundtrip", "raw_value"] }
|
||||
|
||||
# evm related deps
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||
alloy = { git = "https://github.com/alloy-rs/alloy", version = "0.1.0", rev="5fbf57bac99edef9d8475190109a7ea9fb7e5e83", features = ["provider-http", "signers", "contract", "rpc-types-eth", "signer-wallet", "node-bindings"] }
|
||||
foundry-compilers = {version = "0.4.1", features = ["svm-solc"]}
|
||||
ethabi = "18"
|
||||
indicatif = { version = "0.17.5", features = ["rayon"] }
|
||||
gag = { version = "1.0.0", default_features = false }
|
||||
alloy = { git = "https://github.com/alloy-rs/alloy", version = "0.1.0", rev = "5fbf57bac99edef9d8475190109a7ea9fb7e5e83", features = [
|
||||
"provider-http",
|
||||
"signers",
|
||||
"contract",
|
||||
"rpc-types-eth",
|
||||
"signer-wallet",
|
||||
"node-bindings",
|
||||
|
||||
], optional = true }
|
||||
foundry-compilers = { version = "0.4.1", features = [
|
||||
"svm-solc",
|
||||
], optional = true }
|
||||
ethabi = { version = "18", optional = true }
|
||||
indicatif = { version = "0.17.5", features = ["rayon"], optional = true }
|
||||
gag = { version = "1.0.0", default-features = false, optional = true }
|
||||
instant = { version = "0.1" }
|
||||
reqwest = { version = "0.12.4", default-features = false, features = [
|
||||
"default-tls",
|
||||
"multipart",
|
||||
"stream",
|
||||
] }
|
||||
openssl = { version = "0.10.55", features = ["vendored"] }
|
||||
tokio-postgres = "0.7.10"
|
||||
pg_bigdecimal = "0.1.5"
|
||||
lazy_static = "1.4.0"
|
||||
colored_json = { version = "3.0.1", default_features = false, optional = true }
|
||||
regex = { version = "1", default_features = false }
|
||||
tokio = { version = "1.35", default_features = false, features = [
|
||||
], optional = true }
|
||||
openssl = { version = "0.10.55", features = ["vendored"], optional = true }
|
||||
tokio-postgres = { version = "0.7.10", optional = true }
|
||||
pg_bigdecimal = { version = "0.1.5", optional = true }
|
||||
lazy_static = { version = "1.4.0", optional = true }
|
||||
colored_json = { version = "3.0.1", default-features = false, optional = true }
|
||||
regex = { version = "1", default-features = false, optional = true }
|
||||
tokio = { version = "1.35.0", default-features = false, features = [
|
||||
"macros",
|
||||
"rt-multi-thread"
|
||||
] }
|
||||
"rt-multi-thread",
|
||||
], optional = true }
|
||||
pyo3 = { version = "0.21.2", features = [
|
||||
"extension-module",
|
||||
"abi3-py37",
|
||||
"macros",
|
||||
], default_features = false, optional = true }
|
||||
pyo3-asyncio = { git = "https://github.com/jopemachine/pyo3-asyncio/", branch="migration-pyo3-0.21", features = [
|
||||
], default-features = false, optional = true }
|
||||
pyo3-asyncio = { git = "https://github.com/jopemachine/pyo3-asyncio/", branch = "migration-pyo3-0.21", features = [
|
||||
"attributes",
|
||||
"tokio-runtime",
|
||||
], default_features = false, optional = true }
|
||||
pyo3-log = { version = "0.10.0", default_features = false, optional = true }
|
||||
tract-onnx = { git = "https://github.com/sonos/tract/", rev = "40c64319291184814d9fea5fdf4fa16f5a4f7116", default_features = false, optional = true }
|
||||
], default-features = false, optional = true }
|
||||
pyo3-log = { version = "0.10.0", default-features = false, optional = true }
|
||||
tract-onnx = { git = "https://github.com/sonos/tract/", rev = "40c64319291184814d9fea5fdf4fa16f5a4f7116", default-features = false, optional = true }
|
||||
tabled = { version = "0.12.0", optional = true }
|
||||
metal = { git = "https://github.com/gfx-rs/metal-rs", optional = true }
|
||||
objc = { version = "0.2.4", optional = true }
|
||||
mimalloc = { version = "0.1", optional = true }
|
||||
|
||||
# universal bindings
|
||||
uniffi = { version = "=0.28.0", optional = true }
|
||||
getrandom = { version = "0.2.8", optional = true }
|
||||
uniffi_bindgen = { version = "=0.28.0", optional = true }
|
||||
camino = { version = "^1.1", optional = true }
|
||||
uuid = { version = "1.10.0", features = ["v4"], optional = true }
|
||||
|
||||
[target.'cfg(not(all(target_arch = "wasm32", target_os = "unknown")))'.dependencies]
|
||||
colored = { version = "2.0.0", default_features = false, optional = true }
|
||||
env_logger = { version = "0.10.0", default_features = false, optional = true }
|
||||
chrono = "0.4.31"
|
||||
sha256 = "1.4.0"
|
||||
colored = { version = "2.0.0", default-features = false, optional = true }
|
||||
env_logger = { version = "0.10.0", default-features = false, optional = true }
|
||||
chrono = { version = "0.4.31", optional = true }
|
||||
sha256 = { version = "1.4.0", optional = true }
|
||||
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies]
|
||||
serde_json = { version = "1.0.97", default-features = false, features = [
|
||||
"float_roundtrip",
|
||||
"raw_value",
|
||||
] }
|
||||
getrandom = { version = "0.2.8", features = ["js"] }
|
||||
instant = { version = "0.1", features = ["wasm-bindgen", "inaccurate"] }
|
||||
|
||||
@@ -106,6 +130,10 @@ wasm-bindgen-console-logger = "0.1.1"
|
||||
[target.'cfg(not(all(target_arch = "wasm32", target_os = "unknown")))'.dev-dependencies]
|
||||
criterion = { version = "0.5.1", features = ["html_reports"] }
|
||||
|
||||
|
||||
[build-dependencies]
|
||||
uniffi = { version = "0.28", features = ["build"], optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.3.0"
|
||||
lazy_static = "1.4.0"
|
||||
@@ -160,6 +188,10 @@ harness = false
|
||||
name = "relu"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "relu_lookupless"
|
||||
harness = false
|
||||
|
||||
[[bench]]
|
||||
name = "accum_matmul_relu"
|
||||
harness = false
|
||||
@@ -175,42 +207,81 @@ test = false
|
||||
bench = false
|
||||
required-features = ["ezkl"]
|
||||
|
||||
[[bin]]
|
||||
name = "ios_gen_bindings"
|
||||
required-features = ["ios-bindings", "uuid", "camino", "uniffi_bindgen"]
|
||||
|
||||
[features]
|
||||
web = ["wasm-bindgen-rayon"]
|
||||
default = ["ezkl", "mv-lookup", "no-banner", "parallel-poly-read"]
|
||||
default = [
|
||||
"ezkl",
|
||||
"mv-lookup",
|
||||
"precompute-coset",
|
||||
"no-banner",
|
||||
"parallel-poly-read",
|
||||
]
|
||||
onnx = ["dep:tract-onnx"]
|
||||
python-bindings = ["pyo3", "pyo3-log", "pyo3-asyncio"]
|
||||
ios-bindings = ["mv-lookup", "precompute-coset", "parallel-poly-read", "uniffi"]
|
||||
ios-bindings-test = ["ios-bindings", "uniffi/bindgen-tests"]
|
||||
ezkl = [
|
||||
"onnx",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"log",
|
||||
"colored",
|
||||
"env_logger",
|
||||
"dep:colored",
|
||||
"dep:env_logger",
|
||||
"tabled/color",
|
||||
"serde_json/std",
|
||||
"colored_json",
|
||||
"halo2_proofs/circuit-params",
|
||||
"dep:alloy",
|
||||
"dep:foundry-compilers",
|
||||
"dep:ethabi",
|
||||
"dep:indicatif",
|
||||
"dep:gag",
|
||||
"dep:reqwest",
|
||||
"dep:openssl",
|
||||
"dep:tokio-postgres",
|
||||
"dep:pg_bigdecimal",
|
||||
"dep:lazy_static",
|
||||
"dep:regex",
|
||||
"dep:tokio",
|
||||
"dep:mimalloc",
|
||||
"dep:chrono",
|
||||
"dep:sha256",
|
||||
"dep:portable-atomic",
|
||||
"dep:clap_complete",
|
||||
"dep:halo2_solidity_verifier",
|
||||
"dep:semver",
|
||||
"dep:clap",
|
||||
"dep:tosubcommand",
|
||||
]
|
||||
parallel-poly-read = [
|
||||
"halo2_proofs/circuit-params",
|
||||
"halo2_proofs/parallel-poly-read",
|
||||
]
|
||||
parallel-poly-read = ["halo2_proofs/parallel-poly-read"]
|
||||
mv-lookup = [
|
||||
"halo2_proofs/mv-lookup",
|
||||
"snark-verifier/mv-lookup",
|
||||
"halo2_solidity_verifier/mv-lookup",
|
||||
]
|
||||
asm = ["halo2curves/asm", "halo2_proofs/asm"]
|
||||
precompute-coset = ["halo2_proofs/precompute-coset"]
|
||||
det-prove = []
|
||||
icicle = ["halo2_proofs/icicle_gpu"]
|
||||
empty-cmd = []
|
||||
no-banner = []
|
||||
no-update = []
|
||||
metal = ["dep:metal", "dep:objc"]
|
||||
|
||||
# icicle patch to 0.1.0 if feature icicle is enabled
|
||||
[patch.'https://github.com/ingonyama-zk/icicle']
|
||||
icicle = { git = "https://github.com/ingonyama-zk/icicle?rev=45b00fb", package = "icicle", branch = "fix/vhnat/ezkl-build-fix" }
|
||||
|
||||
[patch.'https://github.com/zkonduit/halo2']
|
||||
halo2_proofs = { git = "https://github.com/zkonduit/halo2#098ac0ef3b29255e0e2524ecbb4e7e325ae6e7fd", package = "halo2_proofs" }
|
||||
halo2_proofs = { git = "https://github.com/zkonduit/halo2?branch=ac/cache-lookup-commitments#8b13a0d2a7a34d8daab010dadb2c47dfa47d37d0", package = "halo2_proofs", branch = "ac/cache-lookup-commitments" }
|
||||
|
||||
[patch.crates-io]
|
||||
uniffi_testing = { git = "https://github.com/ElusAegis/uniffi-rs", branch = "feat/testing-feature-build-fix" }
|
||||
|
||||
[profile.release]
|
||||
rustflags = ["-C", "relocation-model=pic"]
|
||||
lto = "fat"
|
||||
codegen-units = 1
|
||||
# panic = "abort"
|
||||
|
||||
146
abis/EZKLVerifierManager.json
Normal file
146
abis/EZKLVerifierManager.json
Normal file
@@ -0,0 +1,146 @@
|
||||
[
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "owner",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "OwnableInvalidOwner",
|
||||
"type": "error"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "account",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "OwnableUnauthorizedAccount",
|
||||
"type": "error"
|
||||
},
|
||||
{
|
||||
"anonymous": false,
|
||||
"inputs": [
|
||||
{
|
||||
"indexed": false,
|
||||
"internalType": "address",
|
||||
"name": "addr",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "DeployedVerifier",
|
||||
"type": "event"
|
||||
},
|
||||
{
|
||||
"anonymous": false,
|
||||
"inputs": [
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "address",
|
||||
"name": "previousOwner",
|
||||
"type": "address"
|
||||
},
|
||||
{
|
||||
"indexed": true,
|
||||
"internalType": "address",
|
||||
"name": "newOwner",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "OwnershipTransferred",
|
||||
"type": "event"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes",
|
||||
"name": "bytecode",
|
||||
"type": "bytes"
|
||||
}
|
||||
],
|
||||
"name": "deployVerifier",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "addr",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [],
|
||||
"name": "owner",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "bytes",
|
||||
"name": "bytecode",
|
||||
"type": "bytes"
|
||||
}
|
||||
],
|
||||
"name": "precomputeAddress",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [],
|
||||
"name": "renounceOwnership",
|
||||
"outputs": [],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "newOwner",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "transferOwnership",
|
||||
"outputs": [],
|
||||
"stateMutability": "nonpayable",
|
||||
"type": "function"
|
||||
},
|
||||
{
|
||||
"inputs": [
|
||||
{
|
||||
"internalType": "address",
|
||||
"name": "",
|
||||
"type": "address"
|
||||
}
|
||||
],
|
||||
"name": "verifierAddresses",
|
||||
"outputs": [
|
||||
{
|
||||
"internalType": "bool",
|
||||
"name": "",
|
||||
"type": "bool"
|
||||
}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
}
|
||||
]
|
||||
@@ -64,7 +64,7 @@ impl Circuit<Fr> for MyCircuit {
|
||||
layouter.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = region::RegionCtx::new(region, 0, 1);
|
||||
let mut region = region::RegionCtx::new(region, 0, 1, 1024, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
|
||||
@@ -55,7 +55,7 @@ impl Circuit<Fr> for MyCircuit {
|
||||
layouter.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = region::RegionCtx::new(region, 0, 1);
|
||||
let mut region = region::RegionCtx::new(region, 0, 1, 1024, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
|
||||
@@ -57,7 +57,7 @@ impl Circuit<Fr> for MyCircuit {
|
||||
layouter.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = region::RegionCtx::new(region, 0, 1);
|
||||
let mut region = region::RegionCtx::new(region, 0, 1, 1024, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
|
||||
@@ -57,7 +57,15 @@ impl Circuit<Fr> for MyCircuit {
|
||||
|
||||
// sets up a new relu table
|
||||
base_config
|
||||
.configure_lookup(cs, &b, &output, &a, BITS, K, &LookupOp::ReLU)
|
||||
.configure_lookup(
|
||||
cs,
|
||||
&b,
|
||||
&output,
|
||||
&a,
|
||||
BITS,
|
||||
K,
|
||||
&LookupOp::LeakyReLU { slope: 0.0.into() },
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
MyConfig { base_config }
|
||||
@@ -75,14 +83,18 @@ impl Circuit<Fr> for MyCircuit {
|
||||
let op = PolyOp::Einsum {
|
||||
equation: "ij,jk->ik".to_string(),
|
||||
};
|
||||
let mut region = region::RegionCtx::new(region, 0, 1);
|
||||
let mut region = region::RegionCtx::new(region, 0, 1, 1024, 2);
|
||||
let output = config
|
||||
.base_config
|
||||
.layout(&mut region, &self.inputs, Box::new(op))
|
||||
.unwrap();
|
||||
let _output = config
|
||||
.base_config
|
||||
.layout(&mut region, &[output.unwrap()], Box::new(LookupOp::ReLU))
|
||||
.layout(
|
||||
&mut region,
|
||||
&[output.unwrap()],
|
||||
Box::new(LookupOp::LeakyReLU { slope: 0.0.into() }),
|
||||
)
|
||||
.unwrap();
|
||||
Ok(())
|
||||
},
|
||||
|
||||
@@ -58,7 +58,15 @@ impl Circuit<Fr> for MyCircuit {
|
||||
|
||||
// sets up a new relu table
|
||||
base_config
|
||||
.configure_lookup(cs, &b, &output, &a, BITS, k, &LookupOp::ReLU)
|
||||
.configure_lookup(
|
||||
cs,
|
||||
&b,
|
||||
&output,
|
||||
&a,
|
||||
BITS,
|
||||
k,
|
||||
&LookupOp::LeakyReLU { slope: 0.0.into() },
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
MyConfig { base_config }
|
||||
@@ -76,14 +84,18 @@ impl Circuit<Fr> for MyCircuit {
|
||||
let op = PolyOp::Einsum {
|
||||
equation: "ij,jk->ik".to_string(),
|
||||
};
|
||||
let mut region = region::RegionCtx::new(region, 0, 1);
|
||||
let mut region = region::RegionCtx::new(region, 0, 1, 1024, 2);
|
||||
let output = config
|
||||
.base_config
|
||||
.layout(&mut region, &self.inputs, Box::new(op))
|
||||
.unwrap();
|
||||
let _output = config
|
||||
.base_config
|
||||
.layout(&mut region, &[output.unwrap()], Box::new(LookupOp::ReLU))
|
||||
.layout(
|
||||
&mut region,
|
||||
&[output.unwrap()],
|
||||
Box::new(LookupOp::LeakyReLU { slope: 0.0.into() }),
|
||||
)
|
||||
.unwrap();
|
||||
Ok(())
|
||||
},
|
||||
|
||||
@@ -55,7 +55,7 @@ impl Circuit<Fr> for MyCircuit {
|
||||
layouter.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = region::RegionCtx::new(region, 0, 1);
|
||||
let mut region = region::RegionCtx::new(region, 0, 1, 1024, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
|
||||
@@ -59,7 +59,7 @@ impl Circuit<Fr> for MyCircuit {
|
||||
layouter.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = region::RegionCtx::new(region, 0, 1);
|
||||
let mut region = region::RegionCtx::new(region, 0, 1, 1024, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
|
||||
@@ -55,7 +55,7 @@ impl Circuit<Fr> for MyCircuit {
|
||||
layouter.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = region::RegionCtx::new(region, 0, 1);
|
||||
let mut region = region::RegionCtx::new(region, 0, 1, 1024, 2);
|
||||
config
|
||||
.layout(&mut region, &self.inputs, Box::new(PolyOp::Add))
|
||||
.unwrap();
|
||||
|
||||
@@ -56,7 +56,7 @@ impl Circuit<Fr> for MyCircuit {
|
||||
layouter.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 1024, 2);
|
||||
config
|
||||
.layout(&mut region, &self.inputs, Box::new(PolyOp::Pow(4)))
|
||||
.unwrap();
|
||||
|
||||
@@ -42,7 +42,7 @@ impl Circuit<Fr> for NLCircuit {
|
||||
.map(|_| VarTensor::new_advice(cs, K, 1, LEN))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let nl = LookupOp::ReLU;
|
||||
let nl = LookupOp::LeakyReLU { slope: 0.0.into() };
|
||||
|
||||
let mut config = Config::default();
|
||||
|
||||
@@ -63,9 +63,13 @@ impl Circuit<Fr> for NLCircuit {
|
||||
layouter.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 1024, 2);
|
||||
config
|
||||
.layout(&mut region, &[self.input.clone()], Box::new(LookupOp::ReLU))
|
||||
.layout(
|
||||
&mut region,
|
||||
&[self.input.clone()],
|
||||
Box::new(LookupOp::LeakyReLU { slope: 0.0.into() }),
|
||||
)
|
||||
.unwrap();
|
||||
Ok(())
|
||||
},
|
||||
|
||||
143
benches/relu_lookupless.rs
Normal file
143
benches/relu_lookupless.rs
Normal file
@@ -0,0 +1,143 @@
|
||||
use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Throughput};
|
||||
use ezkl::circuit::poly::PolyOp;
|
||||
use ezkl::circuit::region::RegionCtx;
|
||||
use ezkl::circuit::{BaseConfig as Config, CheckMode};
|
||||
use ezkl::fieldutils::IntegerRep;
|
||||
use ezkl::pfsys::create_proof_circuit;
|
||||
use ezkl::pfsys::TranscriptType;
|
||||
use ezkl::pfsys::{create_keys, srs::gen_srs};
|
||||
use ezkl::tensor::*;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
use halo2_proofs::poly::kzg::multiopen::{ProverSHPLONK, VerifierSHPLONK};
|
||||
use halo2_proofs::poly::kzg::strategy::SingleStrategy;
|
||||
use halo2_proofs::{
|
||||
circuit::{Layouter, SimpleFloorPlanner, Value},
|
||||
plonk::{Circuit, ConstraintSystem, Error},
|
||||
};
|
||||
use halo2curves::bn256::{Bn256, Fr};
|
||||
use rand::Rng;
|
||||
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
|
||||
|
||||
static mut LEN: usize = 4;
|
||||
const K: usize = 16;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct NLCircuit {
|
||||
pub input: ValTensor<Fr>,
|
||||
}
|
||||
|
||||
impl Circuit<Fr> for NLCircuit {
|
||||
type Config = Config<Fr>;
|
||||
type FloorPlanner = SimpleFloorPlanner;
|
||||
type Params = ();
|
||||
|
||||
fn without_witnesses(&self) -> Self {
|
||||
self.clone()
|
||||
}
|
||||
|
||||
fn configure(cs: &mut ConstraintSystem<Fr>) -> Self::Config {
|
||||
unsafe {
|
||||
let advices = (0..3)
|
||||
.map(|_| VarTensor::new_advice(cs, K, 1, LEN))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut config = Config::default();
|
||||
|
||||
config
|
||||
.configure_range_check(cs, &advices[0], &advices[1], (-1, 1), K)
|
||||
.unwrap();
|
||||
|
||||
config
|
||||
.configure_range_check(cs, &advices[0], &advices[1], (0, 1023), K)
|
||||
.unwrap();
|
||||
|
||||
let _constant = VarTensor::constant_cols(cs, K, LEN, false);
|
||||
|
||||
config
|
||||
}
|
||||
}
|
||||
|
||||
fn synthesize(
|
||||
&self,
|
||||
mut config: Self::Config,
|
||||
mut layouter: impl Layouter<Fr>, // layouter is our 'write buffer' for the circuit
|
||||
) -> Result<(), Error> {
|
||||
config.layout_range_checks(&mut layouter).unwrap();
|
||||
layouter.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1, 1024, 2);
|
||||
config
|
||||
.layout(&mut region, &[self.input.clone()], Box::new(PolyOp::ReLU))
|
||||
.unwrap();
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn runrelu(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("relu");
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
let params = gen_srs::<KZGCommitmentScheme<_>>(17);
|
||||
for &len in [4, 8].iter() {
|
||||
unsafe {
|
||||
LEN = len;
|
||||
};
|
||||
|
||||
let input: Tensor<Value<Fr>> =
|
||||
Tensor::<IntegerRep>::from((0..len).map(|_| rng.gen_range(0..10))).into();
|
||||
|
||||
let circuit = NLCircuit {
|
||||
input: ValTensor::from(input.clone()),
|
||||
};
|
||||
|
||||
group.throughput(Throughput::Elements(len as u64));
|
||||
group.bench_with_input(BenchmarkId::new("pk", len), &len, |b, &_| {
|
||||
b.iter(|| {
|
||||
create_keys::<KZGCommitmentScheme<Bn256>, NLCircuit>(&circuit, ¶ms, true)
|
||||
.unwrap();
|
||||
});
|
||||
});
|
||||
|
||||
let pk =
|
||||
create_keys::<KZGCommitmentScheme<Bn256>, NLCircuit>(&circuit, ¶ms, true).unwrap();
|
||||
|
||||
group.throughput(Throughput::Elements(len as u64));
|
||||
group.bench_with_input(BenchmarkId::new("prove", len), &len, |b, &_| {
|
||||
b.iter(|| {
|
||||
let prover = create_proof_circuit::<
|
||||
KZGCommitmentScheme<_>,
|
||||
NLCircuit,
|
||||
ProverSHPLONK<_>,
|
||||
VerifierSHPLONK<_>,
|
||||
SingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<_, _, _, _>,
|
||||
EvmTranscript<_, _, _, _>,
|
||||
>(
|
||||
circuit.clone(),
|
||||
vec![],
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
ezkl::Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
prover.unwrap();
|
||||
});
|
||||
});
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group! {
|
||||
name = benches;
|
||||
config = Criterion::default().with_plots();
|
||||
targets = runrelu
|
||||
}
|
||||
criterion_main!(benches);
|
||||
7
build.rs
Normal file
7
build.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
fn main() {
|
||||
if cfg!(feature = "ios-bindings-test") {
|
||||
println!("cargo::rustc-env=UNIFFI_CARGO_BUILD_EXTRA_ARGS=--features=ios-bindings --no-default-features");
|
||||
}
|
||||
|
||||
println!("cargo::rerun-if-changed=build.rs");
|
||||
}
|
||||
184
contracts/VerifierManager.sol
Normal file
184
contracts/VerifierManager.sol
Normal file
@@ -0,0 +1,184 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
pragma solidity 0.8.20;
|
||||
|
||||
// lib/openzeppelin-contracts/contracts/utils/Context.sol
|
||||
|
||||
// OpenZeppelin Contracts (last updated v5.0.1) (utils/Context.sol)
|
||||
|
||||
/**
|
||||
* @dev Provides information about the current execution context, including the
|
||||
* sender of the transaction and its data. While these are generally available
|
||||
* via msg.sender and msg.data, they should not be accessed in such a direct
|
||||
* manner, since when dealing with meta-transactions the account sending and
|
||||
* paying for execution may not be the actual sender (as far as an application
|
||||
* is concerned).
|
||||
*
|
||||
* This contract is only required for intermediate, library-like contracts.
|
||||
*/
|
||||
abstract contract Context {
|
||||
function _msgSender() internal view virtual returns (address) {
|
||||
return msg.sender;
|
||||
}
|
||||
|
||||
function _msgData() internal view virtual returns (bytes calldata) {
|
||||
return msg.data;
|
||||
}
|
||||
|
||||
function _contextSuffixLength() internal view virtual returns (uint256) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
// lib/openzeppelin-contracts/contracts/access/Ownable.sol
|
||||
|
||||
// OpenZeppelin Contracts (last updated v5.0.0) (access/Ownable.sol)
|
||||
|
||||
/**
|
||||
* @dev Contract module which provides a basic access control mechanism, where
|
||||
* there is an account (an owner) that can be granted exclusive access to
|
||||
* specific functions.
|
||||
*
|
||||
* The initial owner is set to the address provided by the deployer. This can
|
||||
* later be changed with {transferOwnership}.
|
||||
*
|
||||
* This module is used through inheritance. It will make available the modifier
|
||||
* `onlyOwner`, which can be applied to your functions to restrict their use to
|
||||
* the owner.
|
||||
*/
|
||||
abstract contract Ownable is Context {
|
||||
/// set the owener initialy to be the anvil test account
|
||||
address private _owner = 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266;
|
||||
|
||||
/**
|
||||
* @dev The caller account is not authorized to perform an operation.
|
||||
*/
|
||||
error OwnableUnauthorizedAccount(address account);
|
||||
|
||||
/**
|
||||
* @dev The owner is not a valid owner account. (eg. `address(0)`)
|
||||
*/
|
||||
error OwnableInvalidOwner(address owner);
|
||||
|
||||
event OwnershipTransferred(
|
||||
address indexed previousOwner,
|
||||
address indexed newOwner
|
||||
);
|
||||
|
||||
/**
|
||||
* @dev Initializes the contract setting the address provided by the deployer as the initial owner.
|
||||
*/
|
||||
constructor() {
|
||||
_transferOwnership(msg.sender);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Throws if called by any account other than the owner.
|
||||
*/
|
||||
modifier onlyOwner() {
|
||||
_checkOwner();
|
||||
_;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Returns the address of the current owner.
|
||||
*/
|
||||
function owner() public view virtual returns (address) {
|
||||
return _owner;
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Throws if the sender is not the owner.
|
||||
*/
|
||||
function _checkOwner() internal view virtual {
|
||||
if (owner() != _msgSender()) {
|
||||
revert OwnableUnauthorizedAccount(_msgSender());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Leaves the contract without owner. It will not be possible to call
|
||||
* `onlyOwner` functions. Can only be called by the current owner.
|
||||
*
|
||||
* NOTE: Renouncing ownership will leave the contract without an owner,
|
||||
* thereby disabling any functionality that is only available to the owner.
|
||||
*/
|
||||
function renounceOwnership() public virtual onlyOwner {
|
||||
_transferOwnership(address(0));
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Transfers ownership of the contract to a new account (`newOwner`).
|
||||
* Can only be called by the current owner.
|
||||
*/
|
||||
function transferOwnership(address newOwner) public virtual onlyOwner {
|
||||
if (newOwner == address(0)) {
|
||||
revert OwnableInvalidOwner(address(0));
|
||||
}
|
||||
_transferOwnership(newOwner);
|
||||
}
|
||||
|
||||
/**
|
||||
* @dev Transfers ownership of the contract to a new account (`newOwner`).
|
||||
* Internal function without access restriction.
|
||||
*/
|
||||
function _transferOwnership(address newOwner) internal virtual {
|
||||
address oldOwner = _owner;
|
||||
_owner = newOwner;
|
||||
emit OwnershipTransferred(oldOwner, newOwner);
|
||||
}
|
||||
}
|
||||
|
||||
// interface for the reusable verifier.
|
||||
interface Halo2VerifierReusable {
|
||||
function verifyProof(
|
||||
address vkArtifact,
|
||||
bytes calldata proof,
|
||||
uint256[] calldata instances
|
||||
) external returns (bool);
|
||||
}
|
||||
|
||||
// Manages the deployment of all EZKL reusbale verifiers (ezkl version specific), verifiying key artifacts (circuit specific) and
|
||||
// routing proof verifications to the correct VKA and associate reusable verifier.
|
||||
// Helps to prevent the deployment of duplicate verifiers.
|
||||
contract EZKLVerifierManager is Ownable {
|
||||
/// @dev Mapping that checks if a given reusable verifier has been deployed
|
||||
mapping(address => bool) public verifierAddresses;
|
||||
|
||||
event DeployedVerifier(address addr);
|
||||
|
||||
// 1. Compute the address of the verifier to be deployed
|
||||
function precomputeAddress(
|
||||
bytes memory bytecode
|
||||
) public view returns (address) {
|
||||
bytes32 hash = keccak256(
|
||||
abi.encodePacked(
|
||||
bytes1(0xff),
|
||||
address(this),
|
||||
uint(0),
|
||||
keccak256(bytecode)
|
||||
)
|
||||
);
|
||||
|
||||
return address(uint160(uint(hash)));
|
||||
}
|
||||
|
||||
// 2. Deploy the reusable verifier using create2
|
||||
/// @param bytecode The bytecode of the reusable verifier to deploy
|
||||
function deployVerifier(
|
||||
bytes memory bytecode
|
||||
) public returns (address addr) {
|
||||
assembly {
|
||||
addr := create2(
|
||||
0x0, // value, hardcode to 0
|
||||
add(bytecode, 0x20),
|
||||
mload(bytecode),
|
||||
0x0 // salt, hardcode to 0
|
||||
)
|
||||
if iszero(extcodesize(addr)) {
|
||||
revert(0, 0)
|
||||
}
|
||||
}
|
||||
verifierAddresses[addr] = true;
|
||||
emit DeployedVerifier(addr);
|
||||
}
|
||||
}
|
||||
@@ -163,7 +163,7 @@ where
|
||||
¶ms,
|
||||
(LOOKUP_MIN, LOOKUP_MAX),
|
||||
K,
|
||||
&LookupOp::ReLU,
|
||||
&LookupOp::LeakyReLU { slope: 0.0.into() },
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -199,7 +199,7 @@ where
|
||||
.assign_region(
|
||||
|| "mlp_4d",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, NUM_INNER_COLS);
|
||||
let mut region = RegionCtx::new(region, 0, NUM_INNER_COLS, 1024, 2);
|
||||
|
||||
let op = PolyOp::Conv {
|
||||
padding: vec![(PADDING, PADDING); 2],
|
||||
@@ -221,7 +221,11 @@ where
|
||||
|
||||
let x = config
|
||||
.layer_config
|
||||
.layout(&mut region, &[x.unwrap()], Box::new(LookupOp::ReLU))
|
||||
.layout(
|
||||
&mut region,
|
||||
&[x.unwrap()],
|
||||
Box::new(LookupOp::LeakyReLU { slope: 0.0.into() }),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let mut x = config
|
||||
@@ -281,7 +285,7 @@ where
|
||||
}
|
||||
|
||||
pub fn runconv() {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
env_logger::init();
|
||||
|
||||
const KERNEL_HEIGHT: usize = 5;
|
||||
|
||||
@@ -69,7 +69,7 @@ impl<const LEN: usize, const LOOKUP_MIN: IntegerRep, const LOOKUP_MAX: IntegerRe
|
||||
¶ms,
|
||||
(LOOKUP_MIN, LOOKUP_MAX),
|
||||
K,
|
||||
&LookupOp::ReLU,
|
||||
&LookupOp::LeakyReLU { slope: 0.0.into() },
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -108,7 +108,7 @@ impl<const LEN: usize, const LOOKUP_MIN: IntegerRep, const LOOKUP_MAX: IntegerRe
|
||||
.assign_region(
|
||||
|| "mlp_4d",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 1024, 2);
|
||||
let x = config
|
||||
.layer_config
|
||||
.layout(
|
||||
@@ -141,7 +141,11 @@ impl<const LEN: usize, const LOOKUP_MIN: IntegerRep, const LOOKUP_MAX: IntegerRe
|
||||
println!("x shape: {:?}", x.dims());
|
||||
let mut x = config
|
||||
.layer_config
|
||||
.layout(&mut region, &[x], Box::new(LookupOp::ReLU))
|
||||
.layout(
|
||||
&mut region,
|
||||
&[x],
|
||||
Box::new(LookupOp::LeakyReLU { slope: 0.0.into() }),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
println!("3");
|
||||
@@ -177,7 +181,11 @@ impl<const LEN: usize, const LOOKUP_MIN: IntegerRep, const LOOKUP_MAX: IntegerRe
|
||||
println!("x shape: {:?}", x.dims());
|
||||
let x = config
|
||||
.layer_config
|
||||
.layout(&mut region, &[x], Box::new(LookupOp::ReLU))
|
||||
.layout(
|
||||
&mut region,
|
||||
&[x],
|
||||
Box::new(LookupOp::LeakyReLU { slope: 0.0.into() }),
|
||||
)
|
||||
.unwrap();
|
||||
println!("6");
|
||||
println!("offset: {}", region.row());
|
||||
@@ -212,7 +220,7 @@ impl<const LEN: usize, const LOOKUP_MIN: IntegerRep, const LOOKUP_MAX: IntegerRe
|
||||
}
|
||||
|
||||
pub fn runmlp() {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
env_logger::init();
|
||||
// parameters
|
||||
let mut l0_kernel: Tensor<F> = Tensor::<IntegerRep>::new(
|
||||
|
||||
771
examples/notebooks/ezkl_demo_batch.ipynb
Normal file
771
examples/notebooks/ezkl_demo_batch.ipynb
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -232,7 +232,7 @@
|
||||
"run_args.param_visibility = \"fixed\"\n",
|
||||
"run_args.output_visibility = \"public\"\n",
|
||||
"run_args.input_scale = 2\n",
|
||||
"run_args.logrows = 8\n",
|
||||
"run_args.logrows = 15\n",
|
||||
"\n",
|
||||
"ezkl.get_srs(logrows=run_args.logrows, commitment=ezkl.PyCommitments.KZG)"
|
||||
]
|
||||
@@ -404,7 +404,7 @@
|
||||
"run_args.output_visibility = \"polycommit\"\n",
|
||||
"run_args.variables = [(\"batch_size\", 1)]\n",
|
||||
"run_args.input_scale = 2\n",
|
||||
"run_args.logrows = 8\n"
|
||||
"run_args.logrows = 15\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -466,7 +466,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.12.2"
|
||||
"version": "3.12.5"
|
||||
},
|
||||
"orig_nbformat": 4
|
||||
},
|
||||
|
||||
339
examples/notebooks/reusable_verifier.ipynb
Normal file
339
examples/notebooks/reusable_verifier.ipynb
Normal file
@@ -0,0 +1,339 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Reusable Verifiers \n",
|
||||
"\n",
|
||||
"This notebook demonstrates how to create and reuse the same set of separated verifiers for different models. Specifically, we will use the same verifier for the following four models:\n",
|
||||
"\n",
|
||||
"- `1l_mlp sigmoid`\n",
|
||||
"- `1l_mlp relu`\n",
|
||||
"- `1l_conv sigmoid`\n",
|
||||
"- `1l_conv relu`\n",
|
||||
"\n",
|
||||
"When deploying EZKL verifiers on the blockchain, each associated model typically requires its own unique verifier, leading to increased on-chain state usage. \n",
|
||||
"However, with the reusable verifier, we can deploy a single verifier that can be used to verify proofs for any valid H2 circuit. This notebook shows how to do so. \n",
|
||||
"\n",
|
||||
"By reusing the same verifier across multiple models, we significantly reduce the amount of state bloat on the blockchain. Instead of deploying a unique verifier for each model, we deploy a unique and much smaller verifying key artifact (VKA) contract for each model while sharing a common separated verifier. The VKA contains the VK for the model as well circuit specific metadata that was otherwise hardcoded into the stack of the original non-reusable verifier."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import torch\n",
|
||||
"import torch.nn as nn\n",
|
||||
"import torch.onnx\n",
|
||||
"\n",
|
||||
"# Define the models\n",
|
||||
"class MLP_Sigmoid(nn.Module):\n",
|
||||
" def __init__(self):\n",
|
||||
" super(MLP_Sigmoid, self).__init__()\n",
|
||||
" self.fc = nn.Linear(3, 3)\n",
|
||||
" self.sigmoid = nn.Sigmoid()\n",
|
||||
"\n",
|
||||
" def forward(self, x):\n",
|
||||
" x = self.fc(x)\n",
|
||||
" x = self.sigmoid(x)\n",
|
||||
" return x\n",
|
||||
"\n",
|
||||
"class MLP_Relu(nn.Module):\n",
|
||||
" def __init__(self):\n",
|
||||
" super(MLP_Relu, self).__init__()\n",
|
||||
" self.fc = nn.Linear(3, 3)\n",
|
||||
" self.relu = nn.ReLU()\n",
|
||||
"\n",
|
||||
" def forward(self, x):\n",
|
||||
" x = self.fc(x)\n",
|
||||
" x = self.relu(x)\n",
|
||||
" return x\n",
|
||||
"\n",
|
||||
"class Conv_Sigmoid(nn.Module):\n",
|
||||
" def __init__(self):\n",
|
||||
" super(Conv_Sigmoid, self).__init__()\n",
|
||||
" self.conv = nn.Conv1d(1, 1, kernel_size=3, stride=1)\n",
|
||||
" self.sigmoid = nn.Sigmoid()\n",
|
||||
"\n",
|
||||
" def forward(self, x):\n",
|
||||
" x = self.conv(x)\n",
|
||||
" x = self.sigmoid(x)\n",
|
||||
" return x\n",
|
||||
"\n",
|
||||
"class Conv_Relu(nn.Module):\n",
|
||||
" def __init__(self):\n",
|
||||
" super(Conv_Relu, self).__init__()\n",
|
||||
" self.conv = nn.Conv1d(1, 1, kernel_size=3, stride=1)\n",
|
||||
" self.relu = nn.ReLU()\n",
|
||||
"\n",
|
||||
" def forward(self, x):\n",
|
||||
" x = self.conv(x)\n",
|
||||
" x = self.relu(x)\n",
|
||||
" return x\n",
|
||||
"\n",
|
||||
"# Instantiate the models\n",
|
||||
"mlp_sigmoid = MLP_Sigmoid()\n",
|
||||
"mlp_relu = MLP_Relu()\n",
|
||||
"conv_sigmoid = Conv_Sigmoid()\n",
|
||||
"conv_relu = Conv_Relu()\n",
|
||||
"\n",
|
||||
"# Dummy input tensor for mlp\n",
|
||||
"dummy_input_mlp = torch.tensor([[-1.5737053155899048, -1.708398461341858, 0.19544155895709991]])\n",
|
||||
"input_mlp_path = 'mlp_input.json'\n",
|
||||
"\n",
|
||||
"# Dummy input tensor for conv\n",
|
||||
"dummy_input_conv = torch.tensor([[[1.4124163389205933, 0.6938204169273376, 1.0664031505584717]]])\n",
|
||||
"input_conv_path = 'conv_input.json'"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"names = ['mlp_sigmoid', 'mlp_relu', 'conv_sigmoid', 'conv_relu']\n",
|
||||
"models = [mlp_sigmoid, mlp_relu, conv_sigmoid, conv_relu]\n",
|
||||
"inputs = [dummy_input_mlp, dummy_input_mlp, dummy_input_conv, dummy_input_conv]\n",
|
||||
"input_paths = [input_mlp_path, input_mlp_path, input_conv_path, input_conv_path]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os\n",
|
||||
"import json\n",
|
||||
"import torch\n",
|
||||
"import ezkl\n",
|
||||
"\n",
|
||||
"for name, model, x, input_path in zip(names, models, inputs, input_paths):\n",
|
||||
" # Create a new directory for the model if it doesn't exist\n",
|
||||
" if not os.path.exists(name):\n",
|
||||
" os.mkdir(name)\n",
|
||||
" # Store the paths in each of their respective directories\n",
|
||||
" model_path = os.path.join(name, \"network.onnx\")\n",
|
||||
" compiled_model_path = os.path.join(name, \"network.compiled\")\n",
|
||||
" pk_path = os.path.join(name, \"test.pk\")\n",
|
||||
" vk_path = os.path.join(name, \"test.vk\")\n",
|
||||
" settings_path = os.path.join(name, \"settings.json\")\n",
|
||||
"\n",
|
||||
" witness_path = os.path.join(name, \"witness.json\")\n",
|
||||
" sol_code_path = os.path.join(name, 'test.sol')\n",
|
||||
" sol_key_code_path = os.path.join(name, 'test_key.sol')\n",
|
||||
" abi_path = os.path.join(name, 'test.abi')\n",
|
||||
" proof_path = os.path.join(name, \"proof.json\")\n",
|
||||
"\n",
|
||||
" # Flips the neural net into inference mode\n",
|
||||
" model.eval()\n",
|
||||
"\n",
|
||||
" # Export the model\n",
|
||||
" torch.onnx.export(model, x, model_path, export_params=True, opset_version=10,\n",
|
||||
" do_constant_folding=True, input_names=['input'],\n",
|
||||
" output_names=['output'], dynamic_axes={'input': {0: 'batch_size'},\n",
|
||||
" 'output': {0: 'batch_size'}})\n",
|
||||
"\n",
|
||||
" data_array = ((x).detach().numpy()).reshape([-1]).tolist()\n",
|
||||
" data = dict(input_data=[data_array])\n",
|
||||
" json.dump(data, open(input_path, 'w'))\n",
|
||||
"\n",
|
||||
" py_run_args = ezkl.PyRunArgs()\n",
|
||||
" py_run_args.input_visibility = \"private\"\n",
|
||||
" py_run_args.output_visibility = \"public\"\n",
|
||||
" py_run_args.param_visibility = \"fixed\" # private by default\n",
|
||||
"\n",
|
||||
" res = ezkl.gen_settings(model_path, settings_path, py_run_args=py_run_args)\n",
|
||||
" assert res == True\n",
|
||||
"\n",
|
||||
" await ezkl.calibrate_settings(input_path, model_path, settings_path, \"resources\")\n",
|
||||
"\n",
|
||||
" res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)\n",
|
||||
" assert res == True\n",
|
||||
"\n",
|
||||
" res = await ezkl.get_srs(settings_path)\n",
|
||||
" assert res == True\n",
|
||||
"\n",
|
||||
" # now generate the witness file\n",
|
||||
" res = await ezkl.gen_witness(input_path, compiled_model_path, witness_path)\n",
|
||||
" assert os.path.isfile(witness_path) == True\n",
|
||||
"\n",
|
||||
" # SETUP \n",
|
||||
" # We recommend disabling selector compression for the setup as it decreases the size of the VK artifact\n",
|
||||
" res = ezkl.setup(compiled_model_path, vk_path, pk_path, disable_selector_compression=True)\n",
|
||||
" assert res == True\n",
|
||||
" assert os.path.isfile(vk_path)\n",
|
||||
" assert os.path.isfile(pk_path)\n",
|
||||
" assert os.path.isfile(settings_path)\n",
|
||||
"\n",
|
||||
" # GENERATE A PROOF\n",
|
||||
" res = ezkl.prove(witness_path, compiled_model_path, pk_path, proof_path, \"single\")\n",
|
||||
" assert os.path.isfile(proof_path)\n",
|
||||
"\n",
|
||||
" res = await ezkl.create_evm_verifier(vk_path, settings_path, sol_code_path, abi_path, reusable=True)\n",
|
||||
" assert res == True\n",
|
||||
"\n",
|
||||
" res = await ezkl.create_evm_vka(vk_path, settings_path, sol_key_code_path, abi_path)\n",
|
||||
" assert res == True\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import subprocess\n",
|
||||
"import time\n",
|
||||
"\n",
|
||||
"# make sure anvil is running locally\n",
|
||||
"# $ anvil -p 3030\n",
|
||||
"\n",
|
||||
"RPC_URL = \"http://localhost:3030\"\n",
|
||||
"\n",
|
||||
"# Save process globally\n",
|
||||
"anvil_process = None\n",
|
||||
"\n",
|
||||
"def start_anvil():\n",
|
||||
" global anvil_process\n",
|
||||
" if anvil_process is None:\n",
|
||||
" anvil_process = subprocess.Popen([\"anvil\", \"-p\", \"3030\", \"--code-size-limit=41943040\"])\n",
|
||||
" if anvil_process.returncode is not None:\n",
|
||||
" raise Exception(\"failed to start anvil process\")\n",
|
||||
" time.sleep(3)\n",
|
||||
"\n",
|
||||
"def stop_anvil():\n",
|
||||
" global anvil_process\n",
|
||||
" if anvil_process is not None:\n",
|
||||
" anvil_process.terminate()\n",
|
||||
" anvil_process = None\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Check that the generated verifiers are identical for all models."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"start_anvil()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import filecmp\n",
|
||||
"\n",
|
||||
"def compare_files(file1, file2):\n",
|
||||
" return filecmp.cmp(file1, file2, shallow=False)\n",
|
||||
"\n",
|
||||
"sol_code_path_0 = os.path.join(\"mlp_sigmoid\", 'test.sol')\n",
|
||||
"sol_code_path_1 = os.path.join(\"mlp_relu\", 'test.sol')\n",
|
||||
"\n",
|
||||
"sol_code_path_2 = os.path.join(\"conv_sigmoid\", 'test.sol')\n",
|
||||
"sol_code_path_3 = os.path.join(\"conv_relu\", 'test.sol')\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"assert compare_files(sol_code_path_0, sol_code_path_1) == True\n",
|
||||
"assert compare_files(sol_code_path_2, sol_code_path_3) == True"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Here we deploy separate verifier that will be shared by the four models. We picked the `1l_mlp sigmoid` model as an example but you could have used any of the generated verifiers since they are all identical. "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import os \n",
|
||||
"addr_path_verifier = \"addr_verifier.txt\"\n",
|
||||
"sol_code_path = os.path.join(\"mlp_sigmoid\", 'test.sol')\n",
|
||||
"\n",
|
||||
"res = await ezkl.deploy_evm(\n",
|
||||
" addr_path_verifier,\n",
|
||||
" sol_code_path,\n",
|
||||
" 'http://127.0.0.1:3030',\n",
|
||||
" \"verifier/reusable\"\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"assert res == True\n",
|
||||
"\n",
|
||||
"with open(addr_path_verifier, 'r') as file:\n",
|
||||
" addr = file.read().rstrip()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Finally we deploy each of the unique VK-artifacts and verify them using the shared verifier deployed in the previous step."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for name in names:\n",
|
||||
" addr_path_vk = \"addr_vk.txt\"\n",
|
||||
" sol_key_code_path = os.path.join(name, 'test_key.sol')\n",
|
||||
" res = await ezkl.deploy_evm(addr_path_vk, sol_key_code_path, 'http://127.0.0.1:3030', \"vka\")\n",
|
||||
" assert res == True\n",
|
||||
"\n",
|
||||
" with open(addr_path_vk, 'r') as file:\n",
|
||||
" addr_vk = file.read().rstrip()\n",
|
||||
" \n",
|
||||
" proof_path = os.path.join(name, \"proof.json\")\n",
|
||||
" sol_code_path = os.path.join(name, 'vk.sol')\n",
|
||||
" res = await ezkl.verify_evm(\n",
|
||||
" addr,\n",
|
||||
" proof_path,\n",
|
||||
" \"http://127.0.0.1:3030\",\n",
|
||||
" addr_vk = addr_vk\n",
|
||||
" )\n",
|
||||
" assert res == True"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": ".env",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.11.5"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
@@ -9,7 +9,9 @@ class MyModel(nn.Module):
|
||||
super(MyModel, self).__init__()
|
||||
|
||||
def forward(self, w, x, y, z):
|
||||
return [((x & y)) == (x & (y | (z ^ w)))]
|
||||
a = (x & y)
|
||||
b = (y & (z ^ w))
|
||||
return [a & b]
|
||||
|
||||
|
||||
circuit = MyModel()
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"input_data": [[false, true, false], [true, false, false], [true, false, false], [false, false, false]]}
|
||||
{"input_data": [[false, true, true], [false, true, true], [true, false, false], [false, true, true]]}
|
||||
@@ -1,21 +1,17 @@
|
||||
pytorch1.12.1:«
|
||||
+
|
||||
pytorch2.2.2:„
|
||||
*
|
||||
input1
|
||||
input2
|
||||
onnx::Equal_4And_0"And
|
||||
input2
|
||||
/And_output_0/And"And
|
||||
)
|
||||
input3
|
||||
input
|
||||
onnx::Or_5Xor_1"Xor
|
||||
input3
|
||||
input
|
||||
/Xor_output_0/Xor"Xor
|
||||
input2
|
||||
|
||||
onnx::Or_5onnx::And_6Or_2"Or
|
||||
0
|
||||
input1
|
||||
onnx::And_6
|
||||
onnx::Equal_7And_3"And
|
||||
6
|
||||
5
|
||||
input2
|
||||
|
||||
/Xor_output_0/And_1_output_0/And_1"And
|
||||
5
|
||||
|
||||
/And_output_0
|
||||
/And_1_output_0output/And_2"And
|
||||
|
||||
@@ -9,7 +9,6 @@ import { EVM } from '@ethereumjs/evm'
|
||||
import { buildTransaction, encodeDeployment } from './utils/tx-builder'
|
||||
import { getAccountNonce, insertAccount } from './utils/account-utils'
|
||||
import { encodeVerifierCalldata } from '../nodejs/ezkl';
|
||||
import { error } from 'console'
|
||||
|
||||
async function deployContract(
|
||||
vm: VM,
|
||||
@@ -66,7 +65,7 @@ async function verify(
|
||||
vkAddress = new Uint8Array(uint8Array.buffer);
|
||||
|
||||
// convert uitn8array of length
|
||||
error('vkAddress', vkAddress)
|
||||
console.error('vkAddress', vkAddress)
|
||||
}
|
||||
const data = encodeVerifierCalldata(proof, vkAddress)
|
||||
|
||||
|
||||
@@ -1,25 +1,28 @@
|
||||
// ignore file if compiling for wasm
|
||||
#[global_allocator]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use clap::{CommandFactory, Parser};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use colored_json::ToColoredJson;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use ezkl::commands::Cli;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use ezkl::execute::run;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use ezkl::logger::init_logger;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use log::{error, info};
|
||||
#[cfg(not(any(target_arch = "wasm32", feature = "no-banner")))]
|
||||
use rand::prelude::SliceRandom;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
#[cfg(feature = "icicle")]
|
||||
use std::env;
|
||||
|
||||
#[tokio::main(flavor = "current_thread")]
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
pub async fn main() {
|
||||
let args = Cli::parse();
|
||||
|
||||
@@ -56,7 +59,7 @@ pub async fn main() {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[cfg(any(not(feature = "ezkl"), target_arch = "wasm32"))]
|
||||
pub fn main() {}
|
||||
|
||||
#[cfg(not(any(target_arch = "wasm32", feature = "no-banner")))]
|
||||
|
||||
269
src/bin/ios_gen_bindings.rs
Normal file
269
src/bin/ios_gen_bindings.rs
Normal file
@@ -0,0 +1,269 @@
|
||||
use camino::Utf8Path;
|
||||
use std::fs;
|
||||
use std::fs::remove_dir_all;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use uniffi_bindgen::bindings::SwiftBindingGenerator;
|
||||
use uniffi_bindgen::library_mode::generate_bindings;
|
||||
use uuid::Uuid;
|
||||
|
||||
fn main() {
|
||||
let library_name = std::env::var("CARGO_PKG_NAME").expect("CARGO_PKG_NAME is not set");
|
||||
let mode = determine_build_mode();
|
||||
build_bindings(&library_name, mode);
|
||||
}
|
||||
|
||||
/// Determines the build mode based on the CONFIGURATION environment variable.
|
||||
/// Defaults to "release" if not set or unrecognized.
|
||||
/// "release" mode takes longer to build but produces optimized code, which has smaller size and is faster.
|
||||
fn determine_build_mode() -> &'static str {
|
||||
match std::env::var("CONFIGURATION").map(|s| s.to_lowercase()) {
|
||||
Ok(ref config) if config == "debug" => "debug",
|
||||
_ => "release",
|
||||
}
|
||||
}
|
||||
|
||||
/// Builds the Swift bindings and XCFramework for the specified library and build mode.
|
||||
fn build_bindings(library_name: &str, mode: &str) {
|
||||
// Get the root directory of this Cargo project
|
||||
let manifest_dir = std::env::var_os("CARGO_MANIFEST_DIR")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|| std::env::current_dir().unwrap());
|
||||
|
||||
// Define the build directory inside the manifest directory
|
||||
let build_dir = manifest_dir.join("build");
|
||||
|
||||
// Create a temporary directory to store the bindings and combined library
|
||||
let tmp_dir = mktemp_local(&build_dir);
|
||||
|
||||
// Define directories for Swift bindings and output bindings
|
||||
let swift_bindings_dir = tmp_dir.join("SwiftBindings");
|
||||
let bindings_out = create_bindings_out_dir(&tmp_dir);
|
||||
let framework_out = bindings_out.join("EzklCore.xcframework");
|
||||
|
||||
// Define target architectures for building
|
||||
// We currently only support iOS devices and simulators running on ARM Macs
|
||||
// This is due to limiting the library size to under 100MB for GitHub Commit Size Limit
|
||||
// To support older Macs (Intel), follow the instructions in the comments below
|
||||
#[allow(clippy::useless_vec)]
|
||||
let target_archs = vec![
|
||||
vec!["aarch64-apple-ios"], // iOS device
|
||||
vec!["aarch64-apple-ios-sim"], // iOS simulator ARM Mac
|
||||
// vec!["aarch64-apple-ios-sim", "x86_64-apple-ios"], // TODO - replace the above line with this line to allow running on older Macs (Intel)
|
||||
];
|
||||
|
||||
// Build the library for each architecture and combine them
|
||||
let out_lib_paths: Vec<PathBuf> = target_archs
|
||||
.iter()
|
||||
.map(|archs| build_combined_archs(library_name, archs, &build_dir, mode))
|
||||
.collect();
|
||||
|
||||
// Generate the path to the built dynamic library (.dylib)
|
||||
let out_dylib_path = build_dir.join(format!(
|
||||
"{}/{}/lib{}.dylib",
|
||||
target_archs[0][0], mode, library_name
|
||||
));
|
||||
|
||||
// Generate Swift bindings using uniffi_bindgen
|
||||
generate_ios_bindings(&out_dylib_path, &swift_bindings_dir)
|
||||
.expect("Failed to generate iOS bindings");
|
||||
|
||||
// Move the generated Swift file to the bindings output directory
|
||||
fs::rename(
|
||||
swift_bindings_dir.join(format!("{}.swift", library_name)),
|
||||
bindings_out.join("EzklCore.swift"),
|
||||
)
|
||||
.expect("Failed to copy swift bindings file");
|
||||
|
||||
// Rename the `ios_ezklFFI.modulemap` file to `module.modulemap`
|
||||
fs::rename(
|
||||
swift_bindings_dir.join(format!("{}FFI.modulemap", library_name)),
|
||||
swift_bindings_dir.join("module.modulemap"),
|
||||
)
|
||||
.expect("Failed to rename modulemap file");
|
||||
|
||||
// Create the XCFramework from the combined libraries and Swift bindings
|
||||
create_xcframework(&out_lib_paths, &swift_bindings_dir, &framework_out);
|
||||
|
||||
// Define the destination directory for the bindings
|
||||
let bindings_dest = build_dir.join("EzklCoreBindings");
|
||||
if bindings_dest.exists() {
|
||||
fs::remove_dir_all(&bindings_dest).expect("Failed to remove existing bindings directory");
|
||||
}
|
||||
|
||||
// Move the bindings output to the destination directory
|
||||
fs::rename(&bindings_out, &bindings_dest).expect("Failed to move framework into place");
|
||||
|
||||
// Clean up temporary directories
|
||||
cleanup_temp_dirs(&build_dir);
|
||||
}
|
||||
|
||||
/// Creates the output directory for the bindings.
|
||||
/// Returns the path to the bindings output directory.
|
||||
fn create_bindings_out_dir(base_dir: &Path) -> PathBuf {
|
||||
let bindings_out = base_dir.join("EzklCoreBindings");
|
||||
fs::create_dir_all(&bindings_out).expect("Failed to create bindings output directory");
|
||||
bindings_out
|
||||
}
|
||||
|
||||
/// Builds the library for each architecture and combines them into a single library using lipo.
|
||||
/// Returns the path to the combined library.
|
||||
fn build_combined_archs(
|
||||
library_name: &str,
|
||||
archs: &[&str],
|
||||
build_dir: &Path,
|
||||
mode: &str,
|
||||
) -> PathBuf {
|
||||
// Build the library for each architecture
|
||||
let out_lib_paths: Vec<PathBuf> = archs
|
||||
.iter()
|
||||
.map(|&arch| {
|
||||
build_for_arch(arch, build_dir, mode);
|
||||
build_dir
|
||||
.join(arch)
|
||||
.join(mode)
|
||||
.join(format!("lib{}.a", library_name))
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Create a unique temporary directory for the combined library
|
||||
let lib_out = mktemp_local(build_dir).join(format!("lib{}.a", library_name));
|
||||
|
||||
// Combine the libraries using lipo
|
||||
let mut lipo_cmd = Command::new("lipo");
|
||||
lipo_cmd
|
||||
.arg("-create")
|
||||
.arg("-output")
|
||||
.arg(lib_out.to_str().unwrap());
|
||||
for lib_path in &out_lib_paths {
|
||||
lipo_cmd.arg(lib_path.to_str().unwrap());
|
||||
}
|
||||
|
||||
let status = lipo_cmd.status().expect("Failed to run lipo command");
|
||||
if !status.success() {
|
||||
panic!("lipo command failed with status: {}", status);
|
||||
}
|
||||
|
||||
lib_out
|
||||
}
|
||||
|
||||
/// Builds the library for a specific architecture.
|
||||
fn build_for_arch(arch: &str, build_dir: &Path, mode: &str) {
|
||||
// Ensure the target architecture is installed
|
||||
install_arch(arch);
|
||||
|
||||
// Run cargo build for the specified architecture and mode
|
||||
let mut build_cmd = Command::new("cargo");
|
||||
build_cmd
|
||||
.arg("build")
|
||||
.arg("--no-default-features")
|
||||
.arg("--features")
|
||||
.arg("ios-bindings");
|
||||
|
||||
if mode == "release" {
|
||||
build_cmd.arg("--release");
|
||||
}
|
||||
build_cmd
|
||||
.arg("--lib")
|
||||
.env("CARGO_BUILD_TARGET_DIR", build_dir)
|
||||
.env("CARGO_BUILD_TARGET", arch);
|
||||
|
||||
let status = build_cmd.status().expect("Failed to run cargo build");
|
||||
if !status.success() {
|
||||
panic!("cargo build failed for architecture: {}", arch);
|
||||
}
|
||||
}
|
||||
|
||||
/// Installs the specified target architecture using rustup.
|
||||
fn install_arch(arch: &str) {
|
||||
let status = Command::new("rustup")
|
||||
.arg("target")
|
||||
.arg("add")
|
||||
.arg(arch)
|
||||
.status()
|
||||
.expect("Failed to run rustup command");
|
||||
|
||||
if !status.success() {
|
||||
panic!("Failed to install target architecture: {}", arch);
|
||||
}
|
||||
}
|
||||
|
||||
/// Generates Swift bindings for the iOS library using uniffi_bindgen.
|
||||
fn generate_ios_bindings(dylib_path: &Path, binding_dir: &Path) -> Result<(), std::io::Error> {
|
||||
// Remove existing binding directory if it exists
|
||||
if binding_dir.exists() {
|
||||
remove_dir_all(binding_dir)?;
|
||||
}
|
||||
|
||||
// Generate the Swift bindings using uniffi_bindgen
|
||||
generate_bindings(
|
||||
Utf8Path::from_path(dylib_path).ok_or_else(|| {
|
||||
std::io::Error::new(std::io::ErrorKind::InvalidInput, "Invalid dylib path")
|
||||
})?,
|
||||
None,
|
||||
&SwiftBindingGenerator,
|
||||
None,
|
||||
Utf8Path::from_path(binding_dir).ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"Invalid Swift bindings directory",
|
||||
)
|
||||
})?,
|
||||
true,
|
||||
)
|
||||
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Creates an XCFramework from the combined libraries and Swift bindings.
|
||||
fn create_xcframework(lib_paths: &[PathBuf], swift_bindings_dir: &Path, framework_out: &Path) {
|
||||
let mut xcbuild_cmd = Command::new("xcodebuild");
|
||||
xcbuild_cmd.arg("-create-xcframework");
|
||||
|
||||
// Add each library and its corresponding headers to the xcodebuild command
|
||||
for lib_path in lib_paths {
|
||||
println!("Including library: {:?}", lib_path);
|
||||
xcbuild_cmd.arg("-library");
|
||||
xcbuild_cmd.arg(lib_path.to_str().unwrap());
|
||||
xcbuild_cmd.arg("-headers");
|
||||
xcbuild_cmd.arg(swift_bindings_dir.to_str().unwrap());
|
||||
}
|
||||
|
||||
xcbuild_cmd.arg("-output");
|
||||
xcbuild_cmd.arg(framework_out.to_str().unwrap());
|
||||
|
||||
let status = xcbuild_cmd.status().expect("Failed to run xcodebuild");
|
||||
if !status.success() {
|
||||
panic!("xcodebuild failed with status: {}", status);
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a temporary directory inside the build path with a unique UUID.
|
||||
/// This ensures unique build artifacts for concurrent builds.
|
||||
fn mktemp_local(build_path: &Path) -> PathBuf {
|
||||
let dir = tmp_local(build_path).join(Uuid::new_v4().to_string());
|
||||
fs::create_dir(&dir).expect("Failed to create temporary directory");
|
||||
dir
|
||||
}
|
||||
|
||||
/// Gets the path to the local temporary directory inside the build path.
|
||||
fn tmp_local(build_path: &Path) -> PathBuf {
|
||||
let tmp_path = build_path.join("tmp");
|
||||
if let Ok(metadata) = fs::metadata(&tmp_path) {
|
||||
if !metadata.is_dir() {
|
||||
panic!("Expected 'tmp' to be a directory");
|
||||
}
|
||||
} else {
|
||||
fs::create_dir_all(&tmp_path).expect("Failed to create local temporary directory");
|
||||
}
|
||||
tmp_path
|
||||
}
|
||||
|
||||
/// Cleans up temporary directories inside the build path.
|
||||
fn cleanup_temp_dirs(build_dir: &Path) {
|
||||
let tmp_dir = build_dir.join("tmp");
|
||||
if tmp_dir.exists() {
|
||||
fs::remove_dir_all(tmp_dir).expect("Failed to remove temporary directories");
|
||||
}
|
||||
}
|
||||
12
src/bindings/mod.rs
Normal file
12
src/bindings/mod.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
/// Python bindings
|
||||
#[cfg(feature = "python-bindings")]
|
||||
pub mod python;
|
||||
/// Universal bindings for all platforms
|
||||
#[cfg(any(
|
||||
feature = "ios-bindings",
|
||||
all(target_arch = "wasm32", target_os = "unknown")
|
||||
))]
|
||||
pub mod universal;
|
||||
/// wasm prover and verifier
|
||||
#[cfg(all(target_arch = "wasm32", target_os = "unknown"))]
|
||||
pub mod wasm;
|
||||
@@ -191,6 +191,12 @@ struct PyRunArgs {
|
||||
#[pyo3(get, set)]
|
||||
/// str: commitment type, accepts `kzg`, `ipa`
|
||||
pub commitment: PyCommitments,
|
||||
/// int: The base used for decomposition
|
||||
#[pyo3(get, set)]
|
||||
pub decomp_base: usize,
|
||||
/// int: The number of legs used for decomposition
|
||||
#[pyo3(get, set)]
|
||||
pub decomp_legs: usize,
|
||||
}
|
||||
|
||||
/// default instantiation of PyRunArgs
|
||||
@@ -221,6 +227,8 @@ impl From<PyRunArgs> for RunArgs {
|
||||
rebase_frac_zero_constants: py_run_args.rebase_frac_zero_constants,
|
||||
check_mode: py_run_args.check_mode,
|
||||
commitment: Some(py_run_args.commitment.into()),
|
||||
decomp_base: py_run_args.decomp_base,
|
||||
decomp_legs: py_run_args.decomp_legs,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -243,6 +251,8 @@ impl Into<PyRunArgs> for RunArgs {
|
||||
rebase_frac_zero_constants: self.rebase_frac_zero_constants,
|
||||
check_mode: self.check_mode,
|
||||
commitment: self.commitment.into(),
|
||||
decomp_base: self.decomp_base,
|
||||
decomp_legs: self.decomp_legs,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1490,8 +1500,8 @@ fn encode_evm_calldata<'a>(
|
||||
/// srs_path: str
|
||||
/// The path to the SRS file
|
||||
///
|
||||
/// render_vk_separately: bool
|
||||
/// Whether the verifier key should be rendered as a separate contract. We recommend disabling selector compression if this is enabled. To save the verifier key as a separate contract, set this to true and then call the create_evm_vk command
|
||||
/// reusable: bool
|
||||
/// Whether the verifier should be rendered as a reusable contract. If so, then you will need to deploy the VK artifact separately which you can generate using the create_evm_vka command
|
||||
///
|
||||
/// Returns
|
||||
/// -------
|
||||
@@ -1503,7 +1513,7 @@ fn encode_evm_calldata<'a>(
|
||||
sol_code_path=PathBuf::from(DEFAULT_SOL_CODE),
|
||||
abi_path=PathBuf::from(DEFAULT_VERIFIER_ABI),
|
||||
srs_path=None,
|
||||
render_vk_seperately = DEFAULT_RENDER_VK_SEPERATELY.parse().unwrap(),
|
||||
reusable = DEFAULT_RENDER_REUSABLE.parse().unwrap(),
|
||||
))]
|
||||
fn create_evm_verifier(
|
||||
py: Python,
|
||||
@@ -1512,7 +1522,7 @@ fn create_evm_verifier(
|
||||
sol_code_path: PathBuf,
|
||||
abi_path: PathBuf,
|
||||
srs_path: Option<PathBuf>,
|
||||
render_vk_seperately: bool,
|
||||
reusable: bool,
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
pyo3_asyncio::tokio::future_into_py(py, async move {
|
||||
crate::execute::create_evm_verifier(
|
||||
@@ -1521,7 +1531,7 @@ fn create_evm_verifier(
|
||||
settings_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
render_vk_seperately,
|
||||
reusable,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
@@ -1533,7 +1543,8 @@ fn create_evm_verifier(
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates an Evm verifer key. This command should be called after create_evm_verifier with the render_vk_separately arg set to true. By rendering a verification key separately you can reuse the same verifier for similar circuit setups with different verifying keys, helping to reduce the amount of state our verifiers store on the blockchain.
|
||||
/// Creates an Evm VK artifact. This command generated a VK with circuit specific meta data encoding in memory for use by the reusable H2 verifier.
|
||||
/// This is useful for deploying verifier that were otherwise too big to fit on chain and required aggregation.
|
||||
///
|
||||
/// Arguments
|
||||
/// ---------
|
||||
@@ -1563,7 +1574,7 @@ fn create_evm_verifier(
|
||||
abi_path=PathBuf::from(DEFAULT_VERIFIER_ABI),
|
||||
srs_path=None
|
||||
))]
|
||||
fn create_evm_vk(
|
||||
fn create_evm_vka(
|
||||
py: Python,
|
||||
vk_path: PathBuf,
|
||||
settings_path: PathBuf,
|
||||
@@ -1572,7 +1583,7 @@ fn create_evm_vk(
|
||||
srs_path: Option<PathBuf>,
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
pyo3_asyncio::tokio::future_into_py(py, async move {
|
||||
crate::execute::create_evm_vk(vk_path, srs_path, settings_path, sol_code_path, abi_path)
|
||||
crate::execute::create_evm_vka(vk_path, srs_path, settings_path, sol_code_path, abi_path)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run create_evm_verifier: {}", e);
|
||||
@@ -1703,6 +1714,7 @@ fn setup_test_evm_witness(
|
||||
addr_path,
|
||||
sol_code_path=PathBuf::from(DEFAULT_SOL_CODE),
|
||||
rpc_url=None,
|
||||
contract_type=ContractType::default(),
|
||||
optimizer_runs=DEFAULT_OPTIMIZER_RUNS.parse().unwrap(),
|
||||
private_key=None,
|
||||
))]
|
||||
@@ -1711,6 +1723,7 @@ fn deploy_evm(
|
||||
addr_path: PathBuf,
|
||||
sol_code_path: PathBuf,
|
||||
rpc_url: Option<String>,
|
||||
contract_type: ContractType,
|
||||
optimizer_runs: usize,
|
||||
private_key: Option<String>,
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
@@ -1721,42 +1734,7 @@ fn deploy_evm(
|
||||
addr_path,
|
||||
optimizer_runs,
|
||||
private_key,
|
||||
"Halo2Verifier",
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run deploy_evm: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
})
|
||||
}
|
||||
|
||||
/// deploys the solidity vk verifier
|
||||
#[pyfunction(signature = (
|
||||
addr_path,
|
||||
sol_code_path=PathBuf::from(DEFAULT_VK_SOL),
|
||||
rpc_url=None,
|
||||
optimizer_runs=DEFAULT_OPTIMIZER_RUNS.parse().unwrap(),
|
||||
private_key=None,
|
||||
))]
|
||||
fn deploy_vk_evm(
|
||||
py: Python,
|
||||
addr_path: PathBuf,
|
||||
sol_code_path: PathBuf,
|
||||
rpc_url: Option<String>,
|
||||
optimizer_runs: usize,
|
||||
private_key: Option<String>,
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
pyo3_asyncio::tokio::future_into_py(py, async move {
|
||||
crate::execute::deploy_evm(
|
||||
sol_code_path,
|
||||
rpc_url,
|
||||
addr_path,
|
||||
optimizer_runs,
|
||||
private_key,
|
||||
"Halo2VerifyingKey",
|
||||
contract_type,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
@@ -1892,8 +1870,8 @@ fn verify_evm<'a>(
|
||||
/// srs_path: str
|
||||
/// The path to the SRS file
|
||||
///
|
||||
/// render_vk_separately: bool
|
||||
/// Whether the verifier key should be rendered as a separate contract. We recommend disabling selector compression if this is enabled. To save the verifier key as a separate contract, set this to true and then call the create-evm-vk command
|
||||
/// reusable: bool
|
||||
/// Whether the verifier should be rendered as a reusable contract. If so, then you will need to deploy the VK artifact separately which you can generate using the create_evm_vka command
|
||||
///
|
||||
/// Returns
|
||||
/// -------
|
||||
@@ -1906,7 +1884,7 @@ fn verify_evm<'a>(
|
||||
abi_path=PathBuf::from(DEFAULT_VERIFIER_ABI),
|
||||
logrows=DEFAULT_AGGREGATED_LOGROWS.parse().unwrap(),
|
||||
srs_path=None,
|
||||
render_vk_seperately = DEFAULT_RENDER_VK_SEPERATELY.parse().unwrap(),
|
||||
reusable = DEFAULT_RENDER_REUSABLE.parse().unwrap(),
|
||||
))]
|
||||
fn create_evm_verifier_aggr(
|
||||
py: Python,
|
||||
@@ -1916,7 +1894,7 @@ fn create_evm_verifier_aggr(
|
||||
abi_path: PathBuf,
|
||||
logrows: u32,
|
||||
srs_path: Option<PathBuf>,
|
||||
render_vk_seperately: bool,
|
||||
reusable: bool,
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
pyo3_asyncio::tokio::future_into_py(py, async move {
|
||||
crate::execute::create_evm_aggregate_verifier(
|
||||
@@ -1926,7 +1904,7 @@ fn create_evm_verifier_aggr(
|
||||
abi_path,
|
||||
aggregation_settings,
|
||||
logrows,
|
||||
render_vk_seperately,
|
||||
reusable,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
@@ -1975,9 +1953,8 @@ fn ezkl(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
|
||||
m.add_function(wrap_pyfunction!(compile_circuit, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(verify_aggr, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(create_evm_verifier, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(create_evm_vk, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(create_evm_vka, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(deploy_evm, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(deploy_vk_evm, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(deploy_da_evm, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(verify_evm, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(setup_test_evm_witness, m)?)?;
|
||||
579
src/bindings/universal.rs
Normal file
579
src/bindings/universal.rs
Normal file
@@ -0,0 +1,579 @@
|
||||
use halo2_proofs::{
|
||||
plonk::*,
|
||||
poly::{
|
||||
commitment::{CommitmentScheme, ParamsProver},
|
||||
ipa::{
|
||||
commitment::{IPACommitmentScheme, ParamsIPA},
|
||||
multiopen::{ProverIPA, VerifierIPA},
|
||||
strategy::SingleStrategy as IPASingleStrategy,
|
||||
},
|
||||
kzg::{
|
||||
commitment::{KZGCommitmentScheme, ParamsKZG},
|
||||
multiopen::{ProverSHPLONK, VerifierSHPLONK},
|
||||
strategy::SingleStrategy as KZGSingleStrategy,
|
||||
},
|
||||
VerificationStrategy,
|
||||
},
|
||||
};
|
||||
use std::fmt::Display;
|
||||
use std::io::BufReader;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::{
|
||||
circuit::region::RegionSettings,
|
||||
graph::GraphSettings,
|
||||
pfsys::{
|
||||
create_proof_circuit,
|
||||
evm::aggregation_kzg::{AggregationCircuit, PoseidonTranscript},
|
||||
verify_proof_circuit, TranscriptType,
|
||||
},
|
||||
tensor::TensorType,
|
||||
CheckMode, Commitments, EZKLError as InnerEZKLError,
|
||||
};
|
||||
|
||||
use crate::graph::{GraphCircuit, GraphWitness};
|
||||
use halo2_solidity_verifier::encode_calldata;
|
||||
use halo2curves::{
|
||||
bn256::{Bn256, Fr, G1Affine},
|
||||
ff::{FromUniformBytes, PrimeField},
|
||||
};
|
||||
use snark_verifier::{loader::native::NativeLoader, system::halo2::transcript::evm::EvmTranscript};
|
||||
|
||||
/// Wrapper around the Error Message
|
||||
#[cfg_attr(feature = "ios-bindings", derive(uniffi::Error))]
|
||||
#[derive(Debug)]
|
||||
pub enum EZKLError {
|
||||
/// Some Comment
|
||||
InternalError(String),
|
||||
}
|
||||
|
||||
impl Display for EZKLError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
EZKLError::InternalError(e) => write!(f, "Internal error: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<InnerEZKLError> for EZKLError {
|
||||
fn from(e: InnerEZKLError) -> Self {
|
||||
EZKLError::InternalError(e.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// Encode verifier calldata from proof and ethereum vk_address
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub(crate) fn encode_verifier_calldata(
|
||||
// TODO - shuold it be pub(crate) or pub or pub(super)?
|
||||
proof: Vec<u8>,
|
||||
vk_address: Option<Vec<u8>>,
|
||||
) -> Result<Vec<u8>, EZKLError> {
|
||||
let snark: crate::pfsys::Snark<Fr, G1Affine> =
|
||||
serde_json::from_slice(&proof[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
let vk_address: Option<[u8; 20]> = if let Some(vk_address) = vk_address {
|
||||
let array: [u8; 20] =
|
||||
serde_json::from_slice(&vk_address[..]).map_err(InnerEZKLError::from)?;
|
||||
Some(array)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let flattened_instances = snark.instances.into_iter().flatten();
|
||||
|
||||
let encoded = encode_calldata(
|
||||
vk_address,
|
||||
&snark.proof,
|
||||
&flattened_instances.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
Ok(encoded)
|
||||
}
|
||||
|
||||
/// Generate witness from compiled circuit and input json
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub(crate) fn gen_witness(compiled_circuit: Vec<u8>, input: Vec<u8>) -> Result<Vec<u8>, EZKLError> {
|
||||
let mut circuit: crate::graph::GraphCircuit = bincode::deserialize(&compiled_circuit[..])
|
||||
.map_err(|e| {
|
||||
EZKLError::InternalError(format!("Failed to deserialize compiled model: {}", e))
|
||||
})?;
|
||||
let input: crate::graph::input::GraphData = serde_json::from_slice(&input[..])
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize input: {}", e)))?;
|
||||
|
||||
let mut input = circuit
|
||||
.load_graph_input(&input)
|
||||
.map_err(|e| EZKLError::InternalError(format!("{}", e)))?;
|
||||
|
||||
let witness = circuit
|
||||
.forward::<KZGCommitmentScheme<Bn256>>(
|
||||
&mut input,
|
||||
None,
|
||||
None,
|
||||
RegionSettings::all_true(
|
||||
circuit.settings().run_args.decomp_base,
|
||||
circuit.settings().run_args.decomp_legs,
|
||||
),
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("{}", e)))?;
|
||||
|
||||
serde_json::to_vec(&witness)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to serialize witness: {}", e)))
|
||||
}
|
||||
|
||||
/// Generate verifying key from compiled circuit, and parameters srs
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub(crate) fn gen_vk(
|
||||
compiled_circuit: Vec<u8>,
|
||||
srs: Vec<u8>,
|
||||
compress_selectors: bool,
|
||||
) -> Result<Vec<u8>, EZKLError> {
|
||||
let mut reader = BufReader::new(&srs[..]);
|
||||
let params: ParamsKZG<Bn256> = get_params(&mut reader)?;
|
||||
|
||||
let circuit: GraphCircuit = bincode::deserialize(&compiled_circuit[..])
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize circuit: {}", e)))?;
|
||||
|
||||
let vk = create_vk_lean::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(
|
||||
&circuit,
|
||||
¶ms,
|
||||
compress_selectors,
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to create verifying key: {}", e)))?;
|
||||
|
||||
let mut serialized_vk = Vec::new();
|
||||
vk.write(&mut serialized_vk, halo2_proofs::SerdeFormat::RawBytes)
|
||||
.map_err(|e| {
|
||||
EZKLError::InternalError(format!("Failed to serialize verifying key: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(serialized_vk)
|
||||
}
|
||||
|
||||
/// Generate proving key from vk, compiled circuit and parameters srs
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub(crate) fn gen_pk(
|
||||
vk: Vec<u8>,
|
||||
compiled_circuit: Vec<u8>,
|
||||
srs: Vec<u8>,
|
||||
) -> Result<Vec<u8>, EZKLError> {
|
||||
let mut reader = BufReader::new(&srs[..]);
|
||||
let params: ParamsKZG<Bn256> = get_params(&mut reader)?;
|
||||
|
||||
let circuit: GraphCircuit = bincode::deserialize(&compiled_circuit[..])
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize circuit: {}", e)))?;
|
||||
|
||||
let mut reader = BufReader::new(&vk[..]);
|
||||
let vk = VerifyingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
circuit.settings().clone(),
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize verifying key: {}", e)))?;
|
||||
|
||||
let pk = create_pk_lean::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(vk, &circuit, ¶ms)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to create proving key: {}", e)))?;
|
||||
|
||||
let mut serialized_pk = Vec::new();
|
||||
pk.write(&mut serialized_pk, halo2_proofs::SerdeFormat::RawBytes)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to serialize proving key: {}", e)))?;
|
||||
|
||||
Ok(serialized_pk)
|
||||
}
|
||||
|
||||
/// Verify proof with vk, proof json, circuit settings json and srs
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub(crate) fn verify(
|
||||
proof: Vec<u8>,
|
||||
vk: Vec<u8>,
|
||||
settings: Vec<u8>,
|
||||
srs: Vec<u8>,
|
||||
) -> Result<bool, EZKLError> {
|
||||
let circuit_settings: GraphSettings = serde_json::from_slice(&settings[..])
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize settings: {}", e)))?;
|
||||
|
||||
let proof: crate::pfsys::Snark<Fr, G1Affine> = serde_json::from_slice(&proof[..])
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize proof: {}", e)))?;
|
||||
|
||||
let mut reader = BufReader::new(&vk[..]);
|
||||
let vk = VerifyingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
circuit_settings.clone(),
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize vk: {}", e)))?;
|
||||
|
||||
let orig_n = 1 << circuit_settings.run_args.logrows;
|
||||
let commitment = circuit_settings.run_args.commitment.into();
|
||||
|
||||
let mut reader = BufReader::new(&srs[..]);
|
||||
let result = match commitment {
|
||||
Commitments::KZG => {
|
||||
let params: ParamsKZG<Bn256> = get_params(&mut reader)?;
|
||||
let strategy = KZGSingleStrategy::new(params.verifier_params());
|
||||
match proof.transcript_type {
|
||||
TranscriptType::EVM => verify_proof_circuit::<
|
||||
VerifierSHPLONK<'_, Bn256>,
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
KZGSingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<G1Affine, _, _, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n),
|
||||
TranscriptType::Poseidon => {
|
||||
verify_proof_circuit::<
|
||||
VerifierSHPLONK<'_, Bn256>,
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
KZGSingleStrategy<_>,
|
||||
_,
|
||||
PoseidonTranscript<NativeLoader, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n)
|
||||
}
|
||||
}
|
||||
}
|
||||
Commitments::IPA => {
|
||||
let params: ParamsIPA<_> = get_params(&mut reader)?;
|
||||
let strategy = IPASingleStrategy::new(params.verifier_params());
|
||||
match proof.transcript_type {
|
||||
TranscriptType::EVM => verify_proof_circuit::<
|
||||
VerifierIPA<_>,
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
IPASingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<G1Affine, _, _, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n),
|
||||
TranscriptType::Poseidon => {
|
||||
verify_proof_circuit::<
|
||||
VerifierIPA<_>,
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
IPASingleStrategy<_>,
|
||||
_,
|
||||
PoseidonTranscript<NativeLoader, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(_) => Ok(true),
|
||||
Err(e) => Err(EZKLError::InternalError(format!(
|
||||
"Verification failed: {}",
|
||||
e
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
/// Verify aggregate proof with vk, proof, circuit settings and srs
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub(crate) fn verify_aggr(
|
||||
proof: Vec<u8>,
|
||||
vk: Vec<u8>,
|
||||
logrows: u64,
|
||||
srs: Vec<u8>,
|
||||
commitment: &str,
|
||||
) -> Result<bool, EZKLError> {
|
||||
let proof: crate::pfsys::Snark<Fr, G1Affine> = serde_json::from_slice(&proof[..])
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize proof: {}", e)))?;
|
||||
|
||||
let mut reader = BufReader::new(&vk[..]);
|
||||
let vk = VerifyingKey::<G1Affine>::read::<_, AggregationCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
(),
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize vk: {}", e)))?;
|
||||
|
||||
let commit = Commitments::from_str(commitment)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Invalid commitment: {}", e)))?;
|
||||
|
||||
let orig_n = 1 << logrows;
|
||||
|
||||
let mut reader = BufReader::new(&srs[..]);
|
||||
let result = match commit {
|
||||
Commitments::KZG => {
|
||||
let params: ParamsKZG<Bn256> = get_params(&mut reader)?;
|
||||
let strategy = KZGSingleStrategy::new(params.verifier_params());
|
||||
match proof.transcript_type {
|
||||
TranscriptType::EVM => verify_proof_circuit::<
|
||||
VerifierSHPLONK<'_, Bn256>,
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
KZGSingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<G1Affine, _, _, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n),
|
||||
|
||||
TranscriptType::Poseidon => {
|
||||
verify_proof_circuit::<
|
||||
VerifierSHPLONK<'_, Bn256>,
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
KZGSingleStrategy<_>,
|
||||
_,
|
||||
PoseidonTranscript<NativeLoader, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n)
|
||||
}
|
||||
}
|
||||
}
|
||||
Commitments::IPA => {
|
||||
let params: ParamsIPA<_> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader).map_err(
|
||||
|e| EZKLError::InternalError(format!("Failed to deserialize params: {}", e)),
|
||||
)?;
|
||||
let strategy = IPASingleStrategy::new(params.verifier_params());
|
||||
match proof.transcript_type {
|
||||
TranscriptType::EVM => verify_proof_circuit::<
|
||||
VerifierIPA<_>,
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
IPASingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<G1Affine, _, _, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n),
|
||||
TranscriptType::Poseidon => {
|
||||
verify_proof_circuit::<
|
||||
VerifierIPA<_>,
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
IPASingleStrategy<_>,
|
||||
_,
|
||||
PoseidonTranscript<NativeLoader, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
result
|
||||
.map(|_| true)
|
||||
.map_err(|e| EZKLError::InternalError(format!("{}", e)))
|
||||
}
|
||||
|
||||
/// Prove in browser with compiled circuit, witness json, proving key, and srs
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub(crate) fn prove(
|
||||
witness: Vec<u8>,
|
||||
pk: Vec<u8>,
|
||||
compiled_circuit: Vec<u8>,
|
||||
srs: Vec<u8>,
|
||||
) -> Result<Vec<u8>, EZKLError> {
|
||||
#[cfg(feature = "det-prove")]
|
||||
log::set_max_level(log::LevelFilter::Debug);
|
||||
#[cfg(not(feature = "det-prove"))]
|
||||
log::set_max_level(log::LevelFilter::Info);
|
||||
|
||||
let mut circuit: GraphCircuit = bincode::deserialize(&compiled_circuit[..])
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize circuit: {}", e)))?;
|
||||
|
||||
let data: GraphWitness = serde_json::from_slice(&witness[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
let mut reader = BufReader::new(&pk[..]);
|
||||
let pk = ProvingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
circuit.settings().clone(),
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize proving key: {}", e)))?;
|
||||
|
||||
circuit
|
||||
.load_graph_witness(&data)
|
||||
.map_err(InnerEZKLError::from)?;
|
||||
let public_inputs = circuit
|
||||
.prepare_public_inputs(&data)
|
||||
.map_err(InnerEZKLError::from)?;
|
||||
let proof_split_commits: Option<crate::pfsys::ProofSplitCommit> = data.into();
|
||||
|
||||
let mut reader = BufReader::new(&srs[..]);
|
||||
let commitment = circuit.settings().run_args.commitment.into();
|
||||
|
||||
let proof = match commitment {
|
||||
Commitments::KZG => {
|
||||
let params: ParamsKZG<Bn256> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader).map_err(
|
||||
|e| EZKLError::InternalError(format!("Failed to deserialize srs: {}", e)),
|
||||
)?;
|
||||
|
||||
create_proof_circuit::<
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
_,
|
||||
ProverSHPLONK<_>,
|
||||
VerifierSHPLONK<_>,
|
||||
KZGSingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<_, _, _, _>,
|
||||
EvmTranscript<_, _, _, _>,
|
||||
>(
|
||||
circuit,
|
||||
vec![public_inputs],
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
proof_split_commits,
|
||||
None,
|
||||
)
|
||||
}
|
||||
Commitments::IPA => {
|
||||
let params: ParamsIPA<_> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader).map_err(
|
||||
|e| EZKLError::InternalError(format!("Failed to deserialize srs: {}", e)),
|
||||
)?;
|
||||
|
||||
create_proof_circuit::<
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
_,
|
||||
ProverIPA<_>,
|
||||
VerifierIPA<_>,
|
||||
IPASingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<_, _, _, _>,
|
||||
EvmTranscript<_, _, _, _>,
|
||||
>(
|
||||
circuit,
|
||||
vec![public_inputs],
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
Commitments::IPA,
|
||||
TranscriptType::EVM,
|
||||
proof_split_commits,
|
||||
None,
|
||||
)
|
||||
}
|
||||
}
|
||||
.map_err(InnerEZKLError::from)?;
|
||||
|
||||
Ok(serde_json::to_vec(&proof).map_err(InnerEZKLError::from)?)
|
||||
}
|
||||
|
||||
/// Validate the witness json
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub(crate) fn witness_validation(witness: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let _: GraphWitness = serde_json::from_slice(&witness[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Validate the compiled circuit
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub(crate) fn compiled_circuit_validation(compiled_circuit: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let _: GraphCircuit = bincode::deserialize(&compiled_circuit[..]).map_err(|e| {
|
||||
EZKLError::InternalError(format!("Failed to deserialize compiled circuit: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Validate the input json
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub(crate) fn input_validation(input: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let _: crate::graph::input::GraphData =
|
||||
serde_json::from_slice(&input[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Validate the proof json
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub(crate) fn proof_validation(proof: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let _: crate::pfsys::Snark<Fr, G1Affine> =
|
||||
serde_json::from_slice(&proof[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Validate the verifying key given the settings json
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub(crate) fn vk_validation(vk: Vec<u8>, settings: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let circuit_settings: GraphSettings =
|
||||
serde_json::from_slice(&settings[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
let mut reader = BufReader::new(&vk[..]);
|
||||
let _ = VerifyingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
circuit_settings,
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize verifying key: {}", e)))?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Validate the proving key given the settings json
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub(crate) fn pk_validation(pk: Vec<u8>, settings: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let circuit_settings: GraphSettings =
|
||||
serde_json::from_slice(&settings[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
let mut reader = BufReader::new(&pk[..]);
|
||||
let _ = ProvingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
circuit_settings,
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize proving key: {}", e)))?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Validate the settings json
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub(crate) fn settings_validation(settings: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let _: GraphSettings = serde_json::from_slice(&settings[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Validate the srs
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub(crate) fn srs_validation(srs: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let mut reader = BufReader::new(&srs[..]);
|
||||
let _: ParamsKZG<Bn256> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader).map_err(|e| {
|
||||
EZKLError::InternalError(format!("Failed to deserialize params: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
// HELPER FUNCTIONS
|
||||
|
||||
fn get_params<
|
||||
Scheme: for<'a> halo2_proofs::poly::commitment::Params<'a, halo2curves::bn256::G1Affine>,
|
||||
>(
|
||||
mut reader: &mut BufReader<&[u8]>,
|
||||
) -> Result<Scheme, EZKLError> {
|
||||
halo2_proofs::poly::commitment::Params::<G1Affine>::read(&mut reader)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize params: {}", e)))
|
||||
}
|
||||
|
||||
/// Creates a [ProvingKey] for a [GraphCircuit] (`circuit`) with specific [CommitmentScheme] parameters (`params`) for the WASM target
|
||||
pub fn create_vk_lean<Scheme: CommitmentScheme, F: PrimeField + TensorType, C: Circuit<F>>(
|
||||
circuit: &C,
|
||||
params: &'_ Scheme::ParamsProver,
|
||||
compress_selectors: bool,
|
||||
) -> Result<VerifyingKey<Scheme::Curve>, halo2_proofs::plonk::Error>
|
||||
where
|
||||
C: Circuit<Scheme::Scalar>,
|
||||
<Scheme as CommitmentScheme>::Scalar: FromUniformBytes<64>,
|
||||
{
|
||||
// Real proof
|
||||
let empty_circuit = <C as Circuit<F>>::without_witnesses(circuit);
|
||||
|
||||
// Initialize the verifying key
|
||||
let vk = keygen_vk_custom(params, &empty_circuit, compress_selectors)?;
|
||||
Ok(vk)
|
||||
}
|
||||
/// Creates a [ProvingKey] from a [VerifyingKey] for a [GraphCircuit] (`circuit`) with specific [CommitmentScheme] parameters (`params`) for the WASM target
|
||||
pub fn create_pk_lean<Scheme: CommitmentScheme, F: PrimeField + TensorType, C: Circuit<F>>(
|
||||
vk: VerifyingKey<Scheme::Curve>,
|
||||
circuit: &C,
|
||||
params: &'_ Scheme::ParamsProver,
|
||||
) -> Result<ProvingKey<Scheme::Curve>, halo2_proofs::plonk::Error>
|
||||
where
|
||||
C: Circuit<Scheme::Scalar>,
|
||||
<Scheme as CommitmentScheme>::Scalar: FromUniformBytes<64>,
|
||||
{
|
||||
// Real proof
|
||||
let empty_circuit = <C as Circuit<F>>::without_witnesses(circuit);
|
||||
|
||||
// Initialize the proving key
|
||||
let pk = keygen_pk(params, vk, &empty_circuit)?;
|
||||
Ok(pk)
|
||||
}
|
||||
372
src/bindings/wasm.rs
Normal file
372
src/bindings/wasm.rs
Normal file
@@ -0,0 +1,372 @@
|
||||
use crate::{
|
||||
circuit::modules::{
|
||||
polycommit::PolyCommitChip,
|
||||
poseidon::{
|
||||
spec::{PoseidonSpec, POSEIDON_RATE, POSEIDON_WIDTH},
|
||||
PoseidonChip,
|
||||
},
|
||||
Module,
|
||||
},
|
||||
fieldutils::{felt_to_integer_rep, integer_rep_to_felt},
|
||||
graph::{
|
||||
modules::POSEIDON_LEN_GRAPH, quantize_float, scale_to_multiplier, GraphCircuit,
|
||||
GraphSettings,
|
||||
},
|
||||
};
|
||||
use console_error_panic_hook;
|
||||
use halo2_proofs::{
|
||||
plonk::*,
|
||||
poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG},
|
||||
};
|
||||
use halo2curves::{
|
||||
bn256::{Bn256, Fr, G1Affine},
|
||||
ff::PrimeField,
|
||||
};
|
||||
use wasm_bindgen::prelude::*;
|
||||
use wasm_bindgen_console_logger::DEFAULT_LOGGER;
|
||||
|
||||
use crate::bindings::universal::{
|
||||
compiled_circuit_validation, encode_verifier_calldata, gen_pk, gen_vk, gen_witness,
|
||||
input_validation, pk_validation, proof_validation, settings_validation, srs_validation,
|
||||
verify_aggr, vk_validation, witness_validation, EZKLError as ExternalEZKLError,
|
||||
};
|
||||
#[cfg(feature = "web")]
|
||||
pub use wasm_bindgen_rayon::init_thread_pool;
|
||||
|
||||
impl From<ExternalEZKLError> for JsError {
|
||||
fn from(e: ExternalEZKLError) -> Self {
|
||||
JsError::new(&format!("{}", e))
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
/// Initialize logger for wasm
|
||||
pub fn init_logger() {
|
||||
log::set_logger(&DEFAULT_LOGGER).unwrap();
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
/// Initialize panic hook for wasm
|
||||
pub fn init_panic_hook() {
|
||||
console_error_panic_hook::set_once();
|
||||
}
|
||||
|
||||
/// Wrapper around the halo2 encode call data method
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn encodeVerifierCalldata(
|
||||
proof: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
vk_address: Option<Vec<u8>>,
|
||||
) -> Result<Vec<u8>, JsError> {
|
||||
encode_verifier_calldata(proof.0, vk_address).map_err(JsError::from)
|
||||
}
|
||||
|
||||
/// Converts a hex string to a byte array
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn feltToBigEndian(array: wasm_bindgen::Clamped<Vec<u8>>) -> Result<String, JsError> {
|
||||
let felt: Fr = serde_json::from_slice(&array[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize field element: {}", e)))?;
|
||||
Ok(format!("{:?}", felt))
|
||||
}
|
||||
|
||||
/// Converts a felt to a little endian string
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn feltToLittleEndian(array: wasm_bindgen::Clamped<Vec<u8>>) -> Result<String, JsError> {
|
||||
let felt: Fr = serde_json::from_slice(&array[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize field element: {}", e)))?;
|
||||
let repr = serde_json::to_string(&felt).unwrap();
|
||||
let b: String = serde_json::from_str(&repr).unwrap();
|
||||
Ok(b)
|
||||
}
|
||||
|
||||
/// Converts a hex string to a byte array
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn feltToInt(
|
||||
array: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
|
||||
let felt: Fr = serde_json::from_slice(&array[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize field element: {}", e)))?;
|
||||
Ok(wasm_bindgen::Clamped(
|
||||
serde_json::to_vec(&felt_to_integer_rep(felt))
|
||||
.map_err(|e| JsError::new(&format!("Failed to serialize integer: {}", e)))?,
|
||||
))
|
||||
}
|
||||
|
||||
/// Converts felts to a floating point element
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn feltToFloat(
|
||||
array: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
scale: crate::Scale,
|
||||
) -> Result<f64, JsError> {
|
||||
let felt: Fr = serde_json::from_slice(&array[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize field element: {}", e)))?;
|
||||
let int_rep = felt_to_integer_rep(felt);
|
||||
let multiplier = scale_to_multiplier(scale);
|
||||
Ok(int_rep as f64 / multiplier)
|
||||
}
|
||||
|
||||
/// Converts a floating point number to a hex string representing a fixed point field element
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn floatToFelt(
|
||||
input: f64,
|
||||
scale: crate::Scale,
|
||||
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
|
||||
let int_rep =
|
||||
quantize_float(&input, 0.0, scale).map_err(|e| JsError::new(&format!("{}", e)))?;
|
||||
let felt = integer_rep_to_felt(int_rep);
|
||||
let vec = crate::pfsys::field_to_string::<halo2curves::bn256::Fr>(&felt);
|
||||
Ok(wasm_bindgen::Clamped(serde_json::to_vec(&vec).map_err(
|
||||
|e| JsError::new(&format!("Failed to serialize a float to felt{}", e)),
|
||||
)?))
|
||||
}
|
||||
|
||||
/// Generate a kzg commitment.
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn kzgCommit(
|
||||
message: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
vk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
settings: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
params_ser: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
|
||||
let message: Vec<Fr> = serde_json::from_slice(&message[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize message: {}", e)))?;
|
||||
|
||||
let mut reader = std::io::BufReader::new(¶ms_ser[..]);
|
||||
let params: ParamsKZG<Bn256> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize params: {}", e)))?;
|
||||
|
||||
let mut reader = std::io::BufReader::new(&vk[..]);
|
||||
let circuit_settings: GraphSettings = serde_json::from_slice(&settings[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize settings: {}", e)))?;
|
||||
let vk = VerifyingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
circuit_settings,
|
||||
)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize vk: {}", e)))?;
|
||||
|
||||
let output = PolyCommitChip::commit::<KZGCommitmentScheme<Bn256>>(
|
||||
message,
|
||||
(vk.cs().blinding_factors() + 1) as u32,
|
||||
¶ms,
|
||||
);
|
||||
|
||||
Ok(wasm_bindgen::Clamped(
|
||||
serde_json::to_vec(&output).map_err(|e| JsError::new(&format!("{}", e)))?,
|
||||
))
|
||||
}
|
||||
|
||||
/// Converts a buffer to vector of 4 u64s representing a fixed point field element
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn bufferToVecOfFelt(
|
||||
buffer: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
|
||||
// Convert the buffer to a slice
|
||||
let buffer: &[u8] = &buffer;
|
||||
|
||||
// Divide the buffer into chunks of 64 bytes
|
||||
let chunks = buffer.chunks_exact(16);
|
||||
|
||||
// Get the remainder
|
||||
let remainder = chunks.remainder();
|
||||
|
||||
// Add 0s to the remainder to make it 64 bytes
|
||||
let mut remainder = remainder.to_vec();
|
||||
|
||||
// Collect chunks into a Vec<[u8; 16]>.
|
||||
let chunks: Result<Vec<[u8; 16]>, JsError> = chunks
|
||||
.map(|slice| {
|
||||
let array: [u8; 16] = slice
|
||||
.try_into()
|
||||
.map_err(|_| JsError::new("failed to slice input chunks"))?;
|
||||
Ok(array)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut chunks = chunks?;
|
||||
|
||||
if remainder.len() != 0 {
|
||||
remainder.resize(16, 0);
|
||||
// Convert the Vec<u8> to [u8; 16]
|
||||
let remainder_array: [u8; 16] = remainder
|
||||
.try_into()
|
||||
.map_err(|_| JsError::new("failed to slice remainder"))?;
|
||||
// append the remainder to the chunks
|
||||
chunks.push(remainder_array);
|
||||
}
|
||||
|
||||
// Convert each chunk to a field element
|
||||
let field_elements: Vec<Fr> = chunks
|
||||
.iter()
|
||||
.map(|x| PrimeField::from_u128(u8_array_to_u128_le(*x)))
|
||||
.collect();
|
||||
|
||||
Ok(wasm_bindgen::Clamped(
|
||||
serde_json::to_vec(&field_elements)
|
||||
.map_err(|e| JsError::new(&format!("Failed to serialize field elements: {}", e)))?,
|
||||
))
|
||||
}
|
||||
|
||||
/// Generate a poseidon hash in browser. Input message
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn poseidonHash(
|
||||
message: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
|
||||
let message: Vec<Fr> = serde_json::from_slice(&message[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize message: {}", e)))?;
|
||||
|
||||
let output =
|
||||
PoseidonChip::<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE, POSEIDON_LEN_GRAPH>::run(
|
||||
message.clone(),
|
||||
)
|
||||
.map_err(|e| JsError::new(&format!("{}", e)))?;
|
||||
|
||||
Ok(wasm_bindgen::Clamped(serde_json::to_vec(&output).map_err(
|
||||
|e| JsError::new(&format!("Failed to serialize poseidon hash output: {}", e)),
|
||||
)?))
|
||||
}
|
||||
|
||||
/// Generate a witness file from input.json, compiled model and a settings.json file.
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn genWitness(
|
||||
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
input: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<Vec<u8>, JsError> {
|
||||
gen_witness(compiled_circuit.0, input.0).map_err(JsError::from)
|
||||
}
|
||||
|
||||
/// Generate verifying key in browser
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn genVk(
|
||||
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
params_ser: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
compress_selectors: bool,
|
||||
) -> Result<Vec<u8>, JsError> {
|
||||
gen_vk(compiled_circuit.0, params_ser.0, compress_selectors).map_err(JsError::from)
|
||||
}
|
||||
|
||||
/// Generate proving key in browser
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn genPk(
|
||||
vk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
params_ser: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<Vec<u8>, JsError> {
|
||||
gen_pk(vk.0, compiled_circuit.0, params_ser.0).map_err(JsError::from)
|
||||
}
|
||||
|
||||
/// Verify proof in browser using wasm
|
||||
#[wasm_bindgen]
|
||||
pub fn verify(
|
||||
proof_js: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
vk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
settings: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
srs: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<bool, JsError> {
|
||||
super::universal::verify(proof_js.0, vk.0, settings.0, srs.0).map_err(JsError::from)
|
||||
}
|
||||
|
||||
/// Verify aggregate proof in browser using wasm
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn verifyAggr(
|
||||
proof_js: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
vk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
logrows: u64,
|
||||
srs: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
commitment: &str,
|
||||
) -> Result<bool, JsError> {
|
||||
verify_aggr(proof_js.0, vk.0, logrows, srs.0, commitment).map_err(JsError::from)
|
||||
}
|
||||
|
||||
/// Prove in browser using wasm
|
||||
#[wasm_bindgen]
|
||||
pub fn prove(
|
||||
witness: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
pk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
srs: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<Vec<u8>, JsError> {
|
||||
super::universal::prove(witness.0, pk.0, compiled_circuit.0, srs.0).map_err(JsError::from)
|
||||
}
|
||||
|
||||
// VALIDATION FUNCTIONS
|
||||
|
||||
/// Witness file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn witnessValidation(witness: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsError> {
|
||||
witness_validation(witness.0).map_err(JsError::from)
|
||||
}
|
||||
/// Compiled circuit validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn compiledCircuitValidation(
|
||||
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<bool, JsError> {
|
||||
compiled_circuit_validation(compiled_circuit.0).map_err(JsError::from)
|
||||
}
|
||||
/// Input file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn inputValidation(input: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsError> {
|
||||
input_validation(input.0).map_err(JsError::from)
|
||||
}
|
||||
/// Proof file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn proofValidation(proof: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsError> {
|
||||
proof_validation(proof.0).map_err(JsError::from)
|
||||
}
|
||||
/// Vk file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn vkValidation(
|
||||
vk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
settings: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<bool, JsError> {
|
||||
vk_validation(vk.0, settings.0).map_err(JsError::from)
|
||||
}
|
||||
/// Pk file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn pkValidation(
|
||||
pk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
settings: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<bool, JsError> {
|
||||
pk_validation(pk.0, settings.0).map_err(JsError::from)
|
||||
}
|
||||
/// Settings file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn settingsValidation(settings: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsError> {
|
||||
settings_validation(settings.0).map_err(JsError::from)
|
||||
}
|
||||
/// Srs file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn srsValidation(srs: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsError> {
|
||||
srs_validation(srs.0).map_err(JsError::from)
|
||||
}
|
||||
|
||||
/// HELPER FUNCTIONS
|
||||
pub fn u8_array_to_u128_le(arr: [u8; 16]) -> u128 {
|
||||
let mut n: u128 = 0;
|
||||
for &b in arr.iter().rev() {
|
||||
n <<= 8;
|
||||
n |= b as u128;
|
||||
}
|
||||
n
|
||||
}
|
||||
@@ -219,7 +219,7 @@ mod tests {
|
||||
fn polycommit_chip_for_a_range_of_input_sizes() {
|
||||
let rng = rand::rngs::OsRng;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
env_logger::init();
|
||||
|
||||
{
|
||||
@@ -247,7 +247,7 @@ mod tests {
|
||||
#[test]
|
||||
#[ignore]
|
||||
fn polycommit_chip_much_longer_input() {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
env_logger::init();
|
||||
|
||||
let rng = rand::rngs::OsRng;
|
||||
|
||||
@@ -560,7 +560,7 @@ mod tests {
|
||||
fn hash_for_a_range_of_input_sizes() {
|
||||
let rng = rand::rngs::OsRng;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
env_logger::init();
|
||||
|
||||
{
|
||||
|
||||
@@ -14,6 +14,7 @@ use pyo3::{
|
||||
types::PyString,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tosubcommand::ToFlags;
|
||||
|
||||
use crate::{
|
||||
@@ -49,6 +50,7 @@ impl std::fmt::Display for CheckMode {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
impl ToFlags for CheckMode {
|
||||
/// Convert the struct to a subcommand string
|
||||
fn to_flags(&self) -> Vec<String> {
|
||||
@@ -88,6 +90,7 @@ impl std::fmt::Display for Tolerance {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
impl ToFlags for Tolerance {
|
||||
/// Convert the struct to a subcommand string
|
||||
fn to_flags(&self) -> Vec<String> {
|
||||
@@ -174,7 +177,7 @@ impl<'source> FromPyObject<'source> for Tolerance {
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct DynamicLookups {
|
||||
/// [Selector]s generated when configuring the layer. We use a [BTreeMap] as we expect to configure many dynamic lookup ops.
|
||||
pub lookup_selectors: BTreeMap<(usize, usize), Selector>,
|
||||
pub lookup_selectors: BTreeMap<(usize, (usize, usize)), Selector>,
|
||||
/// Selectors for the dynamic lookup tables
|
||||
pub table_selectors: Vec<Selector>,
|
||||
/// Inputs:
|
||||
@@ -206,7 +209,7 @@ impl DynamicLookups {
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct Shuffles {
|
||||
/// [Selector]s generated when configuring the layer. We use a [BTreeMap] as we expect to configure many dynamic lookup ops.
|
||||
pub input_selectors: BTreeMap<(usize, usize), Selector>,
|
||||
pub input_selectors: BTreeMap<(usize, (usize, usize)), Selector>,
|
||||
/// Selectors for the dynamic lookup tables
|
||||
pub reference_selectors: Vec<Selector>,
|
||||
/// Inputs:
|
||||
@@ -643,57 +646,73 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> BaseConfig<F> {
|
||||
}
|
||||
|
||||
for t in tables.iter() {
|
||||
if !t.is_advice() || t.num_blocks() > 1 || t.num_inner_cols() > 1 {
|
||||
if !t.is_advice() || t.num_inner_cols() > 1 {
|
||||
return Err(CircuitError::WrongDynamicColumnType(t.name().to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
// assert all tables have the same number of inner columns
|
||||
if tables
|
||||
.iter()
|
||||
.map(|t| t.num_blocks())
|
||||
.collect::<Vec<_>>()
|
||||
.windows(2)
|
||||
.any(|w| w[0] != w[1])
|
||||
{
|
||||
return Err(CircuitError::WrongDynamicColumnType(
|
||||
"tables inner cols".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let one = Expression::Constant(F::ONE);
|
||||
|
||||
let s_ltable = cs.complex_selector();
|
||||
for q in 0..tables[0].num_blocks() {
|
||||
let s_ltable = cs.complex_selector();
|
||||
|
||||
for x in 0..lookups[0].num_blocks() {
|
||||
for y in 0..lookups[0].num_inner_cols() {
|
||||
let s_lookup = cs.complex_selector();
|
||||
for x in 0..lookups[0].num_blocks() {
|
||||
for y in 0..lookups[0].num_inner_cols() {
|
||||
let s_lookup = cs.complex_selector();
|
||||
|
||||
cs.lookup_any("lookup", |cs| {
|
||||
let s_lookupq = cs.query_selector(s_lookup);
|
||||
let mut expression = vec![];
|
||||
let s_ltableq = cs.query_selector(s_ltable);
|
||||
let mut lookup_queries = vec![one.clone()];
|
||||
cs.lookup_any("lookup", |cs| {
|
||||
let s_lookupq = cs.query_selector(s_lookup);
|
||||
let mut expression = vec![];
|
||||
let s_ltableq = cs.query_selector(s_ltable);
|
||||
let mut lookup_queries = vec![one.clone()];
|
||||
|
||||
for lookup in lookups {
|
||||
lookup_queries.push(match lookup {
|
||||
VarTensor::Advice { inner: advices, .. } => {
|
||||
cs.query_advice(advices[x][y], Rotation(0))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
});
|
||||
}
|
||||
for lookup in lookups {
|
||||
lookup_queries.push(match lookup {
|
||||
VarTensor::Advice { inner: advices, .. } => {
|
||||
cs.query_advice(advices[x][y], Rotation(0))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
});
|
||||
}
|
||||
|
||||
let mut table_queries = vec![one.clone()];
|
||||
for table in tables {
|
||||
table_queries.push(match table {
|
||||
VarTensor::Advice { inner: advices, .. } => {
|
||||
cs.query_advice(advices[0][0], Rotation(0))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
});
|
||||
}
|
||||
let mut table_queries = vec![one.clone()];
|
||||
for table in tables {
|
||||
table_queries.push(match table {
|
||||
VarTensor::Advice { inner: advices, .. } => {
|
||||
cs.query_advice(advices[q][0], Rotation(0))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
});
|
||||
}
|
||||
|
||||
let lhs = lookup_queries.into_iter().map(|c| c * s_lookupq.clone());
|
||||
let rhs = table_queries.into_iter().map(|c| c * s_ltableq.clone());
|
||||
expression.extend(lhs.zip(rhs));
|
||||
let lhs = lookup_queries.into_iter().map(|c| c * s_lookupq.clone());
|
||||
let rhs = table_queries.into_iter().map(|c| c * s_ltableq.clone());
|
||||
expression.extend(lhs.zip(rhs));
|
||||
|
||||
expression
|
||||
});
|
||||
self.dynamic_lookups
|
||||
.lookup_selectors
|
||||
.entry((x, y))
|
||||
.or_insert(s_lookup);
|
||||
expression
|
||||
});
|
||||
self.dynamic_lookups
|
||||
.lookup_selectors
|
||||
.entry((q, (x, y)))
|
||||
.or_insert(s_lookup);
|
||||
}
|
||||
}
|
||||
|
||||
self.dynamic_lookups.table_selectors.push(s_ltable);
|
||||
}
|
||||
self.dynamic_lookups.table_selectors.push(s_ltable);
|
||||
|
||||
// if we haven't previously initialized the input/output, do so now
|
||||
if self.dynamic_lookups.tables.is_empty() {
|
||||
@@ -726,57 +745,72 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> BaseConfig<F> {
|
||||
}
|
||||
|
||||
for t in references.iter() {
|
||||
if !t.is_advice() || t.num_blocks() > 1 || t.num_inner_cols() > 1 {
|
||||
if !t.is_advice() || t.num_inner_cols() > 1 {
|
||||
return Err(CircuitError::WrongDynamicColumnType(t.name().to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
// assert all tables have the same number of blocks
|
||||
if references
|
||||
.iter()
|
||||
.map(|t| t.num_blocks())
|
||||
.collect::<Vec<_>>()
|
||||
.windows(2)
|
||||
.any(|w| w[0] != w[1])
|
||||
{
|
||||
return Err(CircuitError::WrongDynamicColumnType(
|
||||
"references inner cols".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let one = Expression::Constant(F::ONE);
|
||||
|
||||
let s_reference = cs.complex_selector();
|
||||
for q in 0..references[0].num_blocks() {
|
||||
let s_reference = cs.complex_selector();
|
||||
|
||||
for x in 0..inputs[0].num_blocks() {
|
||||
for y in 0..inputs[0].num_inner_cols() {
|
||||
let s_input = cs.complex_selector();
|
||||
for x in 0..inputs[0].num_blocks() {
|
||||
for y in 0..inputs[0].num_inner_cols() {
|
||||
let s_input = cs.complex_selector();
|
||||
|
||||
cs.lookup_any("lookup", |cs| {
|
||||
let s_inputq = cs.query_selector(s_input);
|
||||
let mut expression = vec![];
|
||||
let s_referenceq = cs.query_selector(s_reference);
|
||||
let mut input_queries = vec![one.clone()];
|
||||
cs.lookup_any("lookup", |cs| {
|
||||
let s_inputq = cs.query_selector(s_input);
|
||||
let mut expression = vec![];
|
||||
let s_referenceq = cs.query_selector(s_reference);
|
||||
let mut input_queries = vec![one.clone()];
|
||||
|
||||
for input in inputs {
|
||||
input_queries.push(match input {
|
||||
VarTensor::Advice { inner: advices, .. } => {
|
||||
cs.query_advice(advices[x][y], Rotation(0))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
});
|
||||
}
|
||||
for input in inputs {
|
||||
input_queries.push(match input {
|
||||
VarTensor::Advice { inner: advices, .. } => {
|
||||
cs.query_advice(advices[x][y], Rotation(0))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
});
|
||||
}
|
||||
|
||||
let mut ref_queries = vec![one.clone()];
|
||||
for reference in references {
|
||||
ref_queries.push(match reference {
|
||||
VarTensor::Advice { inner: advices, .. } => {
|
||||
cs.query_advice(advices[0][0], Rotation(0))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
});
|
||||
}
|
||||
let mut ref_queries = vec![one.clone()];
|
||||
for reference in references {
|
||||
ref_queries.push(match reference {
|
||||
VarTensor::Advice { inner: advices, .. } => {
|
||||
cs.query_advice(advices[q][0], Rotation(0))
|
||||
}
|
||||
_ => unreachable!(),
|
||||
});
|
||||
}
|
||||
|
||||
let lhs = input_queries.into_iter().map(|c| c * s_inputq.clone());
|
||||
let rhs = ref_queries.into_iter().map(|c| c * s_referenceq.clone());
|
||||
expression.extend(lhs.zip(rhs));
|
||||
let lhs = input_queries.into_iter().map(|c| c * s_inputq.clone());
|
||||
let rhs = ref_queries.into_iter().map(|c| c * s_referenceq.clone());
|
||||
expression.extend(lhs.zip(rhs));
|
||||
|
||||
expression
|
||||
});
|
||||
self.shuffles
|
||||
.input_selectors
|
||||
.entry((x, y))
|
||||
.or_insert(s_input);
|
||||
expression
|
||||
});
|
||||
self.shuffles
|
||||
.input_selectors
|
||||
.entry((q, (x, y)))
|
||||
.or_insert(s_input);
|
||||
}
|
||||
}
|
||||
self.shuffles.reference_selectors.push(s_reference);
|
||||
}
|
||||
self.shuffles.reference_selectors.push(s_reference);
|
||||
|
||||
// if we haven't previously initialized the input/output, do so now
|
||||
if self.shuffles.references.is_empty() {
|
||||
|
||||
@@ -91,4 +91,7 @@ pub enum CircuitError {
|
||||
/// Missing layout
|
||||
#[error("missing layout for op: {0}")]
|
||||
MissingLayout(String),
|
||||
#[error("[io] {0}")]
|
||||
/// IO error
|
||||
IoError(#[from] std::io::Error),
|
||||
}
|
||||
|
||||
@@ -71,12 +71,17 @@ pub enum HybridOp {
|
||||
},
|
||||
}
|
||||
|
||||
impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash > Op<F> for HybridOp {
|
||||
impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for HybridOp {
|
||||
///
|
||||
fn requires_homogenous_input_scales(&self) -> Vec<usize> {
|
||||
match self {
|
||||
HybridOp::Greater | HybridOp::Less | HybridOp::Equals => vec![0, 1],
|
||||
HybridOp::GreaterEqual | HybridOp::LessEqual => vec![0, 1],
|
||||
HybridOp::Greater { .. }
|
||||
| HybridOp::Less { .. }
|
||||
| HybridOp::Equals { .. }
|
||||
| HybridOp::GreaterEqual { .. }
|
||||
| HybridOp::LessEqual { .. } => {
|
||||
vec![0, 1]
|
||||
}
|
||||
_ => vec![],
|
||||
}
|
||||
}
|
||||
@@ -135,10 +140,10 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash > Op<F> for Hybri
|
||||
)
|
||||
}
|
||||
HybridOp::RangeCheck(p) => format!("RANGECHECK (tol={:?})", p),
|
||||
HybridOp::Greater => "GREATER".into(),
|
||||
HybridOp::GreaterEqual => "GREATEREQUAL".into(),
|
||||
HybridOp::Less => "LESS".into(),
|
||||
HybridOp::LessEqual => "LESSEQUAL".into(),
|
||||
HybridOp::Greater => "GREATER".to_string(),
|
||||
HybridOp::GreaterEqual => "GREATEREQUAL".to_string(),
|
||||
HybridOp::Less => "LESS".to_string(),
|
||||
HybridOp::LessEqual => "LESSEQUAL".to_string(),
|
||||
HybridOp::Equals => "EQUALS".into(),
|
||||
HybridOp::Gather { dim, .. } => format!("GATHER (dim={})", dim),
|
||||
HybridOp::TopK { k, dim, largest } => {
|
||||
|
||||
@@ -240,7 +240,7 @@ pub(crate) fn recip<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::circuit::layouts::dot;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[5, 2, 3, 0, 4, -1, 3, 1, 6]),
|
||||
@@ -374,7 +374,7 @@ pub fn dot<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// // matmul case
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
@@ -827,7 +827,7 @@ fn _select_topk<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[2, 15, 2, 1, 1, 0]),
|
||||
@@ -979,8 +979,16 @@ pub(crate) fn dynamic_lookup<F: PrimeField + TensorType + PartialOrd + std::hash
|
||||
let (lookup_0, lookup_1) = (lookups[0].clone(), lookups[1].clone());
|
||||
let (table_0, table_1) = (tables[0].clone(), tables[1].clone());
|
||||
|
||||
let table_0 = region.assign_dynamic_lookup(&config.dynamic_lookups.tables[0], &table_0)?;
|
||||
let _table_1 = region.assign_dynamic_lookup(&config.dynamic_lookups.tables[1], &table_1)?;
|
||||
let (table_0, flush_len_0) =
|
||||
region.assign_dynamic_lookup(&config.dynamic_lookups.tables[0], &table_0)?;
|
||||
let (_table_1, flush_len_1) =
|
||||
region.assign_dynamic_lookup(&config.dynamic_lookups.tables[1], &table_1)?;
|
||||
if flush_len_0 != flush_len_1 {
|
||||
return Err(CircuitError::MismatchedLookupTableLength(
|
||||
flush_len_0,
|
||||
flush_len_1,
|
||||
));
|
||||
}
|
||||
let table_len = table_0.len();
|
||||
|
||||
trace!("assigning tables took: {:?}", start.elapsed());
|
||||
@@ -1005,13 +1013,21 @@ pub(crate) fn dynamic_lookup<F: PrimeField + TensorType + PartialOrd + std::hash
|
||||
|
||||
trace!("assigning lookup index took: {:?}", start.elapsed());
|
||||
|
||||
let mut lookup_block = 0;
|
||||
|
||||
if !region.is_dummy() {
|
||||
(0..table_len)
|
||||
.map(|i| {
|
||||
let table_selector = config.dynamic_lookups.table_selectors[0];
|
||||
let (_, _, z) = config.dynamic_lookups.tables[0]
|
||||
.cartesian_coord(region.combined_dynamic_shuffle_coord() + i);
|
||||
let (x, _, z) = config.dynamic_lookups.tables[0]
|
||||
.cartesian_coord(region.combined_dynamic_shuffle_coord() + i + flush_len_0);
|
||||
|
||||
if lookup_block != x {
|
||||
lookup_block = x;
|
||||
}
|
||||
|
||||
let table_selector = config.dynamic_lookups.table_selectors[lookup_block];
|
||||
region.enable(Some(&table_selector), z)?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.collect::<Result<Vec<_>, CircuitError>>()?;
|
||||
@@ -1023,20 +1039,23 @@ pub(crate) fn dynamic_lookup<F: PrimeField + TensorType + PartialOrd + std::hash
|
||||
.map(|i| {
|
||||
let (x, y, z) =
|
||||
config.dynamic_lookups.inputs[0].cartesian_coord(region.linear_coord() + i);
|
||||
|
||||
let lookup_selector = config
|
||||
.dynamic_lookups
|
||||
.lookup_selectors
|
||||
.get(&(x, y))
|
||||
.get(&(lookup_block, (x, y)))
|
||||
.ok_or(CircuitError::MissingSelectors(format!("{:?}", (x, y))))?;
|
||||
|
||||
region.enable(Some(lookup_selector), z)?;
|
||||
|
||||
// region.enable(Some(lookup_selector), z)?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.collect::<Result<Vec<_>, CircuitError>>()?;
|
||||
}
|
||||
|
||||
region.increment_dynamic_lookup_col_coord(table_len);
|
||||
region.increment_dynamic_lookup_col_coord(table_len + flush_len_0);
|
||||
region.increment_dynamic_lookup_index(1);
|
||||
region.increment(lookup_len);
|
||||
|
||||
@@ -1064,22 +1083,33 @@ pub(crate) fn shuffles<F: PrimeField + TensorType + PartialOrd + std::hash::Hash
|
||||
));
|
||||
}
|
||||
|
||||
let reference = region.assign_shuffle(&config.shuffles.references[0], &reference)?;
|
||||
let (reference, flush_len_ref) =
|
||||
region.assign_shuffle(&config.shuffles.references[0], &reference)?;
|
||||
let reference_len = reference.len();
|
||||
|
||||
// now create a vartensor of constants for the shuffle index
|
||||
let index = create_constant_tensor(F::from(shuffle_index as u64), reference_len);
|
||||
let index = region.assign_shuffle(&config.shuffles.references[1], &index)?;
|
||||
let (index, flush_len_index) = region.assign_shuffle(&config.shuffles.references[1], &index)?;
|
||||
|
||||
if flush_len_index != flush_len_ref {
|
||||
return Err(CircuitError::MismatchedShuffleLength(
|
||||
flush_len_index,
|
||||
flush_len_ref,
|
||||
));
|
||||
}
|
||||
|
||||
let input = region.assign(&config.shuffles.inputs[0], &input)?;
|
||||
region.assign(&config.shuffles.inputs[1], &index)?;
|
||||
|
||||
let mut shuffle_block = 0;
|
||||
|
||||
if !region.is_dummy() {
|
||||
(0..reference_len)
|
||||
.map(|i| {
|
||||
let ref_selector = config.shuffles.reference_selectors[0];
|
||||
let (_, _, z) = config.shuffles.references[0]
|
||||
.cartesian_coord(region.combined_dynamic_shuffle_coord() + i);
|
||||
let (x, _, z) = config.shuffles.references[0]
|
||||
.cartesian_coord(region.combined_dynamic_shuffle_coord() + i + flush_len_ref);
|
||||
shuffle_block = x;
|
||||
let ref_selector = config.shuffles.reference_selectors[shuffle_block];
|
||||
region.enable(Some(&ref_selector), z)?;
|
||||
Ok(())
|
||||
})
|
||||
@@ -1095,7 +1125,7 @@ pub(crate) fn shuffles<F: PrimeField + TensorType + PartialOrd + std::hash::Hash
|
||||
let input_selector = config
|
||||
.shuffles
|
||||
.input_selectors
|
||||
.get(&(x, y))
|
||||
.get(&(shuffle_block, (x, y)))
|
||||
.ok_or(CircuitError::MissingSelectors(format!("{:?}", (x, y))))?;
|
||||
|
||||
region.enable(Some(input_selector), z)?;
|
||||
@@ -1105,7 +1135,7 @@ pub(crate) fn shuffles<F: PrimeField + TensorType + PartialOrd + std::hash::Hash
|
||||
.collect::<Result<Vec<_>, CircuitError>>()?;
|
||||
}
|
||||
|
||||
region.increment_shuffle_col_coord(reference_len);
|
||||
region.increment_shuffle_col_coord(reference_len + flush_len_ref);
|
||||
region.increment_shuffle_index(1);
|
||||
region.increment(reference_len);
|
||||
|
||||
@@ -1760,7 +1790,7 @@ pub(crate) fn scatter_nd<F: PrimeField + TensorType + PartialOrd + std::hash::Ha
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[2, 15, 2, 1, 1, 0]),
|
||||
@@ -1864,7 +1894,7 @@ pub fn sum<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[2, 15, 2, 1, 1, 0]),
|
||||
@@ -2027,7 +2057,7 @@ fn axes_wise_op<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[2, 15, 2, 1, 1, 0]),
|
||||
@@ -2063,7 +2093,7 @@ pub fn prod_axes<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[2, 15, 2, 1, 1, 0]),
|
||||
@@ -2099,7 +2129,7 @@ pub fn sum_axes<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[2, 15, 2, 1, 1, 0]),
|
||||
@@ -2141,7 +2171,7 @@ pub fn argmax_axes<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[2, 15, 2, 1, 1, 0]),
|
||||
/// &[2, 3],
|
||||
@@ -2177,7 +2207,7 @@ pub fn max_axes<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[2, 15, 2, 1, 1, 0]),
|
||||
@@ -2223,7 +2253,7 @@ pub fn argmin_axes<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[2, 15, 2, 1, 1, 0]),
|
||||
@@ -2415,7 +2445,7 @@ pub(crate) fn pairwise<F: PrimeField + TensorType + PartialOrd + std::hash::Hash
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[2, 15, 2, 1, 1, 0]),
|
||||
@@ -2472,7 +2502,7 @@ pub(crate) fn expand<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128, 2));
|
||||
///
|
||||
/// let a = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[1, 12, 6, 4, 5, 6]),
|
||||
@@ -2500,12 +2530,9 @@ pub fn greater<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
|
||||
let diff = pairwise(config, region, &[lhs, rhs], BaseOp::Sub)?;
|
||||
|
||||
nonlinearity(
|
||||
config,
|
||||
region,
|
||||
&[diff],
|
||||
&LookupOp::GreaterThan { a: utils::F32(0.) },
|
||||
)
|
||||
let sign = sign(config, region, &[diff])?;
|
||||
|
||||
equals(config, region, &[sign, create_unit_tensor(1)])
|
||||
}
|
||||
|
||||
/// Greater equals than operation.
|
||||
@@ -2524,7 +2551,7 @@ pub fn greater<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128, 2));
|
||||
///
|
||||
///
|
||||
/// let a = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
@@ -2544,21 +2571,17 @@ pub fn greater_equal<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
region: &mut RegionCtx<F>,
|
||||
values: &[ValTensor<F>; 2],
|
||||
) -> Result<ValTensor<F>, CircuitError> {
|
||||
let (mut lhs, mut rhs) = (values[0].clone(), values[1].clone());
|
||||
let (lhs, rhs) = (values[0].clone(), values[1].clone());
|
||||
|
||||
let broadcasted_shape = get_broadcasted_shape(lhs.dims(), rhs.dims())?;
|
||||
|
||||
lhs.expand(&broadcasted_shape)?;
|
||||
rhs.expand(&broadcasted_shape)?;
|
||||
|
||||
let diff = pairwise(config, region, &[lhs, rhs], BaseOp::Sub)?;
|
||||
|
||||
nonlinearity(
|
||||
// add 1 to lhs
|
||||
let lhs_plus_one = pairwise(
|
||||
config,
|
||||
region,
|
||||
&[diff],
|
||||
&LookupOp::GreaterThanEqual { a: utils::F32(0.) },
|
||||
)
|
||||
&[lhs.clone(), create_unit_tensor(1)],
|
||||
BaseOp::Add,
|
||||
)?;
|
||||
|
||||
greater(config, region, &[lhs_plus_one, rhs])
|
||||
}
|
||||
|
||||
/// Less than to operation.
|
||||
@@ -2578,7 +2601,7 @@ pub fn greater_equal<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128, 2));
|
||||
///
|
||||
/// let a = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[1, 0, 5, 4, 5, 1]),
|
||||
@@ -2619,7 +2642,7 @@ pub fn less<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let a = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[1, 0, 5, 4, 5, 1]),
|
||||
@@ -2660,7 +2683,7 @@ pub fn less_equal<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let a = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[1, 1, 1, 1, 1, 0]),
|
||||
@@ -2704,7 +2727,7 @@ pub fn and<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let a = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[1, 1, 1, 1, 1, 0]),
|
||||
@@ -2752,7 +2775,7 @@ pub fn or<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let a = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[1, 1, 1, 1, 1, 0]),
|
||||
@@ -2826,7 +2849,7 @@ pub(crate) fn equals_zero<F: PrimeField + TensorType + PartialOrd + std::hash::H
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let a = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[1, 1, 1, 1, 1, 0]),
|
||||
@@ -2882,7 +2905,7 @@ pub fn xor<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[1, 1, 1, 1, 1, 0]),
|
||||
@@ -2925,7 +2948,7 @@ pub fn not<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let mask = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[1, 0, 1, 0, 1, 0]),
|
||||
@@ -2983,7 +3006,7 @@ pub fn iff<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[2, 1, 2, 1, 1, 1]),
|
||||
@@ -3015,7 +3038,7 @@ pub fn neg<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
@@ -3097,7 +3120,7 @@ pub fn sumpool<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
@@ -3198,7 +3221,7 @@ pub fn max_pool<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::tensor::ValTensor;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let c = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(Some(&[6, 0, 12, 4, 0, 8, 0, 0, 3, 0, 0, 2]), &[1, 2, 2, 3]).unwrap());
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
@@ -3426,7 +3449,7 @@ pub fn deconv<
|
||||
/// use ezkl::circuit::BaseConfig;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[5, 2, 3, 0, 4, -1, 3, 1, 6]),
|
||||
@@ -4155,6 +4178,128 @@ pub(crate) fn min<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
.map_err(|e| e.into())
|
||||
}
|
||||
|
||||
pub(crate) fn decompose<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
config: &BaseConfig<F>,
|
||||
region: &mut RegionCtx<F>,
|
||||
values: &[ValTensor<F>; 1],
|
||||
base: &usize,
|
||||
n: &usize,
|
||||
) -> Result<ValTensor<F>, CircuitError> {
|
||||
let input = values[0].clone();
|
||||
|
||||
let is_assigned = !input.all_prev_assigned();
|
||||
|
||||
let bases: ValTensor<F> = Tensor::from(
|
||||
(0..*n)
|
||||
.rev()
|
||||
.map(|x| ValType::Constant(integer_rep_to_felt(base.pow(x as u32) as IntegerRep))),
|
||||
)
|
||||
.into();
|
||||
|
||||
let cartesian_coord = input
|
||||
.dims()
|
||||
.iter()
|
||||
.map(|x| 0..*x)
|
||||
.multi_cartesian_product()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut output: Tensor<Tensor<ValType<F>>> = Tensor::new(None, input.dims())?;
|
||||
|
||||
let inner_loop_function =
|
||||
|i: usize, region: &mut RegionCtx<F>| -> Result<Tensor<ValType<F>>, CircuitError> {
|
||||
let coord = cartesian_coord[i].clone();
|
||||
let slice = coord.iter().map(|x| *x..*x + 1).collect::<Vec<_>>();
|
||||
let mut sliced_input = input.get_slice(&slice)?;
|
||||
sliced_input.flatten();
|
||||
|
||||
if !is_assigned {
|
||||
sliced_input = region.assign(&config.custom_gates.inputs[0], &sliced_input)?;
|
||||
}
|
||||
|
||||
let mut claimed_output_slice = if region.witness_gen() {
|
||||
sliced_input.decompose(*base, *n)?
|
||||
} else {
|
||||
Tensor::from(vec![ValType::Value(Value::unknown()); *n + 1].into_iter()).into()
|
||||
};
|
||||
|
||||
claimed_output_slice =
|
||||
region.assign(&config.custom_gates.inputs[1], &claimed_output_slice)?;
|
||||
claimed_output_slice.flatten();
|
||||
|
||||
region.increment(claimed_output_slice.len());
|
||||
|
||||
// get the sign bit and make sure it is valid
|
||||
let sign = claimed_output_slice.first()?;
|
||||
let sign = range_check(config, region, &[sign], &(-1, 1))?;
|
||||
|
||||
// get the rest of the thing and make sure it is in the correct range
|
||||
let rest = claimed_output_slice.get_slice(&[1..claimed_output_slice.len()])?;
|
||||
|
||||
let rest = range_check(config, region, &[rest], &(0, (base - 1) as i128))?;
|
||||
|
||||
let prod_decomp = dot(config, region, &[rest, bases.clone()])?;
|
||||
|
||||
let signed_decomp = pairwise(config, region, &[prod_decomp, sign], BaseOp::Mult)?;
|
||||
|
||||
enforce_equality(config, region, &[sliced_input, signed_decomp])?;
|
||||
|
||||
Ok(claimed_output_slice.get_inner_tensor()?.clone())
|
||||
};
|
||||
|
||||
region.apply_in_loop(&mut output, inner_loop_function)?;
|
||||
|
||||
let mut combined_output = output.combine()?;
|
||||
let mut output_dims = input.dims().to_vec();
|
||||
output_dims.push(*n + 1);
|
||||
combined_output.reshape(&output_dims)?;
|
||||
|
||||
Ok(combined_output.into())
|
||||
}
|
||||
|
||||
pub(crate) fn sign<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
config: &BaseConfig<F>,
|
||||
region: &mut RegionCtx<F>,
|
||||
values: &[ValTensor<F>; 1],
|
||||
) -> Result<ValTensor<F>, CircuitError> {
|
||||
let mut decomp = decompose(config, region, values, ®ion.base(), ®ion.legs())?;
|
||||
// get every n elements now, which correspond to the sign bit
|
||||
|
||||
decomp.get_every_n(region.legs() + 1)?;
|
||||
decomp.reshape(values[0].dims())?;
|
||||
|
||||
Ok(decomp)
|
||||
}
|
||||
|
||||
pub(crate) fn abs<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
config: &BaseConfig<F>,
|
||||
region: &mut RegionCtx<F>,
|
||||
values: &[ValTensor<F>; 1],
|
||||
) -> Result<ValTensor<F>, CircuitError> {
|
||||
let sign = sign(config, region, values)?;
|
||||
|
||||
pairwise(config, region, &[values[0].clone(), sign], BaseOp::Mult)
|
||||
}
|
||||
|
||||
pub(crate) fn relu<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
config: &BaseConfig<F>,
|
||||
region: &mut RegionCtx<F>,
|
||||
values: &[ValTensor<F>; 1],
|
||||
) -> Result<ValTensor<F>, CircuitError> {
|
||||
let sign = sign(config, region, values)?;
|
||||
|
||||
let mut unit = create_unit_tensor(sign.len());
|
||||
unit.reshape(sign.dims())?;
|
||||
|
||||
let relu_mask = equals(config, region, &[sign, unit])?;
|
||||
|
||||
pairwise(
|
||||
config,
|
||||
region,
|
||||
&[values[0].clone(), relu_mask],
|
||||
BaseOp::Mult,
|
||||
)
|
||||
}
|
||||
|
||||
fn multi_dim_axes_op<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
config: &BaseConfig<F>,
|
||||
region: &mut RegionCtx<F>,
|
||||
@@ -4324,7 +4469,7 @@ pub(crate) fn percent<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>
|
||||
/// use ezkl::circuit::BaseConfig;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[2, 2, 3, 2, 2, 0]),
|
||||
@@ -4372,7 +4517,7 @@ pub fn softmax<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
/// use ezkl::circuit::BaseConfig;
|
||||
///
|
||||
/// let dummy_config = BaseConfig::dummy(12, 2);
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true());
|
||||
/// let mut dummy_region = RegionCtx::new_dummy(0,2,RegionSettings::all_true(128,2));
|
||||
///
|
||||
/// let x = ValTensor::from_integer_rep_tensor(Tensor::<IntegerRep>::new(
|
||||
/// Some(&[100, 200, 300, 400, 500, 600]),
|
||||
|
||||
@@ -15,14 +15,12 @@ use halo2curves::ff::PrimeField;
|
||||
/// An enum representing the operations that can be used to express more complex operations via accumulation
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Deserialize, Serialize)]
|
||||
pub enum LookupOp {
|
||||
Abs,
|
||||
Div {
|
||||
denom: utils::F32,
|
||||
},
|
||||
Cast {
|
||||
scale: utils::F32,
|
||||
},
|
||||
ReLU,
|
||||
Max {
|
||||
scale: utils::F32,
|
||||
a: utils::F32,
|
||||
@@ -104,19 +102,6 @@ pub enum LookupOp {
|
||||
Erf {
|
||||
scale: utils::F32,
|
||||
},
|
||||
GreaterThan {
|
||||
a: utils::F32,
|
||||
},
|
||||
LessThan {
|
||||
a: utils::F32,
|
||||
},
|
||||
GreaterThanEqual {
|
||||
a: utils::F32,
|
||||
},
|
||||
LessThanEqual {
|
||||
a: utils::F32,
|
||||
},
|
||||
Sign,
|
||||
KroneckerDelta,
|
||||
Pow {
|
||||
scale: utils::F32,
|
||||
@@ -135,97 +120,153 @@ impl LookupOp {
|
||||
(-range, range)
|
||||
}
|
||||
|
||||
/// as path
|
||||
pub fn as_path(&self) -> String {
|
||||
match self {
|
||||
LookupOp::Ceil { scale } => format!("ceil_{}", scale),
|
||||
LookupOp::Floor { scale } => format!("floor_{}", scale),
|
||||
LookupOp::Round { scale } => format!("round_{}", scale),
|
||||
LookupOp::RoundHalfToEven { scale } => format!("round_half_to_even_{}", scale),
|
||||
LookupOp::Pow { scale, a } => format!("pow_{}_{}", scale, a),
|
||||
LookupOp::KroneckerDelta => "kronecker_delta".into(),
|
||||
LookupOp::Max { scale, a } => format!("max_{}_{}", scale, a),
|
||||
LookupOp::Min { scale, a } => format!("min_{}_{}", scale, a),
|
||||
LookupOp::Div { denom } => format!("div_{}", denom),
|
||||
LookupOp::Cast { scale } => format!("cast_{}", scale),
|
||||
LookupOp::Recip {
|
||||
input_scale,
|
||||
output_scale,
|
||||
} => format!("recip_{}_{}", input_scale, output_scale),
|
||||
LookupOp::LeakyReLU { slope: a } => format!("leaky_relu_{}", a),
|
||||
LookupOp::Sigmoid { scale } => format!("sigmoid_{}", scale),
|
||||
LookupOp::Sqrt { scale } => format!("sqrt_{}", scale),
|
||||
LookupOp::Rsqrt { scale } => format!("rsqrt_{}", scale),
|
||||
LookupOp::Erf { scale } => format!("erf_{}", scale),
|
||||
LookupOp::Exp { scale } => format!("exp_{}", scale),
|
||||
LookupOp::Ln { scale } => format!("ln_{}", scale),
|
||||
LookupOp::Cos { scale } => format!("cos_{}", scale),
|
||||
LookupOp::ACos { scale } => format!("acos_{}", scale),
|
||||
LookupOp::Cosh { scale } => format!("cosh_{}", scale),
|
||||
LookupOp::ACosh { scale } => format!("acosh_{}", scale),
|
||||
LookupOp::Sin { scale } => format!("sin_{}", scale),
|
||||
LookupOp::ASin { scale } => format!("asin_{}", scale),
|
||||
LookupOp::Sinh { scale } => format!("sinh_{}", scale),
|
||||
LookupOp::ASinh { scale } => format!("asinh_{}", scale),
|
||||
LookupOp::Tan { scale } => format!("tan_{}", scale),
|
||||
LookupOp::ATan { scale } => format!("atan_{}", scale),
|
||||
LookupOp::ATanh { scale } => format!("atanh_{}", scale),
|
||||
LookupOp::Tanh { scale } => format!("tanh_{}", scale),
|
||||
LookupOp::HardSwish { scale } => format!("hardswish_{}", scale),
|
||||
}
|
||||
}
|
||||
|
||||
/// Matches a [Op] to an operation in the `tensor::ops` module.
|
||||
pub(crate) fn f<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
&self,
|
||||
x: &[Tensor<F>],
|
||||
) -> Result<ForwardResult<F>, TensorError> {
|
||||
let x = x[0].clone().map(|x| felt_to_integer_rep(x));
|
||||
let res = match &self {
|
||||
LookupOp::Abs => Ok(tensor::ops::abs(&x)?),
|
||||
LookupOp::Ceil { scale } => Ok(tensor::ops::nonlinearities::ceil(&x, scale.into())),
|
||||
LookupOp::Floor { scale } => Ok(tensor::ops::nonlinearities::floor(&x, scale.into())),
|
||||
LookupOp::Round { scale } => Ok(tensor::ops::nonlinearities::round(&x, scale.into())),
|
||||
LookupOp::RoundHalfToEven { scale } => Ok(
|
||||
tensor::ops::nonlinearities::round_half_to_even(&x, scale.into()),
|
||||
),
|
||||
LookupOp::Pow { scale, a } => Ok(tensor::ops::nonlinearities::pow(
|
||||
&x,
|
||||
scale.0.into(),
|
||||
a.0.into(),
|
||||
)),
|
||||
LookupOp::KroneckerDelta => Ok(tensor::ops::nonlinearities::kronecker_delta(&x)),
|
||||
LookupOp::Max { scale, a } => Ok(tensor::ops::nonlinearities::max(
|
||||
&x,
|
||||
scale.0.into(),
|
||||
a.0.into(),
|
||||
)),
|
||||
LookupOp::Min { scale, a } => Ok(tensor::ops::nonlinearities::min(
|
||||
&x,
|
||||
scale.0.into(),
|
||||
a.0.into(),
|
||||
)),
|
||||
LookupOp::Sign => Ok(tensor::ops::nonlinearities::sign(&x)),
|
||||
LookupOp::LessThan { a } => Ok(tensor::ops::nonlinearities::less_than(
|
||||
&x,
|
||||
f32::from(*a).into(),
|
||||
)),
|
||||
LookupOp::LessThanEqual { a } => Ok(tensor::ops::nonlinearities::less_than_equal(
|
||||
&x,
|
||||
f32::from(*a).into(),
|
||||
)),
|
||||
LookupOp::GreaterThan { a } => Ok(tensor::ops::nonlinearities::greater_than(
|
||||
&x,
|
||||
f32::from(*a).into(),
|
||||
)),
|
||||
LookupOp::GreaterThanEqual { a } => Ok(
|
||||
tensor::ops::nonlinearities::greater_than_equal(&x, f32::from(*a).into()),
|
||||
),
|
||||
LookupOp::Div { denom } => Ok(tensor::ops::nonlinearities::const_div(
|
||||
&x,
|
||||
f32::from(*denom).into(),
|
||||
)),
|
||||
LookupOp::Cast { scale } => Ok(tensor::ops::nonlinearities::const_div(
|
||||
&x,
|
||||
f32::from(*scale).into(),
|
||||
)),
|
||||
LookupOp::Recip {
|
||||
input_scale,
|
||||
output_scale,
|
||||
} => Ok(tensor::ops::nonlinearities::recip(
|
||||
&x,
|
||||
input_scale.into(),
|
||||
output_scale.into(),
|
||||
)),
|
||||
LookupOp::ReLU => Ok(tensor::ops::nonlinearities::leakyrelu(&x, 0_f64)),
|
||||
|
||||
LookupOp::LeakyReLU { slope: a } => {
|
||||
Ok(tensor::ops::nonlinearities::leakyrelu(&x, a.0.into()))
|
||||
}
|
||||
LookupOp::Sigmoid { scale } => {
|
||||
Ok(tensor::ops::nonlinearities::sigmoid(&x, scale.into()))
|
||||
}
|
||||
LookupOp::Sqrt { scale } => Ok(tensor::ops::nonlinearities::sqrt(&x, scale.into())),
|
||||
LookupOp::Rsqrt { scale } => Ok(tensor::ops::nonlinearities::rsqrt(&x, scale.into())),
|
||||
LookupOp::Erf { scale } => Ok(tensor::ops::nonlinearities::erffunc(&x, scale.into())),
|
||||
LookupOp::Exp { scale } => Ok(tensor::ops::nonlinearities::exp(&x, scale.into())),
|
||||
LookupOp::Ln { scale } => Ok(tensor::ops::nonlinearities::ln(&x, scale.into())),
|
||||
LookupOp::Cos { scale } => Ok(tensor::ops::nonlinearities::cos(&x, scale.into())),
|
||||
LookupOp::ACos { scale } => Ok(tensor::ops::nonlinearities::acos(&x, scale.into())),
|
||||
LookupOp::Cosh { scale } => Ok(tensor::ops::nonlinearities::cosh(&x, scale.into())),
|
||||
LookupOp::ACosh { scale } => Ok(tensor::ops::nonlinearities::acosh(&x, scale.into())),
|
||||
LookupOp::Sin { scale } => Ok(tensor::ops::nonlinearities::sin(&x, scale.into())),
|
||||
LookupOp::ASin { scale } => Ok(tensor::ops::nonlinearities::asin(&x, scale.into())),
|
||||
LookupOp::Sinh { scale } => Ok(tensor::ops::nonlinearities::sinh(&x, scale.into())),
|
||||
LookupOp::ASinh { scale } => Ok(tensor::ops::nonlinearities::asinh(&x, scale.into())),
|
||||
LookupOp::Tan { scale } => Ok(tensor::ops::nonlinearities::tan(&x, scale.into())),
|
||||
LookupOp::ATan { scale } => Ok(tensor::ops::nonlinearities::atan(&x, scale.into())),
|
||||
LookupOp::ATanh { scale } => Ok(tensor::ops::nonlinearities::atanh(&x, scale.into())),
|
||||
LookupOp::Tanh { scale } => Ok(tensor::ops::nonlinearities::tanh(&x, scale.into())),
|
||||
LookupOp::HardSwish { scale } => {
|
||||
Ok(tensor::ops::nonlinearities::hardswish(&x, scale.into()))
|
||||
}
|
||||
}?;
|
||||
let res =
|
||||
match &self {
|
||||
LookupOp::Ceil { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::ceil(&x, scale.into()))
|
||||
}
|
||||
LookupOp::Floor { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::floor(&x, scale.into()))
|
||||
}
|
||||
LookupOp::Round { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::round(&x, scale.into()))
|
||||
}
|
||||
LookupOp::RoundHalfToEven { scale } => Ok::<_, TensorError>(
|
||||
tensor::ops::nonlinearities::round_half_to_even(&x, scale.into()),
|
||||
),
|
||||
LookupOp::Pow { scale, a } => Ok::<_, TensorError>(
|
||||
tensor::ops::nonlinearities::pow(&x, scale.0.into(), a.0.into()),
|
||||
),
|
||||
LookupOp::KroneckerDelta => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::kronecker_delta(&x))
|
||||
}
|
||||
LookupOp::Max { scale, a } => Ok::<_, TensorError>(
|
||||
tensor::ops::nonlinearities::max(&x, scale.0.into(), a.0.into()),
|
||||
),
|
||||
LookupOp::Min { scale, a } => Ok::<_, TensorError>(
|
||||
tensor::ops::nonlinearities::min(&x, scale.0.into(), a.0.into()),
|
||||
),
|
||||
LookupOp::Div { denom } => Ok::<_, TensorError>(
|
||||
tensor::ops::nonlinearities::const_div(&x, f32::from(*denom).into()),
|
||||
),
|
||||
LookupOp::Cast { scale } => Ok::<_, TensorError>(
|
||||
tensor::ops::nonlinearities::const_div(&x, f32::from(*scale).into()),
|
||||
),
|
||||
LookupOp::Recip {
|
||||
input_scale,
|
||||
output_scale,
|
||||
} => Ok::<_, TensorError>(tensor::ops::nonlinearities::recip(
|
||||
&x,
|
||||
input_scale.into(),
|
||||
output_scale.into(),
|
||||
)),
|
||||
LookupOp::LeakyReLU { slope: a } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::leakyrelu(&x, a.0.into()))
|
||||
}
|
||||
LookupOp::Sigmoid { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::sigmoid(&x, scale.into()))
|
||||
}
|
||||
LookupOp::Sqrt { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::sqrt(&x, scale.into()))
|
||||
}
|
||||
LookupOp::Rsqrt { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::rsqrt(&x, scale.into()))
|
||||
}
|
||||
LookupOp::Erf { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::erffunc(&x, scale.into()))
|
||||
}
|
||||
LookupOp::Exp { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::exp(&x, scale.into()))
|
||||
}
|
||||
LookupOp::Ln { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::ln(&x, scale.into()))
|
||||
}
|
||||
LookupOp::Cos { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::cos(&x, scale.into()))
|
||||
}
|
||||
LookupOp::ACos { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::acos(&x, scale.into()))
|
||||
}
|
||||
LookupOp::Cosh { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::cosh(&x, scale.into()))
|
||||
}
|
||||
LookupOp::ACosh { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::acosh(&x, scale.into()))
|
||||
}
|
||||
LookupOp::Sin { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::sin(&x, scale.into()))
|
||||
}
|
||||
LookupOp::ASin { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::asin(&x, scale.into()))
|
||||
}
|
||||
LookupOp::Sinh { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::sinh(&x, scale.into()))
|
||||
}
|
||||
LookupOp::ASinh { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::asinh(&x, scale.into()))
|
||||
}
|
||||
LookupOp::Tan { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::tan(&x, scale.into()))
|
||||
}
|
||||
LookupOp::ATan { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::atan(&x, scale.into()))
|
||||
}
|
||||
LookupOp::ATanh { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::atanh(&x, scale.into()))
|
||||
}
|
||||
LookupOp::Tanh { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::tanh(&x, scale.into()))
|
||||
}
|
||||
LookupOp::HardSwish { scale } => {
|
||||
Ok::<_, TensorError>(tensor::ops::nonlinearities::hardswish(&x, scale.into()))
|
||||
}
|
||||
}?;
|
||||
|
||||
let output = res.map(|x| integer_rep_to_felt(x));
|
||||
|
||||
@@ -242,7 +283,6 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Lookup
|
||||
/// Returns the name of the operation
|
||||
fn as_string(&self) -> String {
|
||||
match self {
|
||||
LookupOp::Abs => "ABS".into(),
|
||||
LookupOp::Ceil { scale } => format!("CEIL(scale={})", scale),
|
||||
LookupOp::Floor { scale } => format!("FLOOR(scale={})", scale),
|
||||
LookupOp::Round { scale } => format!("ROUND(scale={})", scale),
|
||||
@@ -251,11 +291,6 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Lookup
|
||||
LookupOp::KroneckerDelta => "K_DELTA".into(),
|
||||
LookupOp::Max { scale, a } => format!("MAX(scale={}, a={})", scale, a),
|
||||
LookupOp::Min { scale, a } => format!("MIN(scale={}, a={})", scale, a),
|
||||
LookupOp::Sign => "SIGN".into(),
|
||||
LookupOp::GreaterThan { a } => format!("GREATER_THAN(a={})", a),
|
||||
LookupOp::GreaterThanEqual { a } => format!("GREATER_THAN_EQUAL(a={})", a),
|
||||
LookupOp::LessThan { a } => format!("LESS_THAN(a={})", a),
|
||||
LookupOp::LessThanEqual { a } => format!("LESS_THAN_EQUAL(a={})", a),
|
||||
LookupOp::Recip {
|
||||
input_scale,
|
||||
output_scale,
|
||||
@@ -266,7 +301,6 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Lookup
|
||||
LookupOp::Div { denom, .. } => format!("DIV(denom={})", denom),
|
||||
LookupOp::Cast { scale } => format!("CAST(scale={})", scale),
|
||||
LookupOp::Ln { scale } => format!("LN(scale={})", scale),
|
||||
LookupOp::ReLU => "RELU".to_string(),
|
||||
LookupOp::LeakyReLU { slope: a } => format!("L_RELU(slope={})", a),
|
||||
LookupOp::Sigmoid { scale } => format!("SIGMOID(scale={})", scale),
|
||||
LookupOp::Sqrt { scale } => format!("SQRT(scale={})", scale),
|
||||
@@ -311,12 +345,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Lookup
|
||||
in_scale + multiplier_to_scale(1. / scale.0 as f64)
|
||||
}
|
||||
LookupOp::Recip { output_scale, .. } => multiplier_to_scale(output_scale.into()),
|
||||
LookupOp::Sign
|
||||
| LookupOp::GreaterThan { .. }
|
||||
| LookupOp::LessThan { .. }
|
||||
| LookupOp::GreaterThanEqual { .. }
|
||||
| LookupOp::LessThanEqual { .. }
|
||||
| LookupOp::KroneckerDelta => 0,
|
||||
LookupOp::KroneckerDelta => 0,
|
||||
_ => inputs_scale[0],
|
||||
};
|
||||
Ok(scale)
|
||||
|
||||
@@ -255,7 +255,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Constant<F> {
|
||||
self.raw_values = Tensor::new(None, &[0]).unwrap();
|
||||
}
|
||||
|
||||
///
|
||||
/// Pre-assign a value
|
||||
pub fn pre_assign(&mut self, val: ValTensor<F>) {
|
||||
self.pre_assigned_val = Some(val)
|
||||
}
|
||||
|
||||
@@ -9,6 +9,9 @@ use super::{base::BaseOp, *};
|
||||
/// An enum representing the operations that can be expressed as arithmetic (non lookup) operations.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub enum PolyOp {
|
||||
ReLU,
|
||||
Abs,
|
||||
Sign,
|
||||
GatherElements {
|
||||
dim: usize,
|
||||
constant_idx: Option<Tensor<usize>>,
|
||||
@@ -99,8 +102,7 @@ impl<
|
||||
+ PartialOrd
|
||||
+ std::hash::Hash
|
||||
+ Serialize
|
||||
+ for<'de> Deserialize<'de>
|
||||
,
|
||||
+ for<'de> Deserialize<'de>,
|
||||
> Op<F> for PolyOp
|
||||
{
|
||||
/// Returns a reference to the Any trait.
|
||||
@@ -110,6 +112,9 @@ impl<
|
||||
|
||||
fn as_string(&self) -> String {
|
||||
match &self {
|
||||
PolyOp::Abs => "ABS".to_string(),
|
||||
PolyOp::Sign => "SIGN".to_string(),
|
||||
PolyOp::ReLU => "RELU".to_string(),
|
||||
PolyOp::GatherElements { dim, constant_idx } => format!(
|
||||
"GATHERELEMENTS (dim={}, constant_idx{})",
|
||||
dim,
|
||||
@@ -191,6 +196,9 @@ impl<
|
||||
values: &[ValTensor<F>],
|
||||
) -> Result<Option<ValTensor<F>>, CircuitError> {
|
||||
Ok(Some(match self {
|
||||
PolyOp::Abs => layouts::abs(config, region, values[..].try_into()?)?,
|
||||
PolyOp::Sign => layouts::sign(config, region, values[..].try_into()?)?,
|
||||
PolyOp::ReLU => layouts::relu(config, region, values[..].try_into()?)?,
|
||||
PolyOp::MultiBroadcastTo { shape } => {
|
||||
layouts::expand(config, region, values[..].try_into()?, shape)?
|
||||
}
|
||||
@@ -368,6 +376,7 @@ impl<
|
||||
PolyOp::Reshape(_) | PolyOp::Flatten(_) => in_scales[0],
|
||||
PolyOp::Pow(pow) => in_scales[0] * (*pow as crate::Scale),
|
||||
PolyOp::Identity { out_scale } => out_scale.unwrap_or(in_scales[0]),
|
||||
PolyOp::Sign { .. } => 0,
|
||||
_ => in_scales[0],
|
||||
};
|
||||
Ok(scale)
|
||||
|
||||
@@ -3,7 +3,7 @@ use crate::{
|
||||
fieldutils::IntegerRep,
|
||||
tensor::{Tensor, TensorType, ValTensor, ValType, VarTensor},
|
||||
};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use colored::Colorize;
|
||||
use halo2_proofs::{
|
||||
circuit::Region,
|
||||
@@ -93,6 +93,10 @@ pub struct RegionSettings {
|
||||
pub witness_gen: bool,
|
||||
/// whether we should check range checks for validity
|
||||
pub check_range: bool,
|
||||
/// base for decompositions
|
||||
pub base: usize,
|
||||
/// number of legs for decompositions
|
||||
pub legs: usize,
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
@@ -102,26 +106,32 @@ unsafe impl Send for RegionSettings {}
|
||||
|
||||
impl RegionSettings {
|
||||
/// Create a new region settings
|
||||
pub fn new(witness_gen: bool, check_range: bool) -> RegionSettings {
|
||||
pub fn new(witness_gen: bool, check_range: bool, base: usize, legs: usize) -> RegionSettings {
|
||||
RegionSettings {
|
||||
witness_gen,
|
||||
check_range,
|
||||
base,
|
||||
legs,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new region settings with all true
|
||||
pub fn all_true() -> RegionSettings {
|
||||
pub fn all_true(base: usize, legs: usize) -> RegionSettings {
|
||||
RegionSettings {
|
||||
witness_gen: true,
|
||||
check_range: true,
|
||||
base,
|
||||
legs,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new region settings with all false
|
||||
pub fn all_false() -> RegionSettings {
|
||||
pub fn all_false(base: usize, legs: usize) -> RegionSettings {
|
||||
RegionSettings {
|
||||
witness_gen: false,
|
||||
check_range: false,
|
||||
base,
|
||||
legs,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -170,14 +180,30 @@ pub struct RegionCtx<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Ha
|
||||
statistics: RegionStatistics,
|
||||
settings: RegionSettings,
|
||||
assigned_constants: ConstantsMap<F>,
|
||||
max_dynamic_input_len: usize,
|
||||
}
|
||||
|
||||
impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a, F> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// get the region's decomposition base
|
||||
pub fn base(&self) -> usize {
|
||||
self.settings.base
|
||||
}
|
||||
|
||||
/// get the region's decomposition legs
|
||||
pub fn legs(&self) -> usize {
|
||||
self.settings.legs
|
||||
}
|
||||
|
||||
/// get the max dynamic input len
|
||||
pub fn max_dynamic_input_len(&self) -> usize {
|
||||
self.max_dynamic_input_len
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
///
|
||||
pub fn debug_report(&self) {
|
||||
log::debug!(
|
||||
"(rows={}, coord={}, constants={}, max_lookup_inputs={}, min_lookup_inputs={}, max_range_size={}, dynamic_lookup_col_coord={}, shuffle_col_coord={})",
|
||||
"(rows={}, coord={}, constants={}, max_lookup_inputs={}, min_lookup_inputs={}, max_range_size={}, dynamic_lookup_col_coord={}, shuffle_col_coord={}, max_dynamic_input_len={})",
|
||||
self.row().to_string().blue(),
|
||||
self.linear_coord().to_string().yellow(),
|
||||
self.total_constants().to_string().red(),
|
||||
@@ -185,7 +211,9 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
|
||||
self.min_lookup_inputs().to_string().green(),
|
||||
self.max_range_size().to_string().green(),
|
||||
self.dynamic_lookup_col_coord().to_string().green(),
|
||||
self.shuffle_col_coord().to_string().green());
|
||||
self.shuffle_col_coord().to_string().green(),
|
||||
self.max_dynamic_input_len().to_string().green()
|
||||
);
|
||||
}
|
||||
|
||||
///
|
||||
@@ -203,6 +231,11 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
|
||||
self.dynamic_lookup_index.index += n;
|
||||
}
|
||||
|
||||
/// increment the max dynamic input len
|
||||
pub fn update_max_dynamic_input_len(&mut self, n: usize) {
|
||||
self.max_dynamic_input_len = self.max_dynamic_input_len.max(n);
|
||||
}
|
||||
|
||||
///
|
||||
pub fn increment_dynamic_lookup_col_coord(&mut self, n: usize) {
|
||||
self.dynamic_lookup_index.col_coord += n;
|
||||
@@ -234,7 +267,13 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
|
||||
}
|
||||
|
||||
/// Create a new region context
|
||||
pub fn new(region: Region<'a, F>, row: usize, num_inner_cols: usize) -> RegionCtx<'a, F> {
|
||||
pub fn new(
|
||||
region: Region<'a, F>,
|
||||
row: usize,
|
||||
num_inner_cols: usize,
|
||||
decomp_base: usize,
|
||||
decomp_legs: usize,
|
||||
) -> RegionCtx<'a, F> {
|
||||
let region = Some(RefCell::new(region));
|
||||
let linear_coord = row * num_inner_cols;
|
||||
|
||||
@@ -246,8 +285,9 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
|
||||
dynamic_lookup_index: DynamicLookupIndex::default(),
|
||||
shuffle_index: ShuffleIndex::default(),
|
||||
statistics: RegionStatistics::default(),
|
||||
settings: RegionSettings::all_true(),
|
||||
settings: RegionSettings::all_true(decomp_base, decomp_legs),
|
||||
assigned_constants: HashMap::new(),
|
||||
max_dynamic_input_len: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -256,9 +296,11 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
|
||||
region: Region<'a, F>,
|
||||
row: usize,
|
||||
num_inner_cols: usize,
|
||||
decomp_base: usize,
|
||||
decomp_legs: usize,
|
||||
constants: ConstantsMap<F>,
|
||||
) -> RegionCtx<'a, F> {
|
||||
let mut new_self = Self::new(region, row, num_inner_cols);
|
||||
let mut new_self = Self::new(region, row, num_inner_cols, decomp_base, decomp_legs);
|
||||
new_self.assigned_constants = constants;
|
||||
new_self
|
||||
}
|
||||
@@ -282,6 +324,7 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
|
||||
statistics: RegionStatistics::default(),
|
||||
settings,
|
||||
assigned_constants: HashMap::new(),
|
||||
max_dynamic_input_len: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -303,6 +346,7 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
|
||||
statistics: RegionStatistics::default(),
|
||||
settings,
|
||||
assigned_constants: HashMap::new(),
|
||||
max_dynamic_input_len: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -555,9 +599,12 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
|
||||
&mut self,
|
||||
var: &VarTensor,
|
||||
values: &ValTensor<F>,
|
||||
) -> Result<ValTensor<F>, CircuitError> {
|
||||
) -> Result<(ValTensor<F>, usize), CircuitError> {
|
||||
|
||||
self.update_max_dynamic_input_len(values.len());
|
||||
|
||||
if let Some(region) = &self.region {
|
||||
Ok(var.assign(
|
||||
Ok(var.assign_exact_column(
|
||||
&mut region.borrow_mut(),
|
||||
self.combined_dynamic_shuffle_coord(),
|
||||
values,
|
||||
@@ -568,7 +615,11 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
|
||||
let values_map = values.create_constants_map_iterator();
|
||||
self.assigned_constants.par_extend(values_map);
|
||||
}
|
||||
Ok(values.clone())
|
||||
|
||||
let flush_len = var.get_column_flush(self.combined_dynamic_shuffle_coord(), values)?;
|
||||
|
||||
// get the diff between the current column and the next row
|
||||
Ok((values.clone(), flush_len))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -577,7 +628,7 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
|
||||
&mut self,
|
||||
var: &VarTensor,
|
||||
values: &ValTensor<F>,
|
||||
) -> Result<ValTensor<F>, CircuitError> {
|
||||
) -> Result<(ValTensor<F>, usize), CircuitError> {
|
||||
self.assign_dynamic_lookup(var, values)
|
||||
}
|
||||
|
||||
|
||||
@@ -15,6 +15,9 @@ use crate::{
|
||||
tensor::{Tensor, TensorType},
|
||||
};
|
||||
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use crate::execute::EZKL_REPO_PATH;
|
||||
|
||||
use crate::circuit::lookup::LookupOp;
|
||||
|
||||
/// The range of the lookup table.
|
||||
@@ -25,6 +28,16 @@ pub const RANGE_MULTIPLIER: IntegerRep = 2;
|
||||
/// The safety factor offset for the number of rows in the lookup table.
|
||||
pub const RESERVED_BLINDING_ROWS_PAD: usize = 3;
|
||||
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
lazy_static::lazy_static! {
|
||||
/// an optional directory to read and write the lookup table cache
|
||||
pub static ref LOOKUP_CACHE: String = format!("{}/cache", *EZKL_REPO_PATH);
|
||||
}
|
||||
|
||||
/// The lookup table cache is disabled on wasm32 target.
|
||||
#[cfg(any(not(feature = "ezkl"), target_arch = "wasm32"))]
|
||||
pub const LOOKUP_CACHE: &str = "";
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
///
|
||||
pub struct SelectorConstructor<F: PrimeField> {
|
||||
@@ -137,6 +150,14 @@ pub fn num_cols_required(range_len: IntegerRep, col_size: usize) -> usize {
|
||||
}
|
||||
|
||||
impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Table<F> {
|
||||
fn name(&self) -> String {
|
||||
format!(
|
||||
"{}_{}_{}",
|
||||
self.nonlinearity.as_path(),
|
||||
self.range.0,
|
||||
self.range.1
|
||||
)
|
||||
}
|
||||
/// Configures the table.
|
||||
pub fn configure(
|
||||
cs: &mut ConstraintSystem<F>,
|
||||
@@ -203,8 +224,51 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Table<F> {
|
||||
let smallest = self.range.0;
|
||||
let largest = self.range.1;
|
||||
|
||||
let inputs: Tensor<F> = Tensor::from(smallest..=largest).map(|x| integer_rep_to_felt(x));
|
||||
let evals = self.nonlinearity.f(&[inputs.clone()])?;
|
||||
let gen_table = || -> Result<(Tensor<F>, Tensor<F>), crate::tensor::TensorError> {
|
||||
let inputs = Tensor::from(smallest..=largest)
|
||||
.par_enum_map(|_, x| Ok::<_, crate::tensor::TensorError>(integer_rep_to_felt(x)))?;
|
||||
let evals = self.nonlinearity.f(&[inputs.clone()])?;
|
||||
Ok((inputs, evals.output))
|
||||
};
|
||||
|
||||
let (inputs, evals) = if !LOOKUP_CACHE.is_empty() {
|
||||
let cache = std::path::Path::new(&*LOOKUP_CACHE);
|
||||
let cache_path = cache.join(self.name());
|
||||
let input_path = cache_path.join("inputs");
|
||||
let output_path = cache_path.join("outputs");
|
||||
if cache_path.exists() {
|
||||
log::info!("Loading lookup table from cache: {:?}", cache_path);
|
||||
let (input_cache, output_cache) =
|
||||
(Tensor::load(&input_path)?, Tensor::load(&output_path)?);
|
||||
(input_cache, output_cache)
|
||||
} else {
|
||||
log::info!(
|
||||
"Generating lookup table and saving to cache: {:?}",
|
||||
cache_path
|
||||
);
|
||||
|
||||
// mkdir -p cache_path
|
||||
std::fs::create_dir_all(&cache_path).map_err(|e| {
|
||||
CircuitError::TensorError(crate::tensor::TensorError::FileSaveError(
|
||||
e.to_string(),
|
||||
))
|
||||
})?;
|
||||
|
||||
let (inputs, evals) = gen_table()?;
|
||||
inputs.save(&input_path)?;
|
||||
evals.save(&output_path)?;
|
||||
|
||||
(inputs, evals)
|
||||
}
|
||||
} else {
|
||||
log::info!(
|
||||
"Generating lookup table {} without cache",
|
||||
self.nonlinearity.as_path()
|
||||
);
|
||||
|
||||
gen_table()?
|
||||
};
|
||||
|
||||
let chunked_inputs = inputs.chunks(self.col_size);
|
||||
|
||||
self.is_assigned = true;
|
||||
@@ -236,7 +300,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Table<F> {
|
||||
)?;
|
||||
}
|
||||
|
||||
let output = evals.output[row_offset];
|
||||
let output = evals[row_offset];
|
||||
|
||||
table.assign_cell(
|
||||
|| format!("nl_o_col row {}", row_offset),
|
||||
@@ -274,6 +338,11 @@ pub struct RangeCheck<F: PrimeField> {
|
||||
}
|
||||
|
||||
impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RangeCheck<F> {
|
||||
/// as path
|
||||
pub fn as_path(&self) -> String {
|
||||
format!("rangecheck_{}_{}", self.range.0, self.range.1)
|
||||
}
|
||||
|
||||
/// get first_element of column
|
||||
pub fn get_first_element(&self, chunk: usize) -> F {
|
||||
let chunk = chunk as IntegerRep;
|
||||
@@ -351,7 +420,32 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RangeCheck<F> {
|
||||
let smallest = self.range.0;
|
||||
let largest = self.range.1;
|
||||
|
||||
let inputs: Tensor<F> = Tensor::from(smallest..=largest).map(|x| integer_rep_to_felt(x));
|
||||
let inputs: Tensor<F> = if !LOOKUP_CACHE.is_empty() {
|
||||
let cache = std::path::Path::new(&*LOOKUP_CACHE);
|
||||
let cache_path = cache.join(self.as_path());
|
||||
let input_path = cache_path.join("inputs");
|
||||
if cache_path.exists() {
|
||||
log::info!("Loading range check table from cache: {:?}", cache_path);
|
||||
Tensor::load(&input_path)?
|
||||
} else {
|
||||
log::info!(
|
||||
"Generating range check table and saving to cache: {:?}",
|
||||
cache_path
|
||||
);
|
||||
|
||||
// mkdir -p cache_path
|
||||
std::fs::create_dir_all(&cache_path)?;
|
||||
|
||||
let inputs = Tensor::from(smallest..=largest).map(|x| integer_rep_to_felt(x));
|
||||
inputs.save(&input_path)?;
|
||||
inputs
|
||||
}
|
||||
} else {
|
||||
log::info!("Generating range check {} without cache", self.as_path());
|
||||
|
||||
Tensor::from(smallest..=largest).map(|x| integer_rep_to_felt(x))
|
||||
};
|
||||
|
||||
let chunked_inputs = inputs.chunks(self.col_size);
|
||||
|
||||
self.is_assigned = true;
|
||||
|
||||
@@ -8,6 +8,10 @@ use halo2_proofs::{
|
||||
};
|
||||
use halo2curves::bn256::Fr as F;
|
||||
use halo2curves::ff::{Field, PrimeField};
|
||||
#[cfg(not(any(
|
||||
all(target_arch = "wasm32", target_os = "unknown"),
|
||||
not(feature = "ezkl")
|
||||
)))]
|
||||
use ops::lookup::LookupOp;
|
||||
use ops::region::RegionCtx;
|
||||
use rand::rngs::OsRng;
|
||||
@@ -55,7 +59,7 @@ mod matmul {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
@@ -132,7 +136,7 @@ mod matmul_col_overflow_double_col {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, NUM_INNER_COLS);
|
||||
let mut region = RegionCtx::new(region, 0, NUM_INNER_COLS, 128, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
@@ -206,7 +210,7 @@ mod matmul_col_overflow {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
@@ -243,7 +247,10 @@ mod matmul_col_overflow {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
|
||||
#[cfg(all(
|
||||
feature = "ezkl",
|
||||
not(all(target_arch = "wasm32", target_os = "unknown"))
|
||||
))]
|
||||
mod matmul_col_ultra_overflow_double_col {
|
||||
|
||||
use halo2_proofs::poly::kzg::{
|
||||
@@ -256,7 +263,7 @@ mod matmul_col_ultra_overflow_double_col {
|
||||
use super::*;
|
||||
|
||||
const K: usize = 4;
|
||||
const LEN: usize = 20;
|
||||
const LEN: usize = 10;
|
||||
const NUM_INNER_COLS: usize = 2;
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -290,7 +297,7 @@ mod matmul_col_ultra_overflow_double_col {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, NUM_INNER_COLS);
|
||||
let mut region = RegionCtx::new(region, 0, NUM_INNER_COLS, 128, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
@@ -361,7 +368,10 @@ mod matmul_col_ultra_overflow_double_col {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
|
||||
#[cfg(all(
|
||||
feature = "ezkl",
|
||||
not(all(target_arch = "wasm32", target_os = "unknown"))
|
||||
))]
|
||||
mod matmul_col_ultra_overflow {
|
||||
|
||||
use halo2_proofs::poly::kzg::{
|
||||
@@ -374,7 +384,7 @@ mod matmul_col_ultra_overflow {
|
||||
use super::*;
|
||||
|
||||
const K: usize = 4;
|
||||
const LEN: usize = 20;
|
||||
const LEN: usize = 10;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct MatmulCircuit<F: PrimeField + TensorType + PartialOrd> {
|
||||
@@ -407,7 +417,7 @@ mod matmul_col_ultra_overflow {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
@@ -518,7 +528,7 @@ mod dot {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
@@ -595,7 +605,7 @@ mod dot_col_overflow_triple_col {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 3);
|
||||
let mut region = RegionCtx::new(region, 0, 3, 128, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
@@ -668,7 +678,7 @@ mod dot_col_overflow {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
@@ -741,7 +751,7 @@ mod sum {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
@@ -811,7 +821,7 @@ mod sum_col_overflow_double_col {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, NUM_INNER_COLS);
|
||||
let mut region = RegionCtx::new(region, 0, NUM_INNER_COLS, 128, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
@@ -880,7 +890,7 @@ mod sum_col_overflow {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
@@ -951,7 +961,7 @@ mod composition {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
let _ = config
|
||||
.layout(
|
||||
&mut region,
|
||||
@@ -1042,7 +1052,7 @@ mod conv {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
@@ -1144,7 +1154,10 @@ mod conv {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
|
||||
#[cfg(all(
|
||||
feature = "ezkl",
|
||||
not(all(target_arch = "wasm32", target_os = "unknown"))
|
||||
))]
|
||||
mod conv_col_ultra_overflow {
|
||||
|
||||
use halo2_proofs::poly::{
|
||||
@@ -1159,7 +1172,7 @@ mod conv_col_ultra_overflow {
|
||||
use super::*;
|
||||
|
||||
const K: usize = 4;
|
||||
const LEN: usize = 28;
|
||||
const LEN: usize = 10;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ConvCircuit<F: PrimeField + TensorType + PartialOrd> {
|
||||
@@ -1193,7 +1206,7 @@ mod conv_col_ultra_overflow {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(
|
||||
&mut region,
|
||||
@@ -1285,7 +1298,10 @@ mod conv_col_ultra_overflow {
|
||||
|
||||
#[cfg(test)]
|
||||
// not wasm 32 unknown
|
||||
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
|
||||
#[cfg(all(
|
||||
feature = "ezkl",
|
||||
not(all(target_arch = "wasm32", target_os = "unknown"))
|
||||
))]
|
||||
mod conv_relu_col_ultra_overflow {
|
||||
|
||||
use halo2_proofs::poly::kzg::{
|
||||
@@ -1297,8 +1313,8 @@ mod conv_relu_col_ultra_overflow {
|
||||
|
||||
use super::*;
|
||||
|
||||
const K: usize = 4;
|
||||
const LEN: usize = 28;
|
||||
const K: usize = 8;
|
||||
const LEN: usize = 15;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ConvCircuit<F: PrimeField + TensorType + PartialOrd> {
|
||||
@@ -1317,15 +1333,23 @@ mod conv_relu_col_ultra_overflow {
|
||||
}
|
||||
|
||||
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
|
||||
let a = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN);
|
||||
let b = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN);
|
||||
let output = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN);
|
||||
let a = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN * 4);
|
||||
let b = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN * 4);
|
||||
let output = VarTensor::new_advice(cs, K, 1, LEN * LEN * LEN * 4);
|
||||
let mut base_config =
|
||||
Self::Config::configure(cs, &[a.clone(), b.clone()], &output, CheckMode::SAFE);
|
||||
// sets up a new relu table
|
||||
|
||||
base_config
|
||||
.configure_lookup(cs, &b, &output, &a, (-3, 3), K, &LookupOp::ReLU)
|
||||
.configure_range_check(cs, &a, &b, (-1, 1), K)
|
||||
.unwrap();
|
||||
|
||||
base_config
|
||||
.configure_range_check(cs, &a, &b, (0, 1), K)
|
||||
.unwrap();
|
||||
|
||||
let _constant = VarTensor::constant_cols(cs, K, 8, false);
|
||||
|
||||
base_config.clone()
|
||||
}
|
||||
|
||||
@@ -1334,12 +1358,12 @@ mod conv_relu_col_ultra_overflow {
|
||||
mut config: Self::Config,
|
||||
mut layouter: impl Layouter<F>,
|
||||
) -> Result<(), Error> {
|
||||
config.layout_tables(&mut layouter).unwrap();
|
||||
config.layout_range_checks(&mut layouter).unwrap();
|
||||
layouter
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 2, 2);
|
||||
let output = config
|
||||
.layout(
|
||||
&mut region,
|
||||
@@ -1355,7 +1379,7 @@ mod conv_relu_col_ultra_overflow {
|
||||
.layout(
|
||||
&mut region,
|
||||
&[output.unwrap().unwrap()],
|
||||
Box::new(LookupOp::ReLU),
|
||||
Box::new(PolyOp::ReLU),
|
||||
)
|
||||
.unwrap();
|
||||
Ok(())
|
||||
@@ -1476,7 +1500,7 @@ mod add_w_shape_casting {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Add))
|
||||
.map_err(|_| Error::Synthesis)
|
||||
@@ -1492,7 +1516,7 @@ mod add_w_shape_casting {
|
||||
// parameters
|
||||
let a = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
|
||||
|
||||
let b = Tensor::from((0..1).map(|i| Value::known(F::from(i as u64 + 1))));
|
||||
let b = Tensor::from((0..1).map(|i| Value::known(F::from(i + 1))));
|
||||
|
||||
let circuit = MyCircuit::<F> {
|
||||
inputs: [ValTensor::from(a), ValTensor::from(b)],
|
||||
@@ -1543,7 +1567,7 @@ mod add {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Add))
|
||||
.map_err(|_| Error::Synthesis)
|
||||
@@ -1627,7 +1651,7 @@ mod dynamic_lookup {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
for i in 0..NUM_LOOP {
|
||||
layouts::dynamic_lookup(
|
||||
&config,
|
||||
@@ -1769,7 +1793,7 @@ mod shuffle {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
for i in 0..NUM_LOOP {
|
||||
layouts::shuffles(
|
||||
&config,
|
||||
@@ -1884,7 +1908,7 @@ mod add_with_overflow {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Add))
|
||||
.map_err(|_| Error::Synthesis)
|
||||
@@ -1986,7 +2010,7 @@ mod add_with_overflow_and_poseidon {
|
||||
layouter.assign_region(
|
||||
|| "model",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.base
|
||||
.layout(&mut region, &inputs, Box::new(PolyOp::Add))
|
||||
@@ -2092,7 +2116,7 @@ mod sub {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Sub))
|
||||
.map_err(|_| Error::Synthesis)
|
||||
@@ -2159,7 +2183,7 @@ mod mult {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Mult))
|
||||
.map_err(|_| Error::Synthesis)
|
||||
@@ -2226,7 +2250,7 @@ mod pow {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Pow(5)))
|
||||
.map_err(|_| Error::Synthesis)
|
||||
@@ -2258,7 +2282,6 @@ mod matmul_relu {
|
||||
|
||||
const K: usize = 18;
|
||||
const LEN: usize = 32;
|
||||
use crate::circuit::lookup::LookupOp;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
|
||||
@@ -2288,11 +2311,17 @@ mod matmul_relu {
|
||||
|
||||
let mut base_config =
|
||||
BaseConfig::configure(cs, &[a.clone(), b.clone()], &output, CheckMode::SAFE);
|
||||
// sets up a new relu table
|
||||
|
||||
base_config
|
||||
.configure_lookup(cs, &b, &output, &a, (-32768, 32768), K, &LookupOp::ReLU)
|
||||
.configure_range_check(cs, &a, &b, (-1, 1), K)
|
||||
.unwrap();
|
||||
|
||||
base_config
|
||||
.configure_range_check(cs, &a, &b, (0, 1023), K)
|
||||
.unwrap();
|
||||
|
||||
let _constant = VarTensor::constant_cols(cs, K, 8, false);
|
||||
|
||||
MyConfig { base_config }
|
||||
}
|
||||
|
||||
@@ -2301,11 +2330,14 @@ mod matmul_relu {
|
||||
mut config: Self::Config,
|
||||
mut layouter: impl Layouter<F>,
|
||||
) -> Result<(), Error> {
|
||||
config.base_config.layout_tables(&mut layouter).unwrap();
|
||||
config
|
||||
.base_config
|
||||
.layout_range_checks(&mut layouter)
|
||||
.unwrap();
|
||||
layouter.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 1024, 2);
|
||||
let op = PolyOp::Einsum {
|
||||
equation: "ij,jk->ik".to_string(),
|
||||
};
|
||||
@@ -2315,7 +2347,7 @@ mod matmul_relu {
|
||||
.unwrap();
|
||||
let _output = config
|
||||
.base_config
|
||||
.layout(&mut region, &[output.unwrap()], Box::new(LookupOp::ReLU))
|
||||
.layout(&mut region, &[output.unwrap()], Box::new(PolyOp::ReLU))
|
||||
.unwrap();
|
||||
Ok(())
|
||||
},
|
||||
@@ -2354,6 +2386,8 @@ mod relu {
|
||||
plonk::{Circuit, ConstraintSystem, Error},
|
||||
};
|
||||
|
||||
const K: u32 = 8;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ReLUCircuit<F: PrimeField + TensorType + PartialOrd> {
|
||||
pub input: ValTensor<F>,
|
||||
@@ -2370,16 +2404,26 @@ mod relu {
|
||||
|
||||
fn configure(cs: &mut ConstraintSystem<F>) -> Self::Config {
|
||||
let advices = (0..3)
|
||||
.map(|_| VarTensor::new_advice(cs, 4, 1, 3))
|
||||
.map(|_| VarTensor::new_advice(cs, 8, 1, 3))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let nl = LookupOp::ReLU;
|
||||
|
||||
let mut config = BaseConfig::default();
|
||||
let mut config = BaseConfig::configure(
|
||||
cs,
|
||||
&[advices[0].clone(), advices[1].clone()],
|
||||
&advices[2],
|
||||
CheckMode::SAFE,
|
||||
);
|
||||
|
||||
config
|
||||
.configure_lookup(cs, &advices[0], &advices[1], &advices[2], (-6, 6), 4, &nl)
|
||||
.configure_range_check(cs, &advices[0], &advices[1], (-1, 1), K as usize)
|
||||
.unwrap();
|
||||
|
||||
config
|
||||
.configure_range_check(cs, &advices[0], &advices[1], (0, 1), K as usize)
|
||||
.unwrap();
|
||||
|
||||
let _constant = VarTensor::constant_cols(cs, K as usize, 8, false);
|
||||
|
||||
config
|
||||
}
|
||||
|
||||
@@ -2388,15 +2432,15 @@ mod relu {
|
||||
mut config: Self::Config,
|
||||
mut layouter: impl Layouter<F>, // layouter is our 'write buffer' for the circuit
|
||||
) -> Result<(), Error> {
|
||||
config.layout_tables(&mut layouter).unwrap();
|
||||
config.layout_range_checks(&mut layouter).unwrap();
|
||||
layouter
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
config
|
||||
.layout(&mut region, &[self.input.clone()], Box::new(LookupOp::ReLU))
|
||||
.map_err(|_| Error::Synthesis)
|
||||
let mut region = RegionCtx::new(region, 0, 1, 2, 2);
|
||||
Ok(config
|
||||
.layout(&mut region, &[self.input.clone()], Box::new(PolyOp::ReLU))
|
||||
.unwrap())
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
@@ -2414,13 +2458,16 @@ mod relu {
|
||||
input: ValTensor::from(input),
|
||||
};
|
||||
|
||||
let prover = MockProver::run(4_u32, &circuit, vec![]).unwrap();
|
||||
let prover = MockProver::run(K, &circuit, vec![]).unwrap();
|
||||
prover.assert_satisfied();
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
|
||||
#[cfg(all(
|
||||
feature = "ezkl",
|
||||
not(all(target_arch = "wasm32", target_os = "unknown"))
|
||||
))]
|
||||
mod lookup_ultra_overflow {
|
||||
use super::*;
|
||||
use halo2_proofs::{
|
||||
@@ -2453,7 +2500,7 @@ mod lookup_ultra_overflow {
|
||||
.map(|_| VarTensor::new_advice(cs, 4, 1, 3))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let nl = LookupOp::ReLU;
|
||||
let nl = LookupOp::LeakyReLU { slope: 0.0.into() };
|
||||
|
||||
let mut config = BaseConfig::default();
|
||||
|
||||
@@ -2481,9 +2528,13 @@ mod lookup_ultra_overflow {
|
||||
.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let mut region = RegionCtx::new(region, 0, 1);
|
||||
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
|
||||
config
|
||||
.layout(&mut region, &[self.input.clone()], Box::new(LookupOp::ReLU))
|
||||
.layout(
|
||||
&mut region,
|
||||
&[self.input.clone()],
|
||||
Box::new(LookupOp::LeakyReLU { slope: 0.0.into() }),
|
||||
)
|
||||
.map_err(|_| Error::Synthesis)
|
||||
},
|
||||
)
|
||||
|
||||
@@ -141,23 +141,23 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn f32_eq() {
|
||||
assert!(F32(std::f32::NAN) == F32(std::f32::NAN));
|
||||
assert!(F32(std::f32::NAN) != F32(5.0));
|
||||
assert!(F32(5.0) != F32(std::f32::NAN));
|
||||
assert!(F32(f32::NAN) == F32(f32::NAN));
|
||||
assert!(F32(f32::NAN) != F32(5.0));
|
||||
assert!(F32(5.0) != F32(f32::NAN));
|
||||
assert!(F32(0.0) == F32(-0.0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn f32_cmp() {
|
||||
assert!(F32(std::f32::NAN) == F32(std::f32::NAN));
|
||||
assert!(F32(std::f32::NAN) < F32(5.0));
|
||||
assert!(F32(5.0) > F32(std::f32::NAN));
|
||||
assert!(F32(f32::NAN) == F32(f32::NAN));
|
||||
assert!(F32(f32::NAN) < F32(5.0));
|
||||
assert!(F32(5.0) > F32(f32::NAN));
|
||||
assert!(F32(0.0) == F32(-0.0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn f32_hash() {
|
||||
assert!(calculate_hash(&F32(0.0)) == calculate_hash(&F32(-0.0)));
|
||||
assert!(calculate_hash(&F32(std::f32::NAN)) == calculate_hash(&F32(-std::f32::NAN)));
|
||||
assert!(calculate_hash(&F32(f32::NAN)) == calculate_hash(&F32(-f32::NAN)));
|
||||
}
|
||||
}
|
||||
|
||||
206
src/commands.rs
206
src/commands.rs
@@ -1,4 +1,3 @@
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use alloy::primitives::Address as H160;
|
||||
use clap::{Command, Parser, Subcommand};
|
||||
use clap_complete::{generate, Generator, Shell};
|
||||
@@ -17,7 +16,6 @@ use tosubcommand::{ToFlags, ToSubcommand};
|
||||
use crate::{pfsys::ProofType, Commitments, RunArgs};
|
||||
|
||||
use crate::circuit::CheckMode;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::graph::TestDataSource;
|
||||
use crate::pfsys::TranscriptType;
|
||||
|
||||
@@ -81,8 +79,10 @@ pub const DEFAULT_CALIBRATION_FILE: &str = "calibration.json";
|
||||
pub const DEFAULT_LOOKUP_SAFETY_MARGIN: &str = "2";
|
||||
/// Default Compress selectors
|
||||
pub const DEFAULT_DISABLE_SELECTOR_COMPRESSION: &str = "false";
|
||||
/// Default render vk separately
|
||||
pub const DEFAULT_RENDER_VK_SEPERATELY: &str = "false";
|
||||
/// Default render reusable verifier
|
||||
pub const DEFAULT_RENDER_REUSABLE: &str = "false";
|
||||
/// Default contract deployment type
|
||||
pub const DEFAULT_CONTRACT_DEPLOYMENT_TYPE: &str = "verifier";
|
||||
/// Default VK sol path
|
||||
pub const DEFAULT_VK_SOL: &str = "vk.sol";
|
||||
/// Default VK abi path
|
||||
@@ -95,6 +95,9 @@ pub const DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION: &str = "false";
|
||||
pub const DEFAULT_ONLY_RANGE_CHECK_REBASE: &str = "false";
|
||||
/// Default commitment
|
||||
pub const DEFAULT_COMMITMENT: &str = "kzg";
|
||||
// TODO: In prod this will be the same across all chains we deploy to using the EZKL multisig create2 deployment.
|
||||
/// Default address of the verifier manager.
|
||||
pub const DEFAULT_VERIFIER_MANAGER_ADDRESS: &str = "0xdc64a140aa3e981100a9beca4e685f962f0cf6c9";
|
||||
|
||||
#[cfg(feature = "python-bindings")]
|
||||
/// Converts TranscriptType into a PyObject (Required for TranscriptType to be compatible with Python)
|
||||
@@ -181,28 +184,88 @@ impl From<&str> for CalibrationTarget {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[derive(Debug, Copy, Clone, Serialize, Deserialize, PartialEq, PartialOrd)]
|
||||
/// Determines what type of contract (verifier, verifier/reusable, vka) should be deployed
|
||||
pub enum ContractType {
|
||||
/// Deploys a verifier contrat tailored to the circuit and not reusable
|
||||
Verifier {
|
||||
/// Whether to deploy a reusable verifier. This can reduce state bloat on-chain since you need only deploy a verifying key artifact (vka) for a given circuit which is significantly smaller than the verifier contract (up to 4 times smaller for large circuits)
|
||||
/// Can also be used as an alternative to aggregation for verifiers that are otherwise too large to fit on-chain.
|
||||
reusable: bool,
|
||||
},
|
||||
/// Deploys a verifying key artifact that the reusable verifier loads into memory during runtime. Encodes the circuit specific data that was otherwise hardcoded onto the stack.
|
||||
VerifyingKeyArtifact,
|
||||
/// Manages the deployments of all reusable verifier and verifying artifact keys. Routes all the verification tx to the correct artifacts.
|
||||
VerifierManager
|
||||
}
|
||||
|
||||
impl Default for ContractType {
|
||||
fn default() -> Self {
|
||||
ContractType::Verifier {
|
||||
reusable: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ContractType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match self {
|
||||
ContractType::Verifier { reusable: true } => {
|
||||
"verifier/reusable".to_string()
|
||||
},
|
||||
ContractType::Verifier {
|
||||
reusable: false,
|
||||
} => "verifier".to_string(),
|
||||
ContractType::VerifyingKeyArtifact => "vka".to_string(),
|
||||
ContractType::VerifierManager => "manager".to_string()
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToFlags for ContractType {
|
||||
fn to_flags(&self) -> Vec<String> {
|
||||
vec![format!("{}", self)]
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for ContractType {
|
||||
fn from(s: &str) -> Self {
|
||||
match s {
|
||||
"verifier" => ContractType::Verifier { reusable: false },
|
||||
"verifier/reusable" => ContractType::Verifier { reusable: true },
|
||||
"vka" => ContractType::VerifyingKeyArtifact,
|
||||
"manager" => ContractType::VerifierManager,
|
||||
_ => {
|
||||
log::error!("Invalid value for ContractType");
|
||||
log::warn!("Defaulting to verifier");
|
||||
ContractType::default()
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Serialize, Deserialize, PartialEq, PartialOrd)]
|
||||
/// wrapper for H160 to make it easy to parse into flag vals
|
||||
pub struct H160Flag {
|
||||
inner: H160,
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
impl From<H160Flag> for H160 {
|
||||
fn from(val: H160Flag) -> H160 {
|
||||
val.inner
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
impl ToFlags for H160Flag {
|
||||
fn to_flags(&self) -> Vec<String> {
|
||||
vec![format!("{:#x}", self.inner)]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
impl From<&str> for H160Flag {
|
||||
fn from(s: &str) -> Self {
|
||||
Self {
|
||||
@@ -243,6 +306,39 @@ impl<'source> FromPyObject<'source> for CalibrationTarget {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "python-bindings")]
|
||||
/// Converts ContractType into a PyObject (Required for ContractType to be compatible with Python)
|
||||
impl IntoPy<PyObject> for ContractType {
|
||||
fn into_py(self, py: Python) -> PyObject {
|
||||
match self {
|
||||
ContractType::Verifier { reusable: true } => {
|
||||
"verifier/reusable".to_object(py)
|
||||
}
|
||||
ContractType::Verifier {
|
||||
reusable: false,
|
||||
} => "verifier".to_object(py),
|
||||
ContractType::VerifyingKeyArtifact => "vka".to_object(py),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "python-bindings")]
|
||||
/// Obtains ContractType from PyObject (Required for ContractType to be compatible with Python)
|
||||
impl<'source> FromPyObject<'source> for ContractType {
|
||||
fn extract(ob: &'source PyAny) -> PyResult<Self> {
|
||||
let trystr = <PyString as PyTryFrom>::try_from(ob)?;
|
||||
let strval = trystr.to_string();
|
||||
match strval.to_lowercase().as_str() {
|
||||
"verifier" => Ok(ContractType::Verifier {
|
||||
reusable: false,
|
||||
}),
|
||||
"verifier/reusable" => Ok(ContractType::Verifier { reusable: true }),
|
||||
"vka" => Ok(ContractType::VerifyingKeyArtifact),
|
||||
_ => Err(PyValueError::new_err("Invalid value for ContractType")),
|
||||
}
|
||||
}
|
||||
}
|
||||
// not wasm
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
@@ -365,8 +461,7 @@ pub enum Commands {
|
||||
},
|
||||
|
||||
/// Calibrates the proving scale, lookup bits and logrows from a circuit settings file.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
CalibrateSettings {
|
||||
CalibrateSettings {
|
||||
/// The path to the .json calibration data file.
|
||||
#[arg(short = 'D', long, default_value = DEFAULT_CALIBRATION_FILE, value_hint = clap::ValueHint::FilePath)]
|
||||
data: Option<PathBuf>,
|
||||
@@ -416,8 +511,7 @@ pub enum Commands {
|
||||
commitment: Option<Commitments>,
|
||||
},
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Gets an SRS from a circuit settings file.
|
||||
/// Gets an SRS from a circuit settings file.
|
||||
#[command(name = "get-srs")]
|
||||
GetSrs {
|
||||
/// The path to output the desired srs file, if set to None will save to $EZKL_REPO_PATH/srs
|
||||
@@ -552,8 +646,7 @@ pub enum Commands {
|
||||
#[arg(long, default_value = DEFAULT_DISABLE_SELECTOR_COMPRESSION, action = clap::ArgAction::SetTrue)]
|
||||
disable_selector_compression: Option<bool>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Deploys a test contact that the data attester reads from and creates a data attestation formatted input.json file that contains call data information
|
||||
/// Deploys a test contact that the data attester reads from and creates a data attestation formatted input.json file that contains call data information
|
||||
#[command(arg_required_else_help = true)]
|
||||
SetupTestEvmData {
|
||||
/// The path to the .json data file, which should include both the network input (possibly private) and the network output (public input to the proof)
|
||||
@@ -577,8 +670,7 @@ pub enum Commands {
|
||||
#[arg(long, default_value = "on-chain", value_hint = clap::ValueHint::Other)]
|
||||
output_source: TestDataSource,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// The Data Attestation Verifier contract stores the account calls to fetch data to feed into ezkl. This call data can be updated by an admin account. This tests that admin account is able to update this call data.
|
||||
/// The Data Attestation Verifier contract stores the account calls to fetch data to feed into ezkl. This call data can be updated by an admin account. This tests that admin account is able to update this call data.
|
||||
#[command(arg_required_else_help = true)]
|
||||
TestUpdateAccountCalls {
|
||||
/// The path to the verifier contract's address
|
||||
@@ -591,8 +683,7 @@ pub enum Commands {
|
||||
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
|
||||
rpc_url: Option<String>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Swaps the positions in the transcript that correspond to commitments
|
||||
/// Swaps the positions in the transcript that correspond to commitments
|
||||
SwapProofCommitments {
|
||||
/// The path to the proof file
|
||||
#[arg(short = 'P', long, default_value = DEFAULT_PROOF, value_hint = clap::ValueHint::FilePath)]
|
||||
@@ -602,8 +693,7 @@ pub enum Commands {
|
||||
witness_path: Option<PathBuf>,
|
||||
},
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Loads model, data, and creates proof
|
||||
/// Loads model, data, and creates proof
|
||||
Prove {
|
||||
/// The path to the .json witness file (generated using the gen-witness command)
|
||||
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS, value_hint = clap::ValueHint::FilePath)]
|
||||
@@ -633,8 +723,7 @@ pub enum Commands {
|
||||
#[arg(long, default_value = DEFAULT_CHECKMODE, value_hint = clap::ValueHint::Other)]
|
||||
check_mode: Option<CheckMode>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Encodes a proof into evm calldata
|
||||
/// Encodes a proof into evm calldata
|
||||
#[command(name = "encode-evm-calldata")]
|
||||
EncodeEvmCalldata {
|
||||
/// The path to the proof file (generated using the prove command)
|
||||
@@ -647,8 +736,7 @@ pub enum Commands {
|
||||
#[arg(long, value_hint = clap::ValueHint::Other)]
|
||||
addr_vk: Option<H160Flag>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Creates an Evm verifier for a single proof
|
||||
/// Creates an Evm verifier for a single proof
|
||||
#[command(name = "create-evm-verifier")]
|
||||
CreateEvmVerifier {
|
||||
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
|
||||
@@ -666,16 +754,13 @@ pub enum Commands {
|
||||
/// The path to output the Solidity verifier ABI
|
||||
#[arg(long, default_value = DEFAULT_VERIFIER_ABI, value_hint = clap::ValueHint::FilePath)]
|
||||
abi_path: Option<PathBuf>,
|
||||
/// Whether the verifier key should be rendered as a separate contract.
|
||||
/// We recommend disabling selector compression if this is enabled.
|
||||
/// To save the verifier key as a separate contract, set this to true and then call the create-evm-vk command.
|
||||
#[arg(long, default_value = DEFAULT_RENDER_VK_SEPERATELY, action = clap::ArgAction::SetTrue)]
|
||||
render_vk_seperately: Option<bool>,
|
||||
/// Whether the to render the verifier as reusable or not. If true, you will need to deploy a VK artifact, passing it as part of the calldata to the verifier.
|
||||
#[arg(long, default_value = DEFAULT_RENDER_REUSABLE, action = clap::ArgAction::SetTrue)]
|
||||
reusable: Option<bool>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Creates an Evm verifier for a single proof
|
||||
#[command(name = "create-evm-vk")]
|
||||
CreateEvmVK {
|
||||
/// Creates an Evm verifier artifact for a single proof to be used by the reusable verifier
|
||||
#[command(name = "create-evm-vka")]
|
||||
CreateEvmVKArtifact {
|
||||
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
|
||||
#[arg(long, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: Option<PathBuf>,
|
||||
@@ -692,8 +777,7 @@ pub enum Commands {
|
||||
#[arg(long, default_value = DEFAULT_VK_ABI, value_hint = clap::ValueHint::FilePath)]
|
||||
abi_path: Option<PathBuf>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Creates an Evm verifier that attests to on-chain inputs for a single proof
|
||||
/// Creates an Evm verifier that attests to on-chain inputs for a single proof
|
||||
#[command(name = "create-evm-da")]
|
||||
CreateEvmDataAttestation {
|
||||
/// The path to load circuit settings .json file from (generated using the gen-settings command)
|
||||
@@ -717,8 +801,7 @@ pub enum Commands {
|
||||
witness: Option<PathBuf>,
|
||||
},
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Creates an Evm verifier for an aggregate proof
|
||||
/// Creates an Evm verifier for an aggregate proof
|
||||
#[command(name = "create-evm-verifier-aggr")]
|
||||
CreateEvmVerifierAggr {
|
||||
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
|
||||
@@ -739,11 +822,9 @@ pub enum Commands {
|
||||
// logrows used for aggregation circuit
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
|
||||
logrows: Option<u32>,
|
||||
/// Whether the verifier key should be rendered as a separate contract.
|
||||
/// We recommend disabling selector compression if this is enabled.
|
||||
/// To save the verifier key as a separate contract, set this to true and then call the create-evm-vk command.
|
||||
#[arg(long, default_value = DEFAULT_RENDER_VK_SEPERATELY, action = clap::ArgAction::SetTrue)]
|
||||
render_vk_seperately: Option<bool>,
|
||||
/// Whether the to render the verifier as reusable or not. If true, you will need to deploy a VK artifact, passing it as part of the calldata to the verifier.
|
||||
#[arg(long, default_value = DEFAULT_RENDER_REUSABLE, action = clap::ArgAction::SetTrue)]
|
||||
reusable: Option<bool>,
|
||||
},
|
||||
/// Verifies a proof, returning accept or reject
|
||||
Verify {
|
||||
@@ -784,9 +865,8 @@ pub enum Commands {
|
||||
#[arg(long, default_value = DEFAULT_COMMITMENT, value_hint = clap::ValueHint::Other)]
|
||||
commitment: Option<Commitments>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Deploys an evm verifier that is generated by ezkl
|
||||
DeployEvmVerifier {
|
||||
/// Deploys an evm contract (verifier, reusable verifier, or vk artifact) that is generated by ezkl
|
||||
DeployEvm {
|
||||
/// The path to the Solidity code (generated using the create-evm-verifier command)
|
||||
#[arg(long, default_value = DEFAULT_SOL_CODE, value_hint = clap::ValueHint::FilePath)]
|
||||
sol_code_path: Option<PathBuf>,
|
||||
@@ -802,28 +882,19 @@ pub enum Commands {
|
||||
/// Private secp256K1 key in hex format, 64 chars, no 0x prefix, of the account signing transactions. If None the private key will be generated by Anvil
|
||||
#[arg(short = 'P', long, value_hint = clap::ValueHint::Other)]
|
||||
private_key: Option<String>,
|
||||
/// Deployed verifier manager contract's address
|
||||
/// Used to facilitate reusable verifier and vk artifact deployment
|
||||
#[arg(long, value_hint = clap::ValueHint::Other)]
|
||||
addr_verifier_manager: Option<H160Flag>,
|
||||
/// Deployed reusable verifier contract's address
|
||||
/// Use to facilitate reusable verifier and vk artifact deployment
|
||||
#[arg(long, value_hint = clap::ValueHint::Other)]
|
||||
addr_reusable_verifier: Option<H160Flag>,
|
||||
/// Contract type to be deployed
|
||||
#[arg(long = "contract-type", short = 'C', default_value = DEFAULT_CONTRACT_DEPLOYMENT_TYPE, value_hint = clap::ValueHint::Other)]
|
||||
contract: ContractType,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Deploys an evm verifier that is generated by ezkl
|
||||
DeployEvmVK {
|
||||
/// The path to the Solidity code (generated using the create-evm-verifier command)
|
||||
#[arg(long, default_value = DEFAULT_VK_SOL, value_hint = clap::ValueHint::FilePath)]
|
||||
sol_code_path: Option<PathBuf>,
|
||||
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
|
||||
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
|
||||
rpc_url: Option<String>,
|
||||
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS_VK, value_hint = clap::ValueHint::Other)]
|
||||
/// The path to output the contract address
|
||||
addr_path: Option<PathBuf>,
|
||||
/// The optimizer runs to set on the verifier. Lower values optimize for deployment cost, while higher values optimize for gas cost.
|
||||
#[arg(long, default_value = DEFAULT_OPTIMIZER_RUNS, value_hint = clap::ValueHint::Other)]
|
||||
optimizer_runs: usize,
|
||||
/// Private secp256K1 key in hex format, 64 chars, no 0x prefix, of the account signing transactions. If None the private key will be generated by Anvil
|
||||
#[arg(short = 'P', long, value_hint = clap::ValueHint::Other)]
|
||||
private_key: Option<String>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Deploys an evm verifier that allows for data attestation
|
||||
/// Deploys an evm verifier that allows for data attestation
|
||||
#[command(name = "deploy-evm-da")]
|
||||
DeployEvmDataAttestation {
|
||||
/// The path to the .json data file, which should include both the network input (possibly private) and the network output (public input to the proof)
|
||||
@@ -848,8 +919,7 @@ pub enum Commands {
|
||||
#[arg(short = 'P', long, value_hint = clap::ValueHint::Other)]
|
||||
private_key: Option<String>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Verifies a proof using a local Evm executor, returning accept or reject
|
||||
/// Verifies a proof using a local Evm executor, returning accept or reject
|
||||
#[command(name = "verify-evm")]
|
||||
VerifyEvm {
|
||||
/// The path to the proof file (generated using the prove command)
|
||||
|
||||
116
src/eth.rs
116
src/eth.rs
@@ -1,7 +1,6 @@
|
||||
use crate::graph::input::{CallsToAccount, FileSourceInner, GraphData};
|
||||
use crate::graph::modules::POSEIDON_INSTANCES;
|
||||
use crate::graph::DataSource;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::graph::GraphSettings;
|
||||
use crate::pfsys::evm::EvmVerificationError;
|
||||
use crate::pfsys::Snark;
|
||||
@@ -11,8 +10,6 @@ use alloy::core::primitives::Bytes;
|
||||
use alloy::core::primitives::U256;
|
||||
use alloy::dyn_abi::abi::token::{DynSeqToken, PackedSeqToken, WordToken};
|
||||
use alloy::dyn_abi::abi::TokenSeq;
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
use alloy::prelude::Wallet;
|
||||
// use alloy::providers::Middleware;
|
||||
use alloy::json_abi::JsonAbi;
|
||||
use alloy::node_bindings::Anvil;
|
||||
@@ -34,7 +31,7 @@ use alloy::transports::{RpcError, TransportErrorKind};
|
||||
use foundry_compilers::artifacts::Settings as SolcSettings;
|
||||
use foundry_compilers::error::{SolcError, SolcIoError};
|
||||
use foundry_compilers::Solc;
|
||||
use halo2_solidity_verifier::encode_calldata;
|
||||
use halo2_solidity_verifier::{encode_calldata, encode_deploy};
|
||||
use halo2curves::bn256::{Fr, G1Affine};
|
||||
use halo2curves::group::ff::PrimeField;
|
||||
use itertools::Itertools;
|
||||
@@ -216,6 +213,16 @@ abigen!(
|
||||
}
|
||||
);
|
||||
|
||||
// The bytecode here was generated from running solc compiler version 0.8.20 with optimization enabled and runs param set to 1.
|
||||
abigen!(
|
||||
#[allow(missing_docs)]
|
||||
#[sol(
|
||||
rpc,
|
||||
bytecode = "60806040525f80546001600160a01b03191673f39fd6e51aad88f6f4ce6ab8827279cfffb92266179055348015610034575f80fd5b5061003e33610043565b610092565b5f80546001600160a01b038381166001600160a01b0319831681178455604051919092169283917f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e09190a35050565b6103dc8061009f5f395ff3fe608060405234801561000f575f80fd5b5060043610610060575f3560e01c80635717ecef146100645780635d34fd561461009b578063715018a6146100bb5780637a33ac87146100c55780638da5cb5b14610125578063f2fde38b1461012d575b5f80fd5b6100866100723660046102a7565b60016020525f908152604090205460ff1681565b60405190151581526020015b60405180910390f35b6100ae6100a93660046102e8565b610140565b6040516100929190610392565b6100c36101bf565b005b6100ae6100d33660046102e8565b8051602091820120604080516001600160f81b0319818501523060601b6001600160601b03191660218201525f6035820152605580820193909352815180820390930183526075019052805191012090565b6100ae6101d2565b6100c361013b3660046102a7565b6101e0565b5f610149610226565b5f8251602084015ff59050803b61015e575f80fd5b6001600160a01b0381165f90815260016020819052604091829020805460ff19169091179055517f27bf8213352a1c07513a54703c920b9e437940154edead05874c43279acf166c906101b2908390610392565b60405180910390a1919050565b6101c7610226565b6101d05f610258565b565b5f546001600160a01b031690565b6101e8610226565b6001600160a01b03811661021a575f604051631e4fbdf760e01b81526004016102119190610392565b60405180910390fd5b61022381610258565b50565b3361022f6101d2565b6001600160a01b0316146101d0573360405163118cdaa760e01b81526004016102119190610392565b5f80546001600160a01b038381166001600160a01b0319831681178455604051919092169283917f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e09190a35050565b5f602082840312156102b7575f80fd5b81356001600160a01b03811681146102cd575f80fd5b9392505050565b634e487b7160e01b5f52604160045260245ffd5b5f602082840312156102f8575f80fd5b81356001600160401b038082111561030e575f80fd5b818401915084601f830112610321575f80fd5b813581811115610333576103336102d4565b604051601f8201601f19908116603f0116810190838211818310171561035b5761035b6102d4565b81604052828152876020848701011115610373575f80fd5b826020860160208301375f928101602001929092525095945050505050565b6001600160a01b039190911681526020019056fea26469706673582212201d85104628b308554b775f612650220008f8e318f66dc4ace466d82d70bae4e264736f6c63430008140033"
|
||||
)]
|
||||
EZKLVerifierManager,
|
||||
"./abis/EZKLVerifierManager.json"
|
||||
);
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum EthError {
|
||||
#[error("a transport error occurred: {0}")]
|
||||
@@ -285,7 +292,6 @@ pub type EthersClient = Arc<
|
||||
pub type ContractFactory<M> = CallBuilder<Http<Client>, Arc<M>, ()>;
|
||||
|
||||
/// Return an instance of Anvil and a client for the given RPC URL. If none is provided, a local client is used.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub async fn setup_eth_backend(
|
||||
rpc_url: Option<&str>,
|
||||
private_key: Option<&str>,
|
||||
@@ -356,6 +362,99 @@ pub async fn deploy_contract_via_solidity(
|
||||
Ok(contract)
|
||||
}
|
||||
|
||||
pub async fn deploy_vka(
|
||||
sol_code_path: PathBuf,
|
||||
rpc_url: Option<&str>,
|
||||
runs: usize,
|
||||
private_key: Option<&str>,
|
||||
contract_name: &str,
|
||||
verifier_manager: H160,
|
||||
reusable_verifier: H160,
|
||||
) -> Result<H160, EthError> {
|
||||
let (client, _) = setup_eth_backend(rpc_url, private_key).await?;
|
||||
|
||||
// Create an instance of the EZKLVerifierManager contract
|
||||
let verifier_manager_contract = EZKLVerifierManager::new(verifier_manager, client.clone());
|
||||
|
||||
// Get the bytecode of the contract to be deployed
|
||||
let (_, bytecode, _run_time_bytecode) =
|
||||
get_contract_artifacts(sol_code_path.clone(), contract_name, runs).await?;
|
||||
|
||||
// Check if the reusable verifier is already deployed
|
||||
let deployed_verifier: bool = verifier_manager_contract
|
||||
.verifierAddresses(reusable_verifier)
|
||||
.call()
|
||||
.await?
|
||||
._0;
|
||||
|
||||
if deployed_verifier == false {
|
||||
panic!("The reusable verifier for this VKA has not been deployed yet.");
|
||||
}
|
||||
|
||||
let encoded = encode_deploy(&bytecode);
|
||||
|
||||
debug!("encoded: {:#?}", hex::encode(&encoded));
|
||||
|
||||
let input: TransactionInput = encoded.into();
|
||||
|
||||
let tx = TransactionRequest::default()
|
||||
.to(reusable_verifier)
|
||||
.input(input);
|
||||
debug!("transaction {:#?}", tx);
|
||||
|
||||
let result = client.call(&tx).await;
|
||||
|
||||
if let Err(e) = result {
|
||||
return Err(EvmVerificationError::SolidityExecution(e.to_string()).into());
|
||||
}
|
||||
|
||||
// Now send the tx
|
||||
let _ = client.send_transaction(tx).await?;
|
||||
|
||||
let result = result?;
|
||||
debug!("result: {:#?}", result.to_vec());
|
||||
|
||||
let contract = H160::from_slice(&result.to_vec()[12..32]);
|
||||
return Ok(contract);
|
||||
}
|
||||
|
||||
pub async fn deploy_reusable_verifier(
|
||||
sol_code_path: PathBuf,
|
||||
rpc_url: Option<&str>,
|
||||
runs: usize,
|
||||
private_key: Option<&str>,
|
||||
contract_name: &str,
|
||||
verifier_manager: H160,
|
||||
) -> Result<H160, EthError> {
|
||||
let (client, _) = setup_eth_backend(rpc_url, private_key).await?;
|
||||
|
||||
// Create an instance of the EZKLVerifierManager contract
|
||||
let verifier_manager_contract = EZKLVerifierManager::new(verifier_manager, client.clone());
|
||||
|
||||
// Get the bytecode of the contract to be deployed
|
||||
let (_, bytecode, _run_time_bytecode) =
|
||||
get_contract_artifacts(sol_code_path.clone(), contract_name, runs).await?;
|
||||
|
||||
// Deploy the contract using the EZKLVerifierManager
|
||||
let output = verifier_manager_contract
|
||||
.deployVerifier(bytecode.clone().into())
|
||||
.call()
|
||||
.await?;
|
||||
let out = verifier_manager_contract
|
||||
.precomputeAddress(bytecode.clone().into())
|
||||
.call()
|
||||
.await?;
|
||||
// assert that out == output
|
||||
assert_eq!(out._0, output.addr);
|
||||
// Get the deployed contract address from the receipt
|
||||
let contract = output.addr;
|
||||
let _ = verifier_manager_contract
|
||||
.deployVerifier(bytecode.into())
|
||||
.send()
|
||||
.await?;
|
||||
return Ok(contract);
|
||||
}
|
||||
|
||||
///
|
||||
pub async fn deploy_da_verifier_via_solidity(
|
||||
settings_path: PathBuf,
|
||||
@@ -614,7 +713,6 @@ pub async fn update_account_calls(
|
||||
}
|
||||
|
||||
/// Verify a proof using a Solidity verifier contract
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub async fn verify_proof_via_solidity(
|
||||
proof: Snark<Fr, G1Affine>,
|
||||
addr: H160,
|
||||
@@ -716,7 +814,6 @@ pub async fn setup_test_contract<M: 'static + Provider<Http<Client>, Ethereum>>(
|
||||
|
||||
/// Verify a proof using a Solidity DataAttestation contract.
|
||||
/// Used for testing purposes.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub async fn verify_proof_with_data_attestation(
|
||||
proof: Snark<Fr, G1Affine>,
|
||||
addr_verifier: H160,
|
||||
@@ -731,7 +828,7 @@ pub async fn verify_proof_with_data_attestation(
|
||||
|
||||
for val in flattened_instances.clone() {
|
||||
let bytes = val.to_repr();
|
||||
let u = U256::from_le_slice(bytes.as_slice());
|
||||
let u = U256::from_le_slice(bytes.inner().as_slice());
|
||||
public_inputs.push(u);
|
||||
}
|
||||
|
||||
@@ -829,7 +926,6 @@ pub async fn test_on_chain_data<M: 'static + Provider<Http<Client>, Ethereum>>(
|
||||
}
|
||||
|
||||
/// Reads on-chain inputs, returning the raw encoded data returned from making all the calls in on_chain_input_data
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub async fn read_on_chain_inputs<M: 'static + Provider<Http<Client>, Ethereum>>(
|
||||
client: Arc<M>,
|
||||
address: H160,
|
||||
@@ -863,7 +959,6 @@ pub async fn read_on_chain_inputs<M: 'static + Provider<Http<Client>, Ethereum>>
|
||||
}
|
||||
|
||||
///
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub async fn evm_quantize<M: 'static + Provider<Http<Client>, Ethereum>>(
|
||||
client: Arc<M>,
|
||||
scales: Vec<crate::Scale>,
|
||||
@@ -964,7 +1059,6 @@ fn get_sol_contract_factory<'a, M: 'static + Provider<Http<Client>, Ethereum>, T
|
||||
}
|
||||
|
||||
/// Compiles a solidity verifier contract and returns the abi, bytecode, and runtime bytecode
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub async fn get_contract_artifacts(
|
||||
sol_code_path: PathBuf,
|
||||
contract_name: &str,
|
||||
|
||||
218
src/execute.rs
218
src/execute.rs
@@ -1,18 +1,13 @@
|
||||
use crate::circuit::region::RegionSettings;
|
||||
use crate::circuit::CheckMode;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::commands::CalibrationTarget;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::eth::{deploy_contract_via_solidity, deploy_da_verifier_via_solidity};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[allow(unused_imports)]
|
||||
use crate::eth::{fix_da_sol, get_contract_artifacts, verify_proof_via_solidity};
|
||||
use crate::graph::input::GraphData;
|
||||
use crate::graph::{GraphCircuit, GraphSettings, GraphWitness, Model};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::graph::{TestDataSource, TestSources};
|
||||
use crate::pfsys::evm::aggregation_kzg::{AggregationCircuit, PoseidonTranscript};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::pfsys::{
|
||||
create_keys, load_pk, load_vk, save_params, save_pk, Snark, StrategyType, TranscriptType,
|
||||
};
|
||||
@@ -21,11 +16,9 @@ use crate::pfsys::{
|
||||
};
|
||||
use crate::pfsys::{save_vk, srs::*};
|
||||
use crate::tensor::TensorError;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::EZKL_BUF_CAPACITY;
|
||||
use crate::{commands::*, EZKLError};
|
||||
use crate::{Commitments, RunArgs};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use colored::Colorize;
|
||||
#[cfg(unix)]
|
||||
use gag::Gag;
|
||||
@@ -45,17 +38,13 @@ use halo2_proofs::poly::kzg::{
|
||||
};
|
||||
use halo2_proofs::poly::VerificationStrategy;
|
||||
use halo2_proofs::transcript::{EncodedChallenge, TranscriptReadBuffer};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use halo2_solidity_verifier;
|
||||
use halo2curves::bn256::{Bn256, Fr, G1Affine};
|
||||
use halo2curves::ff::{FromUniformBytes, WithSmallOrderMulGroup};
|
||||
use halo2curves::serde::SerdeObject;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use instant::Instant;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use itertools::Itertools;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use log::debug;
|
||||
use log::{info, trace, warn};
|
||||
use serde::de::DeserializeOwned;
|
||||
@@ -65,9 +54,7 @@ use snark_verifier::system::halo2::compile;
|
||||
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
|
||||
use snark_verifier::system::halo2::Config;
|
||||
use std::fs::File;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use std::io::BufWriter;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use std::io::{Cursor, Write};
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
@@ -128,7 +115,6 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
|
||||
logrows as u32,
|
||||
commitment.unwrap_or(Commitments::from_str(DEFAULT_COMMITMENT).unwrap()),
|
||||
),
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::GetSrs {
|
||||
srs_path,
|
||||
settings_path,
|
||||
@@ -145,7 +131,6 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
|
||||
settings_path.unwrap_or(DEFAULT_SETTINGS.into()),
|
||||
args,
|
||||
),
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::CalibrateSettings {
|
||||
model,
|
||||
settings_path,
|
||||
@@ -188,14 +173,13 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
|
||||
model.unwrap_or(DEFAULT_MODEL.into()),
|
||||
witness.unwrap_or(DEFAULT_WITNESS.into()),
|
||||
),
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::CreateEvmVerifier {
|
||||
vk_path,
|
||||
srs_path,
|
||||
settings_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
render_vk_seperately,
|
||||
reusable,
|
||||
} => {
|
||||
create_evm_verifier(
|
||||
vk_path.unwrap_or(DEFAULT_VK.into()),
|
||||
@@ -203,11 +187,10 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
|
||||
settings_path.unwrap_or(DEFAULT_SETTINGS.into()),
|
||||
sol_code_path.unwrap_or(DEFAULT_SOL_CODE.into()),
|
||||
abi_path.unwrap_or(DEFAULT_VERIFIER_ABI.into()),
|
||||
render_vk_seperately.unwrap_or(DEFAULT_RENDER_VK_SEPERATELY.parse().unwrap()),
|
||||
reusable.unwrap_or(DEFAULT_RENDER_REUSABLE.parse().unwrap()),
|
||||
)
|
||||
.await
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::EncodeEvmCalldata {
|
||||
proof_path,
|
||||
calldata_path,
|
||||
@@ -219,14 +202,14 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
|
||||
)
|
||||
.map(|e| serde_json::to_string(&e).unwrap()),
|
||||
|
||||
Commands::CreateEvmVK {
|
||||
Commands::CreateEvmVKArtifact {
|
||||
vk_path,
|
||||
srs_path,
|
||||
settings_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
} => {
|
||||
create_evm_vk(
|
||||
create_evm_vka(
|
||||
vk_path.unwrap_or(DEFAULT_VK.into()),
|
||||
srs_path,
|
||||
settings_path.unwrap_or(DEFAULT_SETTINGS.into()),
|
||||
@@ -235,7 +218,6 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
|
||||
)
|
||||
.await
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::CreateEvmDataAttestation {
|
||||
settings_path,
|
||||
sol_code_path,
|
||||
@@ -252,7 +234,6 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
|
||||
)
|
||||
.await
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::CreateEvmVerifierAggr {
|
||||
vk_path,
|
||||
srs_path,
|
||||
@@ -260,7 +241,7 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
|
||||
abi_path,
|
||||
aggregation_settings,
|
||||
logrows,
|
||||
render_vk_seperately,
|
||||
reusable,
|
||||
} => {
|
||||
create_evm_aggregate_verifier(
|
||||
vk_path.unwrap_or(DEFAULT_VK.into()),
|
||||
@@ -269,7 +250,7 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
|
||||
abi_path.unwrap_or(DEFAULT_VERIFIER_AGGREGATED_ABI.into()),
|
||||
aggregation_settings,
|
||||
logrows.unwrap_or(DEFAULT_AGGREGATED_LOGROWS.parse().unwrap()),
|
||||
render_vk_seperately.unwrap_or(DEFAULT_RENDER_VK_SEPERATELY.parse().unwrap()),
|
||||
reusable.unwrap_or(DEFAULT_RENDER_REUSABLE.parse().unwrap()),
|
||||
)
|
||||
.await
|
||||
}
|
||||
@@ -298,7 +279,6 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
|
||||
disable_selector_compression
|
||||
.unwrap_or(DEFAULT_DISABLE_SELECTOR_COMPRESSION.parse().unwrap()),
|
||||
),
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::SetupTestEvmData {
|
||||
data,
|
||||
compiled_circuit,
|
||||
@@ -317,13 +297,11 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
|
||||
)
|
||||
.await
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::TestUpdateAccountCalls {
|
||||
addr,
|
||||
data,
|
||||
rpc_url,
|
||||
} => test_update_account_calls(addr, data.unwrap_or(DEFAULT_DATA.into()), rpc_url).await,
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::SwapProofCommitments {
|
||||
proof_path,
|
||||
witness_path,
|
||||
@@ -333,7 +311,6 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
|
||||
)
|
||||
.map(|e| serde_json::to_string(&e).unwrap()),
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::Prove {
|
||||
witness,
|
||||
compiled_circuit,
|
||||
@@ -434,38 +411,41 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
|
||||
)
|
||||
.map(|e| serde_json::to_string(&e).unwrap()),
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::DeployEvmVerifier {
|
||||
Commands::DeployEvm {
|
||||
sol_code_path,
|
||||
rpc_url,
|
||||
addr_path,
|
||||
optimizer_runs,
|
||||
private_key,
|
||||
addr_verifier_manager,
|
||||
addr_reusable_verifier,
|
||||
contract,
|
||||
} => {
|
||||
// if contract type is either verifier/reusable
|
||||
match contract {
|
||||
ContractType::Verifier { reusable: true } => {
|
||||
if addr_verifier_manager.is_none() {
|
||||
panic!("Must pass a verifier manager address for reusable verifier")
|
||||
}
|
||||
}
|
||||
ContractType::VerifyingKeyArtifact => {
|
||||
if addr_verifier_manager.is_none() || addr_reusable_verifier.is_none() {
|
||||
panic!(
|
||||
"Must pass a verifier manager address and reusable verifier address for verifying key artifact"
|
||||
)
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
deploy_evm(
|
||||
sol_code_path.unwrap_or(DEFAULT_SOL_CODE.into()),
|
||||
rpc_url,
|
||||
addr_path.unwrap_or(DEFAULT_CONTRACT_ADDRESS.into()),
|
||||
optimizer_runs,
|
||||
private_key,
|
||||
"Halo2Verifier",
|
||||
)
|
||||
.await
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::DeployEvmVK {
|
||||
sol_code_path,
|
||||
rpc_url,
|
||||
addr_path,
|
||||
optimizer_runs,
|
||||
private_key,
|
||||
} => {
|
||||
deploy_evm(
|
||||
sol_code_path.unwrap_or(DEFAULT_VK_SOL.into()),
|
||||
rpc_url,
|
||||
addr_path.unwrap_or(DEFAULT_CONTRACT_ADDRESS_VK.into()),
|
||||
optimizer_runs,
|
||||
private_key,
|
||||
"Halo2VerifyingKey",
|
||||
addr_verifier_manager.map(|s| s.into()),
|
||||
addr_reusable_verifier.map(|s| s.into()),
|
||||
contract,
|
||||
)
|
||||
.await
|
||||
}
|
||||
@@ -490,7 +470,6 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
|
||||
)
|
||||
.await
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::VerifyEvm {
|
||||
proof_path,
|
||||
addr_verifier,
|
||||
@@ -610,7 +589,6 @@ pub(crate) fn gen_srs_cmd(
|
||||
Ok(String::new())
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
async fn fetch_srs(uri: &str) -> Result<Vec<u8>, EZKLError> {
|
||||
let pb = {
|
||||
let pb = init_spinner();
|
||||
@@ -630,7 +608,6 @@ async fn fetch_srs(uri: &str) -> Result<Vec<u8>, EZKLError> {
|
||||
Ok(std::mem::take(&mut buf))
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) fn get_file_hash(path: &PathBuf) -> Result<String, EZKLError> {
|
||||
use std::io::Read;
|
||||
let file = std::fs::File::open(path)?;
|
||||
@@ -649,7 +626,6 @@ pub(crate) fn get_file_hash(path: &PathBuf) -> Result<String, EZKLError> {
|
||||
Ok(hash)
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
fn check_srs_hash(
|
||||
logrows: u32,
|
||||
srs_path: Option<PathBuf>,
|
||||
@@ -675,7 +651,6 @@ fn check_srs_hash(
|
||||
Ok(hash)
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) async fn get_srs_cmd(
|
||||
srs_path: Option<PathBuf>,
|
||||
settings_path: Option<PathBuf>,
|
||||
@@ -718,12 +693,9 @@ pub(crate) async fn get_srs_cmd(
|
||||
let srs_uri = format!("{}{}", PUBLIC_SRS_URL, k);
|
||||
let mut reader = Cursor::new(fetch_srs(&srs_uri).await?);
|
||||
// check the SRS
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let pb = init_spinner();
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pb.set_message("Validating SRS (this may take a while) ...");
|
||||
let params = ParamsKZG::<Bn256>::read(&mut reader)?;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pb.finish_with_message("SRS validated.");
|
||||
|
||||
info!("Saving SRS to disk...");
|
||||
@@ -777,16 +749,16 @@ pub(crate) async fn gen_witness(
|
||||
None
|
||||
};
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let mut input = circuit.load_graph_input(&data).await?;
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[cfg(any(not(feature = "ezkl"), target_arch = "wasm32"))]
|
||||
let mut input = circuit.load_graph_input(&data)?;
|
||||
|
||||
// if any of the settings have kzg visibility then we need to load the srs
|
||||
|
||||
let commitment: Commitments = settings.run_args.commitment.into();
|
||||
|
||||
let region_settings = RegionSettings::all_true();
|
||||
let region_settings =
|
||||
RegionSettings::all_true(settings.run_args.decomp_base, settings.run_args.decomp_legs);
|
||||
|
||||
let start_time = Instant::now();
|
||||
let witness = if settings.module_requires_polycommit() {
|
||||
@@ -874,7 +846,6 @@ pub(crate) fn gen_circuit_settings(
|
||||
}
|
||||
|
||||
// not for wasm targets
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) fn init_spinner() -> ProgressBar {
|
||||
let pb = indicatif::ProgressBar::new_spinner();
|
||||
pb.set_draw_target(indicatif::ProgressDrawTarget::stdout());
|
||||
@@ -896,7 +867,6 @@ pub(crate) fn init_spinner() -> ProgressBar {
|
||||
}
|
||||
|
||||
// not for wasm targets
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) fn init_bar(len: u64) -> ProgressBar {
|
||||
let pb = ProgressBar::new(len);
|
||||
pb.set_draw_target(indicatif::ProgressDrawTarget::stdout());
|
||||
@@ -910,7 +880,6 @@ pub(crate) fn init_bar(len: u64) -> ProgressBar {
|
||||
pb
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use colored_json::ToColoredJson;
|
||||
|
||||
#[derive(Debug, Clone, Tabled)]
|
||||
@@ -1010,7 +979,6 @@ impl AccuracyResults {
|
||||
}
|
||||
|
||||
/// Calibrate the circuit parameters to a given a dataset
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[allow(trivial_casts)]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) async fn calibrate(
|
||||
@@ -1143,12 +1111,12 @@ pub(crate) async fn calibrate(
|
||||
};
|
||||
|
||||
// if unix get a gag
|
||||
#[cfg(unix)]
|
||||
#[cfg(all(not(not(feature = "ezkl")), unix))]
|
||||
let _r = match Gag::stdout() {
|
||||
Ok(g) => Some(g),
|
||||
_ => None,
|
||||
};
|
||||
#[cfg(unix)]
|
||||
#[cfg(all(not(not(feature = "ezkl")), unix))]
|
||||
let _g = match Gag::stderr() {
|
||||
Ok(g) => Some(g),
|
||||
_ => None,
|
||||
@@ -1178,7 +1146,10 @@ pub(crate) async fn calibrate(
|
||||
&mut data.clone(),
|
||||
None,
|
||||
None,
|
||||
RegionSettings::all_true(),
|
||||
RegionSettings::all_true(
|
||||
settings.run_args.decomp_base,
|
||||
settings.run_args.decomp_legs,
|
||||
),
|
||||
)
|
||||
.map_err(|e| format!("failed to forward: {}", e))?;
|
||||
|
||||
@@ -1204,9 +1175,9 @@ pub(crate) async fn calibrate(
|
||||
}
|
||||
|
||||
// drop the gag
|
||||
#[cfg(unix)]
|
||||
#[cfg(all(not(not(feature = "ezkl")), unix))]
|
||||
drop(_r);
|
||||
#[cfg(unix)]
|
||||
#[cfg(all(not(not(feature = "ezkl")), unix))]
|
||||
drop(_g);
|
||||
|
||||
let result = forward_pass_res.get(&key).ok_or("key not found")?;
|
||||
@@ -1254,6 +1225,7 @@ pub(crate) async fn calibrate(
|
||||
num_rows: new_settings.num_rows,
|
||||
total_assignments: new_settings.total_assignments,
|
||||
total_const_size: new_settings.total_const_size,
|
||||
total_dynamic_col_size: new_settings.total_dynamic_col_size,
|
||||
..settings.clone()
|
||||
};
|
||||
|
||||
@@ -1371,9 +1343,13 @@ pub(crate) async fn calibrate(
|
||||
let lookup_log_rows = best_params.lookup_log_rows_with_blinding();
|
||||
let module_log_row = best_params.module_constraint_logrows_with_blinding();
|
||||
let instance_logrows = best_params.log2_total_instances_with_blinding();
|
||||
let dynamic_lookup_logrows = best_params.dynamic_lookup_and_shuffle_logrows_with_blinding();
|
||||
let dynamic_lookup_logrows =
|
||||
best_params.min_dynamic_lookup_and_shuffle_logrows_with_blinding();
|
||||
|
||||
let range_check_logrows = best_params.range_check_log_rows_with_blinding();
|
||||
|
||||
let mut reduction = std::cmp::max(lookup_log_rows, module_log_row);
|
||||
reduction = std::cmp::max(reduction, range_check_logrows);
|
||||
reduction = std::cmp::max(reduction, instance_logrows);
|
||||
reduction = std::cmp::max(reduction, dynamic_lookup_logrows);
|
||||
reduction = std::cmp::max(reduction, crate::graph::MIN_LOGROWS);
|
||||
@@ -1419,14 +1395,13 @@ pub(crate) fn mock(
|
||||
Ok(String::new())
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) async fn create_evm_verifier(
|
||||
vk_path: PathBuf,
|
||||
srs_path: Option<PathBuf>,
|
||||
settings_path: PathBuf,
|
||||
sol_code_path: PathBuf,
|
||||
abi_path: PathBuf,
|
||||
render_vk_seperately: bool,
|
||||
reusable: bool,
|
||||
) -> Result<String, EZKLError> {
|
||||
let settings = GraphSettings::load(&settings_path)?;
|
||||
let commitment: Commitments = settings.run_args.commitment.into();
|
||||
@@ -1448,16 +1423,16 @@ pub(crate) async fn create_evm_verifier(
|
||||
halo2_solidity_verifier::BatchOpenScheme::Bdfg21,
|
||||
num_instance,
|
||||
);
|
||||
let verifier_solidity = if render_vk_seperately {
|
||||
generator.render_separately()?.0 // ignore the rendered vk for now and generate it in create_evm_vk
|
||||
let (verifier_solidity, name) = if reusable {
|
||||
(generator.render_separately()?.0, "Halo2VerifierReusable") // ignore the rendered vk artifact for now and generate it in create_evm_vka
|
||||
} else {
|
||||
generator.render()?
|
||||
(generator.render()?, "Halo2Verifier")
|
||||
};
|
||||
|
||||
File::create(sol_code_path.clone())?.write_all(verifier_solidity.as_bytes())?;
|
||||
|
||||
// fetch abi of the contract
|
||||
let (abi, _, _) = get_contract_artifacts(sol_code_path, "Halo2Verifier", 0).await?;
|
||||
let (abi, _, _) = get_contract_artifacts(sol_code_path, name, 0).await?;
|
||||
// save abi to file
|
||||
serde_json::to_writer(std::fs::File::create(abi_path)?, &abi)?;
|
||||
|
||||
@@ -1465,7 +1440,7 @@ pub(crate) async fn create_evm_verifier(
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) async fn create_evm_vk(
|
||||
pub(crate) async fn create_evm_vka(
|
||||
vk_path: PathBuf,
|
||||
srs_path: Option<PathBuf>,
|
||||
settings_path: PathBuf,
|
||||
@@ -1493,19 +1468,29 @@ pub(crate) async fn create_evm_vk(
|
||||
num_instance,
|
||||
);
|
||||
|
||||
let vk_solidity = generator.render_separately()?.1;
|
||||
let (reusable_verifier, vk_solidity) = generator.render_separately()?;
|
||||
|
||||
File::create(sol_code_path.clone())?.write_all(vk_solidity.as_bytes())?;
|
||||
// Remove the first line of vk_solidity (license identifier). Same license identifier for all contracts in this .sol
|
||||
let vk_solidity = vk_solidity
|
||||
.lines()
|
||||
.skip(1)
|
||||
.collect::<Vec<&str>>()
|
||||
.join("\n");
|
||||
|
||||
// We store each contracts to the same file...
|
||||
// We need to do this so that during the deployment transaction we make sure
|
||||
// verifier manager links the VKA to the correct reusable_verifier.
|
||||
let combined_solidity = format!("{}\n\n{}", reusable_verifier, vk_solidity);
|
||||
File::create(sol_code_path.clone())?.write_all(combined_solidity.as_bytes())?;
|
||||
|
||||
// fetch abi of the contract
|
||||
let (abi, _, _) = get_contract_artifacts(sol_code_path, "Halo2VerifyingKey", 0).await?;
|
||||
let (abi, _, _) = get_contract_artifacts(sol_code_path, "Halo2VerifyingArtifact", 0).await?;
|
||||
// save abi to file
|
||||
serde_json::to_writer(std::fs::File::create(abi_path)?, &abi)?;
|
||||
|
||||
Ok(String::new())
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) async fn create_evm_data_attestation(
|
||||
settings_path: PathBuf,
|
||||
sol_code_path: PathBuf,
|
||||
@@ -1582,7 +1567,6 @@ pub(crate) async fn create_evm_data_attestation(
|
||||
Ok(String::new())
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) async fn deploy_da_evm(
|
||||
data: PathBuf,
|
||||
settings_path: PathBuf,
|
||||
@@ -1609,23 +1593,57 @@ pub(crate) async fn deploy_da_evm(
|
||||
Ok(String::new())
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) async fn deploy_evm(
|
||||
sol_code_path: PathBuf,
|
||||
rpc_url: Option<String>,
|
||||
addr_path: PathBuf,
|
||||
runs: usize,
|
||||
private_key: Option<String>,
|
||||
contract_name: &str,
|
||||
verifier_manager: Option<alloy::primitives::Address>,
|
||||
reusable_verifier: Option<alloy::primitives::Address>,
|
||||
contract: ContractType,
|
||||
) -> Result<String, EZKLError> {
|
||||
let contract_address = deploy_contract_via_solidity(
|
||||
sol_code_path,
|
||||
rpc_url.as_deref(),
|
||||
runs,
|
||||
private_key.as_deref(),
|
||||
contract_name,
|
||||
)
|
||||
.await?;
|
||||
use crate::eth::{deploy_reusable_verifier, deploy_vka};
|
||||
|
||||
let contract_name = match contract {
|
||||
ContractType::Verifier { reusable: false } => "Halo2Verifier",
|
||||
ContractType::Verifier { reusable: true } => "Halo2VerifierReusable",
|
||||
ContractType::VerifyingKeyArtifact => "Halo2VerifyingArtifact",
|
||||
ContractType::VerifierManager => "EZKLVerifierManager",
|
||||
};
|
||||
|
||||
let contract_address = if contract_name == "Halo2VerifierReusable" {
|
||||
// Use VerifierManager to deploy the contract
|
||||
deploy_reusable_verifier(
|
||||
sol_code_path,
|
||||
rpc_url.as_deref(),
|
||||
runs,
|
||||
private_key.as_deref(),
|
||||
contract_name,
|
||||
verifier_manager.unwrap(),
|
||||
)
|
||||
.await?
|
||||
} else if contract_name == "Halo2VerifyingArtifact" {
|
||||
deploy_vka(
|
||||
sol_code_path,
|
||||
rpc_url.as_deref(),
|
||||
runs,
|
||||
private_key.as_deref(),
|
||||
contract_name,
|
||||
verifier_manager.unwrap(),
|
||||
reusable_verifier.unwrap(),
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
deploy_contract_via_solidity(
|
||||
sol_code_path,
|
||||
rpc_url.as_deref(),
|
||||
runs,
|
||||
private_key.as_deref(),
|
||||
contract_name,
|
||||
)
|
||||
.await?
|
||||
};
|
||||
|
||||
info!("Contract deployed at: {:#?}", contract_address);
|
||||
|
||||
@@ -1660,7 +1678,6 @@ pub(crate) fn encode_evm_calldata(
|
||||
Ok(encoded)
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) async fn verify_evm(
|
||||
proof_path: PathBuf,
|
||||
addr_verifier: H160Flag,
|
||||
@@ -1700,7 +1717,6 @@ pub(crate) async fn verify_evm(
|
||||
Ok(String::new())
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) async fn create_evm_aggregate_verifier(
|
||||
vk_path: PathBuf,
|
||||
srs_path: Option<PathBuf>,
|
||||
@@ -1708,7 +1724,7 @@ pub(crate) async fn create_evm_aggregate_verifier(
|
||||
abi_path: PathBuf,
|
||||
circuit_settings: Vec<PathBuf>,
|
||||
logrows: u32,
|
||||
render_vk_seperately: bool,
|
||||
reusable: bool,
|
||||
) -> Result<String, EZKLError> {
|
||||
let srs_path = get_srs_path(logrows, srs_path, Commitments::KZG);
|
||||
let params: ParamsKZG<Bn256> = load_srs_verifier::<KZGCommitmentScheme<Bn256>>(srs_path)?;
|
||||
@@ -1746,8 +1762,8 @@ pub(crate) async fn create_evm_aggregate_verifier(
|
||||
|
||||
generator = generator.set_acc_encoding(Some(acc_encoding));
|
||||
|
||||
let verifier_solidity = if render_vk_seperately {
|
||||
generator.render_separately()?.0 // ignore the rendered vk for now and generate it in create_evm_vk
|
||||
let verifier_solidity = if reusable {
|
||||
generator.render_separately()?.0 // ignore the rendered vk artifact for now and generate it in create_evm_vka
|
||||
} else {
|
||||
generator.render()?
|
||||
};
|
||||
@@ -1824,7 +1840,6 @@ pub(crate) fn setup(
|
||||
Ok(String::new())
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) async fn setup_test_evm_witness(
|
||||
data_path: PathBuf,
|
||||
compiled_circuit_path: PathBuf,
|
||||
@@ -1860,9 +1875,7 @@ pub(crate) async fn setup_test_evm_witness(
|
||||
Ok(String::new())
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::pfsys::ProofType;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) async fn test_update_account_calls(
|
||||
addr: H160Flag,
|
||||
data: PathBuf,
|
||||
@@ -1875,7 +1888,6 @@ pub(crate) async fn test_update_account_calls(
|
||||
Ok(String::new())
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn prove(
|
||||
data_path: PathBuf,
|
||||
@@ -2073,7 +2085,6 @@ pub(crate) fn mock_aggregate(
|
||||
}
|
||||
}
|
||||
// proof aggregation
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let pb = {
|
||||
let pb = init_spinner();
|
||||
pb.set_message("Aggregating (may take a while)...");
|
||||
@@ -2085,7 +2096,6 @@ pub(crate) fn mock_aggregate(
|
||||
let prover = halo2_proofs::dev::MockProver::run(logrows, &circuit, vec![circuit.instances()])
|
||||
.map_err(|e| ExecutionError::MockProverError(e.to_string()))?;
|
||||
prover.verify().map_err(ExecutionError::VerifyError)?;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pb.finish_with_message("Done.");
|
||||
Ok(String::new())
|
||||
}
|
||||
@@ -2180,7 +2190,6 @@ pub(crate) fn aggregate(
|
||||
}
|
||||
|
||||
// proof aggregation
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let pb = {
|
||||
let pb = init_spinner();
|
||||
pb.set_message("Aggregating (may take a while)...");
|
||||
@@ -2330,7 +2339,6 @@ pub(crate) fn aggregate(
|
||||
);
|
||||
snark.save(&proof_path)?;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pb.finish_with_message("Done.");
|
||||
|
||||
Ok(snark)
|
||||
|
||||
@@ -48,7 +48,10 @@ pub enum GraphError {
|
||||
#[error("failed to ser/deser model: {0}")]
|
||||
ModelSerialize(#[from] bincode::Error),
|
||||
/// Tract error
|
||||
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
|
||||
#[cfg(all(
|
||||
feature = "ezkl",
|
||||
not(all(target_arch = "wasm32", target_os = "unknown"))
|
||||
))]
|
||||
#[error("[tract] {0}")]
|
||||
TractError(#[from] tract_onnx::prelude::TractError),
|
||||
/// Packing exponent is too large
|
||||
@@ -85,11 +88,17 @@ pub enum GraphError {
|
||||
#[error("unknown dimension batch_size in model inputs, set batch_size in variables")]
|
||||
MissingBatchSize,
|
||||
/// Tokio postgres error
|
||||
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
|
||||
#[cfg(all(
|
||||
feature = "ezkl",
|
||||
not(all(target_arch = "wasm32", target_os = "unknown"))
|
||||
))]
|
||||
#[error("[tokio postgres] {0}")]
|
||||
TokioPostgresError(#[from] tokio_postgres::Error),
|
||||
/// Eth error
|
||||
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
|
||||
#[cfg(all(
|
||||
feature = "ezkl",
|
||||
not(all(target_arch = "wasm32", target_os = "unknown"))
|
||||
))]
|
||||
#[error("[eth] {0}")]
|
||||
EthError(#[from] crate::eth::EthError),
|
||||
/// Json error
|
||||
|
||||
@@ -2,9 +2,9 @@ use super::errors::GraphError;
|
||||
use super::quantize_float;
|
||||
use crate::circuit::InputType;
|
||||
use crate::fieldutils::integer_rep_to_felt;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use crate::graph::postgres::Client;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use crate::tensor::Tensor;
|
||||
use crate::EZKL_BUF_CAPACITY;
|
||||
use halo2curves::bn256::Fr as Fp;
|
||||
@@ -20,12 +20,12 @@ use std::io::BufReader;
|
||||
use std::io::BufWriter;
|
||||
use std::io::Read;
|
||||
use std::panic::UnwindSafe;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tract_onnx::tract_core::{
|
||||
tract_data::{prelude::Tensor as TractTensor, TVec},
|
||||
value::TValue,
|
||||
};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tract_onnx::tract_hir::tract_num_traits::ToPrimitive;
|
||||
|
||||
type Decimals = u8;
|
||||
@@ -171,7 +171,7 @@ impl OnChainSource {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
/// Inner elements of inputs/outputs coming from postgres DB
|
||||
#[derive(Clone, Debug, Deserialize, Serialize, Default, PartialOrd, PartialEq)]
|
||||
pub struct PostgresSource {
|
||||
@@ -189,7 +189,7 @@ pub struct PostgresSource {
|
||||
pub port: String,
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
impl PostgresSource {
|
||||
/// Create a new PostgresSource
|
||||
pub fn new(
|
||||
@@ -268,7 +268,7 @@ impl PostgresSource {
|
||||
}
|
||||
|
||||
impl OnChainSource {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
/// Create dummy local on-chain data to test the OnChain data source
|
||||
pub async fn test_from_file_data(
|
||||
data: &FileSource,
|
||||
@@ -359,7 +359,7 @@ pub enum DataSource {
|
||||
/// On-chain data source. The first element is the calls to the account, and the second is the RPC url.
|
||||
OnChain(OnChainSource),
|
||||
/// Postgres DB
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
DB(PostgresSource),
|
||||
}
|
||||
|
||||
@@ -419,7 +419,7 @@ impl<'de> Deserialize<'de> for DataSource {
|
||||
if let Ok(t) = second_try {
|
||||
return Ok(DataSource::OnChain(t));
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
{
|
||||
let third_try: Result<PostgresSource, _> = serde_json::from_str(this_json.get());
|
||||
if let Ok(t) = third_try {
|
||||
@@ -445,7 +445,7 @@ impl UnwindSafe for GraphData {}
|
||||
|
||||
impl GraphData {
|
||||
// not wasm
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
/// Convert the input data to tract data
|
||||
pub fn to_tract_data(
|
||||
&self,
|
||||
@@ -530,7 +530,7 @@ impl GraphData {
|
||||
"on-chain data cannot be split into batches".to_string(),
|
||||
))
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
GraphData {
|
||||
input_data: DataSource::DB(data),
|
||||
output_data: _,
|
||||
|
||||
@@ -7,7 +7,7 @@ pub mod modules;
|
||||
/// Inner elements of a computational graph that represent a single operation / constraints.
|
||||
pub mod node;
|
||||
/// postgres helper functions
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
pub mod postgres;
|
||||
/// Helper functions
|
||||
pub mod utilities;
|
||||
@@ -17,18 +17,19 @@ pub mod vars;
|
||||
/// errors for the graph
|
||||
pub mod errors;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use colored_json::ToColoredJson;
|
||||
#[cfg(unix)]
|
||||
#[cfg(all(not(not(feature = "ezkl")), unix))]
|
||||
use gag::Gag;
|
||||
use halo2_proofs::plonk::VerifyingKey;
|
||||
use halo2_proofs::poly::commitment::CommitmentScheme;
|
||||
pub use input::DataSource;
|
||||
use itertools::Itertools;
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tosubcommand::ToFlags;
|
||||
|
||||
use self::errors::GraphError;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use self::input::OnChainSource;
|
||||
use self::input::{FileSource, GraphData};
|
||||
use self::modules::{GraphModules, ModuleConfigs, ModuleForwardResult, ModuleSizes};
|
||||
@@ -48,7 +49,7 @@ use halo2_proofs::{
|
||||
};
|
||||
use halo2curves::bn256::{self, Fr as Fp, G1Affine};
|
||||
use halo2curves::ff::{Field, PrimeField};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use lazy_static::lazy_static;
|
||||
use log::{debug, error, trace, warn};
|
||||
use maybe_rayon::prelude::{IntoParallelRefIterator, ParallelIterator};
|
||||
@@ -78,7 +79,7 @@ pub const MAX_NUM_LOOKUP_COLS: usize = 12;
|
||||
pub const MAX_LOOKUP_ABS: IntegerRep =
|
||||
(MAX_NUM_LOOKUP_COLS as IntegerRep) * 2_i128.pow(MAX_PUBLIC_SRS);
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
lazy_static! {
|
||||
/// Max circuit area
|
||||
pub static ref EZKL_MAX_CIRCUIT_AREA: Option<usize> =
|
||||
@@ -89,7 +90,7 @@ lazy_static! {
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[cfg(any(not(feature = "ezkl"), target_arch = "wasm32"))]
|
||||
const EZKL_MAX_CIRCUIT_AREA: Option<usize> = None;
|
||||
|
||||
///
|
||||
@@ -384,7 +385,7 @@ fn insert_poseidon_hash_pydict(pydict: &PyDict, poseidon_hash: &Vec<Fp>) -> Resu
|
||||
|
||||
#[cfg(feature = "python-bindings")]
|
||||
fn insert_polycommit_pydict(pydict: &PyDict, commits: &Vec<Vec<G1Affine>>) -> Result<(), PyErr> {
|
||||
use crate::python::PyG1Affine;
|
||||
use crate::bindings::python::PyG1Affine;
|
||||
let poseidon_hash: Vec<Vec<PyG1Affine>> = commits
|
||||
.iter()
|
||||
.map(|c| c.iter().map(|x| PyG1Affine::from(*x)).collect())
|
||||
@@ -407,6 +408,8 @@ pub struct GraphSettings {
|
||||
pub total_const_size: usize,
|
||||
/// total dynamic column size
|
||||
pub total_dynamic_col_size: usize,
|
||||
/// max dynamic column input length
|
||||
pub max_dynamic_input_len: usize,
|
||||
/// number of dynamic lookups
|
||||
pub num_dynamic_lookups: usize,
|
||||
/// number of shuffles
|
||||
@@ -451,6 +454,18 @@ impl GraphSettings {
|
||||
.ceil() as u32
|
||||
}
|
||||
|
||||
/// Calc the number of rows required for the range checks
|
||||
pub fn range_check_log_rows_with_blinding(&self) -> u32 {
|
||||
let max_range = self
|
||||
.required_range_checks
|
||||
.iter()
|
||||
.map(|x| x.1 - x.0)
|
||||
.max()
|
||||
.unwrap_or(0);
|
||||
|
||||
(max_range as f32).log2().ceil() as u32
|
||||
}
|
||||
|
||||
fn model_constraint_logrows_with_blinding(&self) -> u32 {
|
||||
(self.num_rows as f64 + RESERVED_BLINDING_ROWS as f64)
|
||||
.log2()
|
||||
@@ -472,6 +487,13 @@ impl GraphSettings {
|
||||
.ceil() as u32
|
||||
}
|
||||
|
||||
/// calculate the number of rows required for the dynamic lookup and shuffle
|
||||
pub fn min_dynamic_lookup_and_shuffle_logrows_with_blinding(&self) -> u32 {
|
||||
(self.max_dynamic_input_len as f64 + RESERVED_BLINDING_ROWS as f64)
|
||||
.log2()
|
||||
.ceil() as u32
|
||||
}
|
||||
|
||||
fn dynamic_lookup_and_shuffle_col_size(&self) -> usize {
|
||||
self.total_dynamic_col_size + self.total_shuffle_col_size
|
||||
}
|
||||
@@ -685,6 +707,7 @@ impl std::fmt::Display for TestDataSource {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
impl ToFlags for TestDataSource {}
|
||||
|
||||
impl From<String> for TestDataSource {
|
||||
@@ -873,7 +896,7 @@ impl GraphCircuit {
|
||||
public_inputs.processed_outputs = elements.processed_outputs.clone();
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
debug!(
|
||||
"rescaled and processed public inputs: {}",
|
||||
serde_json::to_string(&public_inputs)?.to_colored_json_auto()?
|
||||
@@ -883,7 +906,7 @@ impl GraphCircuit {
|
||||
}
|
||||
|
||||
///
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[cfg(any(not(feature = "ezkl"), target_arch = "wasm32"))]
|
||||
pub fn load_graph_input(&mut self, data: &GraphData) -> Result<Vec<Tensor<Fp>>, GraphError> {
|
||||
let shapes = self.model().graph.input_shapes()?;
|
||||
let scales = self.model().graph.get_input_scales();
|
||||
@@ -910,7 +933,7 @@ impl GraphCircuit {
|
||||
}
|
||||
|
||||
///
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
pub async fn load_graph_input(
|
||||
&mut self,
|
||||
data: &GraphData,
|
||||
@@ -924,7 +947,7 @@ impl GraphCircuit {
|
||||
.await
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[cfg(any(not(feature = "ezkl"), target_arch = "wasm32"))]
|
||||
/// Process the data source for the model
|
||||
fn process_data_source(
|
||||
&mut self,
|
||||
@@ -941,7 +964,7 @@ impl GraphCircuit {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
/// Process the data source for the model
|
||||
async fn process_data_source(
|
||||
&mut self,
|
||||
@@ -971,7 +994,7 @@ impl GraphCircuit {
|
||||
}
|
||||
|
||||
/// Prepare on chain test data
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
pub async fn load_on_chain_data(
|
||||
&mut self,
|
||||
source: OnChainSource,
|
||||
@@ -1190,12 +1213,12 @@ impl GraphCircuit {
|
||||
settings.required_range_checks = vec![(0, max_range_size)];
|
||||
let mut cs = ConstraintSystem::default();
|
||||
// if unix get a gag
|
||||
#[cfg(unix)]
|
||||
#[cfg(all(not(not(feature = "ezkl")), unix))]
|
||||
let _r = match Gag::stdout() {
|
||||
Ok(g) => Some(g),
|
||||
_ => None,
|
||||
};
|
||||
#[cfg(unix)]
|
||||
#[cfg(all(not(not(feature = "ezkl")), unix))]
|
||||
let _g = match Gag::stderr() {
|
||||
Ok(g) => Some(g),
|
||||
_ => None,
|
||||
@@ -1204,9 +1227,9 @@ impl GraphCircuit {
|
||||
Self::configure_with_params(&mut cs, settings);
|
||||
|
||||
// drop the gag
|
||||
#[cfg(unix)]
|
||||
#[cfg(all(not(not(feature = "ezkl")), unix))]
|
||||
drop(_r);
|
||||
#[cfg(unix)]
|
||||
#[cfg(all(not(not(feature = "ezkl")), unix))]
|
||||
drop(_g);
|
||||
|
||||
#[cfg(feature = "mv-lookup")]
|
||||
@@ -1335,7 +1358,7 @@ impl GraphCircuit {
|
||||
visibility,
|
||||
);
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
log::trace!(
|
||||
"witness: \n {}",
|
||||
&witness.as_json()?.to_colored_json_auto()?
|
||||
@@ -1345,7 +1368,7 @@ impl GraphCircuit {
|
||||
}
|
||||
|
||||
/// Create a new circuit from a set of input data and [RunArgs].
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
pub fn from_run_args(
|
||||
run_args: &RunArgs,
|
||||
model_path: &std::path::Path,
|
||||
@@ -1355,7 +1378,7 @@ impl GraphCircuit {
|
||||
}
|
||||
|
||||
/// Create a new circuit from a set of input data and [GraphSettings].
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
pub fn from_settings(
|
||||
params: &GraphSettings,
|
||||
model_path: &std::path::Path,
|
||||
@@ -1370,7 +1393,7 @@ impl GraphCircuit {
|
||||
}
|
||||
|
||||
///
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
pub async fn populate_on_chain_test_data(
|
||||
&mut self,
|
||||
data: &mut GraphData,
|
||||
@@ -1463,7 +1486,7 @@ impl CircuitSize {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
/// Export the ezkl configuration as json
|
||||
pub fn as_json(&self) -> Result<String, GraphError> {
|
||||
let serialized = match serde_json::to_string(&self) {
|
||||
@@ -1551,7 +1574,7 @@ impl Circuit<Fp> for GraphCircuit {
|
||||
|
||||
let circuit_size = CircuitSize::from_cs(cs, params.run_args.logrows);
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
debug!(
|
||||
"circuit size: \n {}",
|
||||
circuit_size
|
||||
|
||||
@@ -21,9 +21,9 @@ use crate::{
|
||||
};
|
||||
use halo2curves::bn256::Fr as Fp;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use super::input::GraphData;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use colored::Colorize;
|
||||
use halo2_proofs::{
|
||||
circuit::{Layouter, Value},
|
||||
@@ -36,29 +36,29 @@ use log::{debug, info, trace};
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::collections::BTreeMap;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::fs;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tabled::Table;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tract_onnx;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tract_onnx::prelude::{
|
||||
Framework, Graph, InferenceFact, InferenceModelExt, SymbolValues, TypedFact, TypedOp,
|
||||
};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tract_onnx::tract_core::internal::DatumType;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tract_onnx::tract_hir::ops::scan::Scan;
|
||||
use unzip_n::unzip_n;
|
||||
|
||||
unzip_n!(pub 3);
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
type TractResult = (Graph<TypedFact, Box<dyn TypedOp>>, SymbolValues);
|
||||
/// The result of a forward pass.
|
||||
#[derive(Clone, Debug)]
|
||||
@@ -103,6 +103,8 @@ pub struct DummyPassRes {
|
||||
pub num_rows: usize,
|
||||
/// num dynamic lookups
|
||||
pub num_dynamic_lookups: usize,
|
||||
/// max dynamic lookup input len
|
||||
pub max_dynamic_input_len: usize,
|
||||
/// dynamic lookup col size
|
||||
pub dynamic_lookup_col_coord: usize,
|
||||
/// num shuffles
|
||||
@@ -360,6 +362,14 @@ impl NodeType {
|
||||
NodeType::SubGraph { .. } => SupportedOp::Unknown(Unknown),
|
||||
}
|
||||
}
|
||||
|
||||
/// check if it is a softmax
|
||||
pub fn is_softmax(&self) -> bool {
|
||||
match self {
|
||||
NodeType::Node(n) => n.is_softmax(),
|
||||
NodeType::SubGraph { .. } => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
|
||||
@@ -470,7 +480,7 @@ impl Model {
|
||||
/// # Arguments
|
||||
/// * `reader` - A reader for an Onnx file.
|
||||
/// * `run_args` - [RunArgs]
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
pub fn new(reader: &mut dyn std::io::Read, run_args: &RunArgs) -> Result<Self, GraphError> {
|
||||
let visibility = VarVisibility::from_args(run_args)?;
|
||||
|
||||
@@ -517,7 +527,7 @@ impl Model {
|
||||
check_mode: CheckMode,
|
||||
) -> Result<GraphSettings, GraphError> {
|
||||
let instance_shapes = self.instance_shapes()?;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
debug!(
|
||||
"{} {} {}",
|
||||
"model has".blue(),
|
||||
@@ -547,7 +557,11 @@ impl Model {
|
||||
})
|
||||
.collect::<Result<Vec<_>, GraphError>>()?;
|
||||
|
||||
let res = self.dummy_layout(run_args, &inputs, RegionSettings::all_false())?;
|
||||
let res = self.dummy_layout(
|
||||
run_args,
|
||||
&inputs,
|
||||
RegionSettings::all_false(run_args.decomp_base, run_args.decomp_legs),
|
||||
)?;
|
||||
|
||||
// if we're using percentage tolerance, we need to add the necessary range check ops for it.
|
||||
|
||||
@@ -558,6 +572,7 @@ impl Model {
|
||||
num_rows: res.num_rows,
|
||||
total_assignments: res.linear_coord,
|
||||
required_lookups: res.lookup_ops.into_iter().collect(),
|
||||
max_dynamic_input_len: res.max_dynamic_input_len,
|
||||
required_range_checks: res.range_checks.into_iter().collect(),
|
||||
model_output_scales: self.graph.get_output_scales()?,
|
||||
model_input_scales: self.graph.get_input_scales(),
|
||||
@@ -570,13 +585,13 @@ impl Model {
|
||||
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
num_blinding_factors: None,
|
||||
// unix time timestamp
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
timestamp: Some(
|
||||
instant::SystemTime::now()
|
||||
.duration_since(instant::SystemTime::UNIX_EPOCH)?
|
||||
.as_millis(),
|
||||
),
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[cfg(any(not(feature = "ezkl"), target_arch = "wasm32"))]
|
||||
timestamp: None,
|
||||
})
|
||||
}
|
||||
@@ -605,7 +620,7 @@ impl Model {
|
||||
/// * `reader` - A reader for an Onnx file.
|
||||
/// * `scale` - The scale to use for quantization.
|
||||
/// * `public_params` - Whether to make the params public.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
fn load_onnx_using_tract(
|
||||
reader: &mut dyn std::io::Read,
|
||||
run_args: &RunArgs,
|
||||
@@ -660,7 +675,7 @@ impl Model {
|
||||
/// * `reader` - A reader for an Onnx file.
|
||||
/// * `scale` - The scale to use for quantization.
|
||||
/// * `public_params` - Whether to make the params public.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
fn load_onnx_model(
|
||||
reader: &mut dyn std::io::Read,
|
||||
run_args: &RunArgs,
|
||||
@@ -696,7 +711,7 @@ impl Model {
|
||||
}
|
||||
|
||||
/// Formats nodes (including subgraphs) into tables !
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
pub fn table_nodes(&self) -> String {
|
||||
let mut node_accumulator = vec![];
|
||||
let mut string = String::new();
|
||||
@@ -738,7 +753,7 @@ impl Model {
|
||||
/// * `visibility` - Which inputs to the model are public and private (params, inputs, outputs) using [VarVisibility].
|
||||
/// * `input_scales` - The scales of the model's inputs.
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
pub fn nodes_from_graph(
|
||||
graph: &Graph<TypedFact, Box<dyn TypedOp>>,
|
||||
run_args: &RunArgs,
|
||||
@@ -883,16 +898,8 @@ impl Model {
|
||||
);
|
||||
}
|
||||
None => {
|
||||
let mut n = Node::new(
|
||||
n.clone(),
|
||||
&mut nodes,
|
||||
scales,
|
||||
&run_args.param_visibility,
|
||||
i,
|
||||
symbol_values,
|
||||
run_args.div_rebasing,
|
||||
run_args.rebase_frac_zero_constants,
|
||||
)?;
|
||||
let mut n =
|
||||
Node::new(n.clone(), &mut nodes, scales, i, symbol_values, run_args)?;
|
||||
if let Some(ref scales) = override_input_scales {
|
||||
if let Some(inp) = n.opkind.get_input() {
|
||||
let scale = scales[input_idx];
|
||||
@@ -935,7 +942,7 @@ impl Model {
|
||||
Ok(nodes)
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
/// Removes all nodes that are consts with 0 uses
|
||||
fn remove_unused_nodes(nodes: &mut BTreeMap<usize, NodeType>) {
|
||||
// remove all nodes that are consts with 0 uses now
|
||||
@@ -954,7 +961,7 @@ impl Model {
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
/// Run tract onnx model on sample data !
|
||||
pub fn run_onnx_predictions(
|
||||
run_args: &RunArgs,
|
||||
@@ -995,7 +1002,7 @@ impl Model {
|
||||
/// Creates a `Model` from parsed run_args
|
||||
/// # Arguments
|
||||
/// * `params` - A [GraphSettings] struct holding parsed CLI arguments.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
pub fn from_run_args(run_args: &RunArgs, model: &std::path::Path) -> Result<Self, GraphError> {
|
||||
let mut file = std::fs::File::open(model).map_err(|e| {
|
||||
GraphError::ReadWriteFileError(model.display().to_string(), e.to_string())
|
||||
@@ -1112,6 +1119,8 @@ impl Model {
|
||||
region,
|
||||
0,
|
||||
run_args.num_inner_cols,
|
||||
run_args.decomp_base,
|
||||
run_args.decomp_legs,
|
||||
original_constants.clone(),
|
||||
);
|
||||
// we need to do this as this loop is called multiple times
|
||||
@@ -1168,7 +1177,7 @@ impl Model {
|
||||
})?;
|
||||
}
|
||||
// Then number of columns in the circuits
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
thread_safe_region.debug_report();
|
||||
|
||||
*constants = thread_safe_region.assigned_constants().clone();
|
||||
@@ -1199,7 +1208,7 @@ impl Model {
|
||||
for (idx, node) in self.graph.nodes.iter() {
|
||||
debug!("laying out {}: {}", idx, node.as_str(),);
|
||||
// Then number of columns in the circuits
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
region.debug_report();
|
||||
debug!("input indices: {:?}", node.inputs());
|
||||
debug!("output scales: {:?}", node.out_scales());
|
||||
@@ -1453,7 +1462,7 @@ impl Model {
|
||||
trace!("dummy model layout took: {:?}", duration);
|
||||
|
||||
// Then number of columns in the circuits
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
region.debug_report();
|
||||
|
||||
let outputs = outputs
|
||||
@@ -1467,6 +1476,7 @@ impl Model {
|
||||
let res = DummyPassRes {
|
||||
num_rows: region.row(),
|
||||
linear_coord: region.linear_coord(),
|
||||
max_dynamic_input_len: region.max_dynamic_input_len(),
|
||||
total_const_size: region.total_constants(),
|
||||
lookup_ops: region.used_lookups(),
|
||||
range_checks: region.used_range_checks(),
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use super::scale_to_multiplier;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use super::utilities::node_output_shapes;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use super::VarScales;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use super::Visibility;
|
||||
use crate::circuit::hybrid::HybridOp;
|
||||
use crate::circuit::lookup::LookupOp;
|
||||
@@ -13,29 +13,29 @@ use crate::circuit::Constant;
|
||||
use crate::circuit::Input;
|
||||
use crate::circuit::Op;
|
||||
use crate::circuit::Unknown;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use crate::graph::errors::GraphError;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use crate::graph::new_op_from_onnx;
|
||||
use crate::tensor::TensorError;
|
||||
use halo2curves::bn256::Fr as Fp;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use log::trace;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use std::collections::BTreeMap;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use std::fmt;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tabled::Tabled;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tract_onnx::{
|
||||
self,
|
||||
prelude::{Node as OnnxNode, SymbolValues, TypedFact, TypedOp},
|
||||
};
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
fn display_vector<T: fmt::Debug>(v: &Vec<T>) -> String {
|
||||
if !v.is_empty() {
|
||||
format!("{:?}", v)
|
||||
@@ -44,7 +44,7 @@ fn display_vector<T: fmt::Debug>(v: &Vec<T>) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
fn display_opkind(v: &SupportedOp) -> String {
|
||||
v.as_string()
|
||||
}
|
||||
@@ -125,6 +125,7 @@ impl RebaseScale {
|
||||
if (op_out_scale > (global_scale * scale_rebase_multiplier as i32))
|
||||
&& !inner.is_constant()
|
||||
&& !inner.is_input()
|
||||
&& !inner.is_identity()
|
||||
{
|
||||
let multiplier =
|
||||
scale_to_multiplier(op_out_scale - global_scale * scale_rebase_multiplier as i32);
|
||||
@@ -302,7 +303,7 @@ impl SupportedOp {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
fn homogenous_rescale(
|
||||
&self,
|
||||
in_scales: Vec<crate::Scale>,
|
||||
@@ -326,6 +327,19 @@ impl SupportedOp {
|
||||
SupportedOp::RebaseScale(op) => op,
|
||||
}
|
||||
}
|
||||
|
||||
/// check if is the identity operation
|
||||
/// # Returns
|
||||
/// * `true` if the operation is the identity operation
|
||||
/// * `false` otherwise
|
||||
pub fn is_identity(&self) -> bool {
|
||||
match self {
|
||||
SupportedOp::Linear(op) => matches!(op, PolyOp::Identity { .. }),
|
||||
SupportedOp::Rescaled(op) => op.inner.is_identity(),
|
||||
SupportedOp::RebaseScale(op) => op.inner.is_identity(),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Box<dyn Op<Fp>>> for SupportedOp {
|
||||
@@ -427,7 +441,7 @@ pub struct Node {
|
||||
pub num_uses: usize,
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
impl Tabled for Node {
|
||||
const LENGTH: usize = 6;
|
||||
|
||||
@@ -467,17 +481,15 @@ impl Node {
|
||||
/// * `other_nodes` - [BTreeMap] of other previously initialized [Node]s in the computational graph.
|
||||
/// * `public_params` - flag if parameters of model are public
|
||||
/// * `idx` - The node's unique identifier.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
node: OnnxNode<TypedFact, Box<dyn TypedOp>>,
|
||||
other_nodes: &mut BTreeMap<usize, super::NodeType>,
|
||||
scales: &VarScales,
|
||||
param_visibility: &Visibility,
|
||||
idx: usize,
|
||||
symbol_values: &SymbolValues,
|
||||
div_rebasing: bool,
|
||||
rebase_frac_zero_constants: bool,
|
||||
run_args: &crate::RunArgs,
|
||||
) -> Result<Self, GraphError> {
|
||||
trace!("Create {:?}", node);
|
||||
trace!("Create op {:?}", node.op);
|
||||
@@ -517,11 +529,10 @@ impl Node {
|
||||
let (mut opkind, deleted_indices) = new_op_from_onnx(
|
||||
idx,
|
||||
scales,
|
||||
param_visibility,
|
||||
node.clone(),
|
||||
&mut inputs,
|
||||
symbol_values,
|
||||
rebase_frac_zero_constants,
|
||||
run_args,
|
||||
)?; // parses the op name
|
||||
|
||||
// we can only take the inputs as mutable once -- so we need to collect them first
|
||||
@@ -569,7 +580,7 @@ impl Node {
|
||||
rescale_const_with_single_use(
|
||||
constant,
|
||||
in_scales.clone(),
|
||||
param_visibility,
|
||||
&run_args.param_visibility,
|
||||
input_node.num_uses(),
|
||||
)?;
|
||||
input_node.replace_opkind(constant.clone_dyn().into());
|
||||
@@ -589,7 +600,7 @@ impl Node {
|
||||
global_scale,
|
||||
out_scale,
|
||||
scales.rebase_multiplier,
|
||||
div_rebasing,
|
||||
run_args.div_rebasing,
|
||||
);
|
||||
|
||||
out_scale = opkind.out_scale(in_scales)?;
|
||||
@@ -612,9 +623,18 @@ impl Node {
|
||||
num_uses,
|
||||
})
|
||||
}
|
||||
|
||||
/// check if it is a softmax node
|
||||
pub fn is_softmax(&self) -> bool {
|
||||
if let SupportedOp::Hybrid(HybridOp::Softmax { .. }) = self.opkind {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
fn rescale_const_with_single_use(
|
||||
constant: &mut Constant<Fp>,
|
||||
in_scales: Vec<crate::Scale>,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use log::{debug, error, info};
|
||||
use std::fmt::Debug;
|
||||
use std::net::IpAddr;
|
||||
#[cfg(unix)]
|
||||
#[cfg(all(not(not(feature = "ezkl")), unix))]
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
@@ -150,7 +150,7 @@ impl Config {
|
||||
/// Adds a Unix socket host to the configuration.
|
||||
///
|
||||
/// Unlike `host`, this method allows non-UTF8 paths.
|
||||
#[cfg(unix)]
|
||||
#[cfg(all(not(not(feature = "ezkl")), unix))]
|
||||
pub fn host_path<T>(&mut self, host: T) -> &mut Config
|
||||
where
|
||||
T: AsRef<Path>,
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use super::errors::GraphError;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use super::VarScales;
|
||||
use super::{Rescaled, SupportedOp, Visibility};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use crate::circuit::hybrid::HybridOp;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use crate::circuit::lookup::LookupOp;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use crate::circuit::poly::PolyOp;
|
||||
use crate::circuit::Op;
|
||||
use crate::fieldutils::IntegerRep;
|
||||
@@ -14,13 +14,13 @@ use crate::tensor::{Tensor, TensorError, TensorType};
|
||||
use halo2curves::bn256::Fr as Fp;
|
||||
use halo2curves::ff::PrimeField;
|
||||
use itertools::Itertools;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use log::{debug, warn};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use std::sync::Arc;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tract_onnx::prelude::{DatumType, Node as OnnxNode, TypedFact, TypedOp};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tract_onnx::tract_core::ops::{
|
||||
array::{
|
||||
Gather, GatherElements, GatherNd, MultiBroadcastTo, OneHot, ScatterElements, ScatterNd,
|
||||
@@ -33,7 +33,7 @@ use tract_onnx::tract_core::ops::{
|
||||
nn::{LeakyRelu, Reduce, Softmax},
|
||||
Downsample,
|
||||
};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tract_onnx::tract_hir::{
|
||||
internal::DimLike,
|
||||
ops::array::{Pad, PadMode, TypedConcat},
|
||||
@@ -41,7 +41,7 @@ use tract_onnx::tract_hir::{
|
||||
ops::konst::Const,
|
||||
ops::nn::DataFormat,
|
||||
tract_core::ops::cast::Cast,
|
||||
tract_core::ops::cnn::{conv::KernelFormat, MaxPool, PaddingSpec, SumPool},
|
||||
tract_core::ops::cnn::{conv::KernelFormat, MaxPool, SumPool},
|
||||
};
|
||||
|
||||
/// Quantizes an iterable of f32s to a [Tensor] of i32s using a fixed point representation.
|
||||
@@ -90,25 +90,26 @@ pub fn multiplier_to_scale(mult: f64) -> crate::Scale {
|
||||
mult.log2().round() as crate::Scale
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
/// extract padding from a onnx node.
|
||||
pub fn extract_padding(
|
||||
pool_spec: &PoolSpec,
|
||||
num_dims: usize,
|
||||
image_size: &[usize],
|
||||
) -> Result<Vec<(usize, usize)>, GraphError> {
|
||||
let padding = match &pool_spec.padding {
|
||||
PaddingSpec::Explicit(b, a) | PaddingSpec::ExplicitOnnxPool(b, a, _) => {
|
||||
b.iter().zip(a.iter()).map(|(b, a)| (*b, *a)).collect()
|
||||
}
|
||||
PaddingSpec::Valid => vec![(0, 0); num_dims],
|
||||
_ => {
|
||||
return Err(GraphError::MissingParams("padding".to_string()));
|
||||
}
|
||||
};
|
||||
let num_relevant_dims = pool_spec.kernel_shape.len();
|
||||
|
||||
// get the last num_relevant_dims of the image size
|
||||
let image_size = &image_size[image_size.len() - num_relevant_dims..];
|
||||
|
||||
let dims = pool_spec.computed_padding(image_size);
|
||||
let mut padding = Vec::new();
|
||||
for dim in dims {
|
||||
padding.push((dim.pad_before, dim.pad_after));
|
||||
}
|
||||
Ok(padding)
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
/// Extracts the strides from a onnx node.
|
||||
pub fn extract_strides(pool_spec: &PoolSpec) -> Result<Vec<usize>, GraphError> {
|
||||
Ok(pool_spec
|
||||
@@ -119,7 +120,7 @@ pub fn extract_strides(pool_spec: &PoolSpec) -> Result<Vec<usize>, GraphError> {
|
||||
}
|
||||
|
||||
/// Gets the shape of a onnx node's outlets.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
pub fn node_output_shapes(
|
||||
node: &OnnxNode<TypedFact, Box<dyn TypedOp>>,
|
||||
symbol_values: &SymbolValues,
|
||||
@@ -134,9 +135,9 @@ pub fn node_output_shapes(
|
||||
}
|
||||
Ok(shapes)
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tract_onnx::prelude::SymbolValues;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
/// Extracts the raw values from a tensor.
|
||||
pub fn extract_tensor_value(
|
||||
input: Arc<tract_onnx::prelude::Tensor>,
|
||||
@@ -245,7 +246,7 @@ pub fn extract_tensor_value(
|
||||
Ok(const_value)
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
fn load_op<C: tract_onnx::prelude::Op + Clone>(
|
||||
op: &dyn tract_onnx::prelude::Op,
|
||||
idx: usize,
|
||||
@@ -269,15 +270,14 @@ fn load_op<C: tract_onnx::prelude::Op + Clone>(
|
||||
/// * `param_visibility` - [Visibility] of the node.
|
||||
/// * `node` - the [OnnxNode] to be matched.
|
||||
/// * `inputs` - the node's inputs.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
pub fn new_op_from_onnx(
|
||||
idx: usize,
|
||||
scales: &VarScales,
|
||||
param_visibility: &Visibility,
|
||||
node: OnnxNode<TypedFact, Box<dyn TypedOp>>,
|
||||
inputs: &mut [super::NodeType],
|
||||
symbol_values: &SymbolValues,
|
||||
rebase_frac_zero_constants: bool,
|
||||
run_args: &crate::RunArgs,
|
||||
) -> Result<(SupportedOp, Vec<usize>), GraphError> {
|
||||
use tract_onnx::tract_core::ops::array::Trilu;
|
||||
|
||||
@@ -664,13 +664,16 @@ pub fn new_op_from_onnx(
|
||||
|
||||
// if all raw_values are round then set scale to 0
|
||||
let all_round = raw_value.iter().all(|x| (x).fract() == 0.0);
|
||||
if all_round && rebase_frac_zero_constants {
|
||||
if all_round && run_args.rebase_frac_zero_constants {
|
||||
constant_scale = 0;
|
||||
}
|
||||
|
||||
// Quantize the raw value
|
||||
let quantized_value =
|
||||
quantize_tensor(raw_value.clone(), constant_scale, param_visibility)?;
|
||||
let quantized_value = quantize_tensor(
|
||||
raw_value.clone(),
|
||||
constant_scale,
|
||||
&run_args.param_visibility,
|
||||
)?;
|
||||
let c = crate::circuit::ops::Constant::new(quantized_value, raw_value);
|
||||
// Create a constant op
|
||||
SupportedOp::Constant(c)
|
||||
@@ -782,7 +785,7 @@ pub fn new_op_from_onnx(
|
||||
deleted_indices.push(const_idx);
|
||||
}
|
||||
if unit == 0. {
|
||||
SupportedOp::Nonlinear(LookupOp::ReLU)
|
||||
SupportedOp::Linear(PolyOp::ReLU)
|
||||
} else {
|
||||
// get the non-constant index
|
||||
let non_const_idx = if const_idx == 0 { 1 } else { 0 };
|
||||
@@ -871,7 +874,7 @@ pub fn new_op_from_onnx(
|
||||
"QuantizeLinearU8" | "DequantizeLinearF32" => {
|
||||
SupportedOp::Linear(PolyOp::Identity { out_scale: None })
|
||||
}
|
||||
"Abs" => SupportedOp::Nonlinear(LookupOp::Abs),
|
||||
"Abs" => SupportedOp::Linear(PolyOp::Abs),
|
||||
"Neg" => SupportedOp::Linear(PolyOp::Neg),
|
||||
"HardSwish" => SupportedOp::Nonlinear(LookupOp::HardSwish {
|
||||
scale: scale_to_multiplier(inputs[0].out_scales()[0]).into(),
|
||||
@@ -1014,8 +1017,13 @@ pub fn new_op_from_onnx(
|
||||
if raw_values.log2().fract() == 0.0 {
|
||||
inputs[const_idx].decrement_use();
|
||||
deleted_indices.push(const_idx);
|
||||
// get the non constant index
|
||||
let non_const_idx = if const_idx == 0 { 1 } else { 0 };
|
||||
|
||||
op = SupportedOp::Linear(PolyOp::Identity {
|
||||
out_scale: Some(input_scales[0] + raw_values.log2() as i32),
|
||||
out_scale: Some(
|
||||
input_scales[non_const_idx] + raw_values.log2() as i32,
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1106,7 +1114,7 @@ pub fn new_op_from_onnx(
|
||||
}
|
||||
|
||||
let stride = extract_strides(pool_spec)?;
|
||||
let padding = extract_padding(pool_spec, input_dims[0].len())?;
|
||||
let padding = extract_padding(pool_spec, &input_dims[0])?;
|
||||
let kernel_shape = &pool_spec.kernel_shape;
|
||||
|
||||
SupportedOp::Hybrid(HybridOp::MaxPool {
|
||||
@@ -1127,7 +1135,7 @@ pub fn new_op_from_onnx(
|
||||
"RoundHalfToEven" => SupportedOp::Nonlinear(LookupOp::RoundHalfToEven {
|
||||
scale: scale_to_multiplier(inputs[0].out_scales()[0]).into(),
|
||||
}),
|
||||
"Sign" => SupportedOp::Nonlinear(LookupOp::Sign),
|
||||
"Sign" => SupportedOp::Linear(PolyOp::Sign),
|
||||
"Pow" => {
|
||||
// Extract the slope layer hyperparams from a const
|
||||
|
||||
@@ -1176,7 +1184,7 @@ pub fn new_op_from_onnx(
|
||||
let pool_spec = &conv_node.pool_spec;
|
||||
|
||||
let stride = extract_strides(pool_spec)?;
|
||||
let padding = extract_padding(pool_spec, input_dims[0].len())?;
|
||||
let padding = extract_padding(pool_spec, &input_dims[0])?;
|
||||
|
||||
// if bias exists then rescale it to the input + kernel scale
|
||||
if input_scales.len() == 3 {
|
||||
@@ -1234,7 +1242,7 @@ pub fn new_op_from_onnx(
|
||||
let pool_spec = &deconv_node.pool_spec;
|
||||
|
||||
let stride = extract_strides(pool_spec)?;
|
||||
let padding = extract_padding(pool_spec, input_dims[0].len())?;
|
||||
let padding = extract_padding(pool_spec, &input_dims[0])?;
|
||||
// if bias exists then rescale it to the input + kernel scale
|
||||
if input_scales.len() == 3 {
|
||||
let bias_scale = input_scales[2];
|
||||
@@ -1347,7 +1355,7 @@ pub fn new_op_from_onnx(
|
||||
}
|
||||
|
||||
let stride = extract_strides(pool_spec)?;
|
||||
let padding = extract_padding(pool_spec, input_dims[0].len())?;
|
||||
let padding = extract_padding(pool_spec, &input_dims[0])?;
|
||||
|
||||
SupportedOp::Hybrid(HybridOp::SumPool {
|
||||
padding,
|
||||
@@ -1356,11 +1364,6 @@ pub fn new_op_from_onnx(
|
||||
normalized: sumpool_node.normalize,
|
||||
})
|
||||
}
|
||||
// "GlobalAvgPool" => SupportedOp::Linear(PolyOp::SumPool {
|
||||
// padding: [(0, 0); 2],
|
||||
// stride: (1, 1),
|
||||
// kernel_shape: (inputs[0].out_dims()[0][1], inputs[0].out_dims()[0][2]),
|
||||
// }),
|
||||
"Pad" => {
|
||||
let pad_node: &Pad = match node.op().downcast_ref::<Pad>() {
|
||||
Some(b) => b,
|
||||
|
||||
@@ -14,6 +14,7 @@ use pyo3::{
|
||||
};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tosubcommand::ToFlags;
|
||||
|
||||
use self::errors::GraphError;
|
||||
@@ -64,6 +65,7 @@ impl Display for Visibility {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
impl ToFlags for Visibility {
|
||||
fn to_flags(&self) -> Vec<String> {
|
||||
vec![format!("{}", self)]
|
||||
@@ -441,7 +443,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> ModelVars<F> {
|
||||
let dynamic_lookup =
|
||||
VarTensor::new_advice(cs, logrows, 1, dynamic_lookup_and_shuffle_size);
|
||||
if dynamic_lookup.num_blocks() > 1 {
|
||||
panic!("dynamic lookup or shuffle should only have one block");
|
||||
warn!("dynamic lookup has {} blocks", dynamic_lookup.num_blocks());
|
||||
};
|
||||
advices.push(dynamic_lookup);
|
||||
}
|
||||
|
||||
114
src/lib.rs
114
src/lib.rs
@@ -23,18 +23,23 @@
|
||||
)]
|
||||
// we allow this for our dynamic range based indexing scheme
|
||||
#![allow(clippy::single_range_in_vec_init)]
|
||||
#![feature(buf_read_has_data_left)]
|
||||
#![feature(stmt_expr_attributes)]
|
||||
|
||||
//! A library for turning computational graphs, such as neural networks, into ZK-circuits.
|
||||
//!
|
||||
|
||||
/// Error type
|
||||
// #[cfg_attr(not(feature = "ezkl"), derive(uniffi::Error))]
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
#[allow(missing_docs)]
|
||||
pub enum EZKLError {
|
||||
#[error("[aggregation] {0}")]
|
||||
AggregationError(#[from] pfsys::evm::aggregation_kzg::AggregationError),
|
||||
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
|
||||
#[cfg(all(
|
||||
feature = "ezkl",
|
||||
not(all(target_arch = "wasm32", target_os = "unknown"))
|
||||
))]
|
||||
#[error("[eth] {0}")]
|
||||
EthError(#[from] eth::EthError),
|
||||
#[error("[graph] {0}")]
|
||||
@@ -53,7 +58,10 @@ pub enum EZKLError {
|
||||
JsonError(#[from] serde_json::Error),
|
||||
#[error("[utf8] {0}")]
|
||||
Utf8Error(#[from] std::str::Utf8Error),
|
||||
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
|
||||
#[cfg(all(
|
||||
feature = "ezkl",
|
||||
not(all(target_arch = "wasm32", target_os = "unknown"))
|
||||
))]
|
||||
#[error("[reqwest] {0}")]
|
||||
ReqwestError(#[from] reqwest::Error),
|
||||
#[error("[fmt] {0}")]
|
||||
@@ -62,7 +70,10 @@ pub enum EZKLError {
|
||||
Halo2Error(#[from] halo2_proofs::plonk::Error),
|
||||
#[error("[Uncategorized] {0}")]
|
||||
UncategorizedError(String),
|
||||
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
|
||||
#[cfg(all(
|
||||
feature = "ezkl",
|
||||
not(all(target_arch = "wasm32", target_os = "unknown"))
|
||||
))]
|
||||
#[error("[execute] {0}")]
|
||||
ExecutionError(#[from] execute::ExecutionError),
|
||||
#[error("[srs] {0}")]
|
||||
@@ -84,7 +95,9 @@ impl From<String> for EZKLError {
|
||||
use std::str::FromStr;
|
||||
|
||||
use circuit::{table::Range, CheckMode, Tolerance};
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use clap::Args;
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use fieldutils::IntegerRep;
|
||||
use graph::Visibility;
|
||||
use halo2_proofs::poly::{
|
||||
@@ -92,52 +105,62 @@ use halo2_proofs::poly::{
|
||||
};
|
||||
use halo2curves::bn256::{Bn256, G1Affine};
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tosubcommand::ToFlags;
|
||||
|
||||
/// Bindings managment
|
||||
#[cfg(any(
|
||||
feature = "ios-bindings",
|
||||
all(target_arch = "wasm32", target_os = "unknown"),
|
||||
feature = "python-bindings"
|
||||
))]
|
||||
pub mod bindings;
|
||||
/// Methods for configuring tensor operations and assigning values to them in a Halo2 circuit.
|
||||
pub mod circuit;
|
||||
/// CLI commands.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
pub mod commands;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
// abigen doesn't generate docs for this module
|
||||
#[allow(missing_docs)]
|
||||
/// Utility functions for contracts
|
||||
pub mod eth;
|
||||
/// Command execution
|
||||
///
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
pub mod execute;
|
||||
/// Utilities for converting from Halo2 Field types to integers (and vice-versa).
|
||||
pub mod fieldutils;
|
||||
/// Methods for loading onnx format models and automatically laying them out in
|
||||
/// a Halo2 circuit.
|
||||
#[cfg(feature = "onnx")]
|
||||
#[cfg(any(feature = "onnx", not(feature = "ezkl")))]
|
||||
pub mod graph;
|
||||
/// beautiful logging
|
||||
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
|
||||
#[cfg(all(
|
||||
feature = "ezkl",
|
||||
not(all(target_arch = "wasm32", target_os = "unknown"))
|
||||
))]
|
||||
pub mod logger;
|
||||
/// Tools for proofs and verification used by cli
|
||||
pub mod pfsys;
|
||||
/// Python bindings
|
||||
#[cfg(feature = "python-bindings")]
|
||||
pub mod python;
|
||||
/// srs sha hashes
|
||||
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
|
||||
#[cfg(all(
|
||||
feature = "ezkl",
|
||||
not(all(target_arch = "wasm32", target_os = "unknown"))
|
||||
))]
|
||||
pub mod srs_sha;
|
||||
/// An implementation of multi-dimensional tensors.
|
||||
pub mod tensor;
|
||||
/// wasm prover and verifier
|
||||
#[cfg(all(target_arch = "wasm32", target_os = "unknown"))]
|
||||
pub mod wasm;
|
||||
#[cfg(feature = "ios-bindings")]
|
||||
uniffi::setup_scaffolding!();
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
/// The denominator in the fixed point representation used when quantizing inputs
|
||||
pub type Scale = i32;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
// Buf writer capacity
|
||||
lazy_static! {
|
||||
/// The capacity of the buffer used for writing to disk
|
||||
@@ -149,12 +172,13 @@ lazy_static! {
|
||||
/// The serialization format for the keys
|
||||
pub static ref EZKL_KEY_FORMAT: String = std::env::var("EZKL_KEY_FORMAT")
|
||||
.unwrap_or("raw-bytes".to_string());
|
||||
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[cfg(any(not(feature = "ezkl"), target_arch = "wasm32"))]
|
||||
const EZKL_KEY_FORMAT: &str = "raw-bytes";
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[cfg(any(not(feature = "ezkl"), target_arch = "wasm32"))]
|
||||
const EZKL_BUF_CAPACITY: &usize = &8000;
|
||||
|
||||
#[derive(
|
||||
@@ -207,6 +231,7 @@ impl std::fmt::Display for Commitments {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
impl ToFlags for Commitments {
|
||||
/// Convert the struct to a subcommand string
|
||||
fn to_flags(&self) -> Vec<String> {
|
||||
@@ -229,53 +254,69 @@ impl From<String> for Commitments {
|
||||
}
|
||||
|
||||
/// Parameters specific to a proving run
|
||||
#[derive(Debug, Args, Deserialize, Serialize, Clone, PartialEq, PartialOrd, ToFlags)]
|
||||
#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, PartialOrd)]
|
||||
#[cfg_attr(
|
||||
all(feature = "ezkl", not(target_arch = "wasm32")),
|
||||
derive(Args, ToFlags)
|
||||
)]
|
||||
pub struct RunArgs {
|
||||
/// The tolerance for error on model outputs
|
||||
#[arg(short = 'T', long, default_value = "0", value_hint = clap::ValueHint::Other)]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(short = 'T', long, default_value = "0", value_hint = clap::ValueHint::Other))]
|
||||
pub tolerance: Tolerance,
|
||||
/// The denominator in the fixed point representation used when quantizing inputs
|
||||
#[arg(short = 'S', long, default_value = "7", value_hint = clap::ValueHint::Other)]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(short = 'S', long, default_value = "7", value_hint = clap::ValueHint::Other))]
|
||||
pub input_scale: Scale,
|
||||
/// The denominator in the fixed point representation used when quantizing parameters
|
||||
#[arg(long, default_value = "7", value_hint = clap::ValueHint::Other)]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(long, default_value = "7", value_hint = clap::ValueHint::Other))]
|
||||
pub param_scale: Scale,
|
||||
/// if the scale is ever > scale_rebase_multiplier * input_scale then the scale is rebased to input_scale (this a more advanced parameter, use with caution)
|
||||
#[arg(long, default_value = "1", value_hint = clap::ValueHint::Other)]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(long, default_value = "1", value_hint = clap::ValueHint::Other))]
|
||||
pub scale_rebase_multiplier: u32,
|
||||
/// The min and max elements in the lookup table input column
|
||||
#[arg(short = 'B', long, value_parser = parse_key_val::<IntegerRep, IntegerRep>, default_value = "-32768->32768")]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(short = 'B', long, value_parser = parse_key_val::<IntegerRep, IntegerRep>, default_value = "-32768->32768"))]
|
||||
pub lookup_range: Range,
|
||||
/// The log_2 number of rows
|
||||
#[arg(short = 'K', long, default_value = "17", value_hint = clap::ValueHint::Other)]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(short = 'K', long, default_value = "17", value_hint = clap::ValueHint::Other))]
|
||||
pub logrows: u32,
|
||||
/// The log_2 number of rows
|
||||
#[arg(short = 'N', long, default_value = "2", value_hint = clap::ValueHint::Other)]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(short = 'N', long, default_value = "2", value_hint = clap::ValueHint::Other))]
|
||||
pub num_inner_cols: usize,
|
||||
/// Hand-written parser for graph variables, eg. batch_size=1
|
||||
#[arg(short = 'V', long, value_parser = parse_key_val::<String, usize>, default_value = "batch_size->1", value_delimiter = ',', value_hint = clap::ValueHint::Other)]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(short = 'V', long, value_parser = parse_key_val::<String, usize>, default_value = "batch_size->1", value_delimiter = ',', value_hint = clap::ValueHint::Other))]
|
||||
pub variables: Vec<(String, usize)>,
|
||||
/// Flags whether inputs are public, private, fixed, hashed, polycommit
|
||||
#[arg(long, default_value = "private", value_hint = clap::ValueHint::Other)]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(long, default_value = "private", value_hint = clap::ValueHint::Other))]
|
||||
pub input_visibility: Visibility,
|
||||
/// Flags whether outputs are public, private, fixed, hashed, polycommit
|
||||
#[arg(long, default_value = "public", value_hint = clap::ValueHint::Other)]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(long, default_value = "public", value_hint = clap::ValueHint::Other))]
|
||||
pub output_visibility: Visibility,
|
||||
/// Flags whether params are fixed, private, hashed, polycommit
|
||||
#[arg(long, default_value = "private", value_hint = clap::ValueHint::Other)]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(long, default_value = "private", value_hint = clap::ValueHint::Other))]
|
||||
pub param_visibility: Visibility,
|
||||
#[arg(long, default_value = "false")]
|
||||
#[cfg_attr(
|
||||
all(feature = "ezkl", not(target_arch = "wasm32")),
|
||||
arg(long, default_value = "false")
|
||||
)]
|
||||
/// Rebase the scale using lookup table for division instead of using a range check
|
||||
pub div_rebasing: bool,
|
||||
/// Should constants with 0.0 fraction be rebased to scale 0
|
||||
#[arg(long, default_value = "false")]
|
||||
#[cfg_attr(
|
||||
all(feature = "ezkl", not(target_arch = "wasm32")),
|
||||
arg(long, default_value = "false")
|
||||
)]
|
||||
pub rebase_frac_zero_constants: bool,
|
||||
/// check mode (safe, unsafe, etc)
|
||||
#[arg(long, default_value = "unsafe", value_hint = clap::ValueHint::Other)]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(long, default_value = "unsafe", value_hint = clap::ValueHint::Other))]
|
||||
pub check_mode: CheckMode,
|
||||
/// commitment scheme
|
||||
#[arg(long, default_value = "kzg", value_hint = clap::ValueHint::Other)]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(long, default_value = "kzg", value_hint = clap::ValueHint::Other))]
|
||||
pub commitment: Option<Commitments>,
|
||||
/// the base used for decompositions
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(long, default_value = "16384", value_hint = clap::ValueHint::Other))]
|
||||
pub decomp_base: usize,
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(long, default_value = "2", value_hint = clap::ValueHint::Other))]
|
||||
/// the number of legs used for decompositions
|
||||
pub decomp_legs: usize,
|
||||
}
|
||||
|
||||
impl Default for RunArgs {
|
||||
@@ -296,6 +337,8 @@ impl Default for RunArgs {
|
||||
rebase_frac_zero_constants: false,
|
||||
check_mode: CheckMode::UNSAFE,
|
||||
commitment: None,
|
||||
decomp_base: 16384,
|
||||
decomp_legs: 2,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -344,6 +387,7 @@ impl RunArgs {
|
||||
}
|
||||
|
||||
/// Parse a single key-value pair
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
fn parse_key_val<T, U>(
|
||||
s: &str,
|
||||
) -> Result<(T, U), Box<dyn std::error::Error + Send + Sync + 'static>>
|
||||
|
||||
@@ -76,7 +76,7 @@ pub fn init_logger() {
|
||||
prefix_token(&record.level()),
|
||||
// pretty print UTC time
|
||||
chrono::Utc::now()
|
||||
.format("%Y-%m-%d %H:%M:%S")
|
||||
.format("%Y-%m-%d %H:%M:%S:%3f")
|
||||
.to_string()
|
||||
.bright_magenta(),
|
||||
record.metadata().target(),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use crate::graph::CircuitSize;
|
||||
use crate::pfsys::{Snark, SnarkWitness};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use colored_json::ToColoredJson;
|
||||
use halo2_proofs::circuit::AssignedCell;
|
||||
use halo2_proofs::plonk::{self};
|
||||
@@ -20,7 +20,7 @@ use halo2_wrong_ecc::{
|
||||
use halo2curves::bn256::{Bn256, Fq, Fr, G1Affine};
|
||||
use halo2curves::ff::PrimeField;
|
||||
use itertools::Itertools;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use log::debug;
|
||||
use log::trace;
|
||||
use rand::rngs::OsRng;
|
||||
@@ -200,7 +200,7 @@ impl AggregationConfig {
|
||||
let range_config =
|
||||
RangeChip::<F>::configure(meta, &main_gate_config, composition_bits, overflow_bits);
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
{
|
||||
let circuit_size = CircuitSize::from_cs(meta, 23);
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ use crate::circuit::CheckMode;
|
||||
use crate::graph::GraphWitness;
|
||||
use crate::pfsys::evm::aggregation_kzg::PoseidonTranscript;
|
||||
use crate::{Commitments, EZKL_BUF_CAPACITY, EZKL_KEY_FORMAT};
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use clap::ValueEnum;
|
||||
use halo2_proofs::circuit::Value;
|
||||
use halo2_proofs::plonk::{
|
||||
@@ -42,6 +43,7 @@ use std::io::{self, BufReader, BufWriter, Cursor, Write};
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use thiserror::Error as thisError;
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tosubcommand::ToFlags;
|
||||
|
||||
use halo2curves::bn256::{Bn256, Fr, G1Affine};
|
||||
@@ -56,8 +58,10 @@ fn serde_format_from_str(s: &str) -> halo2_proofs::SerdeFormat {
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[derive(
|
||||
ValueEnum, Copy, Clone, Default, Debug, PartialEq, Eq, Deserialize, Serialize, PartialOrd,
|
||||
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Deserialize, Serialize, PartialOrd)]
|
||||
#[cfg_attr(
|
||||
all(feature = "ezkl", not(target_arch = "wasm32")),
|
||||
derive(ValueEnum)
|
||||
)]
|
||||
pub enum ProofType {
|
||||
#[default]
|
||||
@@ -77,7 +81,7 @@ impl std::fmt::Display for ProofType {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
impl ToFlags for ProofType {
|
||||
fn to_flags(&self) -> Vec<String> {
|
||||
vec![format!("{}", self)]
|
||||
@@ -129,17 +133,38 @@ impl<'source> pyo3::FromPyObject<'source> for ProofType {
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[derive(ValueEnum, Copy, Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
|
||||
#[cfg_attr(
|
||||
all(feature = "ezkl", not(target_arch = "wasm32")),
|
||||
derive(ValueEnum)
|
||||
)]
|
||||
pub enum StrategyType {
|
||||
Single,
|
||||
Accum,
|
||||
}
|
||||
impl std::fmt::Display for StrategyType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.to_possible_value()
|
||||
.expect("no values are skipped")
|
||||
.get_name()
|
||||
.fmt(f)
|
||||
// When the `ezkl` feature is disabled or we're targeting `wasm32`, use basic string representation.
|
||||
#[cfg(any(not(feature = "ezkl"), target_arch = "wasm32"))]
|
||||
{
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match self {
|
||||
StrategyType::Single => "single",
|
||||
StrategyType::Accum => "accum",
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// When the `ezkl` feature is enabled and we're not targeting `wasm32`, use `to_possible_value`.
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
{
|
||||
self.to_possible_value()
|
||||
.expect("no values are skipped")
|
||||
.get_name()
|
||||
.fmt(f)
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "python-bindings")]
|
||||
@@ -177,8 +202,10 @@ pub enum PfSysError {
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[derive(
|
||||
ValueEnum, Default, Copy, Clone, Debug, PartialEq, Eq, Deserialize, Serialize, PartialOrd,
|
||||
#[derive(Default, Copy, Clone, Debug, PartialEq, Eq, Deserialize, Serialize, PartialOrd)]
|
||||
#[cfg_attr(
|
||||
all(feature = "ezkl", not(target_arch = "wasm32")),
|
||||
derive(ValueEnum)
|
||||
)]
|
||||
pub enum TranscriptType {
|
||||
Poseidon,
|
||||
@@ -198,7 +225,7 @@ impl std::fmt::Display for TranscriptType {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
impl ToFlags for TranscriptType {
|
||||
fn to_flags(&self) -> Vec<String> {
|
||||
vec![format!("{}", self)]
|
||||
@@ -558,7 +585,8 @@ where
|
||||
+ PrimeField
|
||||
+ FromUniformBytes<64>
|
||||
+ WithSmallOrderMulGroup<3>,
|
||||
Scheme::Curve: Serialize + DeserializeOwned,
|
||||
Scheme::Curve: Serialize + DeserializeOwned + SerdeObject,
|
||||
Scheme::ParamsProver: Send + Sync,
|
||||
{
|
||||
let strategy = Strategy::new(params.verifier_params());
|
||||
let mut transcript = TranscriptWriterBuffer::<_, Scheme::Curve, _>::init(vec![]);
|
||||
@@ -861,7 +889,7 @@ pub fn save_params<Scheme: CommitmentScheme>(
|
||||
////////////////////////
|
||||
|
||||
#[cfg(test)]
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
use thiserror::Error;
|
||||
|
||||
use super::ops::DecompositionError;
|
||||
|
||||
/// A wrapper for tensor related errors.
|
||||
#[derive(Debug, Error)]
|
||||
pub enum TensorError {
|
||||
@@ -27,4 +29,13 @@ pub enum TensorError {
|
||||
/// Unset visibility
|
||||
#[error("unset visibility")]
|
||||
UnsetVisibility,
|
||||
/// File save error
|
||||
#[error("save error: {0}")]
|
||||
FileSaveError(String),
|
||||
/// File load error
|
||||
#[error("load error: {0}")]
|
||||
FileLoadError(String),
|
||||
/// Decomposition error
|
||||
#[error("decomposition error: {0}")]
|
||||
DecompositionError(#[from] DecompositionError),
|
||||
}
|
||||
|
||||
@@ -18,6 +18,9 @@ use maybe_rayon::{
|
||||
slice::ParallelSliceMut,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::io::BufRead;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
pub use val::*;
|
||||
pub use var::*;
|
||||
|
||||
@@ -41,6 +44,7 @@ use itertools::Itertools;
|
||||
use metal::{Device, MTLResourceOptions, MTLSize};
|
||||
use std::error::Error;
|
||||
use std::fmt::Debug;
|
||||
use std::io::Read;
|
||||
use std::iter::Iterator;
|
||||
use std::ops::{Add, Deref, DerefMut, Div, Mul, Neg, Range, Sub};
|
||||
use std::{cmp::max, ops::Rem};
|
||||
@@ -409,6 +413,45 @@ impl<'data, T: Clone + TensorType + std::marker::Send + std::marker::Sync>
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Clone + TensorType + PrimeField> Tensor<T> {
|
||||
/// save to a file
|
||||
pub fn save(&self, path: &PathBuf) -> Result<(), TensorError> {
|
||||
let writer =
|
||||
std::fs::File::create(path).map_err(|e| TensorError::FileSaveError(e.to_string()))?;
|
||||
let mut buf_writer = std::io::BufWriter::new(writer);
|
||||
|
||||
self.inner.iter().copied().for_each(|x| {
|
||||
let x = x.to_repr();
|
||||
buf_writer.write_all(x.as_ref()).unwrap();
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// load from a file
|
||||
pub fn load(path: &PathBuf) -> Result<Self, TensorError> {
|
||||
let reader =
|
||||
std::fs::File::open(path).map_err(|e| TensorError::FileLoadError(e.to_string()))?;
|
||||
let mut buf_reader = std::io::BufReader::new(reader);
|
||||
|
||||
let mut inner = Vec::new();
|
||||
while let Ok(true) = buf_reader.has_data_left() {
|
||||
let mut repr = T::Repr::default();
|
||||
match buf_reader.read_exact(repr.as_mut()) {
|
||||
Ok(_) => {
|
||||
inner.push(T::from_repr(repr).unwrap());
|
||||
}
|
||||
Err(_) => {
|
||||
return Err(TensorError::FileLoadError(
|
||||
"Failed to read tensor".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(Tensor::new(Some(&inner), &[inner.len()]).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Clone + TensorType> Tensor<T> {
|
||||
/// Sets (copies) the tensor values to the provided ones.
|
||||
pub fn new(values: Option<&[T]>, dims: &[usize]) -> Result<Self, TensorError> {
|
||||
@@ -721,6 +764,53 @@ impl<T: Clone + TensorType> Tensor<T> {
|
||||
index
|
||||
}
|
||||
|
||||
/// Fetches every nth element
|
||||
///
|
||||
/// ```
|
||||
/// use ezkl::tensor::Tensor;
|
||||
/// use ezkl::fieldutils::IntegerRep;
|
||||
/// let a = Tensor::<IntegerRep>::new(Some(&[1, 2, 3, 4, 5, 6]), &[6]).unwrap();
|
||||
/// let expected = Tensor::<IntegerRep>::new(Some(&[1, 3, 5]), &[3]).unwrap();
|
||||
/// assert_eq!(a.get_every_n(2).unwrap(), expected);
|
||||
/// assert_eq!(a.get_every_n(1).unwrap(), a);
|
||||
///
|
||||
/// let expected = Tensor::<IntegerRep>::new(Some(&[1, 6]), &[2]).unwrap();
|
||||
/// assert_eq!(a.get_every_n(5).unwrap(), expected);
|
||||
///
|
||||
/// ```
|
||||
pub fn get_every_n(&self, n: usize) -> Result<Tensor<T>, TensorError> {
|
||||
let mut inner: Vec<T> = vec![];
|
||||
for (i, elem) in self.inner.clone().into_iter().enumerate() {
|
||||
if i % n == 0 {
|
||||
inner.push(elem.clone());
|
||||
}
|
||||
}
|
||||
Tensor::new(Some(&inner), &[inner.len()])
|
||||
}
|
||||
|
||||
/// Excludes every nth element
|
||||
///
|
||||
/// ```
|
||||
/// use ezkl::tensor::Tensor;
|
||||
/// use ezkl::fieldutils::IntegerRep;
|
||||
/// let a = Tensor::<IntegerRep>::new(Some(&[1, 2, 3, 4, 5, 6]), &[6]).unwrap();
|
||||
/// let expected = Tensor::<IntegerRep>::new(Some(&[2, 4, 6]), &[3]).unwrap();
|
||||
/// assert_eq!(a.exclude_every_n(2).unwrap(), expected);
|
||||
///
|
||||
/// let expected = Tensor::<IntegerRep>::new(Some(&[2, 3, 4, 5]), &[4]).unwrap();
|
||||
/// assert_eq!(a.exclude_every_n(5).unwrap(), expected);
|
||||
///
|
||||
/// ```
|
||||
pub fn exclude_every_n(&self, n: usize) -> Result<Tensor<T>, TensorError> {
|
||||
let mut inner: Vec<T> = vec![];
|
||||
for (i, elem) in self.inner.clone().into_iter().enumerate() {
|
||||
if i % n != 0 {
|
||||
inner.push(elem.clone());
|
||||
}
|
||||
}
|
||||
Tensor::new(Some(&inner), &[inner.len()])
|
||||
}
|
||||
|
||||
/// Duplicates every nth element
|
||||
///
|
||||
/// ```
|
||||
@@ -1174,6 +1264,31 @@ impl<T: Clone + TensorType> Tensor<T> {
|
||||
Tensor::new(Some(&[res]), &[1])
|
||||
}
|
||||
|
||||
/// Get first elem from Tensor
|
||||
/// ```
|
||||
/// use ezkl::tensor::Tensor;
|
||||
/// use ezkl::fieldutils::IntegerRep;
|
||||
/// let mut a = Tensor::<IntegerRep>::new(Some(&[1, 2, 3]), &[3]).unwrap();
|
||||
/// let mut b = Tensor::<IntegerRep>::new(Some(&[1]), &[1]).unwrap();
|
||||
///
|
||||
/// assert_eq!(a.first().unwrap(), b);
|
||||
/// ```
|
||||
pub fn first(&self) -> Result<Tensor<T>, TensorError>
|
||||
where
|
||||
T: Send + Sync,
|
||||
{
|
||||
let res = match self.inner.first() {
|
||||
Some(e) => e.clone(),
|
||||
None => {
|
||||
return Err(TensorError::DimError(
|
||||
"Cannot get first element of empty tensor".to_string(),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
Tensor::new(Some(&[res]), &[1])
|
||||
}
|
||||
|
||||
/// Maps a function to tensors and enumerates in parallel
|
||||
/// ```
|
||||
/// use ezkl::tensor::{Tensor, TensorError};
|
||||
|
||||
@@ -7,6 +7,131 @@ use itertools::Itertools;
|
||||
use maybe_rayon::{iter::ParallelIterator, prelude::IntoParallelRefIterator};
|
||||
pub use std::ops::{Add, Mul, Neg, Sub};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, thiserror::Error)]
|
||||
/// Decomposition error
|
||||
pub enum DecompositionError {
|
||||
/// Integer is too large to be represented by base and n
|
||||
#[error("integer {} is too large to be represented by base {} and n {}", .0, .1, .2)]
|
||||
TooLarge(IntegerRep, usize, usize),
|
||||
}
|
||||
|
||||
/// Helper function to get the base decomp of an integer
|
||||
/// # Arguments
|
||||
/// * `x` - IntegerRep
|
||||
/// * `n` - usize
|
||||
/// * `base` - usize
|
||||
///
|
||||
pub fn get_rep(
|
||||
x: &IntegerRep,
|
||||
base: usize,
|
||||
n: usize,
|
||||
) -> Result<Vec<IntegerRep>, DecompositionError> {
|
||||
// check if x is too large
|
||||
if x.abs() > (base.pow(n as u32) as IntegerRep) {
|
||||
return Err(DecompositionError::TooLarge(*x, base, n));
|
||||
}
|
||||
let mut rep = vec![0; n + 1];
|
||||
// sign bit
|
||||
rep[0] = if *x < 0 {
|
||||
-1
|
||||
} else if *x > 0 {
|
||||
1
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let mut x = x.abs();
|
||||
//
|
||||
for i in (1..rep.len()).rev() {
|
||||
rep[i] = x % base as i128;
|
||||
x /= base as i128;
|
||||
}
|
||||
|
||||
Ok(rep)
|
||||
}
|
||||
|
||||
/// Decompose a tensor of integers into a larger tensor with added dimension of size `n + 1` with the binary (or OTHER base) representation of the integer.
|
||||
/// # Arguments
|
||||
/// * `x` - Tensor
|
||||
/// * `n` - usize
|
||||
/// * `base` - usize
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// use ezkl::tensor::Tensor;
|
||||
/// use ezkl::fieldutils::IntegerRep;
|
||||
/// use ezkl::tensor::ops::decompose;
|
||||
/// let x = Tensor::<IntegerRep>::new(
|
||||
/// Some(&[0, 1, 2, -1]),
|
||||
/// &[2, 2]).unwrap();
|
||||
///
|
||||
/// let result = decompose(&x, 2, 2).unwrap();
|
||||
/// // result will have dims [2, 2, 3]
|
||||
/// let expected = Tensor::<IntegerRep>::new(Some(&[0, 0, 0,
|
||||
/// 1, 0, 1,
|
||||
/// 1, 1, 0,
|
||||
/// -1, 0, 1]), &[2, 2, 3]).unwrap();
|
||||
/// assert_eq!(result, expected);
|
||||
///
|
||||
/// let result = decompose(&x, 3, 1).unwrap();
|
||||
///
|
||||
///
|
||||
/// // result will have dims [2, 2, 2]
|
||||
/// let expected = Tensor::<IntegerRep>::new(Some(&[0, 0,
|
||||
/// 1, 1,
|
||||
/// 1, 2,
|
||||
/// -1, 1]), &[2, 2, 2]).unwrap();
|
||||
///
|
||||
/// assert_eq!(result, expected);
|
||||
///
|
||||
/// let x = Tensor::<IntegerRep>::new(
|
||||
/// Some(&[0, 11, 23, -1]),
|
||||
/// &[2, 2]).unwrap();
|
||||
///
|
||||
/// let result = decompose(&x, 2, 5).unwrap();
|
||||
/// // result will have dims [2, 2, 6]
|
||||
/// let expected = Tensor::<IntegerRep>::new(Some(&[0, 0, 0, 0, 0, 0,
|
||||
/// 1, 0, 1, 0, 1, 1,
|
||||
/// 1, 1, 0, 1, 1, 1,
|
||||
/// -1, 0, 0, 0, 0, 1]), &[2, 2, 6]).unwrap();
|
||||
/// assert_eq!(result, expected);
|
||||
///
|
||||
/// let result = decompose(&x, 16, 2).unwrap();
|
||||
/// // result will have dims [2, 2, 3]
|
||||
/// let expected = Tensor::<IntegerRep>::new(Some(&[0, 0, 0,
|
||||
/// 1, 0, 11,
|
||||
/// 1, 1, 7,
|
||||
/// -1, 0, 1]), &[2, 2, 3]).unwrap();
|
||||
/// assert_eq!(result, expected);
|
||||
/// ```
|
||||
///
|
||||
pub fn decompose(
|
||||
x: &Tensor<IntegerRep>,
|
||||
base: usize,
|
||||
n: usize,
|
||||
) -> Result<Tensor<IntegerRep>, TensorError> {
|
||||
let mut dims = x.dims().to_vec();
|
||||
dims.push(n + 1);
|
||||
|
||||
if n == 0 {
|
||||
let mut x = x.clone();
|
||||
x.reshape(&dims)?;
|
||||
return Ok(x);
|
||||
}
|
||||
|
||||
let resp = x
|
||||
.par_iter()
|
||||
.map(|val| get_rep(val, base, n))
|
||||
// now collect the results into a Result<Vec<Vec<IntegerRep>>, DecompositionError>
|
||||
.collect::<Result<Vec<Vec<IntegerRep>>, DecompositionError>>()?
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<IntegerRep>>();
|
||||
|
||||
let output = Tensor::<i128>::new(Some(&resp), &dims)?;
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
/// Trilu operation.
|
||||
/// # Arguments
|
||||
/// * `a` - Tensor
|
||||
@@ -429,7 +554,7 @@ pub fn downsample<T: TensorType + Send + Sync>(
|
||||
|
||||
output = output.par_enum_map(|i, _: T| {
|
||||
let coord = indices[i].clone();
|
||||
Ok(input.get(&coord))
|
||||
Ok::<_, TensorError>(input.get(&coord))
|
||||
})?;
|
||||
|
||||
Ok(output)
|
||||
@@ -489,7 +614,7 @@ pub fn gather<T: TensorType + Send + Sync>(
|
||||
.map(|(i, x)| if i == dim { index_val } else { *x })
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(input.get(&new_coord))
|
||||
Ok::<_, TensorError>(input.get(&new_coord))
|
||||
})?;
|
||||
|
||||
// Reshape the output tensor
|
||||
@@ -613,7 +738,7 @@ pub fn gather_elements<T: TensorType + Send + Sync>(
|
||||
|
||||
let val = input.get(&new_coord);
|
||||
|
||||
Ok(val)
|
||||
Ok::<_, TensorError>(val)
|
||||
})?;
|
||||
|
||||
// Reshape the output tensor
|
||||
@@ -927,7 +1052,7 @@ pub fn scatter_nd<T: TensorType + Send + Sync>(
|
||||
let index_slice = index_val.iter().map(|x| *x..*x + 1).collect::<Vec<_>>();
|
||||
let src_val = src.get_slice(&slice)?;
|
||||
output.set_slice(&index_slice, &src_val)?;
|
||||
Ok(())
|
||||
Ok::<_, TensorError>(())
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
@@ -1234,7 +1359,7 @@ pub fn concat<T: TensorType + Send + Sync>(
|
||||
index += x;
|
||||
}
|
||||
|
||||
Ok(inputs[input_index].get(&input_coord))
|
||||
Ok::<_, TensorError>(inputs[input_index].get(&input_coord))
|
||||
})?;
|
||||
|
||||
// Reshape the output tensor
|
||||
|
||||
@@ -520,6 +520,54 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> ValTensor<F> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the sign of the inner values
|
||||
pub fn sign(&self) -> Result<Self, TensorError> {
|
||||
let evals = self.int_evals()?;
|
||||
Ok(evals
|
||||
.par_enum_map(|_, val| {
|
||||
Ok::<_, TensorError>(ValType::Value(Value::known(integer_rep_to_felt(
|
||||
val.signum(),
|
||||
))))
|
||||
})?
|
||||
.into())
|
||||
}
|
||||
|
||||
/// Decompose the inner values into base `base` and `n` legs.
|
||||
pub fn decompose(&self, base: usize, n: usize) -> Result<Self, TensorError> {
|
||||
let res = self
|
||||
.get_inner()?
|
||||
.par_iter()
|
||||
.map(|x| {
|
||||
let mut is_empty = true;
|
||||
x.map(|_| is_empty = false);
|
||||
if is_empty {
|
||||
Ok::<_, TensorError>(vec![Value::<F>::unknown(); n + 1])
|
||||
} else {
|
||||
let mut res = vec![Value::unknown(); n + 1];
|
||||
let mut int_rep = 0;
|
||||
|
||||
x.map(|f| {
|
||||
int_rep = crate::fieldutils::felt_to_integer_rep(f);
|
||||
});
|
||||
let decompe = crate::tensor::ops::get_rep(&int_rep, base, n)?;
|
||||
|
||||
for (i, x) in decompe.iter().enumerate() {
|
||||
res[i] = Value::known(crate::fieldutils::integer_rep_to_felt(*x));
|
||||
}
|
||||
Ok(res)
|
||||
}
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>();
|
||||
|
||||
let mut tensor = Tensor::from(res?.into_iter().flatten().collect::<Vec<_>>().into_iter());
|
||||
let mut dims = self.dims().to_vec();
|
||||
dims.push(n + 1);
|
||||
|
||||
tensor.reshape(&dims)?;
|
||||
|
||||
Ok(tensor.into())
|
||||
}
|
||||
|
||||
/// Calls `int_evals` on the inner tensor.
|
||||
pub fn int_evals(&self) -> Result<Tensor<IntegerRep>, TensorError> {
|
||||
// finally convert to vector of integers
|
||||
@@ -574,7 +622,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> ValTensor<F> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Calls `get_slice` on the inner tensor.
|
||||
/// Calls `last` on the inner tensor.
|
||||
pub fn last(&self) -> Result<ValTensor<F>, TensorError> {
|
||||
let slice = match self {
|
||||
ValTensor::Value {
|
||||
@@ -595,6 +643,27 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> ValTensor<F> {
|
||||
Ok(slice)
|
||||
}
|
||||
|
||||
/// Calls `first`
|
||||
pub fn first(&self) -> Result<ValTensor<F>, TensorError> {
|
||||
let slice = match self {
|
||||
ValTensor::Value {
|
||||
inner: v,
|
||||
dims: _,
|
||||
scale,
|
||||
} => {
|
||||
let inner = v.first()?;
|
||||
let dims = inner.dims().to_vec();
|
||||
ValTensor::Value {
|
||||
inner,
|
||||
dims,
|
||||
scale: *scale,
|
||||
}
|
||||
}
|
||||
_ => return Err(TensorError::WrongMethod),
|
||||
};
|
||||
Ok(slice)
|
||||
}
|
||||
|
||||
/// Calls `get_slice` on the inner tensor.
|
||||
pub fn get_slice(&self, indices: &[Range<usize>]) -> Result<ValTensor<F>, TensorError> {
|
||||
if indices.iter().map(|x| x.end - x.start).collect::<Vec<_>>() == self.dims() {
|
||||
@@ -775,6 +844,38 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> ValTensor<F> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Calls `get_every_n` on the inner [Tensor].
|
||||
pub fn get_every_n(&mut self, n: usize) -> Result<(), TensorError> {
|
||||
match self {
|
||||
ValTensor::Value {
|
||||
inner: v, dims: d, ..
|
||||
} => {
|
||||
*v = v.get_every_n(n)?;
|
||||
*d = v.dims().to_vec();
|
||||
}
|
||||
ValTensor::Instance { .. } => {
|
||||
return Err(TensorError::WrongMethod);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Calls `exclude_every_n` on the inner [Tensor].
|
||||
pub fn exclude_every_n(&mut self, n: usize) -> Result<(), TensorError> {
|
||||
match self {
|
||||
ValTensor::Value {
|
||||
inner: v, dims: d, ..
|
||||
} => {
|
||||
*v = v.exclude_every_n(n)?;
|
||||
*d = v.dims().to_vec();
|
||||
}
|
||||
ValTensor::Instance { .. } => {
|
||||
return Err(TensorError::WrongMethod);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// remove constant zero values constants
|
||||
pub fn remove_const_zero_values(&mut self) {
|
||||
match self {
|
||||
|
||||
@@ -396,6 +396,53 @@ impl VarTensor {
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// Helper function to get the remaining size of the column
|
||||
pub fn get_column_flush<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
&self,
|
||||
offset: usize,
|
||||
values: &ValTensor<F>,
|
||||
) -> Result<usize, halo2_proofs::plonk::Error> {
|
||||
if values.len() > self.col_size() {
|
||||
error!("Values are too large for the column");
|
||||
return Err(halo2_proofs::plonk::Error::Synthesis);
|
||||
}
|
||||
|
||||
// this can only be called on columns that have a single inner column
|
||||
if self.num_inner_cols() != 1 {
|
||||
error!("This function can only be called on columns with a single inner column");
|
||||
return Err(halo2_proofs::plonk::Error::Synthesis);
|
||||
}
|
||||
|
||||
// check if the values fit in the remaining space of the column
|
||||
let current_cartesian = self.cartesian_coord(offset);
|
||||
let final_cartesian = self.cartesian_coord(offset + values.len());
|
||||
|
||||
let mut flush_len = 0;
|
||||
if current_cartesian.0 != final_cartesian.0 {
|
||||
debug!("Values overflow the column, flushing to next column");
|
||||
// diff is the number of values that overflow the column
|
||||
flush_len += self.col_size() - current_cartesian.2;
|
||||
}
|
||||
|
||||
Ok(flush_len)
|
||||
}
|
||||
|
||||
/// Assigns [ValTensor] to the columns of the inner tensor. Whereby the values are assigned to a single column, without overflowing.
|
||||
/// So for instance if we are assigning 10 values and we are at index 18 of the column, and the columns are of length 20, we skip the last 2 values of current column and start from the beginning of the next column.
|
||||
pub fn assign_exact_column<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
|
||||
&self,
|
||||
region: &mut Region<F>,
|
||||
offset: usize,
|
||||
values: &ValTensor<F>,
|
||||
constants: &mut ConstantsMap<F>,
|
||||
) -> Result<(ValTensor<F>, usize), halo2_proofs::plonk::Error> {
|
||||
let flush_len = self.get_column_flush(offset, values)?;
|
||||
|
||||
let assigned_vals = self.assign(region, offset + flush_len, values, constants)?;
|
||||
|
||||
Ok((assigned_vals, flush_len))
|
||||
}
|
||||
|
||||
/// Assigns specific values (`ValTensor`) to the columns of the inner tensor but allows for column wrapping for accumulated operations.
|
||||
/// Duplication occurs by copying the last cell of the column to the first cell next column and creating a copy constraint between the two.
|
||||
pub fn dummy_assign_with_duplication<
|
||||
|
||||
785
src/wasm.rs
785
src/wasm.rs
@@ -1,785 +0,0 @@
|
||||
use crate::{
|
||||
circuit::{
|
||||
modules::{
|
||||
polycommit::PolyCommitChip,
|
||||
poseidon::{
|
||||
spec::{PoseidonSpec, POSEIDON_RATE, POSEIDON_WIDTH},
|
||||
PoseidonChip,
|
||||
},
|
||||
Module,
|
||||
},
|
||||
region::RegionSettings,
|
||||
},
|
||||
fieldutils::{felt_to_integer_rep, integer_rep_to_felt},
|
||||
graph::{
|
||||
modules::POSEIDON_LEN_GRAPH, quantize_float, scale_to_multiplier, GraphCircuit,
|
||||
GraphSettings,
|
||||
},
|
||||
pfsys::{
|
||||
create_proof_circuit,
|
||||
evm::aggregation_kzg::{AggregationCircuit, PoseidonTranscript},
|
||||
verify_proof_circuit, TranscriptType,
|
||||
},
|
||||
tensor::TensorType,
|
||||
CheckMode, Commitments,
|
||||
};
|
||||
use console_error_panic_hook;
|
||||
use halo2_proofs::{
|
||||
plonk::*,
|
||||
poly::{
|
||||
commitment::{CommitmentScheme, ParamsProver},
|
||||
ipa::{
|
||||
commitment::{IPACommitmentScheme, ParamsIPA},
|
||||
multiopen::{ProverIPA, VerifierIPA},
|
||||
strategy::SingleStrategy as IPASingleStrategy,
|
||||
},
|
||||
kzg::{
|
||||
commitment::{KZGCommitmentScheme, ParamsKZG},
|
||||
multiopen::{ProverSHPLONK, VerifierSHPLONK},
|
||||
strategy::SingleStrategy as KZGSingleStrategy,
|
||||
},
|
||||
VerificationStrategy,
|
||||
},
|
||||
};
|
||||
use halo2_solidity_verifier::encode_calldata;
|
||||
use halo2curves::{
|
||||
bn256::{Bn256, Fr, G1Affine},
|
||||
ff::{FromUniformBytes, PrimeField},
|
||||
};
|
||||
use snark_verifier::{loader::native::NativeLoader, system::halo2::transcript::evm::EvmTranscript};
|
||||
use std::str::FromStr;
|
||||
use wasm_bindgen::prelude::*;
|
||||
use wasm_bindgen_console_logger::DEFAULT_LOGGER;
|
||||
|
||||
#[cfg(feature = "web")]
|
||||
pub use wasm_bindgen_rayon::init_thread_pool;
|
||||
|
||||
#[wasm_bindgen]
|
||||
/// Initialize logger for wasm
|
||||
pub fn init_logger() {
|
||||
log::set_logger(&DEFAULT_LOGGER).unwrap();
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
/// Initialize panic hook for wasm
|
||||
pub fn init_panic_hook() {
|
||||
console_error_panic_hook::set_once();
|
||||
}
|
||||
|
||||
/// Wrapper around the halo2 encode call data method
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn encodeVerifierCalldata(
|
||||
proof: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
vk_address: Option<Vec<u8>>,
|
||||
) -> Result<Vec<u8>, JsError> {
|
||||
let snark: crate::pfsys::Snark<Fr, G1Affine> = serde_json::from_slice(&proof[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize proof: {}", e)))?;
|
||||
|
||||
let vk_address: Option<[u8; 20]> = if let Some(vk_address) = vk_address {
|
||||
let array: [u8; 20] = serde_json::from_slice(&vk_address[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize vk address: {}", e)))?;
|
||||
Some(array)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let flattened_instances = snark.instances.into_iter().flatten();
|
||||
|
||||
let encoded = encode_calldata(
|
||||
vk_address,
|
||||
&snark.proof,
|
||||
&flattened_instances.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
Ok(encoded)
|
||||
}
|
||||
|
||||
/// Converts a hex string to a byte array
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn feltToBigEndian(array: wasm_bindgen::Clamped<Vec<u8>>) -> Result<String, JsError> {
|
||||
let felt: Fr = serde_json::from_slice(&array[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize field element: {}", e)))?;
|
||||
Ok(format!("{:?}", felt))
|
||||
}
|
||||
|
||||
/// Converts a felt to a little endian string
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn feltToLittleEndian(array: wasm_bindgen::Clamped<Vec<u8>>) -> Result<String, JsError> {
|
||||
let felt: Fr = serde_json::from_slice(&array[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize field element: {}", e)))?;
|
||||
let repr = serde_json::to_string(&felt).unwrap();
|
||||
let b: String = serde_json::from_str(&repr).unwrap();
|
||||
Ok(b)
|
||||
}
|
||||
|
||||
/// Converts a hex string to a byte array
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn feltToInt(
|
||||
array: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
|
||||
let felt: Fr = serde_json::from_slice(&array[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize field element: {}", e)))?;
|
||||
Ok(wasm_bindgen::Clamped(
|
||||
serde_json::to_vec(&felt_to_integer_rep(felt))
|
||||
.map_err(|e| JsError::new(&format!("Failed to serialize integer: {}", e)))?,
|
||||
))
|
||||
}
|
||||
|
||||
/// Converts felts to a floating point element
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn feltToFloat(
|
||||
array: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
scale: crate::Scale,
|
||||
) -> Result<f64, JsError> {
|
||||
let felt: Fr = serde_json::from_slice(&array[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize field element: {}", e)))?;
|
||||
let int_rep = felt_to_integer_rep(felt);
|
||||
let multiplier = scale_to_multiplier(scale);
|
||||
Ok(int_rep as f64 / multiplier)
|
||||
}
|
||||
|
||||
/// Converts a floating point number to a hex string representing a fixed point field element
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn floatToFelt(
|
||||
input: f64,
|
||||
scale: crate::Scale,
|
||||
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
|
||||
let int_rep =
|
||||
quantize_float(&input, 0.0, scale).map_err(|e| JsError::new(&format!("{}", e)))?;
|
||||
let felt = integer_rep_to_felt(int_rep);
|
||||
let vec = crate::pfsys::field_to_string::<halo2curves::bn256::Fr>(&felt);
|
||||
Ok(wasm_bindgen::Clamped(serde_json::to_vec(&vec).map_err(
|
||||
|e| JsError::new(&format!("Failed to serialize a float to felt{}", e)),
|
||||
)?))
|
||||
}
|
||||
|
||||
/// Generate a kzg commitment.
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn kzgCommit(
|
||||
message: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
vk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
settings: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
params_ser: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
|
||||
let message: Vec<Fr> = serde_json::from_slice(&message[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize message: {}", e)))?;
|
||||
|
||||
let mut reader = std::io::BufReader::new(¶ms_ser[..]);
|
||||
let params: ParamsKZG<Bn256> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize params: {}", e)))?;
|
||||
|
||||
let mut reader = std::io::BufReader::new(&vk[..]);
|
||||
let circuit_settings: GraphSettings = serde_json::from_slice(&settings[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize settings: {}", e)))?;
|
||||
let vk = VerifyingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
circuit_settings,
|
||||
)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize vk: {}", e)))?;
|
||||
|
||||
let output = PolyCommitChip::commit::<KZGCommitmentScheme<Bn256>>(
|
||||
message,
|
||||
(vk.cs().blinding_factors() + 1) as u32,
|
||||
¶ms,
|
||||
);
|
||||
|
||||
Ok(wasm_bindgen::Clamped(
|
||||
serde_json::to_vec(&output).map_err(|e| JsError::new(&format!("{}", e)))?,
|
||||
))
|
||||
}
|
||||
|
||||
/// Converts a buffer to vector of 4 u64s representing a fixed point field element
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn bufferToVecOfFelt(
|
||||
buffer: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
|
||||
// Convert the buffer to a slice
|
||||
let buffer: &[u8] = &buffer;
|
||||
|
||||
// Divide the buffer into chunks of 64 bytes
|
||||
let chunks = buffer.chunks_exact(16);
|
||||
|
||||
// Get the remainder
|
||||
let remainder = chunks.remainder();
|
||||
|
||||
// Add 0s to the remainder to make it 64 bytes
|
||||
let mut remainder = remainder.to_vec();
|
||||
|
||||
// Collect chunks into a Vec<[u8; 16]>.
|
||||
let chunks: Result<Vec<[u8; 16]>, JsError> = chunks
|
||||
.map(|slice| {
|
||||
let array: [u8; 16] = slice
|
||||
.try_into()
|
||||
.map_err(|_| JsError::new("failed to slice input chunks"))?;
|
||||
Ok(array)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut chunks = chunks?;
|
||||
|
||||
if remainder.len() != 0 {
|
||||
remainder.resize(16, 0);
|
||||
// Convert the Vec<u8> to [u8; 16]
|
||||
let remainder_array: [u8; 16] = remainder
|
||||
.try_into()
|
||||
.map_err(|_| JsError::new("failed to slice remainder"))?;
|
||||
// append the remainder to the chunks
|
||||
chunks.push(remainder_array);
|
||||
}
|
||||
|
||||
// Convert each chunk to a field element
|
||||
let field_elements: Vec<Fr> = chunks
|
||||
.iter()
|
||||
.map(|x| PrimeField::from_u128(u8_array_to_u128_le(*x)))
|
||||
.collect();
|
||||
|
||||
Ok(wasm_bindgen::Clamped(
|
||||
serde_json::to_vec(&field_elements)
|
||||
.map_err(|e| JsError::new(&format!("Failed to serialize field elements: {}", e)))?,
|
||||
))
|
||||
}
|
||||
|
||||
/// Generate a poseidon hash in browser. Input message
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn poseidonHash(
|
||||
message: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
|
||||
let message: Vec<Fr> = serde_json::from_slice(&message[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize message: {}", e)))?;
|
||||
|
||||
let output =
|
||||
PoseidonChip::<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE, POSEIDON_LEN_GRAPH>::run(
|
||||
message.clone(),
|
||||
)
|
||||
.map_err(|e| JsError::new(&format!("{}", e)))?;
|
||||
|
||||
Ok(wasm_bindgen::Clamped(serde_json::to_vec(&output).map_err(
|
||||
|e| JsError::new(&format!("Failed to serialize poseidon hash output: {}", e)),
|
||||
)?))
|
||||
}
|
||||
|
||||
/// Generate a witness file from input.json, compiled model and a settings.json file.
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn genWitness(
|
||||
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
input: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<Vec<u8>, JsError> {
|
||||
let mut circuit: crate::graph::GraphCircuit = bincode::deserialize(&compiled_circuit[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize compiled model: {}", e)))?;
|
||||
let input: crate::graph::input::GraphData = serde_json::from_slice(&input[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize input: {}", e)))?;
|
||||
|
||||
let mut input = circuit
|
||||
.load_graph_input(&input)
|
||||
.map_err(|e| JsError::new(&format!("{}", e)))?;
|
||||
|
||||
let witness = circuit
|
||||
.forward::<KZGCommitmentScheme<Bn256>>(&mut input, None, None, RegionSettings::all_false())
|
||||
.map_err(|e| JsError::new(&format!("{}", e)))?;
|
||||
|
||||
serde_json::to_vec(&witness)
|
||||
.map_err(|e| JsError::new(&format!("Failed to serialize witness: {}", e)))
|
||||
}
|
||||
|
||||
/// Generate verifying key in browser
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn genVk(
|
||||
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
params_ser: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
compress_selectors: bool,
|
||||
) -> Result<Vec<u8>, JsError> {
|
||||
// Read in kzg params
|
||||
let mut reader = std::io::BufReader::new(¶ms_ser[..]);
|
||||
let params: ParamsKZG<Bn256> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize params: {}", e)))?;
|
||||
// Read in compiled circuit
|
||||
let circuit: crate::graph::GraphCircuit = bincode::deserialize(&compiled_circuit[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize compiled model: {}", e)))?;
|
||||
|
||||
// Create verifying key
|
||||
let vk = create_vk_wasm::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(
|
||||
&circuit,
|
||||
¶ms,
|
||||
compress_selectors,
|
||||
)
|
||||
.map_err(Box::<dyn std::error::Error>::from)
|
||||
.map_err(|e| JsError::new(&format!("Failed to create verifying key: {}", e)))?;
|
||||
|
||||
let mut serialized_vk = Vec::new();
|
||||
vk.write(&mut serialized_vk, halo2_proofs::SerdeFormat::RawBytes)
|
||||
.map_err(|e| JsError::new(&format!("Failed to serialize vk: {}", e)))?;
|
||||
|
||||
Ok(serialized_vk)
|
||||
}
|
||||
|
||||
/// Generate proving key in browser
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn genPk(
|
||||
vk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
params_ser: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<Vec<u8>, JsError> {
|
||||
// Read in kzg params
|
||||
let mut reader = std::io::BufReader::new(¶ms_ser[..]);
|
||||
let params: ParamsKZG<Bn256> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize params: {}", e)))?;
|
||||
// Read in compiled circuit
|
||||
let circuit: crate::graph::GraphCircuit = bincode::deserialize(&compiled_circuit[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize compiled model: {}", e)))?;
|
||||
|
||||
// Read in verifying key
|
||||
let mut reader = std::io::BufReader::new(&vk[..]);
|
||||
let vk = VerifyingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
circuit.settings().clone(),
|
||||
)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize verifying key: {}", e)))?;
|
||||
// Create proving key
|
||||
let pk = create_pk_wasm::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(vk, &circuit, ¶ms)
|
||||
.map_err(Box::<dyn std::error::Error>::from)
|
||||
.map_err(|e| JsError::new(&format!("Failed to create proving key: {}", e)))?;
|
||||
|
||||
let mut serialized_pk = Vec::new();
|
||||
pk.write(&mut serialized_pk, halo2_proofs::SerdeFormat::RawBytes)
|
||||
.map_err(|e| JsError::new(&format!("Failed to serialize pk: {}", e)))?;
|
||||
|
||||
Ok(serialized_pk)
|
||||
}
|
||||
|
||||
/// Verify proof in browser using wasm
|
||||
#[wasm_bindgen]
|
||||
pub fn verify(
|
||||
proof_js: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
vk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
settings: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
srs: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<bool, JsError> {
|
||||
let circuit_settings: GraphSettings = serde_json::from_slice(&settings[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize settings: {}", e)))?;
|
||||
|
||||
let proof: crate::pfsys::Snark<Fr, G1Affine> = serde_json::from_slice(&proof_js[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize proof: {}", e)))?;
|
||||
|
||||
let mut reader = std::io::BufReader::new(&vk[..]);
|
||||
let vk = VerifyingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
circuit_settings.clone(),
|
||||
)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize vk: {}", e)))?;
|
||||
|
||||
let orig_n = 1 << circuit_settings.run_args.logrows;
|
||||
|
||||
let commitment = circuit_settings.run_args.commitment.into();
|
||||
|
||||
let mut reader = std::io::BufReader::new(&srs[..]);
|
||||
let result = match commitment {
|
||||
Commitments::KZG => {
|
||||
let params: ParamsKZG<Bn256> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize params: {}", e)))?;
|
||||
let strategy = KZGSingleStrategy::new(params.verifier_params());
|
||||
match proof.transcript_type {
|
||||
TranscriptType::EVM => verify_proof_circuit::<
|
||||
VerifierSHPLONK<'_, Bn256>,
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
KZGSingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<G1Affine, _, _, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n),
|
||||
|
||||
TranscriptType::Poseidon => {
|
||||
verify_proof_circuit::<
|
||||
VerifierSHPLONK<'_, Bn256>,
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
KZGSingleStrategy<_>,
|
||||
_,
|
||||
PoseidonTranscript<NativeLoader, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n)
|
||||
}
|
||||
}
|
||||
}
|
||||
Commitments::IPA => {
|
||||
let params: ParamsIPA<_> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize params: {}", e)))?;
|
||||
let strategy = IPASingleStrategy::new(params.verifier_params());
|
||||
match proof.transcript_type {
|
||||
TranscriptType::EVM => verify_proof_circuit::<
|
||||
VerifierIPA<_>,
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
IPASingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<G1Affine, _, _, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n),
|
||||
TranscriptType::Poseidon => {
|
||||
verify_proof_circuit::<
|
||||
VerifierIPA<_>,
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
IPASingleStrategy<_>,
|
||||
_,
|
||||
PoseidonTranscript<NativeLoader, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(_) => Ok(true),
|
||||
Err(e) => Err(JsError::new(&format!("{}", e))),
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
/// Verify aggregate proof in browser using wasm
|
||||
pub fn verifyAggr(
|
||||
proof_js: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
vk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
logrows: u64,
|
||||
srs: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
commitment: &str,
|
||||
) -> Result<bool, JsError> {
|
||||
let proof: crate::pfsys::Snark<Fr, G1Affine> = serde_json::from_slice(&proof_js[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize proof: {}", e)))?;
|
||||
|
||||
let mut reader = std::io::BufReader::new(&vk[..]);
|
||||
let vk = VerifyingKey::<G1Affine>::read::<_, AggregationCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
(),
|
||||
)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize vk: {}", e)))?;
|
||||
|
||||
let commit = Commitments::from_str(commitment).map_err(|e| JsError::new(&format!("{}", e)))?;
|
||||
|
||||
let orig_n = 1 << logrows;
|
||||
|
||||
let mut reader = std::io::BufReader::new(&srs[..]);
|
||||
let result = match commit {
|
||||
Commitments::KZG => {
|
||||
let params: ParamsKZG<Bn256> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize params: {}", e)))?;
|
||||
let strategy = KZGSingleStrategy::new(params.verifier_params());
|
||||
match proof.transcript_type {
|
||||
TranscriptType::EVM => verify_proof_circuit::<
|
||||
VerifierSHPLONK<'_, Bn256>,
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
KZGSingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<G1Affine, _, _, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n),
|
||||
|
||||
TranscriptType::Poseidon => {
|
||||
verify_proof_circuit::<
|
||||
VerifierSHPLONK<'_, Bn256>,
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
KZGSingleStrategy<_>,
|
||||
_,
|
||||
PoseidonTranscript<NativeLoader, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n)
|
||||
}
|
||||
}
|
||||
}
|
||||
Commitments::IPA => {
|
||||
let params: ParamsIPA<_> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize params: {}", e)))?;
|
||||
let strategy = IPASingleStrategy::new(params.verifier_params());
|
||||
match proof.transcript_type {
|
||||
TranscriptType::EVM => verify_proof_circuit::<
|
||||
VerifierIPA<_>,
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
IPASingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<G1Affine, _, _, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n),
|
||||
TranscriptType::Poseidon => {
|
||||
verify_proof_circuit::<
|
||||
VerifierIPA<_>,
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
IPASingleStrategy<_>,
|
||||
_,
|
||||
PoseidonTranscript<NativeLoader, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(_) => Ok(true),
|
||||
Err(e) => Err(JsError::new(&format!("{}", e))),
|
||||
}
|
||||
}
|
||||
|
||||
/// Prove in browser using wasm
|
||||
#[wasm_bindgen]
|
||||
pub fn prove(
|
||||
witness: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
pk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
srs: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<Vec<u8>, JsError> {
|
||||
#[cfg(feature = "det-prove")]
|
||||
log::set_max_level(log::LevelFilter::Debug);
|
||||
#[cfg(not(feature = "det-prove"))]
|
||||
log::set_max_level(log::LevelFilter::Info);
|
||||
|
||||
// read in circuit
|
||||
let mut circuit: crate::graph::GraphCircuit = bincode::deserialize(&compiled_circuit[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize circuit: {}", e)))?;
|
||||
|
||||
// read in model input
|
||||
let data: crate::graph::GraphWitness = serde_json::from_slice(&witness[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize witness: {}", e)))?;
|
||||
|
||||
// read in proving key
|
||||
let mut reader = std::io::BufReader::new(&pk[..]);
|
||||
let pk = ProvingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
circuit.settings().clone(),
|
||||
)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize proving key: {}", e)))?;
|
||||
|
||||
// prep public inputs
|
||||
circuit
|
||||
.load_graph_witness(&data)
|
||||
.map_err(|e| JsError::new(&format!("{}", e)))?;
|
||||
let public_inputs = circuit
|
||||
.prepare_public_inputs(&data)
|
||||
.map_err(|e| JsError::new(&format!("{}", e)))?;
|
||||
let proof_split_commits: Option<crate::pfsys::ProofSplitCommit> = data.into();
|
||||
|
||||
// read in kzg params
|
||||
let mut reader = std::io::BufReader::new(&srs[..]);
|
||||
let commitment = circuit.settings().run_args.commitment.into();
|
||||
// creates and verifies the proof
|
||||
let proof = match commitment {
|
||||
Commitments::KZG => {
|
||||
let params: ParamsKZG<Bn256> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize srs: {}", e)))?;
|
||||
|
||||
create_proof_circuit::<
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
_,
|
||||
ProverSHPLONK<_>,
|
||||
VerifierSHPLONK<_>,
|
||||
KZGSingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<_, _, _, _>,
|
||||
EvmTranscript<_, _, _, _>,
|
||||
>(
|
||||
circuit,
|
||||
vec![public_inputs],
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
crate::Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
proof_split_commits,
|
||||
None,
|
||||
)
|
||||
}
|
||||
Commitments::IPA => {
|
||||
let params: ParamsIPA<_> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize srs: {}", e)))?;
|
||||
|
||||
create_proof_circuit::<
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
_,
|
||||
ProverIPA<_>,
|
||||
VerifierIPA<_>,
|
||||
IPASingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<_, _, _, _>,
|
||||
EvmTranscript<_, _, _, _>,
|
||||
>(
|
||||
circuit,
|
||||
vec![public_inputs],
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
crate::Commitments::IPA,
|
||||
TranscriptType::EVM,
|
||||
proof_split_commits,
|
||||
None,
|
||||
)
|
||||
}
|
||||
}
|
||||
.map_err(|e| JsError::new(&format!("{}", e)))?;
|
||||
|
||||
Ok(serde_json::to_string(&proof)
|
||||
.map_err(|e| JsError::new(&format!("{}", e)))?
|
||||
.into_bytes())
|
||||
}
|
||||
|
||||
// VALIDATION FUNCTIONS
|
||||
|
||||
/// Witness file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn witnessValidation(witness: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsError> {
|
||||
let _: crate::graph::GraphWitness = serde_json::from_slice(&witness[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize witness: {}", e)))?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
/// Compiled circuit validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn compiledCircuitValidation(
|
||||
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<bool, JsError> {
|
||||
let _: crate::graph::GraphCircuit = bincode::deserialize(&compiled_circuit[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize compiled circuit: {}", e)))?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
/// Input file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn inputValidation(input: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsError> {
|
||||
let _: crate::graph::input::GraphData = serde_json::from_slice(&input[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize input: {}", e)))?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
/// Proof file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn proofValidation(proof: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsError> {
|
||||
let _: crate::pfsys::Snark<Fr, G1Affine> = serde_json::from_slice(&proof[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize proof: {}", e)))?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
/// Vk file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn vkValidation(
|
||||
vk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
settings: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<bool, JsError> {
|
||||
let circuit_settings: GraphSettings = serde_json::from_slice(&settings[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize settings: {}", e)))?;
|
||||
let mut reader = std::io::BufReader::new(&vk[..]);
|
||||
let _ = VerifyingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
circuit_settings,
|
||||
)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize vk: {}", e)))?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
/// Pk file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn pkValidation(
|
||||
pk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
settings: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<bool, JsError> {
|
||||
let circuit_settings: GraphSettings = serde_json::from_slice(&settings[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize settings: {}", e)))?;
|
||||
let mut reader = std::io::BufReader::new(&pk[..]);
|
||||
let _ = ProvingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
circuit_settings,
|
||||
)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize proving key: {}", e)))?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
/// Settings file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn settingsValidation(settings: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsError> {
|
||||
let _: GraphSettings = serde_json::from_slice(&settings[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize settings: {}", e)))?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
/// Srs file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn srsValidation(srs: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsError> {
|
||||
let mut reader = std::io::BufReader::new(&srs[..]);
|
||||
let _: ParamsKZG<Bn256> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize params: {}", e)))?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
// HELPER FUNCTIONS
|
||||
|
||||
/// Creates a [ProvingKey] for a [GraphCircuit] (`circuit`) with specific [CommitmentScheme] parameters (`params`) for the WASM target
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn create_vk_wasm<Scheme: CommitmentScheme, F: PrimeField + TensorType, C: Circuit<F>>(
|
||||
circuit: &C,
|
||||
params: &'_ Scheme::ParamsProver,
|
||||
compress_selectors: bool,
|
||||
) -> Result<VerifyingKey<Scheme::Curve>, halo2_proofs::plonk::Error>
|
||||
where
|
||||
C: Circuit<Scheme::Scalar>,
|
||||
<Scheme as CommitmentScheme>::Scalar: FromUniformBytes<64>,
|
||||
{
|
||||
// Real proof
|
||||
let empty_circuit = <C as Circuit<F>>::without_witnesses(circuit);
|
||||
|
||||
// Initialize the verifying key
|
||||
let vk = keygen_vk_custom(params, &empty_circuit, compress_selectors)?;
|
||||
Ok(vk)
|
||||
}
|
||||
/// Creates a [ProvingKey] from a [VerifyingKey] for a [GraphCircuit] (`circuit`) with specific [CommitmentScheme] parameters (`params`) for the WASM target
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
pub fn create_pk_wasm<Scheme: CommitmentScheme, F: PrimeField + TensorType, C: Circuit<F>>(
|
||||
vk: VerifyingKey<Scheme::Curve>,
|
||||
circuit: &C,
|
||||
params: &'_ Scheme::ParamsProver,
|
||||
) -> Result<ProvingKey<Scheme::Curve>, halo2_proofs::plonk::Error>
|
||||
where
|
||||
C: Circuit<Scheme::Scalar>,
|
||||
<Scheme as CommitmentScheme>::Scalar: FromUniformBytes<64>,
|
||||
{
|
||||
// Real proof
|
||||
let empty_circuit = <C as Circuit<F>>::without_witnesses(circuit);
|
||||
|
||||
// Initialize the proving key
|
||||
let pk = keygen_pk(params, vk, &empty_circuit)?;
|
||||
Ok(pk)
|
||||
}
|
||||
|
||||
///
|
||||
pub fn u8_array_to_u128_le(arr: [u8; 16]) -> u128 {
|
||||
let mut n: u128 = 0;
|
||||
for &b in arr.iter().rev() {
|
||||
n <<= 8;
|
||||
n |= b as u128;
|
||||
}
|
||||
n
|
||||
}
|
||||
Binary file not shown.
BIN
tests/assets/pk.key
Normal file
BIN
tests/assets/pk.key
Normal file
Binary file not shown.
1
tests/assets/proof.json
Normal file
1
tests/assets/proof.json
Normal file
File diff suppressed because one or more lines are too long
@@ -8,7 +8,7 @@
|
||||
"param_scale": 0,
|
||||
"scale_rebase_multiplier": 10,
|
||||
"lookup_range": [
|
||||
-2,
|
||||
0,
|
||||
0
|
||||
],
|
||||
"logrows": 6,
|
||||
@@ -24,15 +24,19 @@
|
||||
"param_visibility": "Private",
|
||||
"div_rebasing": false,
|
||||
"rebase_frac_zero_constants": false,
|
||||
"check_mode": "UNSAFE"
|
||||
"check_mode": "UNSAFE",
|
||||
"commitment": "KZG",
|
||||
"decomp_base": 128,
|
||||
"decomp_legs": 2
|
||||
},
|
||||
"num_rows": 16,
|
||||
"num_rows": 46,
|
||||
"total_assignments": 92,
|
||||
"total_const_size": 3,
|
||||
"total_dynamic_col_size": 0,
|
||||
"max_dynamic_input_len": 0,
|
||||
"num_dynamic_lookups": 0,
|
||||
"num_shuffles": 0,
|
||||
"total_shuffle_col_size": 0,
|
||||
"total_assignments": 32,
|
||||
"total_const_size": 8,
|
||||
"model_instance_shapes": [
|
||||
[
|
||||
1,
|
||||
@@ -54,12 +58,19 @@
|
||||
]
|
||||
]
|
||||
},
|
||||
"required_lookups": [
|
||||
"ReLU"
|
||||
"required_lookups": [],
|
||||
"required_range_checks": [
|
||||
[
|
||||
-1,
|
||||
1
|
||||
],
|
||||
[
|
||||
0,
|
||||
127
|
||||
]
|
||||
],
|
||||
"required_range_checks": [],
|
||||
"check_mode": "UNSAFE",
|
||||
"version": "0.0.0",
|
||||
"num_blinding_factors": null,
|
||||
"timestamp": 1702474230544
|
||||
"timestamp": 1726429587279
|
||||
}
|
||||
BIN
tests/assets/vk.key
Normal file
BIN
tests/assets/vk.key
Normal file
Binary file not shown.
@@ -1 +1 @@
|
||||
{"inputs":[["0200000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000"]],"pretty_elements":{"rescaled_inputs":[["2","1","1"]],"inputs":[["0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000001"]],"processed_inputs":[],"processed_params":[],"processed_outputs":[],"rescaled_outputs":[["0","0","0","0"]],"outputs":[["0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000"]]},"outputs":[["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]],"processed_inputs":null,"processed_params":null,"processed_outputs":null,"max_lookup_inputs":0,"min_lookup_inputs":-1,"max_range_size":0}
|
||||
{"inputs":[["0200000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000"]],"pretty_elements":{"rescaled_inputs":[["2","1","1"]],"inputs":[["0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000001"]],"processed_inputs":[],"processed_params":[],"processed_outputs":[],"rescaled_outputs":[["0","0","0","0"]],"outputs":[["0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000"]]},"outputs":[["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]],"processed_inputs":null,"processed_params":null,"processed_outputs":null,"max_lookup_inputs":0,"min_lookup_inputs":0,"max_range_size":127}
|
||||
@@ -1,4 +1,4 @@
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
#[cfg(test)]
|
||||
mod native_tests {
|
||||
|
||||
@@ -7,11 +7,15 @@ mod native_tests {
|
||||
// use ezkl::circuit::table::RESERVED_BLINDING_ROWS_PAD;
|
||||
use ezkl::graph::input::{FileSource, FileSourceInner, GraphData};
|
||||
use ezkl::graph::{DataSource, GraphSettings, GraphWitness};
|
||||
use ezkl::pfsys::Snark;
|
||||
use ezkl::Commitments;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
use halo2curves::bn256::Bn256;
|
||||
use lazy_static::lazy_static;
|
||||
use rand::Rng;
|
||||
use std::env::var;
|
||||
use std::io::{Read, Write};
|
||||
use std::path::PathBuf;
|
||||
use std::process::{Child, Command};
|
||||
use std::sync::Once;
|
||||
static COMPILE: Once = Once::new();
|
||||
@@ -241,8 +245,8 @@ mod native_tests {
|
||||
"1l_conv_transpose",
|
||||
"1l_upsample",
|
||||
"1l_identity", //35
|
||||
"idolmodel",
|
||||
"trig",
|
||||
"idolmodel", // too big evm
|
||||
"trig", // too big evm
|
||||
"prelu_gmm",
|
||||
"lstm",
|
||||
"rnn", //40
|
||||
@@ -983,16 +987,34 @@ mod native_tests {
|
||||
mod tests_evm {
|
||||
use seq_macro::seq;
|
||||
use crate::native_tests::TESTS_EVM;
|
||||
use crate::native_tests::TESTS;
|
||||
use crate::native_tests::TESTS_EVM_AGGR;
|
||||
use test_case::test_case;
|
||||
use crate::native_tests::kzg_evm_prove_and_verify;
|
||||
use crate::native_tests::kzg_evm_prove_and_verify_render_seperately;
|
||||
use crate::native_tests::kzg_evm_prove_and_verify_reusable_verifier;
|
||||
|
||||
use crate::native_tests::kzg_evm_on_chain_input_prove_and_verify;
|
||||
use crate::native_tests::kzg_evm_aggr_prove_and_verify;
|
||||
use tempdir::TempDir;
|
||||
use crate::native_tests::Hardfork;
|
||||
use crate::native_tests::run_js_tests;
|
||||
use ezkl::logger::init_logger;
|
||||
use crate::native_tests::lazy_static;
|
||||
use std::sync::Once;
|
||||
|
||||
// Global variables to store verifier hashes and identical verifiers
|
||||
lazy_static! {
|
||||
static ref ANVIL_INSTANCE: std::sync::Mutex<Option<std::process::Child>> = std::sync::Mutex::new(None);
|
||||
}
|
||||
|
||||
static INIT: Once = Once::new();
|
||||
|
||||
fn initialize() {
|
||||
INIT.call_once(|| {
|
||||
let anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
|
||||
*ANVIL_INSTANCE.lock().unwrap() = Some(anvil_child);
|
||||
});
|
||||
}
|
||||
|
||||
/// Currently only on chain inputs that return a non-negative value are supported.
|
||||
const TESTS_ON_CHAIN_INPUT: [&str; 17] = [
|
||||
@@ -1104,6 +1126,50 @@ mod native_tests {
|
||||
|
||||
});
|
||||
|
||||
seq!(N in 0..=93 {
|
||||
#(#[test_case(TESTS[N])])*
|
||||
fn kzg_evm_prove_and_verify_reusable_verifier_(test: &str) {
|
||||
initialize();
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
|
||||
init_logger();
|
||||
log::error!("Running kzg_evm_prove_and_verify_reusable_verifier_ for test: {}", test);
|
||||
// default vis
|
||||
kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "private", "private", "public", false);
|
||||
// public/public vis
|
||||
kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "public", "private", "public", false);
|
||||
// hashed input
|
||||
kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "hashed", "private", "public", false);
|
||||
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
#(#[test_case(TESTS[N])])*
|
||||
fn kzg_evm_prove_and_verify_reusable_verifier_with_overflow_(test: &str) {
|
||||
initialize();
|
||||
// verifier too big to fit on chain with overflow calibration target
|
||||
if test == "1l_eltwise_div" || test == "lenet_5" || test == "ltsf" || test == "lstm_large" {
|
||||
return;
|
||||
}
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
|
||||
init_logger();
|
||||
log::error!("Running kzg_evm_prove_and_verify_reusable_verifier_with_overflow_ for test: {}", test);
|
||||
// default vis
|
||||
kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "private", "private", "public", true);
|
||||
// public/public vis
|
||||
kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "public", "private", "public", true);
|
||||
// hashed input
|
||||
kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "hashed", "private", "public", true);
|
||||
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
seq!(N in 0..=22 {
|
||||
|
||||
@@ -1120,19 +1186,6 @@ mod native_tests {
|
||||
|
||||
}
|
||||
|
||||
#(#[test_case(TESTS_EVM[N])])*
|
||||
fn kzg_evm_prove_and_verify_render_seperately_(test: &str) {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
|
||||
kzg_evm_prove_and_verify_render_seperately(2, path, test.to_string(), "private", "private", "public");
|
||||
#[cfg(not(feature = "icicle"))]
|
||||
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", true);
|
||||
test_dir.close().unwrap();
|
||||
|
||||
}
|
||||
|
||||
|
||||
#(#[test_case(TESTS_EVM[N])])*
|
||||
fn kzg_evm_hashed_input_prove_and_verify_(test: &str) {
|
||||
@@ -1883,7 +1936,7 @@ mod native_tests {
|
||||
|
||||
// deploy the verifier
|
||||
let args = vec![
|
||||
"deploy-evm-verifier",
|
||||
"deploy-evm",
|
||||
rpc_arg.as_str(),
|
||||
addr_path_arg.as_str(),
|
||||
"--sol-code-path",
|
||||
@@ -2114,11 +2167,7 @@ mod native_tests {
|
||||
assert!(status.success());
|
||||
|
||||
// deploy the verifier
|
||||
let mut args = vec![
|
||||
"deploy-evm-verifier",
|
||||
rpc_arg.as_str(),
|
||||
addr_path_arg.as_str(),
|
||||
];
|
||||
let mut args = vec!["deploy-evm", rpc_arg.as_str(), addr_path_arg.as_str()];
|
||||
|
||||
args.push("--sol-code-path");
|
||||
args.push(sol_arg.as_str());
|
||||
@@ -2160,13 +2209,14 @@ mod native_tests {
|
||||
}
|
||||
|
||||
// prove-serialize-verify, the usual full path
|
||||
fn kzg_evm_prove_and_verify_render_seperately(
|
||||
fn kzg_evm_prove_and_verify_reusable_verifier(
|
||||
num_inner_columns: usize,
|
||||
test_dir: &str,
|
||||
example_name: String,
|
||||
input_visibility: &str,
|
||||
param_visibility: &str,
|
||||
output_visibility: &str,
|
||||
overflow: bool,
|
||||
) {
|
||||
let anvil_url = ANVIL_URL.as_str();
|
||||
|
||||
@@ -2179,7 +2229,7 @@ mod native_tests {
|
||||
output_visibility,
|
||||
num_inner_columns,
|
||||
None,
|
||||
false,
|
||||
overflow,
|
||||
"single",
|
||||
Commitments::KZG,
|
||||
2,
|
||||
@@ -2190,18 +2240,21 @@ mod native_tests {
|
||||
|
||||
let vk_arg = format!("{}/{}/key.vk", test_dir, example_name);
|
||||
let rpc_arg = format!("--rpc-url={}", anvil_url);
|
||||
// addr path for verifier manager contract
|
||||
let addr_path_arg = format!("--addr-path={}/{}/addr.txt", test_dir, example_name);
|
||||
let verifier_manager_arg: String;
|
||||
let settings_arg = format!("--settings-path={}", settings_path);
|
||||
// reusable verifier sol_arg
|
||||
let sol_arg = format!("--sol-code-path={}/{}/kzg.sol", test_dir, example_name);
|
||||
|
||||
// create the verifier
|
||||
// create the reusable verifier
|
||||
let args = vec![
|
||||
"create-evm-verifier",
|
||||
"--vk-path",
|
||||
&vk_arg,
|
||||
&settings_arg,
|
||||
&sol_arg,
|
||||
"--render-vk-seperately",
|
||||
"--reusable",
|
||||
];
|
||||
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
@@ -2210,11 +2263,64 @@ mod native_tests {
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
|
||||
// deploy the verifier manager
|
||||
let args = vec![
|
||||
"deploy-evm",
|
||||
rpc_arg.as_str(),
|
||||
addr_path_arg.as_str(),
|
||||
// set the sol code path to be contracts/VerifierManager.sol relative to root
|
||||
"--sol-code-path=contracts/VerifierManager.sol",
|
||||
"-C=manager",
|
||||
];
|
||||
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args(&args)
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
|
||||
// read in the address of the verifier manager
|
||||
let addr = std::fs::read_to_string(format!("{}/{}/addr.txt", test_dir, example_name))
|
||||
.expect("failed to read address file");
|
||||
|
||||
verifier_manager_arg = format!("--addr-verifier-manager={}", addr);
|
||||
|
||||
// if the reusable verifier address is not set, deploy the verifier manager and then create the verifier
|
||||
let rv_addr = {
|
||||
// addr path for rv contract
|
||||
let addr_path_arg = format!("--addr-path={}/{}/addr_rv.txt", test_dir, example_name);
|
||||
// deploy the reusable verifier via the verifier router.
|
||||
let args = vec![
|
||||
"deploy-evm",
|
||||
rpc_arg.as_str(),
|
||||
addr_path_arg.as_str(),
|
||||
sol_arg.as_str(),
|
||||
verifier_manager_arg.as_str(),
|
||||
"-C=verifier/reusable",
|
||||
];
|
||||
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args(&args)
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
|
||||
// read in the address of the verifier manager
|
||||
let addr =
|
||||
std::fs::read_to_string(format!("{}/{}/addr_rv.txt", test_dir, example_name))
|
||||
.expect("failed to read address file");
|
||||
|
||||
addr
|
||||
};
|
||||
|
||||
let addr_path_arg_vk = format!("--addr-path={}/{}/addr_vk.txt", test_dir, example_name);
|
||||
let sol_arg_vk = format!("--sol-code-path={}/{}/vk.sol", test_dir, example_name);
|
||||
let sol_arg_vk: String = format!("--sol-code-path={}/{}/vk.sol", test_dir, example_name);
|
||||
// create the verifier
|
||||
let addr_path_arg_vk = format!("--addr-path={}/{}/addr_vk.txt", test_dir, example_name);
|
||||
let sol_arg_vk: String = format!("--sol-code-path={}/{}/vk.sol", test_dir, example_name);
|
||||
// create the verifier
|
||||
let args = vec![
|
||||
"create-evm-vk",
|
||||
"create-evm-vka",
|
||||
"--vk-path",
|
||||
&vk_arg,
|
||||
&settings_arg,
|
||||
@@ -2227,32 +2333,17 @@ mod native_tests {
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
|
||||
// deploy the verifier
|
||||
let rv_addr_arg = format!("--addr-reusable-verifier={}", rv_addr);
|
||||
|
||||
// deploy the vka via the "DeployVKA" command on the reusable verifier
|
||||
let args = vec![
|
||||
"deploy-evm-verifier",
|
||||
rpc_arg.as_str(),
|
||||
addr_path_arg.as_str(),
|
||||
sol_arg.as_str(),
|
||||
];
|
||||
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args(&args)
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
|
||||
// read in the address
|
||||
let addr = std::fs::read_to_string(format!("{}/{}/addr.txt", test_dir, example_name))
|
||||
.expect("failed to read address file");
|
||||
|
||||
let deployed_addr_arg = format!("--addr-verifier={}", addr);
|
||||
|
||||
// deploy the vk
|
||||
let args = vec![
|
||||
"deploy-evm-vk",
|
||||
"deploy-evm",
|
||||
rpc_arg.as_str(),
|
||||
addr_path_arg_vk.as_str(),
|
||||
verifier_manager_arg.as_str(),
|
||||
rv_addr_arg.as_str(),
|
||||
sol_arg_vk.as_str(),
|
||||
"-C=vka",
|
||||
];
|
||||
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
@@ -2280,9 +2371,11 @@ mod native_tests {
|
||||
|
||||
assert!(status.success());
|
||||
|
||||
let deployed_addr_arg = format!("--addr-verifier={}", rv_addr);
|
||||
|
||||
// now verify the proof
|
||||
let pf_arg = format!("{}/{}/proof.pf", test_dir, example_name);
|
||||
let mut args = vec![
|
||||
let args = vec![
|
||||
"verify-evm",
|
||||
"--proof-path",
|
||||
pf_arg.as_str(),
|
||||
@@ -2296,13 +2389,49 @@ mod native_tests {
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
// As sanity check, add example that should fail.
|
||||
args[2] = PF_FAILURE;
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args(args)
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(!status.success());
|
||||
// Read the original proof file
|
||||
let original_proof_data: ezkl::pfsys::Snark<
|
||||
halo2curves::bn256::Fr,
|
||||
halo2curves::bn256::G1Affine,
|
||||
> = Snark::load::<KZGCommitmentScheme<Bn256>>(&PathBuf::from(format!(
|
||||
"{}/{}/proof.pf",
|
||||
test_dir, example_name
|
||||
)))
|
||||
.expect("Failed to read proof file");
|
||||
|
||||
for i in 0..1 {
|
||||
// Create a copy of the original proof data
|
||||
let mut modified_proof_data = original_proof_data.clone();
|
||||
|
||||
// Flip a random bit
|
||||
let random_byte = rand::thread_rng().gen_range(0..modified_proof_data.proof.len());
|
||||
let random_bit = rand::thread_rng().gen_range(0..8);
|
||||
modified_proof_data.proof[random_byte] ^= 1 << random_bit;
|
||||
|
||||
// Write the modified proof to a new file
|
||||
let modified_pf_arg = format!("{}/{}/modified_proof_{}.pf", test_dir, example_name, i);
|
||||
modified_proof_data
|
||||
.save(&PathBuf::from(modified_pf_arg.clone()))
|
||||
.expect("Failed to save modified proof file");
|
||||
|
||||
// Verify the modified proof (should fail)
|
||||
let mut args_mod = args.clone();
|
||||
args_mod[2] = &modified_pf_arg;
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args(&args_mod)
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
|
||||
if status.success() {
|
||||
log::error!("Verification unexpectedly succeeded for modified proof {}. Flipped bit {} in byte {}", i, random_bit, random_byte);
|
||||
}
|
||||
|
||||
assert!(
|
||||
!status.success(),
|
||||
"Modified proof {} should have failed verification",
|
||||
i
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// run js browser evm verify tests for a given example
|
||||
@@ -2504,7 +2633,7 @@ mod native_tests {
|
||||
|
||||
// deploy the verifier
|
||||
let mut args = vec![
|
||||
"deploy-evm-verifier",
|
||||
"deploy-evm",
|
||||
rpc_arg.as_str(),
|
||||
addr_path_verifier_arg.as_str(),
|
||||
];
|
||||
|
||||
39
tests/ios/can_verify_aggr.swift
Normal file
39
tests/ios/can_verify_aggr.swift
Normal file
@@ -0,0 +1,39 @@
|
||||
// Write a simple swift test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let proofAggrPath = pathToFile + "proof_aggr.json"
|
||||
let vkAggrPath = pathToFile + "vk_aggr.key"
|
||||
let srs1Path = pathToFile + "kzg1.srs"
|
||||
|
||||
guard let proofAggr = loadFileAsBytes(from: proofAggrPath) else {
|
||||
fatalError("Failed to load proofAggr file")
|
||||
}
|
||||
guard let vkAggr = loadFileAsBytes(from: vkAggrPath) else {
|
||||
fatalError("Failed to load vkAggr file")
|
||||
}
|
||||
guard let srs1 = loadFileAsBytes(from: srs1Path) else {
|
||||
fatalError("Failed to load srs1 file")
|
||||
}
|
||||
|
||||
let value = try verifyAggr(
|
||||
proof: proofAggr,
|
||||
vk: vkAggr,
|
||||
logrows: 21,
|
||||
srs: srs1,
|
||||
commitment: "kzg"
|
||||
)
|
||||
|
||||
// should not fail
|
||||
assert(value == true, "Failed the test")
|
||||
|
||||
}
|
||||
42
tests/ios/gen_pk_test.swift
Normal file
42
tests/ios/gen_pk_test.swift
Normal file
@@ -0,0 +1,42 @@
|
||||
// Swift version of gen_pk_test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
let networkCompiledPath = pathToFile + "model.compiled"
|
||||
let srsPath = pathToFile + "kzg"
|
||||
|
||||
// Load necessary files
|
||||
guard let compiledCircuit = loadFileAsBytes(from: networkCompiledPath) else {
|
||||
fatalError("Failed to load network compiled file")
|
||||
}
|
||||
guard let srs = loadFileAsBytes(from: srsPath) else {
|
||||
fatalError("Failed to load SRS file")
|
||||
}
|
||||
|
||||
// Generate the vk (Verifying Key)
|
||||
let vk = try genVk(
|
||||
compiledCircuit: compiledCircuit,
|
||||
srs: srs,
|
||||
compressSelectors: true // Corresponds to the `true` boolean in the Rust code
|
||||
)
|
||||
|
||||
// Generate the pk (Proving Key)
|
||||
let pk = try genPk(
|
||||
vk: vk,
|
||||
compiledCircuit: compiledCircuit,
|
||||
srs: srs
|
||||
)
|
||||
|
||||
// Ensure that the proving key is not empty
|
||||
assert(pk.count > 0, "Proving key generation failed, pk is empty")
|
||||
|
||||
} catch {
|
||||
fatalError("Test failed with error: \(error)")
|
||||
}
|
||||
35
tests/ios/gen_vk_test.swift
Normal file
35
tests/ios/gen_vk_test.swift
Normal file
@@ -0,0 +1,35 @@
|
||||
// Swift version of gen_vk_test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
let networkCompiledPath = pathToFile + "model.compiled"
|
||||
let srsPath = pathToFile + "kzg"
|
||||
|
||||
// Load necessary files
|
||||
guard let compiledCircuit = loadFileAsBytes(from: networkCompiledPath) else {
|
||||
fatalError("Failed to load network compiled file")
|
||||
}
|
||||
guard let srs = loadFileAsBytes(from: srsPath) else {
|
||||
fatalError("Failed to load SRS file")
|
||||
}
|
||||
|
||||
// Generate the vk (Verifying Key)
|
||||
let vk = try genVk(
|
||||
compiledCircuit: compiledCircuit,
|
||||
srs: srs,
|
||||
compressSelectors: true // Corresponds to the `true` boolean in the Rust code
|
||||
)
|
||||
|
||||
// Ensure that the verifying key is not empty
|
||||
assert(vk.count > 0, "Verifying key generation failed, vk is empty")
|
||||
|
||||
} catch {
|
||||
fatalError("Test failed with error: \(error)")
|
||||
}
|
||||
69
tests/ios/pk_is_valid_test.swift
Normal file
69
tests/ios/pk_is_valid_test.swift
Normal file
@@ -0,0 +1,69 @@
|
||||
// Swift version of pk_is_valid_test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
let networkCompiledPath = pathToFile + "model.compiled"
|
||||
let srsPath = pathToFile + "kzg"
|
||||
let witnessPath = pathToFile + "witness.json"
|
||||
let settingsPath = pathToFile + "settings.json"
|
||||
|
||||
// Load necessary files
|
||||
guard let compiledCircuit = loadFileAsBytes(from: networkCompiledPath) else {
|
||||
fatalError("Failed to load network compiled file")
|
||||
}
|
||||
guard let srs = loadFileAsBytes(from: srsPath) else {
|
||||
fatalError("Failed to load SRS file")
|
||||
}
|
||||
guard let witness = loadFileAsBytes(from: witnessPath) else {
|
||||
fatalError("Failed to load witness file")
|
||||
}
|
||||
guard let settings = loadFileAsBytes(from: settingsPath) else {
|
||||
fatalError("Failed to load settings file")
|
||||
}
|
||||
|
||||
// Generate the vk (Verifying Key)
|
||||
let vk = try genVk(
|
||||
compiledCircuit: compiledCircuit,
|
||||
srs: srs,
|
||||
compressSelectors: true // Corresponds to the `true` boolean in the Rust code
|
||||
)
|
||||
|
||||
// Generate the pk (Proving Key)
|
||||
let pk = try genPk(
|
||||
vk: vk,
|
||||
compiledCircuit: compiledCircuit,
|
||||
srs: srs
|
||||
)
|
||||
|
||||
// Prove using the witness and proving key
|
||||
let proof = try prove(
|
||||
witness: witness,
|
||||
pk: pk,
|
||||
compiledCircuit: compiledCircuit,
|
||||
srs: srs
|
||||
)
|
||||
|
||||
// Ensure that the proof is not empty
|
||||
assert(proof.count > 0, "Proof generation failed, proof is empty")
|
||||
|
||||
// Verify the proof
|
||||
let value = try verify(
|
||||
proof: proof,
|
||||
vk: vk,
|
||||
settings: settings,
|
||||
srs: srs
|
||||
)
|
||||
|
||||
// Ensure that the verification passed
|
||||
assert(value == true, "Verification failed")
|
||||
|
||||
} catch {
|
||||
fatalError("Test failed with error: \(error)")
|
||||
}
|
||||
71
tests/ios/verify_encode_verifier_calldata.swift
Normal file
71
tests/ios/verify_encode_verifier_calldata.swift
Normal file
@@ -0,0 +1,71 @@
|
||||
// Swift version of verify_encode_verifier_calldata test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
let proofPath = pathToFile + "proof.json"
|
||||
|
||||
guard let proof = loadFileAsBytes(from: proofPath) else {
|
||||
fatalError("Failed to load proof file")
|
||||
}
|
||||
|
||||
// Test without vk address
|
||||
let calldataNoVk = try encodeVerifierCalldata(
|
||||
proof: proof,
|
||||
vkAddress: nil
|
||||
)
|
||||
|
||||
// Deserialize the proof data
|
||||
struct Snark: Decodable {
|
||||
let proof: Data
|
||||
let instances: Data
|
||||
}
|
||||
|
||||
let snark = try JSONDecoder().decode(Snark.self, from: proof)
|
||||
|
||||
let flattenedInstances = snark.instances.flatMap { $0 }
|
||||
let referenceCalldataNoVk = try encodeCalldata(
|
||||
vk: nil,
|
||||
proof: snark.proof,
|
||||
instances: flattenedInstances
|
||||
)
|
||||
|
||||
// Check if the encoded calldata matches the reference
|
||||
assert(calldataNoVk == referenceCalldataNoVk, "Calldata without vk does not match")
|
||||
|
||||
// Test with vk address
|
||||
let vkAddressString = "0000000000000000000000000000000000000000"
|
||||
let vkAddressData = Data(hexString: vkAddressString)
|
||||
|
||||
guard vkAddressData.count == 20 else {
|
||||
fatalError("Invalid VK address length")
|
||||
}
|
||||
|
||||
let vkAddressArray = [UInt8](vkAddressData)
|
||||
|
||||
// Serialize vkAddress to match JSON serialization in Rust
|
||||
let serializedVkAddress = try JSONEncoder().encode(vkAddressArray)
|
||||
|
||||
let calldataWithVk = try encodeVerifierCalldata(
|
||||
proof: proof,
|
||||
vk: serializedVkAddress
|
||||
)
|
||||
|
||||
let referenceCalldataWithVk = try encodeCalldata(
|
||||
vk: vkAddressArray,
|
||||
proof: snark.proof,
|
||||
instances: flattenedInstances
|
||||
)
|
||||
|
||||
// Check if the encoded calldata matches the reference
|
||||
assert(calldataWithVk == referenceCalldataWithVk, "Calldata with vk does not match")
|
||||
|
||||
} catch {
|
||||
fatalError("Test failed with error: \(error)")
|
||||
}
|
||||
45
tests/ios/verify_gen_witness.swift
Normal file
45
tests/ios/verify_gen_witness.swift
Normal file
@@ -0,0 +1,45 @@
|
||||
// Swift version of verify_gen_witness test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
let networkCompiledPath = pathToFile + "model.compiled"
|
||||
let inputPath = pathToFile + "input.json"
|
||||
let witnessPath = pathToFile + "witness.json"
|
||||
|
||||
// Load necessary files
|
||||
guard let networkCompiled = loadFileAsBytes(from: networkCompiledPath) else {
|
||||
fatalError("Failed to load network compiled file")
|
||||
}
|
||||
guard let input = loadFileAsBytes(from: inputPath) else {
|
||||
fatalError("Failed to load input file")
|
||||
}
|
||||
guard let referenceWitnessData = loadFileAsBytes(from: witnessPath) else {
|
||||
fatalError("Failed to load witness file")
|
||||
}
|
||||
|
||||
// Generate witness using genWitness function
|
||||
let witnessData = try genWitness(
|
||||
compiledCircuit: networkCompiled,
|
||||
input: input
|
||||
)
|
||||
|
||||
// Deserialize the witness
|
||||
struct GraphWitness: Decodable, Equatable {}
|
||||
let witness = try JSONDecoder().decode(GraphWitness.self, from: witnessData)
|
||||
|
||||
// Deserialize the reference witness
|
||||
let referenceWitness = try JSONDecoder().decode(GraphWitness.self, from: referenceWitnessData)
|
||||
|
||||
// Check if the witness matches the reference witness
|
||||
assert(witness == referenceWitness, "Witnesses do not match")
|
||||
|
||||
} catch {
|
||||
fatalError("Test failed with error: \(error)")
|
||||
}
|
||||
64
tests/ios/verify_kzg_commit.swift
Normal file
64
tests/ios/verify_kzg_commit.swift
Normal file
@@ -0,0 +1,64 @@
|
||||
// Swift version of verify_kzg_commit test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
let vkPath = pathToFile + "vk.key"
|
||||
let srsPath = pathToFile + "kzg"
|
||||
let settingsPath = pathToFile + "settings.json"
|
||||
|
||||
guard let vk = loadFileAsBytes(from: vkPath) else {
|
||||
fatalError("Failed to load vk file")
|
||||
}
|
||||
guard let srs = loadFileAsBytes(from: srsPath) else {
|
||||
fatalError("Failed to load srs file")
|
||||
}
|
||||
guard let settings = loadFileAsBytes(from: settingsPath) else {
|
||||
fatalError("Failed to load settings file")
|
||||
}
|
||||
|
||||
// Create a vector of field elements
|
||||
var message: [UInt64] = []
|
||||
for i in 0..<32 {
|
||||
message.append(UInt64(i))
|
||||
}
|
||||
|
||||
// Serialize the message array
|
||||
let messageData = try JSONEncoder().encode(message)
|
||||
|
||||
// Deserialize settings
|
||||
struct GraphSettings: Decodable {}
|
||||
let settingsDecoded = try JSONDecoder().decode(GraphSettings.self, from: settings)
|
||||
|
||||
// Generate commitment
|
||||
let commitmentData = try kzgCommit(
|
||||
message: messageData,
|
||||
vk: vk,
|
||||
settings: settings,
|
||||
srs: srs
|
||||
)
|
||||
|
||||
// Deserialize the resulting commitment
|
||||
struct G1Affine: Decodable {}
|
||||
let commitment = try JSONDecoder().decode([G1Affine].self, from: commitmentData)
|
||||
|
||||
// Reference commitment using params and vk
|
||||
// For Swift, you'd need to implement or link the corresponding methods like in Rust
|
||||
let referenceCommitment = try polyCommit(
|
||||
message: message,
|
||||
vk: vk,
|
||||
srs: srs
|
||||
)
|
||||
|
||||
// Check if the commitment matches the reference
|
||||
assert(commitment == referenceCommitment, "Commitments do not match")
|
||||
|
||||
} catch {
|
||||
fatalError("Test failed with error: \(error)")
|
||||
}
|
||||
103
tests/ios/verify_validations.swift
Normal file
103
tests/ios/verify_validations.swift
Normal file
@@ -0,0 +1,103 @@
|
||||
// Swift version of verify_validations test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
let compiledCircuitPath = pathToFile + "model.compiled"
|
||||
let networkPath = pathToFile + "network.onnx"
|
||||
let witnessPath = pathToFile + "witness.json"
|
||||
let inputPath = pathToFile + "input.json"
|
||||
let proofPath = pathToFile + "proof.json"
|
||||
let vkPath = pathToFile + "vk.key"
|
||||
let pkPath = pathToFile + "pk.key"
|
||||
let settingsPath = pathToFile + "settings.json"
|
||||
let srsPath = pathToFile + "kzg"
|
||||
|
||||
// Load necessary files
|
||||
guard let compiledCircuit = loadFileAsBytes(from: compiledCircuitPath) else {
|
||||
fatalError("Failed to load network compiled file")
|
||||
}
|
||||
guard let network = loadFileAsBytes(from: networkPath) else {
|
||||
fatalError("Failed to load network file")
|
||||
}
|
||||
guard let witness = loadFileAsBytes(from: witnessPath) else {
|
||||
fatalError("Failed to load witness file")
|
||||
}
|
||||
guard let input = loadFileAsBytes(from: inputPath) else {
|
||||
fatalError("Failed to load input file")
|
||||
}
|
||||
guard let proof = loadFileAsBytes(from: proofPath) else {
|
||||
fatalError("Failed to load proof file")
|
||||
}
|
||||
guard let vk = loadFileAsBytes(from: vkPath) else {
|
||||
fatalError("Failed to load vk file")
|
||||
}
|
||||
guard let pk = loadFileAsBytes(from: pkPath) else {
|
||||
fatalError("Failed to load pk file")
|
||||
}
|
||||
guard let settings = loadFileAsBytes(from: settingsPath) else {
|
||||
fatalError("Failed to load settings file")
|
||||
}
|
||||
guard let srs = loadFileAsBytes(from: srsPath) else {
|
||||
fatalError("Failed to load srs file")
|
||||
}
|
||||
|
||||
// Witness validation (should fail for network compiled)
|
||||
let witnessValidationResult1 = try? witnessValidation(witness:compiledCircuit)
|
||||
assert(witnessValidationResult1 == nil, "Witness validation should fail for network compiled")
|
||||
|
||||
// Witness validation (should pass for witness)
|
||||
let witnessValidationResult2 = try? witnessValidation(witness:witness)
|
||||
assert(witnessValidationResult2 != nil, "Witness validation should pass for witness")
|
||||
|
||||
// Compiled circuit validation (should fail for onnx network)
|
||||
let circuitValidationResult1 = try? compiledCircuitValidation(compiledCircuit:network)
|
||||
assert(circuitValidationResult1 == nil, "Compiled circuit validation should fail for onnx network")
|
||||
|
||||
// Compiled circuit validation (should pass for compiled network)
|
||||
let circuitValidationResult2 = try? compiledCircuitValidation(compiledCircuit:compiledCircuit)
|
||||
assert(circuitValidationResult2 != nil, "Compiled circuit validation should pass for compiled network")
|
||||
|
||||
// Input validation (should fail for witness)
|
||||
let inputValidationResult1 = try? inputValidation(input:witness)
|
||||
assert(inputValidationResult1 == nil, "Input validation should fail for witness")
|
||||
|
||||
// Input validation (should pass for input)
|
||||
let inputValidationResult2 = try? inputValidation(input:input)
|
||||
assert(inputValidationResult2 != nil, "Input validation should pass for input")
|
||||
|
||||
// Proof validation (should fail for witness)
|
||||
let proofValidationResult1 = try? proofValidation(proof:witness)
|
||||
assert(proofValidationResult1 == nil, "Proof validation should fail for witness")
|
||||
|
||||
// Proof validation (should pass for proof)
|
||||
let proofValidationResult2 = try? proofValidation(proof:proof)
|
||||
assert(proofValidationResult2 != nil, "Proof validation should pass for proof")
|
||||
|
||||
// Verifying key (vk) validation (should pass)
|
||||
let vkValidationResult = try? vkValidation(vk:vk, settings:settings)
|
||||
assert(vkValidationResult != nil, "VK validation should pass for vk")
|
||||
|
||||
// Proving key (pk) validation (should pass)
|
||||
let pkValidationResult = try? pkValidation(pk:pk, settings:settings)
|
||||
assert(pkValidationResult != nil, "PK validation should pass for pk")
|
||||
|
||||
// Settings validation (should fail for proof)
|
||||
let settingsValidationResult1 = try? settingsValidation(settings:proof)
|
||||
assert(settingsValidationResult1 == nil, "Settings validation should fail for proof")
|
||||
|
||||
// Settings validation (should pass for settings)
|
||||
let settingsValidationResult2 = try? settingsValidation(settings:settings)
|
||||
assert(settingsValidationResult2 != nil, "Settings validation should pass for settings")
|
||||
|
||||
// SRS validation (should pass)
|
||||
let srsValidationResult = try? srsValidation(srs:srs)
|
||||
assert(srsValidationResult != nil, "SRS validation should pass for srs")
|
||||
|
||||
}
|
||||
11
tests/ios_integration_tests.rs
Normal file
11
tests/ios_integration_tests.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
#[cfg(feature = "ios-bindings-test")]
|
||||
uniffi::build_foreign_language_testcases!(
|
||||
"tests/ios/can_verify_aggr.swift",
|
||||
"tests/ios/verify_gen_witness.swift",
|
||||
"tests/ios/gen_pk_test.swift",
|
||||
"tests/ios/gen_vk_test.swift",
|
||||
"tests/ios/pk_is_valid_test.swift",
|
||||
"tests/ios/verify_validations.swift",
|
||||
// "tests/ios/verify_encode_verifier_calldata.swift", // TODO - the function requires rust dependencies to test
|
||||
// "tests/ios/verify_kzg_commit.swift", // TODO - the function is not exported and requires rust dependencies to test
|
||||
);
|
||||
@@ -1,4 +1,4 @@
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
#[cfg(test)]
|
||||
mod py_tests {
|
||||
|
||||
@@ -11,7 +11,7 @@ mod py_tests {
|
||||
static ENV_SETUP: Once = Once::new();
|
||||
static DOWNLOAD_VOICE_DATA: Once = Once::new();
|
||||
|
||||
//Sure to run this once
|
||||
// Sure to run this once
|
||||
|
||||
lazy_static! {
|
||||
static ref CARGO_TARGET_DIR: String =
|
||||
@@ -123,7 +123,8 @@ mod py_tests {
|
||||
}
|
||||
}
|
||||
|
||||
const TESTS: [&str; 33] = [
|
||||
const TESTS: [&str; 34] = [
|
||||
"ezkl_demo_batch.ipynb",
|
||||
"proof_splitting.ipynb", // 0
|
||||
"variance.ipynb",
|
||||
"mnist_gan.ipynb",
|
||||
@@ -201,6 +202,18 @@ mod py_tests {
|
||||
anvil_child.kill().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reusable_verifier_notebook_() {
|
||||
crate::py_tests::init_binary();
|
||||
let mut anvil_child = crate::py_tests::start_anvil(false);
|
||||
let test_dir: TempDir = TempDir::new("reusable_verifier").unwrap();
|
||||
let path = test_dir.path().to_str().unwrap();
|
||||
crate::py_tests::mv_test_(path, "reusable_verifier.ipynb");
|
||||
run_notebook(path, "reusable_verifier.ipynb");
|
||||
test_dir.close().unwrap();
|
||||
anvil_child.kill().unwrap();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn postgres_notebook_() {
|
||||
crate::py_tests::init_binary();
|
||||
|
||||
@@ -23,7 +23,7 @@ examples_path = os.path.abspath(
|
||||
|
||||
srs_path = os.path.join(folder_path, 'kzg_test.params')
|
||||
params_k17_path = os.path.join(folder_path, 'kzg_test_k17.params')
|
||||
params_k20_path = os.path.join(folder_path, 'kzg_test_k20.params')
|
||||
params_k21_path = os.path.join(folder_path, 'kzg_test_k21.params')
|
||||
anvil_url = "http://localhost:3030"
|
||||
|
||||
|
||||
@@ -104,8 +104,8 @@ def test_gen_srs():
|
||||
ezkl.gen_srs(params_k17_path, 17)
|
||||
assert os.path.isfile(params_k17_path)
|
||||
|
||||
ezkl.gen_srs(params_k20_path, 20)
|
||||
assert os.path.isfile(params_k20_path)
|
||||
ezkl.gen_srs(params_k21_path, 21)
|
||||
assert os.path.isfile(params_k21_path)
|
||||
|
||||
|
||||
|
||||
@@ -450,10 +450,10 @@ async def test_create_evm_verifier_separate_vk():
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
srs_path=srs_path,
|
||||
render_vk_seperately=True
|
||||
reusable=True
|
||||
)
|
||||
|
||||
res = await ezkl.create_evm_vk(
|
||||
res = await ezkl.create_evm_vka(
|
||||
vk_path,
|
||||
settings_path,
|
||||
vk_code_path,
|
||||
@@ -465,9 +465,9 @@ async def test_create_evm_verifier_separate_vk():
|
||||
assert os.path.isfile(sol_code_path)
|
||||
|
||||
|
||||
async def test_deploy_evm_separate_vk():
|
||||
async def test_deploy_evm_reusable_and_vka():
|
||||
"""
|
||||
Test deployment of the separate verifier smart contract + vk
|
||||
Test deployment of the reusable verifier smart contract + vka
|
||||
In order to run this you will need to install solc in your environment
|
||||
"""
|
||||
addr_path_verifier = os.path.join(folder_path, 'address_separate.json')
|
||||
@@ -481,13 +481,15 @@ async def test_deploy_evm_separate_vk():
|
||||
res = await ezkl.deploy_evm(
|
||||
addr_path_verifier,
|
||||
sol_code_path,
|
||||
rpc_url=anvil_url,
|
||||
anvil_url,
|
||||
"verifier/reusable",
|
||||
)
|
||||
|
||||
res = await ezkl.deploy_vk_evm(
|
||||
res = await ezkl.deploy_evm(
|
||||
addr_path_vk,
|
||||
vk_code_path,
|
||||
rpc_url=anvil_url,
|
||||
anvil_url,
|
||||
"vka",
|
||||
)
|
||||
|
||||
assert res == True
|
||||
@@ -506,7 +508,7 @@ async def test_deploy_evm():
|
||||
res = await ezkl.deploy_evm(
|
||||
addr_path,
|
||||
sol_code_path,
|
||||
rpc_url=anvil_url,
|
||||
anvil_url,
|
||||
)
|
||||
|
||||
assert res == True
|
||||
@@ -677,7 +679,7 @@ async def test_aggregate_and_verify_aggr():
|
||||
)
|
||||
|
||||
# mock aggregate
|
||||
res = ezkl.mock_aggregate([proof_path], 20)
|
||||
res = ezkl.mock_aggregate([proof_path], 21)
|
||||
assert res == True
|
||||
|
||||
aggregate_proof_path = os.path.join(folder_path, 'aggr_1l_relu.pf')
|
||||
@@ -688,8 +690,8 @@ async def test_aggregate_and_verify_aggr():
|
||||
[proof_path],
|
||||
aggregate_vk_path,
|
||||
aggregate_pk_path,
|
||||
20,
|
||||
srs_path=params_k20_path,
|
||||
21,
|
||||
srs_path=params_k21_path,
|
||||
)
|
||||
|
||||
res = ezkl.gen_vk_from_pk_aggr(aggregate_pk_path, aggregate_vk_path)
|
||||
@@ -701,9 +703,9 @@ async def test_aggregate_and_verify_aggr():
|
||||
aggregate_proof_path,
|
||||
aggregate_pk_path,
|
||||
"poseidon",
|
||||
20,
|
||||
21,
|
||||
"unsafe",
|
||||
srs_path=params_k20_path,
|
||||
srs_path=params_k21_path,
|
||||
)
|
||||
|
||||
assert res == True
|
||||
@@ -713,8 +715,8 @@ async def test_aggregate_and_verify_aggr():
|
||||
res = ezkl.verify_aggr(
|
||||
aggregate_proof_path,
|
||||
aggregate_vk_path,
|
||||
20,
|
||||
srs_path=params_k20_path,
|
||||
21,
|
||||
srs_path=params_k21_path,
|
||||
)
|
||||
assert res == True
|
||||
|
||||
@@ -793,8 +795,8 @@ async def test_evm_aggregate_and_verify_aggr():
|
||||
[proof_path],
|
||||
aggregate_vk_path,
|
||||
aggregate_pk_path,
|
||||
20,
|
||||
srs_path=params_k20_path,
|
||||
21,
|
||||
srs_path=params_k21_path,
|
||||
)
|
||||
|
||||
res = ezkl.aggregate(
|
||||
@@ -802,9 +804,9 @@ async def test_evm_aggregate_and_verify_aggr():
|
||||
aggregate_proof_path,
|
||||
aggregate_pk_path,
|
||||
"evm",
|
||||
20,
|
||||
21,
|
||||
"unsafe",
|
||||
srs_path=params_k20_path,
|
||||
srs_path=params_k21_path,
|
||||
)
|
||||
|
||||
assert res == True
|
||||
@@ -819,8 +821,8 @@ async def test_evm_aggregate_and_verify_aggr():
|
||||
aggregate_vk_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
logrows=20,
|
||||
srs_path=params_k20_path,
|
||||
logrows=21,
|
||||
srs_path=params_k21_path,
|
||||
)
|
||||
|
||||
assert res == True
|
||||
@@ -838,8 +840,8 @@ async def test_evm_aggregate_and_verify_aggr():
|
||||
res = ezkl.verify_aggr(
|
||||
aggregate_proof_path,
|
||||
aggregate_vk_path,
|
||||
20,
|
||||
srs_path=params_k20_path,
|
||||
21,
|
||||
srs_path=params_k21_path,
|
||||
)
|
||||
assert res == True
|
||||
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
#[cfg(all(target_arch = "wasm32", target_os = "unknown"))]
|
||||
#[cfg(test)]
|
||||
mod wasm32 {
|
||||
use ezkl::bindings::wasm::{
|
||||
bufferToVecOfFelt, compiledCircuitValidation, encodeVerifierCalldata, feltToBigEndian,
|
||||
feltToFloat, feltToInt, feltToLittleEndian, genPk, genVk, genWitness, inputValidation,
|
||||
kzgCommit, pkValidation, poseidonHash, proofValidation, prove, settingsValidation,
|
||||
srsValidation, u8_array_to_u128_le, verify, verifyAggr, vkValidation, witnessValidation,
|
||||
};
|
||||
use ezkl::circuit::modules::polycommit::PolyCommitChip;
|
||||
use ezkl::circuit::modules::poseidon::spec::{PoseidonSpec, POSEIDON_RATE, POSEIDON_WIDTH};
|
||||
use ezkl::circuit::modules::poseidon::PoseidonChip;
|
||||
@@ -9,39 +15,31 @@ mod wasm32 {
|
||||
use ezkl::graph::GraphCircuit;
|
||||
use ezkl::graph::{GraphSettings, GraphWitness};
|
||||
use ezkl::pfsys;
|
||||
use ezkl::wasm::{
|
||||
bufferToVecOfFelt, compiledCircuitValidation, encodeVerifierCalldata, feltToBigEndian,
|
||||
feltToFloat, feltToInt, feltToLittleEndian, genPk, genVk, genWitness, inputValidation,
|
||||
kzgCommit, pkValidation, poseidonHash, proofValidation, prove, settingsValidation,
|
||||
srsValidation, u8_array_to_u128_le, verify, verifyAggr, vkValidation, witnessValidation,
|
||||
};
|
||||
use halo2_proofs::plonk::VerifyingKey;
|
||||
use halo2_proofs::poly::commitment::CommitmentScheme;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
use halo2_proofs::poly::kzg::commitment::ParamsKZG;
|
||||
use halo2_solidity_verifier::encode_calldata;
|
||||
use halo2curves::bn256::Bn256;
|
||||
use halo2curves::bn256::{Fr, G1Affine};
|
||||
use snark_verifier::util::arithmetic::PrimeField;
|
||||
use wasm_bindgen::JsError;
|
||||
#[cfg(feature = "web")]
|
||||
pub use wasm_bindgen_rayon::init_thread_pool;
|
||||
use wasm_bindgen_test::*;
|
||||
|
||||
wasm_bindgen_test::wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
pub const WITNESS: &[u8] = include_bytes!("../tests/wasm/witness.json");
|
||||
pub const NETWORK_COMPILED: &[u8] = include_bytes!("../tests/wasm/model.compiled");
|
||||
pub const NETWORK: &[u8] = include_bytes!("../tests/wasm/network.onnx");
|
||||
pub const INPUT: &[u8] = include_bytes!("../tests/wasm/input.json");
|
||||
pub const PROOF: &[u8] = include_bytes!("../tests/wasm/proof.json");
|
||||
pub const PROOF_AGGR: &[u8] = include_bytes!("../tests/wasm/proof_aggr.json");
|
||||
pub const SETTINGS: &[u8] = include_bytes!("../tests/wasm/settings.json");
|
||||
pub const PK: &[u8] = include_bytes!("../tests/wasm/pk.key");
|
||||
pub const VK: &[u8] = include_bytes!("../tests/wasm/vk.key");
|
||||
pub const VK_AGGR: &[u8] = include_bytes!("../tests/wasm/vk_aggr.key");
|
||||
pub const SRS: &[u8] = include_bytes!("../tests/wasm/kzg");
|
||||
pub const SRS1: &[u8] = include_bytes!("../tests/wasm/kzg1.srs");
|
||||
pub const WITNESS: &[u8] = include_bytes!("assets/witness.json");
|
||||
pub const NETWORK_COMPILED: &[u8] = include_bytes!("assets/model.compiled");
|
||||
pub const NETWORK: &[u8] = include_bytes!("assets/network.onnx");
|
||||
pub const INPUT: &[u8] = include_bytes!("assets/input.json");
|
||||
pub const PROOF: &[u8] = include_bytes!("assets/proof.json");
|
||||
pub const PROOF_AGGR: &[u8] = include_bytes!("assets/proof_aggr.json");
|
||||
pub const SETTINGS: &[u8] = include_bytes!("assets/settings.json");
|
||||
pub const PK: &[u8] = include_bytes!("assets/pk.key");
|
||||
pub const VK: &[u8] = include_bytes!("assets/vk.key");
|
||||
pub const VK_AGGR: &[u8] = include_bytes!("assets/vk_aggr.key");
|
||||
pub const SRS: &[u8] = include_bytes!("assets/kzg");
|
||||
pub const SRS1: &[u8] = include_bytes!("assets/kzg1.srs");
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
async fn can_verify_aggr() {
|
||||
|
||||
Binary file not shown.
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user