mirror of
https://github.com/zkonduit/ezkl.git
synced 2026-01-13 08:17:57 -05:00
Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5144991b21 | ||
|
|
64acb1d9d6 | ||
|
|
8cf28456b3 | ||
|
|
e70e13a9e3 |
194
.github/workflows/engine.yml
vendored
194
.github/workflows/engine.yml
vendored
@@ -1,194 +0,0 @@
|
||||
name: Build and Publish EZKL Engine npm package
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "The tag to release"
|
||||
required: true
|
||||
push:
|
||||
tags:
|
||||
- "*"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: .
|
||||
jobs:
|
||||
publish-wasm-bindings:
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write # Required for provenance
|
||||
name: publish-wasm-bindings
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.ref_name }}
|
||||
RUSTFLAGS: "-C target-feature=+atomics,+bulk-memory"
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@fb51252c7ba57d633bc668f941da052e410add48 #v1.0.6
|
||||
with:
|
||||
toolchain: nightly-2025-05-01
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
cache: false
|
||||
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa #v0.4.0
|
||||
with:
|
||||
# Pin to version 0.12.1
|
||||
version: "v0.12.1"
|
||||
- name: Add wasm32-unknown-unknown target
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
|
||||
- name: Add rust-src
|
||||
run: rustup component add rust-src --toolchain nightly-2025-05-01-x86_64-unknown-linux-gnu
|
||||
- name: Install binaryen
|
||||
run: |
|
||||
set -e
|
||||
curl -L https://github.com/WebAssembly/binaryen/releases/download/version_116/binaryen-version_116-x86_64-linux.tar.gz | tar xzf -
|
||||
export PATH=$PATH:$PWD/binaryen-version_116/bin
|
||||
wasm-opt --version
|
||||
- name: Build wasm files for both web and nodejs compilation targets
|
||||
run: |
|
||||
wasm-pack build --release --target nodejs --out-dir ./pkg/nodejs . -- -Z build-std="panic_abort,std"
|
||||
wasm-pack build --release --target web --out-dir ./pkg/web . -- -Z build-std="panic_abort,std" --features web
|
||||
- name: Create package.json in pkg folder
|
||||
shell: bash
|
||||
run: |
|
||||
cat > pkg/package.json << EOF
|
||||
{
|
||||
"name": "@ezkljs/engine",
|
||||
"version": "$RELEASE_TAG",
|
||||
"dependencies": {
|
||||
"@types/json-bigint": "^1.0.1",
|
||||
"json-bigint": "^1.0.0"
|
||||
},
|
||||
"files": [
|
||||
"nodejs/ezkl_bg.wasm",
|
||||
"nodejs/ezkl.js",
|
||||
"nodejs/ezkl.d.ts",
|
||||
"nodejs/package.json",
|
||||
"nodejs/utils.js",
|
||||
"web/ezkl_bg.wasm",
|
||||
"web/ezkl.js",
|
||||
"web/ezkl.d.ts",
|
||||
"web/snippets/**/*",
|
||||
"web/package.json",
|
||||
"web/utils.js",
|
||||
"ezkl.d.ts"
|
||||
],
|
||||
"main": "nodejs/ezkl.js",
|
||||
"module": "web/ezkl.js",
|
||||
"types": "nodejs/ezkl.d.ts",
|
||||
"sideEffects": [
|
||||
"web/snippets/*"
|
||||
]
|
||||
}
|
||||
EOF
|
||||
|
||||
- name: Replace memory definition in nodejs
|
||||
run: |
|
||||
sed -i "3s|.*|imports['env'] = {memory: new WebAssembly.Memory({initial:21,maximum:65536,shared:true})}|" pkg/nodejs/ezkl.js
|
||||
|
||||
- name: Replace `import.meta.url` with `import.meta.resolve` definition in workerHelpers.js
|
||||
run: |
|
||||
find ./pkg/web/snippets -type f -name "*.js" -exec sed -i "s|import.meta.url|import.meta.resolve|" {} +
|
||||
|
||||
- name: Add serialize and deserialize methods to nodejs bundle
|
||||
run: |
|
||||
echo '
|
||||
const JSONBig = require("json-bigint");
|
||||
|
||||
function deserialize(buffer) { // buffer is a Uint8ClampedArray | Uint8Array // return a JSON object
|
||||
if (buffer instanceof Uint8ClampedArray) {
|
||||
buffer = new Uint8Array(buffer.buffer);
|
||||
}
|
||||
const string = new TextDecoder().decode(buffer);
|
||||
const jsonObject = JSONBig.parse(string);
|
||||
return jsonObject;
|
||||
}
|
||||
|
||||
function serialize(data) { // data is an object // return a Uint8ClampedArray
|
||||
// Step 1: Stringify the Object with BigInt support
|
||||
if (typeof data === "object") {
|
||||
data = JSONBig.stringify(data);
|
||||
}
|
||||
// Step 2: Encode the JSON String
|
||||
const uint8Array = new TextEncoder().encode(data);
|
||||
|
||||
// Step 3: Convert to Uint8ClampedArray
|
||||
return new Uint8ClampedArray(uint8Array.buffer);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
deserialize,
|
||||
serialize
|
||||
};
|
||||
' > pkg/nodejs/utils.js
|
||||
- name: Add serialize and deserialize methods to web bundle
|
||||
run: |
|
||||
echo '
|
||||
import { parse, stringify } from "json-bigint";
|
||||
|
||||
export function deserialize(buffer) { // buffer is a Uint8ClampedArray | Uint8Array // return a JSON object
|
||||
if (buffer instanceof Uint8ClampedArray) {
|
||||
buffer = new Uint8Array(buffer.buffer);
|
||||
}
|
||||
const string = new TextDecoder().decode(buffer);
|
||||
const jsonObject = parse(string);
|
||||
return jsonObject;
|
||||
}
|
||||
|
||||
export function serialize(data) { // data is an object // return a Uint8ClampedArray
|
||||
// Step 1: Stringify the Object with BigInt support
|
||||
if (typeof data === "object") {
|
||||
data = stringify(data);
|
||||
}
|
||||
// Step 2: Encode the JSON String
|
||||
const uint8Array = new TextEncoder().encode(data);
|
||||
|
||||
// Step 3: Convert to Uint8ClampedArray
|
||||
return new Uint8ClampedArray(uint8Array.buffer);
|
||||
}
|
||||
' > pkg/web/utils.js
|
||||
- name: Expose serialize and deserialize imports in nodejs target
|
||||
run: |
|
||||
sed -i '53i// import serialize and deserialize from utils.js\nconst { serialize, deserialize } = require(`./utils.js`);\nmodule.exports.serialize = serialize;\nmodule.exports.deserialize = deserialize;' pkg/nodejs/ezkl.js
|
||||
- name: Expose serialize and deserialize imports in web target
|
||||
run: |
|
||||
sed -i '51i\
|
||||
// import serialize and deserialize from utils.js\
|
||||
import { serialize, deserialize } from '\''./utils.js'\'';\
|
||||
export { serialize, deserialize };' pkg/web/ezkl.js
|
||||
- name: Add serialize and deserialize imports to nodejs ezkl.d.ts
|
||||
run: |
|
||||
sed -i '1i\
|
||||
export declare function serialize(data: object | string): Uint8ClampedArray;\
|
||||
export declare function deserialize(buffer: Uint8ClampedArray | Uint8Array): any;' pkg/nodejs/ezkl.d.ts
|
||||
|
||||
- name: Add serialize and deserialize imports to web ezkl.d.ts
|
||||
run: |
|
||||
sed -i '1i\
|
||||
export declare function serialize(data: object | string): Uint8ClampedArray;\
|
||||
export declare function deserialize(buffer: Uint8ClampedArray | Uint8Array): any;' pkg/web/ezkl.d.ts
|
||||
|
||||
- name: Create README.md in pkg folder
|
||||
run: |
|
||||
curl -s "https://raw.githubusercontent.com/zkonduit/ezkljs-engine/main/README.md" > ./pkg/README.md
|
||||
|
||||
# zizmor: ignore cache-poisoning
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 #v3.8.2
|
||||
with:
|
||||
node-version: "18.12.1"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
package-manager-cache: false
|
||||
|
||||
- name: Publish to npm with provenance
|
||||
run: |
|
||||
cd pkg
|
||||
npm publish --provenance --access public
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
355
.github/workflows/rust.yml
vendored
355
.github/workflows/rust.yml
vendored
@@ -20,7 +20,7 @@ env:
|
||||
|
||||
jobs:
|
||||
fr-age-test:
|
||||
needs: [build, library-tests, docs, python-tests, python-integration-tests]
|
||||
needs: [build, library-tests, docs]
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: large-self-hosted
|
||||
@@ -147,9 +147,6 @@ jobs:
|
||||
with:
|
||||
crate: cargo-nextest
|
||||
locked: true
|
||||
- uses: mwilliamson/setup-wasmtime-action@bf814d7d8fc3c3a77dfe114bd9fb8a2c575f6ad6 #v2.0.0
|
||||
with:
|
||||
wasmtime-version: "3.0.1"
|
||||
- name: Setup GPU dependencies
|
||||
run: sudo ./setup-gpu.sh --yes
|
||||
- name: Install build dependencies
|
||||
@@ -201,13 +198,6 @@ jobs:
|
||||
with:
|
||||
crate: cargo-nextest
|
||||
locked: true
|
||||
- uses: mwilliamson/setup-wasmtime-action@bf814d7d8fc3c3a77dfe114bd9fb8a2c575f6ad6 #v2.0.0
|
||||
with:
|
||||
wasmtime-version: "3.0.1"
|
||||
# - name: Matmul overflow (wasi)
|
||||
# run: cargo wasi test matmul_col_ultra_overflow -- --include-ignored --nocapture
|
||||
# - name: Conv overflow (wasi)
|
||||
# run: cargo wasi test conv_col_ultra_overflow -- --include-ignored --nocapture
|
||||
- name: lookup overflow
|
||||
run: cargo nextest run lookup_ultra_overflow --no-capture -- --include-ignored
|
||||
- name: Matmul overflow
|
||||
@@ -249,54 +239,6 @@ jobs:
|
||||
- name: Model serialization different binary ID
|
||||
run: cargo nextest run native_tests::tests::model_serialization_different_binaries_ --test-threads 1
|
||||
|
||||
wasm32-tests:
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-22.04
|
||||
env:
|
||||
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
|
||||
# add `atomics` and `bulk-memory` to RUSTFLAGS to enable wasm-bindgen tests
|
||||
RUSTFLAGS: "-C target-feature=+atomics,+bulk-memory"
|
||||
OPENSSL_NO_VENDOR: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@fb51252c7ba57d633bc668f941da052e410add48 #v1.0.6
|
||||
with:
|
||||
toolchain: nightly-2025-05-01
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- name: install libc6
|
||||
run: sudo apt-get install -y libc6
|
||||
- name: Install cmake and build dependencies
|
||||
run: sudo apt-get update && sudo apt-get install -y cmake build-essential g++ gcc libclang-dev llvm-dev libstdc++-12-dev libc6-dev libssl-dev pkg-config
|
||||
- name: Force rebuild icicle dependencies
|
||||
run: cargo clean -p icicle-runtime -p icicle-core -p icicle-hash -p icicle-bn254
|
||||
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa #v0.4.0
|
||||
with:
|
||||
# Pin to version 0.13.1
|
||||
version: "v0.13.1"
|
||||
- uses: nanasess/setup-chromedriver@affb1ea8848cbb080be372c1e8d7a5c173e9298f #v2.3.0
|
||||
# with:
|
||||
# chromedriver-version: "115.0.5790.102"
|
||||
- name: Install wasm32-unknown-unknown
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- name: Add rust-src
|
||||
run: rustup component add rust-src --toolchain nightly-2025-05-01-x86_64-unknown-linux-gnu
|
||||
- name: Create webdriver.json to disable timeouts
|
||||
run: |
|
||||
echo '{"args": ["--headless", "--disable-gpu", "--disable-dev-shm-usage", "--no-sandbox"]}' > webdriver.json
|
||||
- name: Run wasm verifier tests
|
||||
run: |
|
||||
ulimit -n 65536
|
||||
WASM_BINDGEN_TEST_THREADS=1 \
|
||||
WASM_BINDGEN_TEST_TIMEOUT=1800 \
|
||||
CHROMEDRIVER_ARGS="--log-level=INFO" \
|
||||
wasm-pack test --chrome --headless -- -Z build-std="panic_abort,std" --features web -- --nocapture
|
||||
|
||||
mock-proving-tests:
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -376,7 +318,7 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: [non-gpu, non-sgx]
|
||||
needs: [build, library-tests, docs, python-tests, python-integration-tests]
|
||||
needs: [build, library-tests, docs]
|
||||
env:
|
||||
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
|
||||
RUSTFLAGS: "-C target-feature=+atomics,+bulk-memory"
|
||||
@@ -392,10 +334,6 @@ jobs:
|
||||
toolchain: nightly-2025-05-01
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa #v0.4.0
|
||||
with:
|
||||
# Pin to version 0.13.1
|
||||
version: "v0.13.1"
|
||||
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
|
||||
with:
|
||||
crate: cargo-nextest
|
||||
@@ -405,27 +343,12 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Use pnpm 8
|
||||
uses: pnpm/action-setup@eae0cfeb286e66ffb5155f1a79b90583a127a68b #v2.4.1
|
||||
with:
|
||||
version: 8
|
||||
- name: Use Node.js 22.17.1
|
||||
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 #v3.8.2
|
||||
with:
|
||||
node-version: "22.17.1"
|
||||
cache: "pnpm"
|
||||
- name: "Add rust-src"
|
||||
run: rustup component add rust-src --toolchain nightly-2025-05-01-x86_64-unknown-linux-gnu
|
||||
- name: Install dependencies for js tests and package
|
||||
run: |
|
||||
pnpm install --frozen-lockfile
|
||||
# - name: Install solc
|
||||
# run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
|
||||
- name: Install Anvil
|
||||
run: cargo install --git https://github.com/foundry-rs/foundry --rev 56b806a3ba7866a3b061093bebd0fa2ace97f1fc --locked anvil --force
|
||||
- name: Build wasm package for nodejs target.
|
||||
run: |
|
||||
wasm-pack build --target nodejs --out-dir ./tests/wasm/nodejs . -- -Z build-std="panic_abort,std"
|
||||
- name: KZG prove and verify tests (EVM)
|
||||
run: cargo nextest run --verbose "tests_evm::kzg_evm_prove_and_verify_::" --test-threads 1
|
||||
# - name: KZG prove and verify tests (EVM + reusable verifier + col-overflow)
|
||||
@@ -443,40 +366,6 @@ jobs:
|
||||
- name: KZG prove and verify tests (EVM + hashed outputs)
|
||||
run: cargo nextest run --verbose tests_evm::kzg_evm_hashed_output_prove_and_verify --test-threads 1
|
||||
|
||||
# prove-and-verify-tests-metal:
|
||||
# permissions:
|
||||
# contents: read
|
||||
# runs-on: macos-13
|
||||
# # needs: [build, library-tests, docs]
|
||||
# steps:
|
||||
# - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
# with:
|
||||
# persist-credentials: false
|
||||
# - uses: actions-rust-lang/setup-rust-toolchain@fb51252c7ba57d633bc668f941da052e410add48 #v1.0.6
|
||||
# with:
|
||||
# toolchain: nightly-2025-05-01
|
||||
# override: true
|
||||
# components: rustfmt, clippy
|
||||
# - uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa #v0.4.0
|
||||
# with:
|
||||
# # Pin to version 0.13.1
|
||||
# version: 'v0.13.1'
|
||||
# - name: Add rust-src
|
||||
# run: rustup component add rust-src --toolchain nightly-2025-05-01
|
||||
# - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
# with:
|
||||
# persist-credentials: false
|
||||
# - name: Use pnpm 8
|
||||
# uses: pnpm/action-setup@eae0cfeb286e66ffb5155f1a79b90583a127a68b #v2.4.1
|
||||
# with:
|
||||
# version: 8
|
||||
# - uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
|
||||
# with:
|
||||
# crate: cargo-nextest
|
||||
# locked: true
|
||||
# - name: KZG prove and verify tests (public outputs)
|
||||
# run: cargo nextest run --features macos-metal --verbose tests::kzg_prove_and_verify_::t --no-capture
|
||||
|
||||
prove-and-verify-tests:
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -494,13 +383,6 @@ jobs:
|
||||
toolchain: nightly-2025-05-01
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa #v0.4.0
|
||||
with:
|
||||
# Pin to version 0.13.1
|
||||
version: "v0.13.1"
|
||||
- name: Add wasm32-unknown-unknown target
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
|
||||
- name: Add rust-src
|
||||
run: rustup component add rust-src --toolchain nightly-2025-05-01-x86_64-unknown-linux-gnu
|
||||
- name: Force rebuild icicle dependencies
|
||||
@@ -508,30 +390,12 @@ jobs:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Use pnpm 8
|
||||
uses: pnpm/action-setup@eae0cfeb286e66ffb5155f1a79b90583a127a68b #v2.4.1
|
||||
with:
|
||||
version: 8
|
||||
- name: Use Node.js 22.17.1
|
||||
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 #v3.8.2
|
||||
with:
|
||||
node-version: "22.17.1"
|
||||
cache: "pnpm"
|
||||
- name: Install dependencies for js tests
|
||||
run: |
|
||||
pnpm install --frozen-lockfile
|
||||
env:
|
||||
CI: false
|
||||
NODE_ENV: development
|
||||
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
|
||||
with:
|
||||
crate: cargo-nextest
|
||||
locked: true
|
||||
# - name: Build wasm package for nodejs target.
|
||||
# run: |
|
||||
# wasm-pack build --target nodejs --out-dir ./tests/wasm/nodejs . -- -Z build-std="panic_abort,std"
|
||||
- name: KZG prove and verify tests (public outputs + column overflow)
|
||||
run: cargo nextest run --verbose tests::kzg_prove_and_verify_with_overflow_::w
|
||||
run: cargo nextest run --verbose tests::kzg_prove_and_verify_with_overflow_::t
|
||||
- name: KZG prove and verify tests (public outputs + fixed params + column overflow)
|
||||
run: cargo nextest run --verbose tests::kzg_prove_and_verify_with_overflow_fixed_params_
|
||||
- name: KZG prove and verify tests (hashed inputs + column overflow)
|
||||
@@ -593,7 +457,7 @@ jobs:
|
||||
- name: KZG prove and verify tests (kzg outputs)
|
||||
run: cargo nextest run --verbose tests::kzg_prove_and_verify_kzg_output --features gpu-accelerated --test-threads 1
|
||||
- name: KZG prove and verify tests (public outputs + column overflow)
|
||||
run: cargo nextest run --verbose tests::kzg_prove_and_verify_with_overflow_::w --features gpu-accelerated --test-threads 1
|
||||
run: cargo nextest run --verbose tests::kzg_prove_and_verify_with_overflow_::t --features gpu-accelerated --test-threads 1
|
||||
- name: KZG prove and verify tests (public outputs + fixed params + column overflow)
|
||||
run: cargo nextest run --verbose tests::kzg_prove_and_verify_with_overflow_fixed_params_ --features gpu-accelerated --test-threads 1
|
||||
- name: KZG prove and verify tests (public outputs)
|
||||
@@ -605,119 +469,6 @@ jobs:
|
||||
- name: KZG prove and verify tests (hashed outputs)
|
||||
run: cargo nextest run --verbose tests::kzg_prove_and_verify_hashed --features gpu-accelerated --test-threads 1
|
||||
|
||||
prove-and-verify-mock-aggr-tests:
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: self-hosted
|
||||
needs: [build, library-tests, docs, python-tests, python-integration-tests]
|
||||
env:
|
||||
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: dtolnay/rust-toolchain@4f94fbe7e03939b0e674bcc9ca609a16088f63ff #nightly branch, TODO: update when required
|
||||
with:
|
||||
toolchain: nightly-2025-05-01
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
|
||||
with:
|
||||
crate: cargo-nextest
|
||||
locked: true
|
||||
- name: Mock aggr tests (KZG)
|
||||
run: cargo nextest run --verbose tests_aggr::kzg_aggr_mock_prove_and_verify_ --test-threads 8
|
||||
|
||||
prove-and-verify-aggr-tests-gpu:
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: gpu
|
||||
needs: [build, library-tests, docs, python-tests, python-integration-tests]
|
||||
env:
|
||||
ENABLE_ICICLE_GPU: true
|
||||
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
|
||||
RUSTFLAGS: "-C linker=gcc"
|
||||
OPENSSL_NO_VENDOR: 1
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@fb51252c7ba57d633bc668f941da052e410add48 #v1.0.6
|
||||
with:
|
||||
toolchain: nightly-2025-05-01
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
|
||||
with:
|
||||
crate: cargo-nextest
|
||||
locked: true
|
||||
- name: Setup GPU dependencies
|
||||
run: sudo ./setup-gpu.sh --yes
|
||||
- name: Install build dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y build-essential g++ gcc cmake libclang-dev llvm-dev libstdc++-12-dev libc6 libc6-dev libssl-dev pkg-config
|
||||
- name: Force rebuild icicle dependencies
|
||||
run: cargo clean -p icicle-runtime -p icicle-core -p icicle-hash -p icicle-bn254
|
||||
- name: KZG tests
|
||||
run: cargo nextest run --verbose tests_aggr::kzg_aggr_prove_and_verify_ --features gpu-accelerated --test-threads 1 -- --include-ignored
|
||||
|
||||
prove-and-verify-aggr-tests:
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: large-self-hosted
|
||||
needs: [build, library-tests, docs, python-tests, python-integration-tests]
|
||||
env:
|
||||
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@fb51252c7ba57d633bc668f941da052e410add48 #v1.0.6
|
||||
with:
|
||||
toolchain: nightly-2025-05-01
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
|
||||
with:
|
||||
crate: cargo-nextest
|
||||
locked: true
|
||||
- name: KZG tests
|
||||
run: cargo nextest run --verbose tests_aggr::kzg_aggr_prove_and_verify_ --test-threads 4 -- --include-ignored
|
||||
|
||||
prove-and-verify-aggr-evm-tests:
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: large-self-hosted
|
||||
needs: [build, library-tests, docs, python-tests, python-integration-tests]
|
||||
env:
|
||||
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@fb51252c7ba57d633bc668f941da052e410add48 #v1.0.6
|
||||
with:
|
||||
toolchain: nightly-2025-05-01
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
|
||||
with:
|
||||
crate: cargo-nextest
|
||||
locked: true
|
||||
# - name: Install solc
|
||||
# run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
|
||||
- name: Install Anvil
|
||||
run: cargo install --git https://github.com/foundry-rs/foundry --rev 56b806a3ba7866a3b061093bebd0fa2ace97f1fc --locked anvil --force
|
||||
- name: KZG prove and verify aggr tests
|
||||
run: cargo nextest run --verbose tests_evm::kzg_evm_aggr_prove_and_verify_::t --test-threads 4 -- --include-ignored
|
||||
|
||||
examples:
|
||||
permissions:
|
||||
contents: read
|
||||
@@ -789,7 +540,7 @@ jobs:
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: [non-gpu, non-sgx]
|
||||
needs: [build, library-tests, docs, python-tests, python-integration-tests]
|
||||
needs: [build, library-tests, docs]
|
||||
env:
|
||||
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
|
||||
|
||||
@@ -885,99 +636,3 @@ jobs:
|
||||
# run: source .env/bin/activate; cargo nextest run py_tests::tests::reusable_verifier_ --no-capture
|
||||
- name: Reusable verifier tutorial
|
||||
run: source .env/bin/activate; cargo nextest run py_tests::tests::reusable_verifier_ --no-capture --test-threads 1
|
||||
|
||||
ios-integration-tests:
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
|
||||
RUSTFLAGS: "-C linker=gcc"
|
||||
OPENSSL_NO_VENDOR: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@fb51252c7ba57d633bc668f941da052e410add48 #v1.0.6
|
||||
with:
|
||||
toolchain: nightly-2025-05-01
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
|
||||
with:
|
||||
crate: cargo-nextest
|
||||
locked: true
|
||||
- name: Force rebuild icicle dependencies
|
||||
run: cargo clean -p icicle-runtime -p icicle-core -p icicle-hash -p icicle-bn254
|
||||
- name: Run ios tests
|
||||
run: CARGO_BUILD_TARGET=aarch64-apple-darwin RUSTUP_TOOLCHAIN=nightly-2025-05-01-aarch64-apple-darwin cargo test --test ios_integration_tests --features ios-bindings-test --no-default-features
|
||||
|
||||
swift-package-tests:
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: macos-latest
|
||||
needs: [ios-integration-tests]
|
||||
|
||||
env:
|
||||
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
|
||||
RUSTFLAGS: "-C linker=gcc"
|
||||
OPENSSL_NO_VENDOR: 1
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- uses: actions-rust-lang/setup-rust-toolchain@fb51252c7ba57d633bc668f941da052e410add48 #v1.0.6
|
||||
with:
|
||||
toolchain: nightly-2025-05-01
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- name: Force rebuild icicle dependencies
|
||||
run: cargo clean -p icicle-runtime -p icicle-core -p icicle-hash -p icicle-bn254
|
||||
- name: Build EzklCoreBindings
|
||||
run: CONFIGURATION=debug cargo run --bin ios_gen_bindings --features "ios-bindings uuid camino uniffi_bindgen" --no-default-features
|
||||
|
||||
- name: Clone ezkl-swift- repository
|
||||
run: |
|
||||
git clone https://github.com/zkonduit/ezkl-swift-package.git
|
||||
|
||||
- name: Copy EzklCoreBindings
|
||||
run: |
|
||||
rm -rf ezkl-swift-package/Sources/EzklCoreBindings
|
||||
cp -r build/EzklCoreBindings ezkl-swift-package/Sources/
|
||||
|
||||
- name: Copy Test Files
|
||||
run: |
|
||||
rm -rf ezkl-swift-package/Tests/EzklAssets/
|
||||
mkdir -p ezkl-swift-package/Tests/EzklAssets/
|
||||
cp tests/assets/kzg ezkl-swift-package/Tests/EzklAssets/kzg.srs
|
||||
cp tests/assets/input.json ezkl-swift-package/Tests/EzklAssets/input.json
|
||||
cp tests/assets/model.compiled ezkl-swift-package/Tests/EzklAssets/network.ezkl
|
||||
cp tests/assets/settings.json ezkl-swift-package/Tests/EzklAssets/settings.json
|
||||
|
||||
- name: Set up Xcode environment
|
||||
run: |
|
||||
sudo xcode-select -s /Applications/Xcode.app/Contents/Developer
|
||||
sudo xcodebuild -license accept
|
||||
|
||||
- name: Run Package Tests
|
||||
run: |
|
||||
cd ezkl-swift-package
|
||||
xcodebuild test \
|
||||
-scheme EzklPackage \
|
||||
-destination 'platform=iOS Simulator,name=iPhone 16 Pro,OS=18.4' \
|
||||
-resultBundlePath ../testResults
|
||||
|
||||
- name: Run Example App Tests
|
||||
run: |
|
||||
cd ezkl-swift-package/Example
|
||||
xcodebuild test \
|
||||
-project Example.xcodeproj \
|
||||
-scheme EzklApp \
|
||||
-destination 'platform=iOS Simulator,name=iPhone 16 Pro,OS=18.4' \
|
||||
-parallel-testing-enabled NO \
|
||||
-resultBundlePath ../../exampleTestResults \
|
||||
-skip-testing:EzklAppUITests/EzklAppUITests/testButtonClicksInOrder
|
||||
|
||||
134
.github/workflows/swift-pm.yml
vendored
134
.github/workflows/swift-pm.yml
vendored
@@ -1,134 +0,0 @@
|
||||
name: Build and Publish EZKL iOS SPM package
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
# Only support SemVer versioning tags
|
||||
- 'v[0-9]+.[0-9]+.[0-9]+'
|
||||
- '[0-9]+.[0-9]+.[0-9]+'
|
||||
|
||||
jobs:
|
||||
build-and-update:
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
EZKL_SWIFT_PACKAGE_REPO: github.com/zkonduit/ezkl-swift-package.git
|
||||
RELEASE_TAG: ${{ github.ref_name }}
|
||||
|
||||
steps:
|
||||
- name: Checkout EZKL
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Extract TAG from github.ref_name
|
||||
run: |
|
||||
# github.ref_name is provided by GitHub Actions and contains the tag name directly.
|
||||
TAG="${RELEASE_TAG}"
|
||||
echo "Original TAG: $TAG"
|
||||
# Remove leading 'v' if present to match the Swift Package Manager version format.
|
||||
NEW_TAG=${TAG#v}
|
||||
echo "Stripped TAG: $NEW_TAG"
|
||||
echo "TAG=$NEW_TAG" >> $GITHUB_ENV
|
||||
|
||||
- name: Install Rust (nightly)
|
||||
uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
|
||||
with:
|
||||
toolchain: nightly
|
||||
override: true
|
||||
|
||||
- name: Build EzklCoreBindings
|
||||
run: CONFIGURATION=release cargo run --bin ios_gen_bindings --features "ios-bindings uuid camino uniffi_bindgen" --no-default-features
|
||||
|
||||
- name: Clone ezkl-swift-package repository
|
||||
run: |
|
||||
git clone https://${{ env.EZKL_SWIFT_PACKAGE_REPO }}
|
||||
|
||||
- name: Copy EzklCoreBindings
|
||||
run: |
|
||||
rm -rf ezkl-swift-package/Sources/EzklCoreBindings
|
||||
cp -r build/EzklCoreBindings ezkl-swift-package/Sources/
|
||||
|
||||
- name: Copy Test Files
|
||||
run: |
|
||||
rm -rf ezkl-swift-package/Tests/EzklAssets/
|
||||
mkdir -p ezkl-swift-package/Tests/EzklAssets/
|
||||
cp tests/assets/kzg ezkl-swift-package/Tests/EzklAssets/kzg.srs
|
||||
cp tests/assets/input.json ezkl-swift-package/Tests/EzklAssets/input.json
|
||||
cp tests/assets/model.compiled ezkl-swift-package/Tests/EzklAssets/network.ezkl
|
||||
cp tests/assets/settings.json ezkl-swift-package/Tests/EzklAssets/settings.json
|
||||
|
||||
- name: Check for changes
|
||||
id: check_changes
|
||||
run: |
|
||||
cd ezkl-swift-package
|
||||
if git diff --quiet Sources/EzklCoreBindings Tests/EzklAssets; then
|
||||
echo "no_changes=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "no_changes=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Set up Xcode environment
|
||||
if: steps.check_changes.outputs.no_changes == 'false'
|
||||
run: |
|
||||
sudo xcode-select -s /Applications/Xcode.app/Contents/Developer
|
||||
sudo xcodebuild -license accept
|
||||
|
||||
- name: Run Package Tests
|
||||
if: steps.check_changes.outputs.no_changes == 'false'
|
||||
run: |
|
||||
cd ezkl-swift-package
|
||||
xcodebuild test \
|
||||
-scheme EzklPackage \
|
||||
-destination 'platform=iOS Simulator,name=iPhone 15 Pro,OS=17.5' \
|
||||
-resultBundlePath ../testResults
|
||||
|
||||
- name: Run Example App Tests
|
||||
if: steps.check_changes.outputs.no_changes == 'false'
|
||||
run: |
|
||||
cd ezkl-swift-package/Example
|
||||
xcodebuild test \
|
||||
-project Example.xcodeproj \
|
||||
-scheme EzklApp \
|
||||
-destination 'platform=iOS Simulator,name=iPhone 15 Pro,OS=17.5' \
|
||||
-parallel-testing-enabled NO \
|
||||
-resultBundlePath ../../exampleTestResults \
|
||||
-skip-testing:EzklAppUITests/EzklAppUITests/testButtonClicksInOrder
|
||||
|
||||
- name: Setup Git
|
||||
run: |
|
||||
cd ezkl-swift-package
|
||||
git config user.name "GitHub Action"
|
||||
git config user.email "action@github.com"
|
||||
git remote set-url origin https://zkonduit:${EZKL_SWIFT_PACKAGE_REPO_TOKEN}@${{ env.EZKL_SWIFT_PACKAGE_REPO }}
|
||||
env:
|
||||
EZKL_SWIFT_PACKAGE_REPO_TOKEN: ${{ secrets.EZKL_PORTER_TOKEN }}
|
||||
|
||||
- name: Commit and Push Changes
|
||||
if: steps.check_changes.outputs.no_changes == 'false'
|
||||
run: |
|
||||
cd ezkl-swift-package
|
||||
git add Sources/EzklCoreBindings Tests/EzklAssets
|
||||
git commit -m "Automatically updated EzklCoreBindings for EZKL"
|
||||
if ! git push origin; then
|
||||
echo "::error::Failed to push changes to ${{ env.EZKL_SWIFT_PACKAGE_REPO }}. Please ensure that EZKL_PORTER_TOKEN has the correct permissions."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Tag the latest commit
|
||||
run: |
|
||||
cd ezkl-swift-package
|
||||
source $GITHUB_ENV
|
||||
# Tag the latest commit on the current branch
|
||||
if git rev-parse "$TAG" >/dev/null 2>&1; then
|
||||
echo "Tag $TAG already exists locally. Skipping tag creation."
|
||||
else
|
||||
git tag "$TAG"
|
||||
fi
|
||||
|
||||
if ! git push origin "$TAG"; then
|
||||
echo "::error::Failed to push tag '$TAG' to ${{ env.EZKL_SWIFT_PACKAGE_REPO }}. Please ensure EZKL_PORTER_TOKEN has correct permissions."
|
||||
exit 1
|
||||
fi
|
||||
13
Cargo.lock
generated
13
Cargo.lock
generated
@@ -1941,7 +1941,6 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"alloy",
|
||||
"bincode",
|
||||
"camino",
|
||||
"chrono",
|
||||
"clap",
|
||||
"clap_complete",
|
||||
@@ -1996,9 +1995,7 @@ dependencies = [
|
||||
"tosubcommand",
|
||||
"tract-onnx",
|
||||
"uniffi",
|
||||
"uniffi_bindgen",
|
||||
"unzip-n",
|
||||
"uuid",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-console-logger",
|
||||
"wasm-bindgen-rayon",
|
||||
@@ -6397,7 +6394,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f31bff6daf87277a9014bcdefbc2842b0553392919d1096843c5aad899ca4588"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"uniffi_bindgen",
|
||||
"uniffi_build",
|
||||
"uniffi_core",
|
||||
"uniffi_macros",
|
||||
@@ -6580,15 +6576,6 @@ version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "458f7a779bf54acc9f347480ac654f68407d3aab21269a6e3c9f922acd9e2da9"
|
||||
dependencies = [
|
||||
"getrandom 0.3.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "valuable"
|
||||
version = "0.1.1"
|
||||
|
||||
20
Cargo.toml
20
Cargo.toml
@@ -96,13 +96,6 @@ objc = { version = "0.2.4", optional = true }
|
||||
pyo3-stub-gen = { version = "0.6.0", optional = true }
|
||||
jemallocator = { version = "0.5", optional = true }
|
||||
mimalloc = { version = "0.1", optional = true }
|
||||
# universal bindings
|
||||
uniffi = { version = "=0.28.0", optional = true }
|
||||
getrandom = { version = "0.2.8", optional = true }
|
||||
uniffi_bindgen = { version = "=0.28.0", optional = true }
|
||||
camino = { version = "^1.1", optional = true }
|
||||
uuid = { version = "1.10.0", features = ["v4"], optional = true }
|
||||
|
||||
# GPU / device related things (optional - only enabled with gpu-accelerated feature)
|
||||
icicle-runtime = { git = "https://github.com/ingonyama-zk/icicle", branch="emir/gate_eval_2", package="icicle-runtime", optional = true }
|
||||
|
||||
@@ -215,9 +208,7 @@ test = false
|
||||
bench = false
|
||||
required-features = ["ezkl"]
|
||||
|
||||
[[bin]]
|
||||
name = "ios_gen_bindings"
|
||||
required-features = ["ios-bindings", "uuid", "camino", "uniffi_bindgen"]
|
||||
|
||||
|
||||
[[bin]]
|
||||
name = "py_stub_gen"
|
||||
@@ -236,16 +227,7 @@ default = [
|
||||
]
|
||||
onnx = ["dep:tract-onnx"]
|
||||
python-bindings = ["pyo3", "pyo3-log", "pyo3-async-runtimes", "pyo3-stub-gen"]
|
||||
universal-bindings = [
|
||||
"uniffi",
|
||||
"mv-lookup",
|
||||
"precompute-coset",
|
||||
"parallel-poly-read",
|
||||
"dep:halo2_solidity_verifier"
|
||||
]
|
||||
logging = ["dep:colored", "dep:env_logger", "dep:chrono"]
|
||||
ios-bindings = ["universal-bindings"]
|
||||
ios-bindings-test = ["ios-bindings", "uniffi/bindgen-tests"]
|
||||
ezkl = [
|
||||
"onnx",
|
||||
"tabled/color",
|
||||
|
||||
@@ -4,7 +4,6 @@ use ezkl::circuit::*;
|
||||
use ezkl::pfsys::create_keys;
|
||||
use ezkl::pfsys::create_proof_circuit;
|
||||
use ezkl::pfsys::srs::gen_srs;
|
||||
use ezkl::pfsys::TranscriptType;
|
||||
use ezkl::tensor::*;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
use halo2_proofs::poly::kzg::multiopen::ProverSHPLONK;
|
||||
@@ -153,8 +152,6 @@ fn runcnvrl(c: &mut Criterion) {
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
ezkl::Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
@@ -2,7 +2,6 @@ use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Through
|
||||
use ezkl::circuit::poly::PolyOp;
|
||||
use ezkl::circuit::*;
|
||||
use ezkl::pfsys::create_proof_circuit;
|
||||
use ezkl::pfsys::TranscriptType;
|
||||
use ezkl::pfsys::{create_keys, srs::gen_srs};
|
||||
use ezkl::tensor::*;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
@@ -120,8 +119,6 @@ fn rundot(c: &mut Criterion) {
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
ezkl::Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
@@ -7,7 +7,7 @@ use ezkl::circuit::einsum::circuit_params::SingleEinsumParams;
|
||||
use ezkl::circuit::poly::PolyOp;
|
||||
use ezkl::circuit::*;
|
||||
use ezkl::pfsys::srs::gen_srs;
|
||||
use ezkl::pfsys::{create_keys, create_proof_circuit, TranscriptType};
|
||||
use ezkl::pfsys::{create_keys, create_proof_circuit};
|
||||
use ezkl::tensor::*;
|
||||
use halo2_proofs::circuit::floor_planner::V1;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
@@ -174,8 +174,6 @@ fn runmatmul(c: &mut Criterion) {
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
ezkl::Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
@@ -5,7 +5,7 @@ use ezkl::circuit::*;
|
||||
use ezkl::circuit::lookup::LookupOp;
|
||||
use ezkl::circuit::poly::PolyOp;
|
||||
use ezkl::pfsys::create_proof_circuit;
|
||||
use ezkl::pfsys::TranscriptType;
|
||||
|
||||
use ezkl::pfsys::{create_keys, srs::gen_srs};
|
||||
use ezkl::tensor::*;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
@@ -154,8 +154,6 @@ fn runmatmul(c: &mut Criterion) {
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
ezkl::Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
@@ -5,7 +5,7 @@ use ezkl::circuit::lookup::LookupOp;
|
||||
use ezkl::circuit::poly::PolyOp;
|
||||
use ezkl::circuit::table::Range;
|
||||
use ezkl::pfsys::create_proof_circuit;
|
||||
use ezkl::pfsys::TranscriptType;
|
||||
|
||||
use ezkl::pfsys::{create_keys, srs::gen_srs};
|
||||
use ezkl::tensor::*;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
@@ -157,8 +157,6 @@ fn runmatmul(c: &mut Criterion) {
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
ezkl::Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
@@ -2,7 +2,7 @@ use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Through
|
||||
use ezkl::circuit::poly::PolyOp;
|
||||
use ezkl::circuit::*;
|
||||
use ezkl::pfsys::create_proof_circuit;
|
||||
use ezkl::pfsys::TranscriptType;
|
||||
|
||||
use ezkl::pfsys::{create_keys, srs::gen_srs};
|
||||
use ezkl::tensor::*;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
@@ -116,8 +116,6 @@ fn runsum(c: &mut Criterion) {
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
ezkl::Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
@@ -4,7 +4,7 @@ use ezkl::circuit::*;
|
||||
use ezkl::pfsys::create_keys;
|
||||
use ezkl::pfsys::create_proof_circuit;
|
||||
use ezkl::pfsys::srs::gen_srs;
|
||||
use ezkl::pfsys::TranscriptType;
|
||||
|
||||
use ezkl::tensor::*;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
use halo2_proofs::poly::kzg::multiopen::ProverSHPLONK;
|
||||
@@ -131,8 +131,6 @@ fn runsumpool(c: &mut Criterion) {
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
ezkl::Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
@@ -2,7 +2,7 @@ use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion, Through
|
||||
use ezkl::circuit::poly::PolyOp;
|
||||
use ezkl::circuit::*;
|
||||
use ezkl::pfsys::create_proof_circuit;
|
||||
use ezkl::pfsys::TranscriptType;
|
||||
|
||||
use ezkl::pfsys::{create_keys, srs::gen_srs};
|
||||
use ezkl::tensor::*;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
@@ -118,8 +118,6 @@ fn runadd(c: &mut Criterion) {
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
ezkl::Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
@@ -3,7 +3,7 @@ use ezkl::circuit::poly::PolyOp;
|
||||
use ezkl::circuit::region::RegionCtx;
|
||||
use ezkl::circuit::*;
|
||||
use ezkl::pfsys::create_proof_circuit;
|
||||
use ezkl::pfsys::TranscriptType;
|
||||
|
||||
use ezkl::pfsys::{create_keys, srs::gen_srs};
|
||||
use ezkl::tensor::*;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
@@ -117,8 +117,6 @@ fn runpow(c: &mut Criterion) {
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
ezkl::Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
@@ -8,7 +8,7 @@ use ezkl::circuit::*;
|
||||
use ezkl::pfsys::create_keys;
|
||||
use ezkl::pfsys::create_proof_circuit;
|
||||
use ezkl::pfsys::srs::gen_srs;
|
||||
use ezkl::pfsys::TranscriptType;
|
||||
|
||||
use ezkl::tensor::*;
|
||||
use halo2_proofs::circuit::Value;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
@@ -104,8 +104,6 @@ fn runposeidon(c: &mut Criterion) {
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
ezkl::Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
@@ -4,7 +4,7 @@ use ezkl::circuit::region::RegionCtx;
|
||||
use ezkl::circuit::{BaseConfig as Config, CheckMode};
|
||||
use ezkl::fieldutils::IntegerRep;
|
||||
use ezkl::pfsys::create_proof_circuit;
|
||||
use ezkl::pfsys::TranscriptType;
|
||||
|
||||
use ezkl::pfsys::{create_keys, srs::gen_srs};
|
||||
use ezkl::tensor::*;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
@@ -130,8 +130,6 @@ fn runrelu(c: &mut Criterion) {
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
ezkl::Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
@@ -4,7 +4,7 @@ use ezkl::circuit::table::Range;
|
||||
use ezkl::circuit::{ops::lookup::LookupOp, BaseConfig as Config, CheckMode};
|
||||
use ezkl::fieldutils::IntegerRep;
|
||||
use ezkl::pfsys::create_proof_circuit;
|
||||
use ezkl::pfsys::TranscriptType;
|
||||
|
||||
use ezkl::pfsys::{create_keys, srs::gen_srs};
|
||||
use ezkl::tensor::*;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
@@ -124,8 +124,6 @@ fn runrelu(c: &mut Criterion) {
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
ezkl::Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
4
build.rs
4
build.rs
@@ -1,7 +1,3 @@
|
||||
fn main() {
|
||||
if cfg!(feature = "ios-bindings-test") {
|
||||
println!("cargo::rustc-env=UNIFFI_CARGO_BUILD_EXTRA_ARGS=--features=ios-bindings --no-default-features");
|
||||
}
|
||||
|
||||
println!("cargo::rerun-if-changed=build.rs");
|
||||
}
|
||||
|
||||
@@ -253,8 +253,6 @@
|
||||
" compiled_model_path,\n",
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
@@ -303,4 +301,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
|
||||
@@ -546,7 +546,7 @@
|
||||
"\n",
|
||||
"proof_path = os.path.join('proof.json')\n",
|
||||
"\n",
|
||||
"proof = ezkl.prove(proof_type=\"single\", proof_path=proof_path)\n",
|
||||
"proof = ezkl.prove(proof_path=proof_path)\n",
|
||||
"\n",
|
||||
"print(proof)\n",
|
||||
"assert os.path.isfile(proof_path)"
|
||||
@@ -736,4 +736,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
||||
}
|
||||
|
||||
@@ -574,7 +574,7 @@
|
||||
"\n",
|
||||
"proof_path = os.path.join('proof.json')\n",
|
||||
"\n",
|
||||
"proof = ezkl.prove(proof_type=\"single\", proof_path=proof_path)\n",
|
||||
"proof = ezkl.prove(proof_path=proof_path)\n",
|
||||
"\n",
|
||||
"print(proof)\n",
|
||||
"assert os.path.isfile(proof_path)"
|
||||
@@ -768,4 +768,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,7 +54,7 @@
|
||||
" gip_run_args.param_scale = 19\n",
|
||||
" gip_run_args.logrows = 8\n",
|
||||
" run_args = ezkl.gen_settings(py_run_args=gip_run_args)\n",
|
||||
" await ezkl.get_srs(commitment=ezkl.PyCommitments.KZG)\n",
|
||||
" await ezkl.get_srs()\n",
|
||||
" ezkl.compile_circuit()\n",
|
||||
" res = ezkl.gen_witness()\n",
|
||||
" print(res)\n",
|
||||
@@ -127,4 +127,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
}
|
||||
|
||||
@@ -105,7 +105,7 @@
|
||||
"\n",
|
||||
"class GCNConv(Module):\n",
|
||||
" def __init__(self, in_channels, out_channels):\n",
|
||||
" super(GCNConv, self).__init__() # \"Add\" aggregation.\n",
|
||||
" super(GCNConv, self).__init__() \n",
|
||||
" self.lin = torch.nn.Linear(in_channels, out_channels)\n",
|
||||
"\n",
|
||||
" self.reset_parameters()\n",
|
||||
@@ -563,7 +563,6 @@
|
||||
" compiled_model_path,\n",
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \"single\",\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
@@ -625,4 +624,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
|
||||
@@ -286,8 +286,6 @@
|
||||
" compiled_model_path,\n",
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
@@ -341,4 +339,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
|
||||
@@ -248,7 +248,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"execution_count": null,
|
||||
"id": "c384cbc8",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
@@ -263,8 +263,6 @@
|
||||
" compiled_model_path,\n",
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
@@ -313,4 +311,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
|
||||
@@ -368,7 +368,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -236,7 +236,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -240,7 +240,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -358,7 +358,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -278,7 +278,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -232,7 +232,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -442,7 +442,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -227,7 +227,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -252,7 +252,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -422,7 +422,7 @@
|
||||
" compiled_model_path,\n",
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -378,7 +378,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -301,7 +301,7 @@
|
||||
"run_args.param_scale = 0\n",
|
||||
"run_args.logrows = 18\n",
|
||||
"\n",
|
||||
"ezkl.get_srs(logrows=run_args.logrows, commitment=ezkl.PyCommitments.KZG)\n"
|
||||
"ezkl.get_srs(logrows=run_args.logrows, )\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -399,7 +399,6 @@
|
||||
" compiled_model_path,\n",
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \"for-aggr\",\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
" print(res)\n",
|
||||
@@ -438,28 +437,6 @@
|
||||
" print(\"----- proving split \"+str(i))\n",
|
||||
" prove_model(i)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"You can also mock aggregate the split proofs into a single proof. This is useful if you want to verify the proof on chain at a lower cost. Here we mock aggregate the proofs to save time. You can use other notebooks to see how to aggregate in full ! "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# now mock aggregate the proofs\n",
|
||||
"# proofs = []\n",
|
||||
"# for i in range(3):\n",
|
||||
"# proof_path = os.path.join('proof_split_'+str(i)+'.json')\n",
|
||||
"# proofs.append(proof_path)\n",
|
||||
"\n",
|
||||
"# ezkl.mock_aggregate(proofs, logrows=26, split_proofs = True)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
@@ -484,4 +461,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
}
|
||||
|
||||
@@ -303,7 +303,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
@@ -543,7 +543,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -939,7 +939,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -234,7 +234,7 @@
|
||||
"run_args.input_scale = 2\n",
|
||||
"run_args.logrows = 15\n",
|
||||
"\n",
|
||||
"ezkl.get_srs(logrows=run_args.logrows, commitment=ezkl.PyCommitments.KZG)"
|
||||
"ezkl.get_srs(logrows=run_args.logrows, )"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -330,7 +330,6 @@
|
||||
" compiled_model_path,\n",
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \"for-aggr\",\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
" print(res)\n",
|
||||
@@ -426,28 +425,6 @@
|
||||
"for i in range(2):\n",
|
||||
" prove_model(i)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"You can also mock aggregate the split proofs into a single proof. This is useful if you want to verify the proof on chain at a lower cost. Here we mock aggregate the proofs to save time. You can use other notebooks to see how to aggregate in full ! "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# now mock aggregate the proofs\n",
|
||||
"proofs = []\n",
|
||||
"for i in range(2):\n",
|
||||
" proof_path = os.path.join('proof_split_'+str(i)+'.json')\n",
|
||||
" proofs.append(proof_path)\n",
|
||||
"\n",
|
||||
"ezkl.mock_aggregate(proofs, logrows=22, split_proofs = True)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
@@ -472,4 +449,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
}
|
||||
|
||||
@@ -260,7 +260,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -173,7 +173,7 @@
|
||||
" assert os.path.isfile(settings_path)\n",
|
||||
"\n",
|
||||
" # GENERATE A PROOF\n",
|
||||
" res = ezkl.prove(witness_path, compiled_model_path, pk_path, proof_path, \"single\")\n",
|
||||
" res = ezkl.prove(witness_path, compiled_model_path, pk_path, proof_path)\n",
|
||||
" assert os.path.isfile(proof_path)\n",
|
||||
"\n",
|
||||
" res = await ezkl.create_evm_verifier(vk_path, settings_path, sol_code_path, abi_path, reusable=True)\n",
|
||||
|
||||
@@ -384,7 +384,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
@@ -411,7 +411,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path_faulty,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
@@ -438,7 +438,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path_truthy,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -1,407 +0,0 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"attachments": {},
|
||||
"cell_type": "markdown",
|
||||
"id": "cf69bb3f-94e6-4dba-92cd-ce08df117d67",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## EZKL Jupyter Notebook Demo (Aggregated Proofs) \n",
|
||||
"\n",
|
||||
"Demonstrates how to use EZKL with aggregated proofs"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "95613ee9",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# check if notebook is in colab\n",
|
||||
"try:\n",
|
||||
" # install ezkl\n",
|
||||
" import google.colab\n",
|
||||
" import subprocess\n",
|
||||
" import sys\n",
|
||||
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"ezkl\"])\n",
|
||||
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"onnx\"])\n",
|
||||
"\n",
|
||||
"# rely on local installation of ezkl if the notebook is not in colab\n",
|
||||
"except:\n",
|
||||
" pass\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# here we create and (potentially train a model)\n",
|
||||
"\n",
|
||||
"# make sure you have the dependencies required here already installed\n",
|
||||
"from torch import nn\n",
|
||||
"import ezkl\n",
|
||||
"import os\n",
|
||||
"import json\n",
|
||||
"import torch\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# Defines the model\n",
|
||||
"# we got convs, we got relu, we got linear layers\n",
|
||||
"# What else could one want ????\n",
|
||||
"\n",
|
||||
"class MyModel(nn.Module):\n",
|
||||
" def __init__(self):\n",
|
||||
" super(MyModel, self).__init__()\n",
|
||||
"\n",
|
||||
" self.conv1 = nn.Conv2d(in_channels=1, out_channels=2, kernel_size=5, stride=2)\n",
|
||||
" self.conv2 = nn.Conv2d(in_channels=2, out_channels=3, kernel_size=5, stride=2)\n",
|
||||
"\n",
|
||||
" self.relu = nn.ReLU()\n",
|
||||
"\n",
|
||||
" self.d1 = nn.Linear(48, 48)\n",
|
||||
" self.d2 = nn.Linear(48, 10)\n",
|
||||
"\n",
|
||||
" def forward(self, x):\n",
|
||||
" # 32x1x28x28 => 32x32x26x26\n",
|
||||
" x = self.conv1(x)\n",
|
||||
" x = self.relu(x)\n",
|
||||
" x = self.conv2(x)\n",
|
||||
" x = self.relu(x)\n",
|
||||
"\n",
|
||||
" # flatten => 32 x (32*26*26)\n",
|
||||
" x = x.flatten(start_dim = 1)\n",
|
||||
"\n",
|
||||
" # 32 x (32*26*26) => 32x128\n",
|
||||
" x = self.d1(x)\n",
|
||||
" x = self.relu(x)\n",
|
||||
"\n",
|
||||
" # logits => 32x10\n",
|
||||
" logits = self.d2(x)\n",
|
||||
"\n",
|
||||
" return logits\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"circuit = MyModel()\n",
|
||||
"\n",
|
||||
"# Train the model as you like here (skipped for brevity)\n",
|
||||
"\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "b37637c4",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model_path = os.path.join('network.onnx')\n",
|
||||
"compiled_model_path = os.path.join('network.compiled')\n",
|
||||
"pk_path = os.path.join('test.pk')\n",
|
||||
"vk_path = os.path.join('test.vk')\n",
|
||||
"proof_path = os.path.join('test.pf')\n",
|
||||
"settings_path = os.path.join('settings.json')\n",
|
||||
"srs_path = os.path.join('kzg.srs')\n",
|
||||
"witness_path = os.path.join('witness.json')\n",
|
||||
"data_path = os.path.join('input.json')\n",
|
||||
"aggregate_proof_path = os.path.join('aggr.pf')\n",
|
||||
"aggregate_vk_path = os.path.join('aggr.vk')\n",
|
||||
"aggregate_pk_path = os.path.join('aggr.pk')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "82db373a",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"shape = [1, 28, 28]\n",
|
||||
"# After training, export to onnx (network.onnx) and create a data file (input.json)\n",
|
||||
"x = 0.1*torch.rand(1,*shape, requires_grad=True)\n",
|
||||
"\n",
|
||||
"# Flips the neural net into inference mode\n",
|
||||
"circuit.eval()\n",
|
||||
"\n",
|
||||
" # Export the model\n",
|
||||
"torch.onnx.export(circuit, # model being run\n",
|
||||
" x, # model input (or a tuple for multiple inputs)\n",
|
||||
" model_path, # where to save the model (can be a file or file-like object)\n",
|
||||
" export_params=True, # store the trained parameter weights inside the model file\n",
|
||||
" opset_version=10, # the ONNX version to export the model to\n",
|
||||
" do_constant_folding=True, # whether to execute constant folding for optimization\n",
|
||||
" input_names = ['input'], # the model's input names\n",
|
||||
" output_names = ['output'], # the model's output names\n",
|
||||
" dynamic_axes={'input' : {0 : 'batch_size'}, # variable length axes\n",
|
||||
" 'output' : {0 : 'batch_size'}})\n",
|
||||
"\n",
|
||||
"data_array = ((x).detach().numpy()).reshape([-1]).tolist()\n",
|
||||
"\n",
|
||||
"data = dict(input_data = [data_array])\n",
|
||||
"\n",
|
||||
" # Serialize data into file:\n",
|
||||
"json.dump( data, open(data_path, 'w' ))\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "d5e374a2",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"!RUST_LOG=trace\n",
|
||||
"# TODO: Dictionary outputs\n",
|
||||
"res = ezkl.gen_settings(model_path, settings_path)\n",
|
||||
"assert res == True\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"cal_path = os.path.join(\"calibration.json\")\n",
|
||||
"\n",
|
||||
"data_array = (torch.rand(20, *shape, requires_grad=True).detach().numpy()).reshape([-1]).tolist()\n",
|
||||
"\n",
|
||||
"data = dict(input_data = [data_array])\n",
|
||||
"\n",
|
||||
"# Serialize data into file:\n",
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "3aa4f090",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)\n",
|
||||
"assert res == True"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "8b74dcee",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "18c8b7c7",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "b1c561a8",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"# HERE WE SETUP THE CIRCUIT PARAMS\n",
|
||||
"# WE GOT KEYS\n",
|
||||
"# WE GOT CIRCUIT PARAMETERS\n",
|
||||
"# EVERYTHING ANYONE HAS EVER NEEDED FOR ZK\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"res = ezkl.setup(\n",
|
||||
" compiled_model_path,\n",
|
||||
" vk_path,\n",
|
||||
" pk_path,\n",
|
||||
" \n",
|
||||
" )\n",
|
||||
"\n",
|
||||
"assert res == True\n",
|
||||
"assert os.path.isfile(vk_path)\n",
|
||||
"assert os.path.isfile(pk_path)\n",
|
||||
"assert os.path.isfile(settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "c384cbc8",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# GENERATE A PROOF\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"proof_path = os.path.join('test.pf')\n",
|
||||
"\n",
|
||||
"res = ezkl.prove(\n",
|
||||
" witness_path,\n",
|
||||
" compiled_model_path,\n",
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"for-aggr\", # IMPORTANT NOTE: To produce an aggregated EVM proof you will want to use poseidon for the smaller proofs\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
"assert os.path.isfile(proof_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "76f00d41",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# VERIFY IT\n",
|
||||
"\n",
|
||||
"res = ezkl.verify(\n",
|
||||
" proof_path,\n",
|
||||
" settings_path,\n",
|
||||
" vk_path,\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
"assert res == True\n",
|
||||
"print(\"verified\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "0832b909",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Generate a larger SRS. This is needed for the aggregated proof\n",
|
||||
"\n",
|
||||
"res = await ezkl.get_srs(settings_path=None, logrows=21, commitment=ezkl.PyCommitments.KZG)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "c5a64be6",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Run mock aggregate to check whether the proof works\n",
|
||||
"# Use mock to check for validity as it takes a shorter time to check compared to a full aggregated proof\n",
|
||||
"\n",
|
||||
"res = ezkl.mock_aggregate([proof_path], 21)\n",
|
||||
"assert res == True"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "fee8acc6",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Setup the vk and pk for aggregate\n",
|
||||
"res = ezkl.setup_aggregate(\n",
|
||||
" [proof_path],\n",
|
||||
" aggregate_vk_path,\n",
|
||||
" aggregate_pk_path,\n",
|
||||
" 21\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"assert os.path.isfile(aggregate_vk_path)\n",
|
||||
"assert os.path.isfile(aggregate_pk_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 26,
|
||||
"id": "171702d3",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Run aggregate proof\n",
|
||||
"res = ezkl.aggregate(\n",
|
||||
" [proof_path],\n",
|
||||
" aggregate_proof_path,\n",
|
||||
" aggregate_pk_path,\n",
|
||||
" \"evm\",\n",
|
||||
" 21,\n",
|
||||
" \"safe\"\n",
|
||||
")\n",
|
||||
"\n",
|
||||
"assert os.path.isfile(aggregate_proof_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 27,
|
||||
"id": "671dfdd5",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Check if the proof is valid\n",
|
||||
"res = ezkl.verify_aggr(\n",
|
||||
" aggregate_proof_path,\n",
|
||||
" aggregate_vk_path,\n",
|
||||
" 21,\n",
|
||||
")\n",
|
||||
"assert res == True"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 28,
|
||||
"id": "50eba2f4",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Create a smart contract verifier for the aggregated proof\n",
|
||||
"\n",
|
||||
"sol_code_path = os.path.join(\"Verifier.sol\")\n",
|
||||
"abi_path = os.path.join(\"Verifier_ABI.json\")\n",
|
||||
"\n",
|
||||
"res = await ezkl.create_evm_verifier_aggr(\n",
|
||||
" [settings_path],\n",
|
||||
" aggregate_vk_path,\n",
|
||||
" sol_code_path,\n",
|
||||
" abi_path,\n",
|
||||
" logrows=21)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.12.7"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
@@ -255,7 +255,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -253,7 +253,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -254,7 +254,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -233,7 +233,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -323,7 +323,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"assert os.path.isfile(proof_path)\n",
|
||||
@@ -442,7 +442,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -271,7 +271,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -236,7 +236,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -707,7 +707,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -596,7 +596,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -580,7 +580,7 @@
|
||||
" compiled_filename,\n",
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"\n",
|
||||
|
||||
@@ -759,7 +759,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -277,7 +277,7 @@
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" ",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
## The worm
|
||||
## The worm
|
||||
|
||||
This is an onnx file for a [WormVAE](https://github.com/TuragaLab/wormvae?tab=readme-ov-file) model, which is a VAE / latent-space representation of the C. elegans connectome.
|
||||
|
||||
The model "is a large-scale latent variable model with a very high-dimensional latent space
|
||||
consisting of voltage dynamics of 300 neurons over 5 minutes of time at the simulation frequency
|
||||
of 160 Hz. The generative model for these latent variables is described by stochastic differential
|
||||
equations modeling the nonlinear dynamics of the network activity." (see [here](https://openreview.net/pdf?id=CJzi3dRlJE-)).
|
||||
equations modeling the nonlinear dynamics of the network activity." (see [here](https://openreview.net/pdf?id=CJzi3dRlJE-)).
|
||||
|
||||
In effect this is a generative model for a worm's voltage dynamics, which can be used to generate new worm-like voltage dynamics given previous connectome state.
|
||||
|
||||
Using ezkl you can create a zk circuit equivalent to the wormvae model, allowing you to "prove" execution of the worm model. If you're feeling particularly adventurous, you can also use the zk circuit to generate new worm-state that can be verified on chain.
|
||||
Using ezkl you can create a zk circuit equivalent to the wormvae model, allowing you to "prove" execution of the worm model. If you're feeling particularly adventurous, you can also use the zk circuit to generate new worm-state that can be verified on chain.
|
||||
|
||||
To do so you'll first want to fetch the files using git-lfs (as the onnx file is too large to be stored in git).
|
||||
To do so you'll first want to fetch the files using git-lfs (as the onnx file is too large to be stored in git).
|
||||
|
||||
```bash
|
||||
git lfs fetch --all
|
||||
```
|
||||
|
||||
You'll then want to use the usual ezkl loop to generate the zk circuit. We recommend using fixed visibility for the model parameters, as the model is quite large and this will prune the circuit significantly.
|
||||
You'll then want to use the usual ezkl loop to generate the zk circuit. We recommend using fixed visibility for the model parameters, as the model is quite large and this will prune the circuit significantly.
|
||||
|
||||
```bash
|
||||
ezkl gen-settings --param-visibility=fixed
|
||||
@@ -28,17 +28,7 @@ ezkl gen-witness
|
||||
ezkl prove
|
||||
```
|
||||
|
||||
You might also need to aggregate the proof to get it to fit on chain.
|
||||
|
||||
```bash
|
||||
ezkl aggregate
|
||||
```
|
||||
|
||||
You can then create a smart contract that verifies this aggregate proof
|
||||
|
||||
```bash
|
||||
ezkl create-evm-verifier-aggr
|
||||
```
|
||||
|
||||
This can then be deployed on the chain of your choice.
|
||||
|
||||
|
||||
565
ezkl.pyi
565
ezkl.pyi
@@ -10,30 +10,26 @@ class PyG1:
|
||||
r"""
|
||||
pyclass containing the struct used for G1, this is mostly a helper class
|
||||
"""
|
||||
|
||||
...
|
||||
|
||||
class PyG1Affine:
|
||||
r"""
|
||||
pyclass containing the struct used for G1
|
||||
"""
|
||||
|
||||
...
|
||||
|
||||
class PyRunArgs:
|
||||
r"""
|
||||
Python class containing the struct used for run_args
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
PyRunArgs
|
||||
"""
|
||||
...
|
||||
|
||||
class PyCommitments(Enum):
|
||||
r"""
|
||||
pyclass representing an enum, denoting the type of commitment
|
||||
"""
|
||||
KZG = auto()
|
||||
IPA = auto()
|
||||
...
|
||||
|
||||
class PyInputType(Enum):
|
||||
Bool = auto()
|
||||
@@ -47,57 +43,19 @@ class PyTestDataSource(Enum):
|
||||
r"""
|
||||
pyclass representing an enum
|
||||
"""
|
||||
|
||||
File = auto()
|
||||
OnChain = auto()
|
||||
|
||||
def aggregate(aggregation_snarks:typing.Sequence[str | os.PathLike | pathlib.Path],proof_path:str | os.PathLike | pathlib.Path,vk_path:str | os.PathLike | pathlib.Path,transcript:str,logrows:int,check_mode:str,split_proofs:bool,srs_path:typing.Optional[str | os.PathLike | pathlib.Path],commitment:PyCommitments) -> bool:
|
||||
r"""
|
||||
Creates an aggregated proof
|
||||
|
||||
Arguments
|
||||
---------
|
||||
aggregation_snarks: list[str]
|
||||
List of paths to the various proofs
|
||||
|
||||
proof_path: str
|
||||
Path to output the aggregated proof
|
||||
|
||||
vk_path: str
|
||||
Path to the VK file
|
||||
|
||||
transcript:
|
||||
Proof transcript type to be used. `evm` used by default. `poseidon` is also supported
|
||||
|
||||
logrows:
|
||||
Logrows used for aggregation circuit
|
||||
|
||||
check_mode: str
|
||||
Run sanity checks during calculations. Accepts `safe` or `unsafe`
|
||||
|
||||
split-proofs: bool
|
||||
Whether the accumulated proofs are segments of a larger circuit
|
||||
|
||||
srs_path: str
|
||||
Path to the SRS used
|
||||
|
||||
commitment: str
|
||||
Accepts "kzg" or "ipa"
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def buffer_to_felts(buffer:typing.Sequence[int]) -> list[str]:
|
||||
def buffer_to_felts(buffer: typing.Sequence[int]) -> list[str]:
|
||||
r"""
|
||||
Converts a buffer to vector of field elements
|
||||
|
||||
|
||||
Arguments
|
||||
-------
|
||||
buffer: list[int]
|
||||
List of integers representing a buffer
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
list[str]
|
||||
@@ -105,173 +63,175 @@ def buffer_to_felts(buffer:typing.Sequence[int]) -> list[str]:
|
||||
"""
|
||||
...
|
||||
|
||||
def calibrate_settings(data:str | os.PathLike | pathlib.Path,model:str | os.PathLike | pathlib.Path,settings:str | os.PathLike | pathlib.Path,target:str,lookup_safety_margin:float,scales:typing.Optional[typing.Sequence[int]],scale_rebase_multiplier:typing.Sequence[int],max_logrows:typing.Optional[int]) -> typing.Any:
|
||||
def calibrate_settings(
|
||||
data: str | os.PathLike | pathlib.Path,
|
||||
model: str | os.PathLike | pathlib.Path,
|
||||
settings: str | os.PathLike | pathlib.Path,
|
||||
target: str,
|
||||
lookup_safety_margin: float,
|
||||
scales: typing.Optional[typing.Sequence[int]],
|
||||
scale_rebase_multiplier: typing.Sequence[int],
|
||||
max_logrows: typing.Optional[int],
|
||||
) -> typing.Any:
|
||||
r"""
|
||||
Calibrates the circuit settings
|
||||
|
||||
|
||||
Arguments
|
||||
---------
|
||||
data: str
|
||||
Path to the calibration data
|
||||
|
||||
|
||||
model: str
|
||||
Path to the onnx file
|
||||
|
||||
|
||||
settings: str
|
||||
Path to the settings file
|
||||
|
||||
|
||||
lookup_safety_margin: int
|
||||
the lookup safety margin to use for calibration. if the max lookup is 2^k, then the max lookup will be 2^k * lookup_safety_margin. larger = safer but slower
|
||||
|
||||
|
||||
scales: list[int]
|
||||
Optional scales to specifically try for calibration
|
||||
|
||||
|
||||
scale_rebase_multiplier: list[int]
|
||||
Optional scale rebase multipliers to specifically try for calibration. This is the multiplier at which we divide to return to the input scale.
|
||||
|
||||
|
||||
max_logrows: int
|
||||
Optional max logrows to use for calibration
|
||||
|
||||
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def compile_circuit(model:str | os.PathLike | pathlib.Path,compiled_circuit:str | os.PathLike | pathlib.Path,settings_path:str | os.PathLike | pathlib.Path) -> bool:
|
||||
def compile_circuit(
|
||||
model: str | os.PathLike | pathlib.Path,
|
||||
compiled_circuit: str | os.PathLike | pathlib.Path,
|
||||
settings_path: str | os.PathLike | pathlib.Path,
|
||||
) -> bool:
|
||||
r"""
|
||||
Compiles the circuit for use in other steps
|
||||
|
||||
|
||||
Arguments
|
||||
---------
|
||||
model: str
|
||||
Path to the onnx model file
|
||||
|
||||
|
||||
compiled_circuit: str
|
||||
Path to output the compiled circuit
|
||||
|
||||
|
||||
settings_path: str
|
||||
Path to the settings files
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def create_evm_verifier(vk_path:str | os.PathLike | pathlib.Path,settings_path:str | os.PathLike | pathlib.Path,sol_code_path:str | os.PathLike | pathlib.Path,abi_path:str | os.PathLike | pathlib.Path,srs_path:typing.Optional[str | os.PathLike | pathlib.Path],reusable:bool) -> typing.Any:
|
||||
def create_evm_verifier(
|
||||
vk_path: str | os.PathLike | pathlib.Path,
|
||||
settings_path: str | os.PathLike | pathlib.Path,
|
||||
sol_code_path: str | os.PathLike | pathlib.Path,
|
||||
abi_path: str | os.PathLike | pathlib.Path,
|
||||
srs_path: typing.Optional[str | os.PathLike | pathlib.Path],
|
||||
reusable: bool,
|
||||
) -> typing.Any:
|
||||
r"""
|
||||
Creates an EVM compatible verifier, you will need solc installed in your environment to run this
|
||||
|
||||
|
||||
Arguments
|
||||
---------
|
||||
vk_path: str
|
||||
The path to the verification key file
|
||||
|
||||
|
||||
settings_path: str
|
||||
The path to the settings file
|
||||
|
||||
|
||||
sol_code_path: str
|
||||
The path to the create the solidity verifier
|
||||
|
||||
|
||||
abi_path: str
|
||||
The path to create the ABI for the solidity verifier
|
||||
|
||||
|
||||
srs_path: str
|
||||
The path to the SRS file
|
||||
|
||||
|
||||
reusable: bool
|
||||
Whether the verifier should be rendered as a reusable contract. If so, then you will need to deploy the VK artifact separately which you can generate using the create_evm_vka command
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def create_evm_verifier_aggr(aggregation_settings:typing.Sequence[str | os.PathLike | pathlib.Path],vk_path:str | os.PathLike | pathlib.Path,sol_code_path:str | os.PathLike | pathlib.Path,abi_path:str | os.PathLike | pathlib.Path,logrows:int,srs_path:typing.Optional[str | os.PathLike | pathlib.Path],reusable:bool) -> typing.Any:
|
||||
r"""
|
||||
Creates an evm compatible aggregate verifier, you will need solc installed in your environment to run this
|
||||
|
||||
Arguments
|
||||
---------
|
||||
aggregation_settings: str
|
||||
path to the settings file
|
||||
|
||||
vk_path: str
|
||||
The path to load the desired verification key file
|
||||
|
||||
sol_code_path: str
|
||||
The path to the Solidity code
|
||||
|
||||
abi_path: str
|
||||
The path to output the Solidity verifier ABI
|
||||
|
||||
logrows: int
|
||||
Number of logrows used during aggregated setup
|
||||
|
||||
srs_path: str
|
||||
The path to the SRS file
|
||||
|
||||
reusable: bool
|
||||
Whether the verifier should be rendered as a reusable contract. If so, then you will need to deploy the VK artifact separately which you can generate using the create_evm_vka command
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def create_evm_vka(vk_path:str | os.PathLike | pathlib.Path,settings_path:str | os.PathLike | pathlib.Path,vka_path:str | os.PathLike | pathlib.Path,srs_path:typing.Optional[str | os.PathLike | pathlib.Path]) -> typing.Any:
|
||||
def create_evm_vka(
|
||||
vk_path: str | os.PathLike | pathlib.Path,
|
||||
settings_path: str | os.PathLike | pathlib.Path,
|
||||
vka_path: str | os.PathLike | pathlib.Path,
|
||||
srs_path: typing.Optional[str | os.PathLike | pathlib.Path],
|
||||
) -> typing.Any:
|
||||
r"""
|
||||
Creates an Evm VK artifact. This command generated a VK with circuit specific meta data encoding in memory for use by the reusable H2 verifier.
|
||||
This is useful for deploying verifier that were otherwise too big to fit on chain and required aggregation.
|
||||
|
||||
|
||||
Arguments
|
||||
---------
|
||||
vk_path: str
|
||||
The path to the verification key file
|
||||
|
||||
|
||||
settings_path: str
|
||||
The path to the settings file
|
||||
|
||||
|
||||
vka_path: str
|
||||
The path to the create the vka calldata.
|
||||
|
||||
|
||||
abi_path: str
|
||||
The path to create the ABI for the solidity verifier
|
||||
|
||||
|
||||
srs_path: str
|
||||
The path to the SRS file
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def deploy_evm(addr_path:str | os.PathLike | pathlib.Path,sol_code_path:str | os.PathLike | pathlib.Path,rpc_url:typing.Optional[str],contract_type:str,optimizer_runs:int,private_key:typing.Optional[str]) -> typing.Any:
|
||||
def deploy_evm(
|
||||
addr_path: str | os.PathLike | pathlib.Path,
|
||||
sol_code_path: str | os.PathLike | pathlib.Path,
|
||||
rpc_url: typing.Optional[str],
|
||||
contract_type: str,
|
||||
optimizer_runs: int,
|
||||
private_key: typing.Optional[str],
|
||||
) -> typing.Any:
|
||||
r"""
|
||||
deploys the solidity verifier
|
||||
"""
|
||||
...
|
||||
|
||||
def encode_evm_calldata(proof:str | os.PathLike | pathlib.Path,calldata:str | os.PathLike | pathlib.Path,addr_vk:typing.Optional[str]) -> list[int]:
|
||||
def encode_evm_calldata(
|
||||
proof: str | os.PathLike | pathlib.Path,
|
||||
calldata: str | os.PathLike | pathlib.Path,
|
||||
addr_vk: typing.Optional[str],
|
||||
) -> list[int]:
|
||||
r"""
|
||||
Creates encoded evm calldata from a proof file
|
||||
|
||||
|
||||
Arguments
|
||||
---------
|
||||
proof: str
|
||||
Path to the proof file
|
||||
|
||||
|
||||
calldata: str
|
||||
Path to the calldata file to save
|
||||
|
||||
|
||||
addr_vk: str
|
||||
The address of the verification key contract (if the verifier key is to be rendered as a separate contract)
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
vec[u8]
|
||||
@@ -279,16 +239,16 @@ def encode_evm_calldata(proof:str | os.PathLike | pathlib.Path,calldata:str | os
|
||||
"""
|
||||
...
|
||||
|
||||
def felt_to_big_endian(felt:str) -> str:
|
||||
def felt_to_big_endian(felt: str) -> str:
|
||||
r"""
|
||||
Converts a field element hex string to big endian
|
||||
|
||||
|
||||
Arguments
|
||||
-------
|
||||
felt: str
|
||||
The field element represented as a string
|
||||
|
||||
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
@@ -296,54 +256,54 @@ def felt_to_big_endian(felt:str) -> str:
|
||||
"""
|
||||
...
|
||||
|
||||
def felt_to_float(felt:str,scale:int) -> float:
|
||||
def felt_to_float(felt: str, scale: int) -> float:
|
||||
r"""
|
||||
Converts a field element hex string to a floating point number
|
||||
|
||||
|
||||
Arguments
|
||||
-------
|
||||
felt: str
|
||||
The field element represented as a string
|
||||
|
||||
|
||||
scale: float
|
||||
The scaling factor used to convert the field element into a floating point representation
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
float
|
||||
"""
|
||||
...
|
||||
|
||||
def felt_to_int(felt:str) -> int:
|
||||
def felt_to_int(felt: str) -> int:
|
||||
r"""
|
||||
Converts a field element hex string to an integer
|
||||
|
||||
|
||||
Arguments
|
||||
-------
|
||||
felt: str
|
||||
The field element represented as a string
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
int
|
||||
"""
|
||||
...
|
||||
|
||||
def float_to_felt(input:float,scale:int,input_type:PyInputType) -> str:
|
||||
def float_to_felt(input: float, scale: int, input_type: PyInputType) -> str:
|
||||
r"""
|
||||
Converts a floating point element to a field element hex string
|
||||
|
||||
|
||||
Arguments
|
||||
-------
|
||||
input: float
|
||||
The field element represented as a string
|
||||
|
||||
|
||||
scale: float
|
||||
The scaling factor used to quantize the float into a field element
|
||||
|
||||
|
||||
input_type: PyInputType
|
||||
The type of the input
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
@@ -351,101 +311,97 @@ def float_to_felt(input:float,scale:int,input_type:PyInputType) -> str:
|
||||
"""
|
||||
...
|
||||
|
||||
def gen_settings(model:str | os.PathLike | pathlib.Path,output:str | os.PathLike | pathlib.Path,py_run_args:typing.Optional[PyRunArgs]) -> bool:
|
||||
def gen_settings(
|
||||
model: str | os.PathLike | pathlib.Path,
|
||||
output: str | os.PathLike | pathlib.Path,
|
||||
py_run_args: typing.Optional[PyRunArgs],
|
||||
) -> bool:
|
||||
r"""
|
||||
Generates the circuit settings
|
||||
|
||||
|
||||
Arguments
|
||||
---------
|
||||
model: str
|
||||
Path to the onnx file
|
||||
|
||||
|
||||
output: str
|
||||
Path to create the settings file
|
||||
|
||||
|
||||
py_run_args: PyRunArgs
|
||||
PyRunArgs object to initialize the settings
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def gen_srs(srs_path:str | os.PathLike | pathlib.Path,logrows:int) -> None:
|
||||
def gen_srs(srs_path: str | os.PathLike | pathlib.Path, logrows: int) -> None:
|
||||
r"""
|
||||
Generates the Structured Reference String (SRS), use this only for testing purposes
|
||||
|
||||
|
||||
Arguments
|
||||
---------
|
||||
srs_path: str
|
||||
Path to the create the SRS file
|
||||
|
||||
|
||||
logrows: int
|
||||
The number of logrows for the SRS file
|
||||
"""
|
||||
...
|
||||
|
||||
def gen_vk_from_pk_aggr(path_to_pk:str | os.PathLike | pathlib.Path,vk_output_path:str | os.PathLike | pathlib.Path) -> bool:
|
||||
r"""
|
||||
Generates a vk from a pk for an aggregate circuit and saves it to a file
|
||||
|
||||
Arguments
|
||||
-------
|
||||
path_to_pk: str
|
||||
Path to the proving key
|
||||
|
||||
vk_output_path: str
|
||||
Path to create the vk file
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def gen_vk_from_pk_single(path_to_pk:str | os.PathLike | pathlib.Path,circuit_settings_path:str | os.PathLike | pathlib.Path,vk_output_path:str | os.PathLike | pathlib.Path) -> bool:
|
||||
def gen_vk_from_pk_single(
|
||||
path_to_pk: str | os.PathLike | pathlib.Path,
|
||||
circuit_settings_path: str | os.PathLike | pathlib.Path,
|
||||
vk_output_path: str | os.PathLike | pathlib.Path,
|
||||
) -> bool:
|
||||
r"""
|
||||
Generates a vk from a pk for a model circuit and saves it to a file
|
||||
|
||||
|
||||
Arguments
|
||||
-------
|
||||
path_to_pk: str
|
||||
Path to the proving key
|
||||
|
||||
|
||||
circuit_settings_path: str
|
||||
Path to the witness file
|
||||
|
||||
|
||||
vk_output_path: str
|
||||
Path to create the vk file
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def gen_witness(data:str | os.PathLike | pathlib.Path,model:str | os.PathLike | pathlib.Path,output:typing.Optional[str | os.PathLike | pathlib.Path],vk_path:typing.Optional[str | os.PathLike | pathlib.Path],srs_path:typing.Optional[str | os.PathLike | pathlib.Path]) -> typing.Any:
|
||||
def gen_witness(
|
||||
data: str | os.PathLike | pathlib.Path,
|
||||
model: str | os.PathLike | pathlib.Path,
|
||||
output: typing.Optional[str | os.PathLike | pathlib.Path],
|
||||
vk_path: typing.Optional[str | os.PathLike | pathlib.Path],
|
||||
srs_path: typing.Optional[str | os.PathLike | pathlib.Path],
|
||||
) -> typing.Any:
|
||||
r"""
|
||||
Runs the forward pass operation to generate a witness
|
||||
|
||||
|
||||
Arguments
|
||||
---------
|
||||
data: str
|
||||
Path to the data file
|
||||
|
||||
|
||||
model: str
|
||||
Path to the compiled model file
|
||||
|
||||
|
||||
output: str
|
||||
Path to create the witness file
|
||||
|
||||
|
||||
vk_path: str
|
||||
Path to the verification key
|
||||
|
||||
|
||||
srs_path: str
|
||||
Path to the SRS file
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
dict
|
||||
@@ -453,126 +409,89 @@ def gen_witness(data:str | os.PathLike | pathlib.Path,model:str | os.PathLike |
|
||||
"""
|
||||
...
|
||||
|
||||
def get_srs(settings_path:typing.Optional[str | os.PathLike | pathlib.Path],logrows:typing.Optional[int],srs_path:typing.Optional[str | os.PathLike | pathlib.Path],commitment:typing.Optional[PyCommitments]) -> typing.Any:
|
||||
def get_srs(
|
||||
settings_path: typing.Optional[str | os.PathLike | pathlib.Path],
|
||||
logrows: typing.Optional[int],
|
||||
srs_path: typing.Optional[str | os.PathLike | pathlib.Path],
|
||||
) -> typing.Any:
|
||||
r"""
|
||||
Gets a public srs
|
||||
|
||||
|
||||
Arguments
|
||||
---------
|
||||
settings_path: str
|
||||
Path to the settings file
|
||||
|
||||
|
||||
logrows: int
|
||||
The number of logrows for the SRS file
|
||||
|
||||
|
||||
srs_path: str
|
||||
Path to the create the SRS file
|
||||
|
||||
commitment: str
|
||||
Specify the commitment used ("kzg", "ipa")
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def ipa_commit(message:typing.Sequence[str],vk_path:str | os.PathLike | pathlib.Path,settings_path:str | os.PathLike | pathlib.Path,srs_path:typing.Optional[str | os.PathLike | pathlib.Path]) -> list[PyG1Affine]:
|
||||
r"""
|
||||
Generate an ipa commitment.
|
||||
|
||||
Arguments
|
||||
-------
|
||||
message: list[str]
|
||||
List of field elements represnted as strings
|
||||
|
||||
vk_path: str
|
||||
Path to the verification key
|
||||
|
||||
settings_path: str
|
||||
Path to the settings file
|
||||
|
||||
srs_path: str
|
||||
Path to the Structure Reference String (SRS) file
|
||||
|
||||
Returns
|
||||
-------
|
||||
list[PyG1Affine]
|
||||
"""
|
||||
...
|
||||
|
||||
def kzg_commit(message:typing.Sequence[str],vk_path:str | os.PathLike | pathlib.Path,settings_path:str | os.PathLike | pathlib.Path,srs_path:typing.Optional[str | os.PathLike | pathlib.Path]) -> list[PyG1Affine]:
|
||||
def kzg_commit(
|
||||
message: typing.Sequence[str],
|
||||
vk_path: str | os.PathLike | pathlib.Path,
|
||||
settings_path: str | os.PathLike | pathlib.Path,
|
||||
srs_path: typing.Optional[str | os.PathLike | pathlib.Path],
|
||||
) -> list[PyG1Affine]:
|
||||
r"""
|
||||
Generate a kzg commitment.
|
||||
|
||||
|
||||
Arguments
|
||||
-------
|
||||
message: list[str]
|
||||
List of field elements represnted as strings
|
||||
|
||||
|
||||
vk_path: str
|
||||
Path to the verification key
|
||||
|
||||
|
||||
settings_path: str
|
||||
Path to the settings file
|
||||
|
||||
|
||||
srs_path: str
|
||||
Path to the Structure Reference String (SRS) file
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
list[PyG1Affine]
|
||||
"""
|
||||
...
|
||||
|
||||
def mock(witness:str | os.PathLike | pathlib.Path,model:str | os.PathLike | pathlib.Path) -> bool:
|
||||
def mock(
|
||||
witness: str | os.PathLike | pathlib.Path, model: str | os.PathLike | pathlib.Path
|
||||
) -> bool:
|
||||
r"""
|
||||
Mocks the prover
|
||||
|
||||
|
||||
Arguments
|
||||
---------
|
||||
witness: str
|
||||
Path to the witness file
|
||||
|
||||
|
||||
model: str
|
||||
Path to the compiled model file
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def mock_aggregate(aggregation_snarks:typing.Sequence[str | os.PathLike | pathlib.Path],logrows:int,split_proofs:bool) -> bool:
|
||||
r"""
|
||||
Mocks the aggregate prover
|
||||
|
||||
Arguments
|
||||
---------
|
||||
aggregation_snarks: list[str]
|
||||
List of paths to the relevant proof files
|
||||
|
||||
logrows: int
|
||||
Number of logrows to use for the aggregation circuit
|
||||
|
||||
split_proofs: bool
|
||||
Indicates whether the accumulated are segments of a larger proof
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def poseidon_hash(message:typing.Sequence[str]) -> list[str]:
|
||||
def poseidon_hash(message: typing.Sequence[str]) -> list[str]:
|
||||
r"""
|
||||
Generate a poseidon hash.
|
||||
|
||||
|
||||
Arguments
|
||||
-------
|
||||
message: list[str]
|
||||
List of field elements represented as strings
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
list[str]
|
||||
@@ -580,126 +499,104 @@ def poseidon_hash(message:typing.Sequence[str]) -> list[str]:
|
||||
"""
|
||||
...
|
||||
|
||||
def prove(witness:str | os.PathLike | pathlib.Path,model:str | os.PathLike | pathlib.Path,pk_path:str | os.PathLike | pathlib.Path,proof_path:typing.Optional[str | os.PathLike | pathlib.Path],proof_type:str,srs_path:typing.Optional[str | os.PathLike | pathlib.Path]) -> typing.Any:
|
||||
def prove(
|
||||
witness: str | os.PathLike | pathlib.Path,
|
||||
model: str | os.PathLike | pathlib.Path,
|
||||
pk_path: str | os.PathLike | pathlib.Path,
|
||||
proof_path: typing.Optional[str | os.PathLike | pathlib.Path],
|
||||
srs_path: typing.Optional[str | os.PathLike | pathlib.Path],
|
||||
) -> typing.Any:
|
||||
r"""
|
||||
Runs the prover on a set of inputs
|
||||
|
||||
|
||||
Arguments
|
||||
---------
|
||||
witness: str
|
||||
Path to the witness file
|
||||
|
||||
|
||||
model: str
|
||||
Path to the compiled model file
|
||||
|
||||
|
||||
pk_path: str
|
||||
Path to the proving key file
|
||||
|
||||
|
||||
proof_path: str
|
||||
Path to create the proof file
|
||||
|
||||
proof_type: str
|
||||
Accepts `single`, `for-aggr`
|
||||
|
||||
|
||||
srs_path: str
|
||||
Path to the SRS file
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def setup(model:str | os.PathLike | pathlib.Path,vk_path:str | os.PathLike | pathlib.Path,pk_path:str | os.PathLike | pathlib.Path,srs_path:typing.Optional[str | os.PathLike | pathlib.Path],witness_path:typing.Optional[str | os.PathLike | pathlib.Path],disable_selector_compression:bool) -> bool:
|
||||
def setup(
|
||||
model: str | os.PathLike | pathlib.Path,
|
||||
vk_path: str | os.PathLike | pathlib.Path,
|
||||
pk_path: str | os.PathLike | pathlib.Path,
|
||||
srs_path: typing.Optional[str | os.PathLike | pathlib.Path],
|
||||
witness_path: typing.Optional[str | os.PathLike | pathlib.Path],
|
||||
disable_selector_compression: bool,
|
||||
) -> bool:
|
||||
r"""
|
||||
Runs the setup process
|
||||
|
||||
|
||||
Arguments
|
||||
---------
|
||||
model: str
|
||||
Path to the compiled model file
|
||||
|
||||
|
||||
vk_path: str
|
||||
Path to create the verification key file
|
||||
|
||||
|
||||
pk_path: str
|
||||
Path to create the proving key file
|
||||
|
||||
|
||||
srs_path: str
|
||||
Path to the SRS file
|
||||
|
||||
|
||||
witness_path: str
|
||||
Path to the witness file
|
||||
|
||||
|
||||
disable_selector_compression: bool
|
||||
Whether to compress the selectors or not
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def setup_aggregate(sample_snarks:typing.Sequence[str | os.PathLike | pathlib.Path],vk_path:str | os.PathLike | pathlib.Path,pk_path:str | os.PathLike | pathlib.Path,logrows:int,split_proofs:bool,srs_path:typing.Optional[str | os.PathLike | pathlib.Path],disable_selector_compression:bool,commitment:PyCommitments) -> bool:
|
||||
r"""
|
||||
Runs the setup process for an aggregate setup
|
||||
|
||||
Arguments
|
||||
---------
|
||||
sample_snarks: list[str]
|
||||
List of paths to the various proofs
|
||||
|
||||
vk_path: str
|
||||
Path to create the aggregated VK
|
||||
|
||||
pk_path: str
|
||||
Path to create the aggregated PK
|
||||
|
||||
logrows: int
|
||||
Number of logrows to use
|
||||
|
||||
split_proofs: bool
|
||||
Whether the accumulated are segments of a larger proof
|
||||
|
||||
srs_path: str
|
||||
Path to the SRS file
|
||||
|
||||
disable_selector_compression: bool
|
||||
Whether to compress selectors
|
||||
|
||||
commitment: str
|
||||
Accepts `kzg`, `ipa`
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
|
||||
def swap_proof_commitments(proof_path:str | os.PathLike | pathlib.Path,witness_path:str | os.PathLike | pathlib.Path) -> None:
|
||||
def swap_proof_commitments(
|
||||
proof_path: str | os.PathLike | pathlib.Path,
|
||||
witness_path: str | os.PathLike | pathlib.Path,
|
||||
) -> None:
|
||||
r"""
|
||||
Swap the commitments in a proof
|
||||
|
||||
|
||||
Arguments
|
||||
-------
|
||||
proof_path: str
|
||||
Path to the proof file
|
||||
|
||||
|
||||
witness_path: str
|
||||
Path to the witness file
|
||||
"""
|
||||
...
|
||||
|
||||
def table(model:str | os.PathLike | pathlib.Path,py_run_args:typing.Optional[PyRunArgs]) -> str:
|
||||
def table(
|
||||
model: str | os.PathLike | pathlib.Path, py_run_args: typing.Optional[PyRunArgs]
|
||||
) -> str:
|
||||
r"""
|
||||
Displays the table as a string in python
|
||||
|
||||
|
||||
Arguments
|
||||
---------
|
||||
model: str
|
||||
Path to the onnx file
|
||||
|
||||
|
||||
Returns
|
||||
---------
|
||||
str
|
||||
@@ -707,78 +604,59 @@ def table(model:str | os.PathLike | pathlib.Path,py_run_args:typing.Optional[PyR
|
||||
"""
|
||||
...
|
||||
|
||||
def verify(proof_path:str | os.PathLike | pathlib.Path,settings_path:str | os.PathLike | pathlib.Path,vk_path:str | os.PathLike | pathlib.Path,srs_path:typing.Optional[str | os.PathLike | pathlib.Path],reduced_srs:bool) -> bool:
|
||||
def verify(
|
||||
proof_path: str | os.PathLike | pathlib.Path,
|
||||
settings_path: str | os.PathLike | pathlib.Path,
|
||||
vk_path: str | os.PathLike | pathlib.Path,
|
||||
srs_path: typing.Optional[str | os.PathLike | pathlib.Path],
|
||||
reduced_srs: bool,
|
||||
) -> bool:
|
||||
r"""
|
||||
Verifies a given proof
|
||||
|
||||
|
||||
Arguments
|
||||
---------
|
||||
proof_path: str
|
||||
Path to create the proof file
|
||||
|
||||
|
||||
settings_path: str
|
||||
Path to the settings file
|
||||
|
||||
|
||||
vk_path: str
|
||||
Path to the verification key file
|
||||
|
||||
|
||||
srs_path: str
|
||||
Path to the SRS file
|
||||
|
||||
|
||||
non_reduced_srs: bool
|
||||
Whether to reduce the number of SRS logrows to the number of instances rather than the number of logrows used for proofs (only works if the srs were generated in the same ceremony)
|
||||
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def verify_aggr(proof_path:str | os.PathLike | pathlib.Path,vk_path:str | os.PathLike | pathlib.Path,logrows:int,commitment:PyCommitments,reduced_srs:bool,srs_path:typing.Optional[str | os.PathLike | pathlib.Path]) -> bool:
|
||||
r"""
|
||||
Verifies and aggregate proof
|
||||
|
||||
Arguments
|
||||
---------
|
||||
proof_path: str
|
||||
The path to the proof file
|
||||
|
||||
vk_path: str
|
||||
The path to the verification key file
|
||||
|
||||
logrows: int
|
||||
logrows used for aggregation circuit
|
||||
|
||||
commitment: str
|
||||
Accepts "kzg" or "ipa"
|
||||
|
||||
reduced_srs: bool
|
||||
Whether to reduce the number of SRS logrows to the number of instances rather than the number of logrows used for proofs (only works if the srs were generated in the same ceremony)
|
||||
|
||||
srs_path: str
|
||||
The path to the SRS file
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
def verify_evm(addr_verifier:str,proof_path:str | os.PathLike | pathlib.Path,rpc_url:typing.Optional[str],vka_path:typing.Optional[str]) -> typing.Any:
|
||||
def verify_evm(
|
||||
addr_verifier: str,
|
||||
proof_path: str | os.PathLike | pathlib.Path,
|
||||
rpc_url: typing.Optional[str],
|
||||
vka_path: typing.Optional[str],
|
||||
) -> typing.Any:
|
||||
r"""
|
||||
verifies an evm compatible proof, you will need solc installed in your environment to run this
|
||||
|
||||
|
||||
Arguments
|
||||
---------
|
||||
addr_verifier: str
|
||||
The verifier contract's address as a hex string
|
||||
|
||||
|
||||
proof_path: str
|
||||
The path to the proof file (generated using the prove command)
|
||||
|
||||
|
||||
rpc_url: str
|
||||
RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
|
||||
|
||||
|
||||
vka_path: str
|
||||
The path to the VKA calldata bytes file (generated using the create_evm_vka command)
|
||||
Returns
|
||||
@@ -786,4 +664,3 @@ def verify_evm(addr_verifier:str,proof_path:str | os.PathLike | pathlib.Path,rpc
|
||||
bool
|
||||
"""
|
||||
...
|
||||
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
};
|
||||
30
package.json
30
package.json
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"name": "ezkljs-tests",
|
||||
"version": "0.1.0",
|
||||
"author": "Ethan Cemer",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@ezkljs/engine": "^9.4.4",
|
||||
"@ezkljs/verify": "^0.0.6",
|
||||
"@jest/types": "^29.6.3",
|
||||
"@types/file-saver": "^2.0.5",
|
||||
"@types/jest": "^29.5.3",
|
||||
"@types/json-bigint": "^1.0.1",
|
||||
"@types/node": "20.4.5",
|
||||
"buffer": "^6.0.3",
|
||||
"env": "^0.0.2",
|
||||
"fs": "0.0.1-security",
|
||||
"jest": "^29.6.3",
|
||||
"json-bigint": "^1.0.0",
|
||||
"minimist": "^1.2.8",
|
||||
"solc": "^0.8.21",
|
||||
"ts-jest": "^29.1.1",
|
||||
"ts-loader": "^9.4.4",
|
||||
"ts-node": "^10.9.1",
|
||||
"tsconfig-paths": "^4.2.0",
|
||||
"typescript": "5.1.6"
|
||||
}
|
||||
}
|
||||
3596
pnpm-lock.yaml
generated
3596
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,269 +0,0 @@
|
||||
use camino::Utf8Path;
|
||||
use std::fs;
|
||||
use std::fs::remove_dir_all;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use uniffi_bindgen::bindings::SwiftBindingGenerator;
|
||||
use uniffi_bindgen::library_mode::generate_bindings;
|
||||
use uuid::Uuid;
|
||||
|
||||
fn main() {
|
||||
let library_name = std::env::var("CARGO_PKG_NAME").expect("CARGO_PKG_NAME is not set");
|
||||
let mode = determine_build_mode();
|
||||
build_bindings(&library_name, mode);
|
||||
}
|
||||
|
||||
/// Determines the build mode based on the CONFIGURATION environment variable.
|
||||
/// Defaults to "release" if not set or unrecognized.
|
||||
/// "release" mode takes longer to build but produces optimized code, which has smaller size and is faster.
|
||||
fn determine_build_mode() -> &'static str {
|
||||
match std::env::var("CONFIGURATION").map(|s| s.to_lowercase()) {
|
||||
Ok(ref config) if config == "debug" => "debug",
|
||||
_ => "release",
|
||||
}
|
||||
}
|
||||
|
||||
/// Builds the Swift bindings and XCFramework for the specified library and build mode.
|
||||
fn build_bindings(library_name: &str, mode: &str) {
|
||||
// Get the root directory of this Cargo project
|
||||
let manifest_dir = std::env::var_os("CARGO_MANIFEST_DIR")
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|| std::env::current_dir().unwrap());
|
||||
|
||||
// Define the build directory inside the manifest directory
|
||||
let build_dir = manifest_dir.join("build");
|
||||
|
||||
// Create a temporary directory to store the bindings and combined library
|
||||
let tmp_dir = mktemp_local(&build_dir);
|
||||
|
||||
// Define directories for Swift bindings and output bindings
|
||||
let swift_bindings_dir = tmp_dir.join("SwiftBindings");
|
||||
let bindings_out = create_bindings_out_dir(&tmp_dir);
|
||||
let framework_out = bindings_out.join("EzklCore.xcframework");
|
||||
|
||||
// Define target architectures for building
|
||||
// We currently only support iOS devices and simulators running on ARM Macs
|
||||
// This is due to limiting the library size to under 100MB for GitHub Commit Size Limit
|
||||
// To support older Macs (Intel), follow the instructions in the comments below
|
||||
#[allow(clippy::useless_vec)]
|
||||
let target_archs = vec![
|
||||
vec!["aarch64-apple-ios"], // iOS device
|
||||
vec!["aarch64-apple-ios-sim"], // iOS simulator ARM Mac
|
||||
// vec!["aarch64-apple-ios-sim", "x86_64-apple-ios"], // TODO - replace the above line with this line to allow running on older Macs (Intel)
|
||||
];
|
||||
|
||||
// Build the library for each architecture and combine them
|
||||
let out_lib_paths: Vec<PathBuf> = target_archs
|
||||
.iter()
|
||||
.map(|archs| build_combined_archs(library_name, archs, &build_dir, mode))
|
||||
.collect();
|
||||
|
||||
// Generate the path to the built dynamic library (.dylib)
|
||||
let out_dylib_path = build_dir.join(format!(
|
||||
"{}/{}/lib{}.dylib",
|
||||
target_archs[0][0], mode, library_name
|
||||
));
|
||||
|
||||
// Generate Swift bindings using uniffi_bindgen
|
||||
generate_ios_bindings(&out_dylib_path, &swift_bindings_dir)
|
||||
.expect("Failed to generate iOS bindings");
|
||||
|
||||
// Move the generated Swift file to the bindings output directory
|
||||
fs::rename(
|
||||
swift_bindings_dir.join(format!("{}.swift", library_name)),
|
||||
bindings_out.join("EzklCore.swift"),
|
||||
)
|
||||
.expect("Failed to copy swift bindings file");
|
||||
|
||||
// Rename the `ios_ezklFFI.modulemap` file to `module.modulemap`
|
||||
fs::rename(
|
||||
swift_bindings_dir.join(format!("{}FFI.modulemap", library_name)),
|
||||
swift_bindings_dir.join("module.modulemap"),
|
||||
)
|
||||
.expect("Failed to rename modulemap file");
|
||||
|
||||
// Create the XCFramework from the combined libraries and Swift bindings
|
||||
create_xcframework(&out_lib_paths, &swift_bindings_dir, &framework_out);
|
||||
|
||||
// Define the destination directory for the bindings
|
||||
let bindings_dest = build_dir.join("EzklCoreBindings");
|
||||
if bindings_dest.exists() {
|
||||
fs::remove_dir_all(&bindings_dest).expect("Failed to remove existing bindings directory");
|
||||
}
|
||||
|
||||
// Move the bindings output to the destination directory
|
||||
fs::rename(&bindings_out, &bindings_dest).expect("Failed to move framework into place");
|
||||
|
||||
// Clean up temporary directories
|
||||
cleanup_temp_dirs(&build_dir);
|
||||
}
|
||||
|
||||
/// Creates the output directory for the bindings.
|
||||
/// Returns the path to the bindings output directory.
|
||||
fn create_bindings_out_dir(base_dir: &Path) -> PathBuf {
|
||||
let bindings_out = base_dir.join("EzklCoreBindings");
|
||||
fs::create_dir_all(&bindings_out).expect("Failed to create bindings output directory");
|
||||
bindings_out
|
||||
}
|
||||
|
||||
/// Builds the library for each architecture and combines them into a single library using lipo.
|
||||
/// Returns the path to the combined library.
|
||||
fn build_combined_archs(
|
||||
library_name: &str,
|
||||
archs: &[&str],
|
||||
build_dir: &Path,
|
||||
mode: &str,
|
||||
) -> PathBuf {
|
||||
// Build the library for each architecture
|
||||
let out_lib_paths: Vec<PathBuf> = archs
|
||||
.iter()
|
||||
.map(|&arch| {
|
||||
build_for_arch(arch, build_dir, mode);
|
||||
build_dir
|
||||
.join(arch)
|
||||
.join(mode)
|
||||
.join(format!("lib{}.a", library_name))
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Create a unique temporary directory for the combined library
|
||||
let lib_out = mktemp_local(build_dir).join(format!("lib{}.a", library_name));
|
||||
|
||||
// Combine the libraries using lipo
|
||||
let mut lipo_cmd = Command::new("lipo");
|
||||
lipo_cmd
|
||||
.arg("-create")
|
||||
.arg("-output")
|
||||
.arg(lib_out.to_str().unwrap());
|
||||
for lib_path in &out_lib_paths {
|
||||
lipo_cmd.arg(lib_path.to_str().unwrap());
|
||||
}
|
||||
|
||||
let status = lipo_cmd.status().expect("Failed to run lipo command");
|
||||
if !status.success() {
|
||||
panic!("lipo command failed with status: {}", status);
|
||||
}
|
||||
|
||||
lib_out
|
||||
}
|
||||
|
||||
/// Builds the library for a specific architecture.
|
||||
fn build_for_arch(arch: &str, build_dir: &Path, mode: &str) {
|
||||
// Ensure the target architecture is installed
|
||||
install_arch(arch);
|
||||
|
||||
// Run cargo build for the specified architecture and mode
|
||||
let mut build_cmd = Command::new("cargo");
|
||||
build_cmd
|
||||
.arg("build")
|
||||
.arg("--no-default-features")
|
||||
.arg("--features")
|
||||
.arg("ios-bindings");
|
||||
|
||||
if mode == "release" {
|
||||
build_cmd.arg("--release");
|
||||
}
|
||||
build_cmd
|
||||
.arg("--lib")
|
||||
.env("CARGO_BUILD_TARGET_DIR", build_dir)
|
||||
.env("CARGO_BUILD_TARGET", arch);
|
||||
|
||||
let status = build_cmd.status().expect("Failed to run cargo build");
|
||||
if !status.success() {
|
||||
panic!("cargo build failed for architecture: {}", arch);
|
||||
}
|
||||
}
|
||||
|
||||
/// Installs the specified target architecture using rustup.
|
||||
fn install_arch(arch: &str) {
|
||||
let status = Command::new("rustup")
|
||||
.arg("target")
|
||||
.arg("add")
|
||||
.arg(arch)
|
||||
.status()
|
||||
.expect("Failed to run rustup command");
|
||||
|
||||
if !status.success() {
|
||||
panic!("Failed to install target architecture: {}", arch);
|
||||
}
|
||||
}
|
||||
|
||||
/// Generates Swift bindings for the iOS library using uniffi_bindgen.
|
||||
fn generate_ios_bindings(dylib_path: &Path, binding_dir: &Path) -> Result<(), std::io::Error> {
|
||||
// Remove existing binding directory if it exists
|
||||
if binding_dir.exists() {
|
||||
remove_dir_all(binding_dir)?;
|
||||
}
|
||||
|
||||
// Generate the Swift bindings using uniffi_bindgen
|
||||
generate_bindings(
|
||||
Utf8Path::from_path(dylib_path).ok_or_else(|| {
|
||||
std::io::Error::new(std::io::ErrorKind::InvalidInput, "Invalid dylib path")
|
||||
})?,
|
||||
None,
|
||||
&SwiftBindingGenerator,
|
||||
None,
|
||||
Utf8Path::from_path(binding_dir).ok_or_else(|| {
|
||||
std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"Invalid Swift bindings directory",
|
||||
)
|
||||
})?,
|
||||
true,
|
||||
)
|
||||
.map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string()))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Creates an XCFramework from the combined libraries and Swift bindings.
|
||||
fn create_xcframework(lib_paths: &[PathBuf], swift_bindings_dir: &Path, framework_out: &Path) {
|
||||
let mut xcbuild_cmd = Command::new("xcodebuild");
|
||||
xcbuild_cmd.arg("-create-xcframework");
|
||||
|
||||
// Add each library and its corresponding headers to the xcodebuild command
|
||||
for lib_path in lib_paths {
|
||||
println!("Including library: {:?}", lib_path);
|
||||
xcbuild_cmd.arg("-library");
|
||||
xcbuild_cmd.arg(lib_path.to_str().unwrap());
|
||||
xcbuild_cmd.arg("-headers");
|
||||
xcbuild_cmd.arg(swift_bindings_dir.to_str().unwrap());
|
||||
}
|
||||
|
||||
xcbuild_cmd.arg("-output");
|
||||
xcbuild_cmd.arg(framework_out.to_str().unwrap());
|
||||
|
||||
let status = xcbuild_cmd.status().expect("Failed to run xcodebuild");
|
||||
if !status.success() {
|
||||
panic!("xcodebuild failed with status: {}", status);
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a temporary directory inside the build path with a unique UUID.
|
||||
/// This ensures unique build artifacts for concurrent builds.
|
||||
fn mktemp_local(build_path: &Path) -> PathBuf {
|
||||
let dir = tmp_local(build_path).join(Uuid::new_v4().to_string());
|
||||
fs::create_dir(&dir).expect("Failed to create temporary directory");
|
||||
dir
|
||||
}
|
||||
|
||||
/// Gets the path to the local temporary directory inside the build path.
|
||||
fn tmp_local(build_path: &Path) -> PathBuf {
|
||||
let tmp_path = build_path.join("tmp");
|
||||
if let Ok(metadata) = fs::metadata(&tmp_path) {
|
||||
if !metadata.is_dir() {
|
||||
panic!("Expected 'tmp' to be a directory");
|
||||
}
|
||||
} else {
|
||||
fs::create_dir_all(&tmp_path).expect("Failed to create local temporary directory");
|
||||
}
|
||||
tmp_path
|
||||
}
|
||||
|
||||
/// Cleans up temporary directories inside the build path.
|
||||
fn cleanup_temp_dirs(build_dir: &Path) {
|
||||
let tmp_dir = build_dir.join("tmp");
|
||||
if tmp_dir.exists() {
|
||||
fs::remove_dir_all(tmp_dir).expect("Failed to remove temporary directories");
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,3 @@
|
||||
/// Python bindings
|
||||
#[cfg(feature = "python-bindings")]
|
||||
pub mod python;
|
||||
/// Universal bindings for all platforms
|
||||
#[cfg(any(
|
||||
feature = "universal-bindings",
|
||||
all(target_arch = "wasm32", target_os = "unknown")
|
||||
))]
|
||||
pub mod universal;
|
||||
/// wasm prover and verifier
|
||||
#[cfg(all(target_arch = "wasm32", target_os = "unknown"))]
|
||||
pub mod wasm;
|
||||
|
||||
@@ -12,14 +12,10 @@ use crate::graph::TestDataSource;
|
||||
use crate::graph::{
|
||||
quantize_float, scale_to_multiplier, GraphCircuit, GraphSettings, Model, Visibility,
|
||||
};
|
||||
use crate::pfsys::evm::aggregation_kzg::AggregationCircuit;
|
||||
use crate::pfsys::{
|
||||
load_pk, load_vk, save_params, save_vk, srs::gen_srs as ezkl_gen_srs, srs::load_srs_prover,
|
||||
ProofType, TranscriptType,
|
||||
};
|
||||
use crate::Commitments;
|
||||
use crate::RunArgs;
|
||||
use halo2_proofs::poly::ipa::commitment::IPACommitmentScheme;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
use halo2curves::bn256::{Bn256, Fq, Fr, G1Affine, G1};
|
||||
use pyo3::exceptions::{PyIOError, PyRuntimeError};
|
||||
@@ -173,9 +169,6 @@ struct PyRunArgs {
|
||||
#[pyo3(get, set)]
|
||||
/// str: check mode, accepts `safe`, `unsafe`
|
||||
pub check_mode: CheckMode,
|
||||
#[pyo3(get, set)]
|
||||
/// str: commitment type, accepts `kzg`, `ipa`
|
||||
pub commitment: PyCommitments,
|
||||
/// int: The base used for decomposition
|
||||
#[pyo3(get, set)]
|
||||
pub decomp_base: usize,
|
||||
@@ -223,7 +216,6 @@ impl From<PyRunArgs> for RunArgs {
|
||||
variables: py_run_args.variables,
|
||||
rebase_frac_zero_constants: py_run_args.rebase_frac_zero_constants,
|
||||
check_mode: py_run_args.check_mode,
|
||||
commitment: Some(py_run_args.commitment.into()),
|
||||
decomp_base: py_run_args.decomp_base,
|
||||
decomp_legs: py_run_args.decomp_legs,
|
||||
ignore_range_check_inputs_outputs: py_run_args.ignore_range_check_inputs_outputs,
|
||||
@@ -251,7 +243,6 @@ impl Into<PyRunArgs> for RunArgs {
|
||||
variables: self.variables,
|
||||
rebase_frac_zero_constants: self.rebase_frac_zero_constants,
|
||||
check_mode: self.check_mode,
|
||||
commitment: self.commitment.into(),
|
||||
decomp_base: self.decomp_base,
|
||||
decomp_legs: self.decomp_legs,
|
||||
ignore_range_check_inputs_outputs: self.ignore_range_check_inputs_outputs,
|
||||
@@ -261,56 +252,6 @@ impl Into<PyRunArgs> for RunArgs {
|
||||
}
|
||||
}
|
||||
|
||||
#[pyclass]
|
||||
#[derive(Debug, Clone)]
|
||||
#[gen_stub_pyclass_enum]
|
||||
/// pyclass representing an enum, denoting the type of commitment
|
||||
pub enum PyCommitments {
|
||||
/// KZG commitment
|
||||
KZG,
|
||||
/// IPA commitment
|
||||
IPA,
|
||||
}
|
||||
|
||||
impl From<Option<Commitments>> for PyCommitments {
|
||||
fn from(commitment: Option<Commitments>) -> Self {
|
||||
match commitment {
|
||||
Some(Commitments::KZG) => PyCommitments::KZG,
|
||||
Some(Commitments::IPA) => PyCommitments::IPA,
|
||||
None => PyCommitments::KZG,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PyCommitments> for Commitments {
|
||||
fn from(py_commitments: PyCommitments) -> Self {
|
||||
match py_commitments {
|
||||
PyCommitments::KZG => Commitments::KZG,
|
||||
PyCommitments::IPA => Commitments::IPA,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<PyCommitments> for Commitments {
|
||||
fn into(self) -> PyCommitments {
|
||||
match self {
|
||||
Commitments::KZG => PyCommitments::KZG,
|
||||
Commitments::IPA => PyCommitments::IPA,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for PyCommitments {
|
||||
type Err = String;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"kzg" => Ok(PyCommitments::KZG),
|
||||
"ipa" => Ok(PyCommitments::IPA),
|
||||
_ => Err("Invalid value for Commitments".to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[pyclass]
|
||||
#[derive(Debug, Clone)]
|
||||
#[gen_stub_pyclass_enum]
|
||||
@@ -623,8 +564,7 @@ fn kzg_commit(
|
||||
let settings = GraphSettings::load(&settings_path)
|
||||
.map_err(|_| PyIOError::new_err("Failed to load circuit settings"))?;
|
||||
|
||||
let srs_path =
|
||||
crate::execute::get_srs_path(settings.run_args.logrows, srs_path, Commitments::KZG);
|
||||
let srs_path = crate::execute::get_srs_path(settings.run_args.logrows, srs_path);
|
||||
|
||||
let srs = load_srs_prover::<KZGCommitmentScheme<Bn256>>(srs_path)
|
||||
.map_err(|_| PyIOError::new_err("Failed to load srs"))?;
|
||||
@@ -641,65 +581,6 @@ fn kzg_commit(
|
||||
Ok(output.iter().map(|x| (*x).into()).collect::<Vec<_>>())
|
||||
}
|
||||
|
||||
/// Generate an ipa commitment.
|
||||
///
|
||||
/// Arguments
|
||||
/// -------
|
||||
/// message: list[str]
|
||||
/// List of field elements represented as strings
|
||||
///
|
||||
/// vk_path: str
|
||||
/// Path to the verification key
|
||||
///
|
||||
/// settings_path: str
|
||||
/// Path to the settings file
|
||||
///
|
||||
/// srs_path: str
|
||||
/// Path to the Structure Reference String (SRS) file
|
||||
///
|
||||
/// Returns
|
||||
/// -------
|
||||
/// list[PyG1Affine]
|
||||
///
|
||||
#[pyfunction(signature = (
|
||||
message,
|
||||
vk_path=PathBuf::from(DEFAULT_VK),
|
||||
settings_path=PathBuf::from(DEFAULT_SETTINGS),
|
||||
srs_path=None
|
||||
))]
|
||||
#[gen_stub_pyfunction]
|
||||
fn ipa_commit(
|
||||
message: Vec<PyFelt>,
|
||||
vk_path: PathBuf,
|
||||
settings_path: PathBuf,
|
||||
srs_path: Option<PathBuf>,
|
||||
) -> PyResult<Vec<PyG1Affine>> {
|
||||
let message: Vec<Fr> = message
|
||||
.iter()
|
||||
.map(crate::pfsys::string_to_field::<Fr>)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let settings = GraphSettings::load(&settings_path)
|
||||
.map_err(|_| PyIOError::new_err("Failed to load circuit settings"))?;
|
||||
|
||||
let srs_path =
|
||||
crate::execute::get_srs_path(settings.run_args.logrows, srs_path, Commitments::IPA);
|
||||
|
||||
let srs = load_srs_prover::<IPACommitmentScheme<G1Affine>>(srs_path)
|
||||
.map_err(|_| PyIOError::new_err("Failed to load srs"))?;
|
||||
|
||||
let vk = load_vk::<IPACommitmentScheme<G1Affine>, GraphCircuit>(vk_path, settings)
|
||||
.map_err(|_| PyIOError::new_err("Failed to load vk"))?;
|
||||
|
||||
let output = PolyCommitChip::commit::<IPACommitmentScheme<G1Affine>>(
|
||||
message,
|
||||
(vk.cs().blinding_factors() + 1) as u32,
|
||||
&srs,
|
||||
);
|
||||
|
||||
Ok(output.iter().map(|x| (*x).into()).collect::<Vec<_>>())
|
||||
}
|
||||
|
||||
/// Swap the commitments in a proof
|
||||
///
|
||||
/// Arguments
|
||||
@@ -765,37 +646,6 @@ fn gen_vk_from_pk_single(
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Generates a vk from a pk for an aggregate circuit and saves it to a file
|
||||
///
|
||||
/// Arguments
|
||||
/// -------
|
||||
/// path_to_pk: str
|
||||
/// Path to the proving key
|
||||
///
|
||||
/// vk_output_path: str
|
||||
/// Path to create the vk file
|
||||
///
|
||||
/// Returns
|
||||
/// -------
|
||||
/// bool
|
||||
#[pyfunction(signature = (
|
||||
path_to_pk=PathBuf::from(DEFAULT_PK_AGGREGATED),
|
||||
vk_output_path=PathBuf::from(DEFAULT_VK_AGGREGATED),
|
||||
))]
|
||||
#[gen_stub_pyfunction]
|
||||
fn gen_vk_from_pk_aggr(path_to_pk: PathBuf, vk_output_path: PathBuf) -> PyResult<bool> {
|
||||
let pk = load_pk::<KZGCommitmentScheme<Bn256>, AggregationCircuit>(path_to_pk, ())
|
||||
.map_err(|_| PyIOError::new_err("Failed to load pk"))?;
|
||||
|
||||
let vk = pk.get_vk();
|
||||
|
||||
// now save
|
||||
save_vk::<G1Affine>(&vk_output_path, vk)
|
||||
.map_err(|_| PyIOError::new_err("Failed to save vk"))?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Displays the table as a string in python
|
||||
///
|
||||
/// Arguments
|
||||
@@ -858,8 +708,6 @@ fn gen_srs(srs_path: PathBuf, logrows: usize) -> PyResult<()> {
|
||||
/// srs_path: str
|
||||
/// Path to the create the SRS file
|
||||
///
|
||||
/// commitment: str
|
||||
/// Specify the commitment used ("kzg", "ipa")
|
||||
///
|
||||
/// Returns
|
||||
/// -------
|
||||
@@ -869,7 +717,6 @@ fn gen_srs(srs_path: PathBuf, logrows: usize) -> PyResult<()> {
|
||||
settings_path=PathBuf::from(DEFAULT_SETTINGS),
|
||||
logrows=None,
|
||||
srs_path=None,
|
||||
commitment=None,
|
||||
))]
|
||||
#[gen_stub_pyfunction]
|
||||
fn get_srs(
|
||||
@@ -877,15 +724,9 @@ fn get_srs(
|
||||
settings_path: Option<PathBuf>,
|
||||
logrows: Option<u32>,
|
||||
srs_path: Option<PathBuf>,
|
||||
commitment: Option<PyCommitments>,
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
let commitment: Option<Commitments> = match commitment {
|
||||
Some(c) => Some(c.into()),
|
||||
None => None,
|
||||
};
|
||||
|
||||
pyo3_async_runtimes::tokio::future_into_py(py, async move {
|
||||
crate::execute::get_srs_cmd(srs_path, settings_path, logrows, commitment)
|
||||
crate::execute::get_srs_cmd(srs_path, settings_path, logrows)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to get srs: {}", e);
|
||||
@@ -1120,42 +961,6 @@ fn mock(witness: PathBuf, model: PathBuf) -> PyResult<bool> {
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Mocks the aggregate prover
|
||||
///
|
||||
/// Arguments
|
||||
/// ---------
|
||||
/// aggregation_snarks: list[str]
|
||||
/// List of paths to the relevant proof files
|
||||
///
|
||||
/// logrows: int
|
||||
/// Number of logrows to use for the aggregation circuit
|
||||
///
|
||||
/// split_proofs: bool
|
||||
/// Indicates whether the accumulated are segments of a larger proof
|
||||
///
|
||||
/// Returns
|
||||
/// -------
|
||||
/// bool
|
||||
///
|
||||
#[pyfunction(signature = (
|
||||
aggregation_snarks=vec![PathBuf::from(DEFAULT_PROOF)],
|
||||
logrows=DEFAULT_AGGREGATED_LOGROWS.parse().unwrap(),
|
||||
split_proofs = false,
|
||||
))]
|
||||
#[gen_stub_pyfunction]
|
||||
fn mock_aggregate(
|
||||
aggregation_snarks: Vec<PathBuf>,
|
||||
logrows: u32,
|
||||
split_proofs: bool,
|
||||
) -> PyResult<bool> {
|
||||
crate::execute::mock_aggregate(aggregation_snarks, logrows, split_proofs).map_err(|e| {
|
||||
let err_str = format!("Failed to run mock: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Runs the setup process
|
||||
///
|
||||
/// Arguments
|
||||
@@ -1231,8 +1036,6 @@ fn setup(
|
||||
/// proof_path: str
|
||||
/// Path to create the proof file
|
||||
///
|
||||
/// proof_type: str
|
||||
/// Accepts `single`, `for-aggr`
|
||||
///
|
||||
/// srs_path: str
|
||||
/// Path to the SRS file
|
||||
@@ -1246,7 +1049,6 @@ fn setup(
|
||||
model=PathBuf::from(DEFAULT_COMPILED_CIRCUIT),
|
||||
pk_path=PathBuf::from(DEFAULT_PK),
|
||||
proof_path=None,
|
||||
proof_type=ProofType::default(),
|
||||
srs_path=None,
|
||||
))]
|
||||
#[gen_stub_pyfunction]
|
||||
@@ -1255,7 +1057,6 @@ fn prove(
|
||||
model: PathBuf,
|
||||
pk_path: PathBuf,
|
||||
proof_path: Option<PathBuf>,
|
||||
proof_type: ProofType,
|
||||
srs_path: Option<PathBuf>,
|
||||
) -> PyResult<PyObject> {
|
||||
let snark = crate::execute::prove(
|
||||
@@ -1264,7 +1065,6 @@ fn prove(
|
||||
pk_path,
|
||||
proof_path,
|
||||
srs_path,
|
||||
proof_type,
|
||||
CheckMode::UNSAFE,
|
||||
)
|
||||
.map_err(|e| {
|
||||
@@ -1323,77 +1123,6 @@ fn verify(
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Runs the setup process for an aggregate setup
|
||||
///
|
||||
/// Arguments
|
||||
/// ---------
|
||||
/// sample_snarks: list[str]
|
||||
/// List of paths to the various proofs
|
||||
///
|
||||
/// vk_path: str
|
||||
/// Path to create the aggregated VK
|
||||
///
|
||||
/// pk_path: str
|
||||
/// Path to create the aggregated PK
|
||||
///
|
||||
/// logrows: int
|
||||
/// Number of logrows to use
|
||||
///
|
||||
/// split_proofs: bool
|
||||
/// Whether the accumulated are segments of a larger proof
|
||||
///
|
||||
/// srs_path: str
|
||||
/// Path to the SRS file
|
||||
///
|
||||
/// disable_selector_compression: bool
|
||||
/// Whether to compress selectors
|
||||
///
|
||||
/// commitment: str
|
||||
/// Accepts `kzg`, `ipa`
|
||||
///
|
||||
/// Returns
|
||||
/// -------
|
||||
/// bool
|
||||
///
|
||||
#[pyfunction(signature = (
|
||||
sample_snarks=vec![PathBuf::from(DEFAULT_PROOF)],
|
||||
vk_path=PathBuf::from(DEFAULT_VK_AGGREGATED),
|
||||
pk_path=PathBuf::from(DEFAULT_PK_AGGREGATED),
|
||||
logrows=DEFAULT_AGGREGATED_LOGROWS.parse().unwrap(),
|
||||
split_proofs = false,
|
||||
srs_path = None,
|
||||
disable_selector_compression=DEFAULT_DISABLE_SELECTOR_COMPRESSION.parse().unwrap(),
|
||||
commitment=DEFAULT_COMMITMENT.parse().unwrap(),
|
||||
))]
|
||||
#[gen_stub_pyfunction]
|
||||
fn setup_aggregate(
|
||||
sample_snarks: Vec<PathBuf>,
|
||||
vk_path: PathBuf,
|
||||
pk_path: PathBuf,
|
||||
logrows: u32,
|
||||
split_proofs: bool,
|
||||
srs_path: Option<PathBuf>,
|
||||
disable_selector_compression: bool,
|
||||
commitment: PyCommitments,
|
||||
) -> Result<bool, PyErr> {
|
||||
crate::execute::setup_aggregate(
|
||||
sample_snarks,
|
||||
vk_path,
|
||||
pk_path,
|
||||
srs_path,
|
||||
logrows,
|
||||
split_proofs,
|
||||
disable_selector_compression,
|
||||
commitment.into(),
|
||||
)
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to setup aggregate: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Compiles the circuit for use in other steps
|
||||
///
|
||||
/// Arguments
|
||||
@@ -1423,144 +1152,7 @@ fn compile_circuit(
|
||||
settings_path: PathBuf,
|
||||
) -> Result<bool, PyErr> {
|
||||
crate::execute::compile_circuit(model, compiled_circuit, settings_path).map_err(|e| {
|
||||
let err_str = format!("Failed to setup aggregate: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Creates an aggregated proof
|
||||
///
|
||||
/// Arguments
|
||||
/// ---------
|
||||
/// aggregation_snarks: list[str]
|
||||
/// List of paths to the various proofs
|
||||
///
|
||||
/// proof_path: str
|
||||
/// Path to output the aggregated proof
|
||||
///
|
||||
/// vk_path: str
|
||||
/// Path to the VK file
|
||||
///
|
||||
/// transcript:
|
||||
/// Proof transcript type to be used. `evm` used by default. `poseidon` is also supported
|
||||
///
|
||||
/// logrows:
|
||||
/// Logrows used for aggregation circuit
|
||||
///
|
||||
/// check_mode: str
|
||||
/// Run sanity checks during calculations. Accepts `safe` or `unsafe`
|
||||
///
|
||||
/// split-proofs: bool
|
||||
/// Whether the accumulated proofs are segments of a larger circuit
|
||||
///
|
||||
/// srs_path: str
|
||||
/// Path to the SRS used
|
||||
///
|
||||
/// commitment: str
|
||||
/// Accepts "kzg" or "ipa"
|
||||
///
|
||||
/// Returns
|
||||
/// -------
|
||||
/// bool
|
||||
///
|
||||
#[pyfunction(signature = (
|
||||
aggregation_snarks=vec![PathBuf::from(DEFAULT_PROOF)],
|
||||
proof_path=PathBuf::from(DEFAULT_PROOF_AGGREGATED),
|
||||
vk_path=PathBuf::from(DEFAULT_VK_AGGREGATED),
|
||||
transcript=TranscriptType::default(),
|
||||
logrows=DEFAULT_AGGREGATED_LOGROWS.parse().unwrap(),
|
||||
check_mode=CheckMode::UNSAFE,
|
||||
split_proofs = false,
|
||||
srs_path=None,
|
||||
commitment=DEFAULT_COMMITMENT.parse().unwrap(),
|
||||
))]
|
||||
#[gen_stub_pyfunction]
|
||||
fn aggregate(
|
||||
aggregation_snarks: Vec<PathBuf>,
|
||||
proof_path: PathBuf,
|
||||
vk_path: PathBuf,
|
||||
transcript: TranscriptType,
|
||||
logrows: u32,
|
||||
check_mode: CheckMode,
|
||||
split_proofs: bool,
|
||||
srs_path: Option<PathBuf>,
|
||||
commitment: PyCommitments,
|
||||
) -> Result<bool, PyErr> {
|
||||
// the K used for the aggregation circuit
|
||||
crate::execute::aggregate(
|
||||
proof_path,
|
||||
aggregation_snarks,
|
||||
vk_path,
|
||||
srs_path,
|
||||
transcript,
|
||||
logrows,
|
||||
check_mode,
|
||||
split_proofs,
|
||||
commitment.into(),
|
||||
)
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run aggregate: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Verifies and aggregate proof
|
||||
///
|
||||
/// Arguments
|
||||
/// ---------
|
||||
/// proof_path: str
|
||||
/// The path to the proof file
|
||||
///
|
||||
/// vk_path: str
|
||||
/// The path to the verification key file
|
||||
///
|
||||
/// logrows: int
|
||||
/// logrows used for aggregation circuit
|
||||
///
|
||||
/// commitment: str
|
||||
/// Accepts "kzg" or "ipa"
|
||||
///
|
||||
/// reduced_srs: bool
|
||||
/// Whether to reduce the number of SRS logrows to the number of instances rather than the number of logrows used for proofs (only works if the srs were generated in the same ceremony)
|
||||
///
|
||||
/// srs_path: str
|
||||
/// The path to the SRS file
|
||||
///
|
||||
/// Returns
|
||||
/// -------
|
||||
/// bool
|
||||
///
|
||||
#[pyfunction(signature = (
|
||||
proof_path=PathBuf::from(DEFAULT_PROOF_AGGREGATED),
|
||||
vk_path=PathBuf::from(DEFAULT_VK),
|
||||
logrows=DEFAULT_AGGREGATED_LOGROWS.parse().unwrap(),
|
||||
commitment=DEFAULT_COMMITMENT.parse().unwrap(),
|
||||
reduced_srs=DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION.parse().unwrap(),
|
||||
srs_path=None,
|
||||
))]
|
||||
#[gen_stub_pyfunction]
|
||||
fn verify_aggr(
|
||||
proof_path: PathBuf,
|
||||
vk_path: PathBuf,
|
||||
logrows: u32,
|
||||
commitment: PyCommitments,
|
||||
reduced_srs: bool,
|
||||
srs_path: Option<PathBuf>,
|
||||
) -> Result<bool, PyErr> {
|
||||
crate::execute::verify_aggr(
|
||||
proof_path,
|
||||
vk_path,
|
||||
srs_path,
|
||||
logrows,
|
||||
reduced_srs,
|
||||
commitment.into(),
|
||||
)
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run verify_aggr: {}", e);
|
||||
let err_str = format!("Failed to compile circuit: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
@@ -1667,7 +1259,7 @@ fn create_evm_verifier(
|
||||
|
||||
#[cfg(feature = "reusable-verifier")]
|
||||
/// Creates an Evm VK artifact. This command generated a VK with circuit specific meta data encoding in memory for use by the reusable H2 verifier.
|
||||
/// This is useful for deploying verifier that were otherwise too big to fit on chain and required aggregation.
|
||||
/// This is useful for deploying verifier that were otherwise too big to fit on chain .
|
||||
///
|
||||
/// Arguments
|
||||
/// ---------
|
||||
@@ -1873,75 +1465,6 @@ fn verify_evm<'a>(
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates an evm compatible aggregate verifier, you will need solc installed in your environment to run this
|
||||
///
|
||||
/// Arguments
|
||||
/// ---------
|
||||
/// aggregation_settings: str
|
||||
/// path to the settings file
|
||||
///
|
||||
/// vk_path: str
|
||||
/// The path to load the desired verification key file
|
||||
///
|
||||
/// sol_code_path: str
|
||||
/// The path to the Solidity code
|
||||
///
|
||||
/// abi_path: str
|
||||
/// The path to output the Solidity verifier ABI
|
||||
///
|
||||
/// logrows: int
|
||||
/// Number of logrows used during aggregated setup
|
||||
///
|
||||
/// srs_path: str
|
||||
/// The path to the SRS file
|
||||
///
|
||||
/// reusable: bool
|
||||
/// Whether the verifier should be rendered as a reusable contract. If so, then you will need to deploy the VK artifact separately which you can generate using the create_evm_vka command
|
||||
///
|
||||
/// Returns
|
||||
/// -------
|
||||
/// bool
|
||||
///
|
||||
#[pyfunction(signature = (
|
||||
aggregation_settings=vec![PathBuf::from(DEFAULT_PROOF)],
|
||||
vk_path=PathBuf::from(DEFAULT_VK_AGGREGATED),
|
||||
sol_code_path=PathBuf::from(DEFAULT_SOL_CODE),
|
||||
abi_path=PathBuf::from(DEFAULT_VERIFIER_ABI),
|
||||
logrows=DEFAULT_AGGREGATED_LOGROWS.parse().unwrap(),
|
||||
srs_path=None,
|
||||
reusable = DEFAULT_RENDER_REUSABLE.parse().unwrap(),
|
||||
))]
|
||||
#[gen_stub_pyfunction]
|
||||
fn create_evm_verifier_aggr(
|
||||
py: Python<'_>,
|
||||
aggregation_settings: Vec<PathBuf>,
|
||||
vk_path: PathBuf,
|
||||
sol_code_path: PathBuf,
|
||||
abi_path: PathBuf,
|
||||
logrows: u32,
|
||||
srs_path: Option<PathBuf>,
|
||||
reusable: bool,
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
pyo3_async_runtimes::tokio::future_into_py(py, async move {
|
||||
crate::execute::create_evm_aggregate_verifier(
|
||||
vk_path,
|
||||
srs_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
aggregation_settings,
|
||||
logrows,
|
||||
reusable,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run create_evm_verifier_aggr: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
})
|
||||
}
|
||||
|
||||
// Define a function to gather stub information.
|
||||
define_stub_info_gatherer!(stub_info);
|
||||
|
||||
@@ -1953,19 +1476,16 @@ fn ezkl(m: &Bound<'_, PyModule>) -> PyResult<()> {
|
||||
m.add_class::<PyG1Affine>()?;
|
||||
m.add_class::<PyG1>()?;
|
||||
m.add_class::<PyTestDataSource>()?;
|
||||
m.add_class::<PyCommitments>()?;
|
||||
m.add_class::<PyInputType>()?;
|
||||
m.add("__version__", env!("CARGO_PKG_VERSION"))?;
|
||||
m.add_function(wrap_pyfunction!(felt_to_big_endian, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(felt_to_int, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(felt_to_float, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(kzg_commit, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(ipa_commit, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(swap_proof_commitments, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(poseidon_hash, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(float_to_felt, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(buffer_to_felts, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(gen_vk_from_pk_aggr, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(gen_vk_from_pk_single, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(table, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(mock, m)?)?;
|
||||
@@ -1978,17 +1498,12 @@ fn ezkl(m: &Bound<'_, PyModule>) -> PyResult<()> {
|
||||
m.add_function(wrap_pyfunction!(gen_settings, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(gen_random_data, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(calibrate_settings, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(aggregate, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(mock_aggregate, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(setup_aggregate, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(compile_circuit, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(verify_aggr, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(create_evm_verifier, m)?)?;
|
||||
#[cfg(feature = "reusable-verifier")]
|
||||
m.add_function(wrap_pyfunction!(create_evm_vka, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(deploy_evm, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(verify_evm, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(create_evm_verifier_aggr, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(encode_evm_calldata, m)?)?;
|
||||
#[cfg(feature = "reusable-verifier")]
|
||||
m.add_function(wrap_pyfunction!(register_vka, m)?)?;
|
||||
@@ -2004,24 +1519,6 @@ impl pyo3_stub_gen::PyStubType for CalibrationTarget {
|
||||
}
|
||||
}
|
||||
|
||||
impl pyo3_stub_gen::PyStubType for ProofType {
|
||||
fn type_output() -> TypeInfo {
|
||||
TypeInfo {
|
||||
name: "str".to_string(),
|
||||
import: HashSet::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl pyo3_stub_gen::PyStubType for TranscriptType {
|
||||
fn type_output() -> TypeInfo {
|
||||
TypeInfo {
|
||||
name: "str".to_string(),
|
||||
import: HashSet::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl pyo3_stub_gen::PyStubType for CheckMode {
|
||||
fn type_output() -> TypeInfo {
|
||||
TypeInfo {
|
||||
|
||||
@@ -1,606 +0,0 @@
|
||||
use halo2_proofs::{
|
||||
plonk::*,
|
||||
poly::{
|
||||
commitment::{CommitmentScheme, ParamsProver},
|
||||
ipa::{
|
||||
commitment::{IPACommitmentScheme, ParamsIPA},
|
||||
multiopen::{ProverIPA, VerifierIPA},
|
||||
strategy::SingleStrategy as IPASingleStrategy,
|
||||
},
|
||||
kzg::{
|
||||
commitment::{KZGCommitmentScheme, ParamsKZG},
|
||||
multiopen::{ProverSHPLONK, VerifierSHPLONK},
|
||||
strategy::SingleStrategy as KZGSingleStrategy,
|
||||
},
|
||||
VerificationStrategy,
|
||||
},
|
||||
};
|
||||
use std::fmt::Display;
|
||||
use std::io::BufReader;
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::{
|
||||
circuit::region::RegionSettings,
|
||||
graph::GraphSettings,
|
||||
pfsys::{
|
||||
create_proof_circuit, encode_calldata,
|
||||
evm::aggregation_kzg::{AggregationCircuit, PoseidonTranscript},
|
||||
verify_proof_circuit, TranscriptType,
|
||||
},
|
||||
tensor::TensorType,
|
||||
CheckMode, Commitments, EZKLError as InnerEZKLError,
|
||||
};
|
||||
|
||||
use crate::circuit::modules::poseidon::{
|
||||
spec::{PoseidonSpec, POSEIDON_RATE, POSEIDON_WIDTH},
|
||||
PoseidonChip,
|
||||
};
|
||||
use crate::circuit::modules::Module;
|
||||
use crate::graph::{GraphCircuit, GraphWitness};
|
||||
use halo2curves::{
|
||||
bn256::{Bn256, Fr, G1Affine},
|
||||
ff::{FromUniformBytes, PrimeField},
|
||||
};
|
||||
use snark_verifier::{loader::native::NativeLoader, system::halo2::transcript::evm::EvmTranscript};
|
||||
|
||||
/// Wrapper around the Error Message
|
||||
#[cfg_attr(feature = "ios-bindings", derive(uniffi::Error))]
|
||||
#[derive(Debug)]
|
||||
pub enum EZKLError {
|
||||
/// Some Comment
|
||||
InternalError(String),
|
||||
}
|
||||
|
||||
impl Display for EZKLError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
EZKLError::InternalError(e) => write!(f, "Internal error: {}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<InnerEZKLError> for EZKLError {
|
||||
fn from(e: InnerEZKLError) -> Self {
|
||||
EZKLError::InternalError(e.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// Hash the input message with poseidon
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn poseidon_hash(message: Vec<u8>) -> Result<Vec<u8>, EZKLError> {
|
||||
let message: Vec<Fr> = serde_json::from_slice(&message[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
let output = PoseidonChip::<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE>::run(message.clone())
|
||||
.map_err(InnerEZKLError::from)?;
|
||||
|
||||
Ok(serde_json::to_vec(&output).map_err(InnerEZKLError::from)?)
|
||||
}
|
||||
|
||||
/// Hash the input message with poseidon without converting to Fr
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn poseidon_hash_no_felt(message: Vec<u8>) -> Result<Vec<u8>, EZKLError> {
|
||||
let message: Vec<Fr> = message.iter().map(|x| Fr::from(*x as u64)).collect();
|
||||
|
||||
let output = PoseidonChip::<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE>::run(message.clone())
|
||||
.map_err(InnerEZKLError::from)?;
|
||||
|
||||
Ok(serde_json::to_vec(&output).map_err(InnerEZKLError::from)?)
|
||||
}
|
||||
|
||||
/// Encode verifier calldata from proof and ethereum vk_address
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn encode_verifier_calldata(
|
||||
// TODO - shuold it be pub or pub or pub(super)?
|
||||
proof: Vec<u8>,
|
||||
vka: Option<Vec<u8>>,
|
||||
) -> Result<Vec<u8>, EZKLError> {
|
||||
let snark: crate::pfsys::Snark<Fr, G1Affine> =
|
||||
serde_json::from_slice(&proof[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
let vka_buf: Option<Vec<[u8; 32]>> = if let Some(vka) = vka {
|
||||
let array: Vec<[u8; 32]> =
|
||||
serde_json::from_slice(&vka[..]).map_err(InnerEZKLError::from)?;
|
||||
Some(array)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let vka: Option<&[[u8; 32]]> = vka_buf.as_deref();
|
||||
|
||||
let flattened_instances = snark.instances.into_iter().flatten();
|
||||
|
||||
let encoded = encode_calldata(vka, &snark.proof, &flattened_instances.collect::<Vec<_>>());
|
||||
|
||||
Ok(encoded)
|
||||
}
|
||||
|
||||
/// Generate witness from compiled circuit and input json
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn gen_witness(compiled_circuit: Vec<u8>, input: Vec<u8>) -> Result<Vec<u8>, EZKLError> {
|
||||
println!("[circuit]");
|
||||
let mut circuit: crate::graph::GraphCircuit = bincode::deserialize(&compiled_circuit[..])
|
||||
.map_err(|e| {
|
||||
EZKLError::InternalError(format!("Failed to deserialize compiled model: {}", e))
|
||||
})?;
|
||||
|
||||
println!("[input]");
|
||||
let input: crate::graph::input::GraphData = serde_json::from_slice(&input[..])
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize input: {}", e)))?;
|
||||
|
||||
println!("[load graph input]");
|
||||
let mut input = circuit
|
||||
.load_graph_input(&input)
|
||||
.map_err(|e| EZKLError::InternalError(format!("{}", e)))?;
|
||||
|
||||
println!("[load graph witness]");
|
||||
let witness = circuit
|
||||
.forward::<KZGCommitmentScheme<Bn256>>(
|
||||
&mut input,
|
||||
None,
|
||||
None,
|
||||
RegionSettings::all_true(
|
||||
circuit.settings().run_args.decomp_base,
|
||||
circuit.settings().run_args.decomp_legs,
|
||||
),
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("{}", e)))?;
|
||||
|
||||
println!("[serialize witness]");
|
||||
serde_json::to_vec(&witness)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to serialize witness: {}", e)))
|
||||
}
|
||||
|
||||
/// Generate verifying key from compiled circuit, and parameters srs
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn gen_vk(
|
||||
compiled_circuit: Vec<u8>,
|
||||
srs: Vec<u8>,
|
||||
compress_selectors: bool,
|
||||
) -> Result<Vec<u8>, EZKLError> {
|
||||
let mut reader = BufReader::new(&srs[..]);
|
||||
let params: ParamsKZG<Bn256> = get_params(&mut reader)?;
|
||||
|
||||
let circuit: GraphCircuit = bincode::deserialize(&compiled_circuit[..])
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize circuit: {}", e)))?;
|
||||
|
||||
let vk = create_vk_lean::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(
|
||||
&circuit,
|
||||
¶ms,
|
||||
compress_selectors,
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to create verifying key: {}", e)))?;
|
||||
|
||||
let mut serialized_vk = Vec::new();
|
||||
vk.write(
|
||||
&mut serialized_vk,
|
||||
halo2_proofs::SerdeFormat::RawBytesUnchecked,
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to serialize verifying key: {}", e)))?;
|
||||
|
||||
Ok(serialized_vk)
|
||||
}
|
||||
|
||||
/// Generate proving key from vk, compiled circuit and parameters srs
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn gen_pk(vk: Vec<u8>, compiled_circuit: Vec<u8>, srs: Vec<u8>) -> Result<Vec<u8>, EZKLError> {
|
||||
let mut reader = BufReader::new(&srs[..]);
|
||||
let params: ParamsKZG<Bn256> = get_params(&mut reader)?;
|
||||
|
||||
let circuit: GraphCircuit = bincode::deserialize(&compiled_circuit[..])
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize circuit: {}", e)))?;
|
||||
|
||||
let mut reader = BufReader::new(&vk[..]);
|
||||
let vk = VerifyingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytesUnchecked,
|
||||
circuit.settings().clone(),
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize verifying key: {}", e)))?;
|
||||
|
||||
let pk = create_pk_lean::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(vk, &circuit, ¶ms)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to create proving key: {}", e)))?;
|
||||
|
||||
let mut serialized_pk = Vec::new();
|
||||
pk.write(&mut serialized_pk, halo2_proofs::SerdeFormat::RawBytes)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to serialize proving key: {}", e)))?;
|
||||
|
||||
Ok(serialized_pk)
|
||||
}
|
||||
|
||||
/// Verify proof with vk, proof json, circuit settings json and srs
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn verify(
|
||||
proof: Vec<u8>,
|
||||
vk: Vec<u8>,
|
||||
settings: Vec<u8>,
|
||||
srs: Vec<u8>,
|
||||
) -> Result<bool, EZKLError> {
|
||||
let circuit_settings: GraphSettings = serde_json::from_slice(&settings[..])
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize settings: {}", e)))?;
|
||||
|
||||
let proof: crate::pfsys::Snark<Fr, G1Affine> = serde_json::from_slice(&proof[..])
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize proof: {}", e)))?;
|
||||
|
||||
let mut reader = BufReader::new(&vk[..]);
|
||||
let vk = VerifyingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytesUnchecked,
|
||||
circuit_settings.clone(),
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize vk: {}", e)))?;
|
||||
|
||||
let orig_n = 1 << circuit_settings.run_args.logrows;
|
||||
let commitment = circuit_settings.run_args.commitment.into();
|
||||
|
||||
let mut reader = BufReader::new(&srs[..]);
|
||||
let result = match commitment {
|
||||
Commitments::KZG => {
|
||||
let params: ParamsKZG<Bn256> = get_params(&mut reader)?;
|
||||
let strategy = KZGSingleStrategy::new(params.verifier_params());
|
||||
match proof.transcript_type {
|
||||
TranscriptType::EVM => verify_proof_circuit::<
|
||||
VerifierSHPLONK<'_, Bn256>,
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
KZGSingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<G1Affine, _, _, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n),
|
||||
TranscriptType::Poseidon => {
|
||||
verify_proof_circuit::<
|
||||
VerifierSHPLONK<'_, Bn256>,
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
KZGSingleStrategy<_>,
|
||||
_,
|
||||
PoseidonTranscript<NativeLoader, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n)
|
||||
}
|
||||
}
|
||||
}
|
||||
Commitments::IPA => {
|
||||
let params: ParamsIPA<_> = get_params(&mut reader)?;
|
||||
let strategy = IPASingleStrategy::new(params.verifier_params());
|
||||
match proof.transcript_type {
|
||||
TranscriptType::EVM => verify_proof_circuit::<
|
||||
VerifierIPA<_>,
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
IPASingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<G1Affine, _, _, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n),
|
||||
TranscriptType::Poseidon => {
|
||||
verify_proof_circuit::<
|
||||
VerifierIPA<_>,
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
IPASingleStrategy<_>,
|
||||
_,
|
||||
PoseidonTranscript<NativeLoader, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(_) => Ok(true),
|
||||
Err(e) => Err(EZKLError::InternalError(format!(
|
||||
"Verification failed: {}",
|
||||
e
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
/// Verify aggregate proof with vk, proof, circuit settings and srs
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn verify_aggr(
|
||||
proof: Vec<u8>,
|
||||
vk: Vec<u8>,
|
||||
logrows: u64,
|
||||
srs: Vec<u8>,
|
||||
commitment: &str,
|
||||
) -> Result<bool, EZKLError> {
|
||||
let proof: crate::pfsys::Snark<Fr, G1Affine> = serde_json::from_slice(&proof[..])
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize proof: {}", e)))?;
|
||||
|
||||
let mut reader = BufReader::new(&vk[..]);
|
||||
let vk = VerifyingKey::<G1Affine>::read::<_, AggregationCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytesUnchecked,
|
||||
(),
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize vk: {}", e)))?;
|
||||
|
||||
let commit = Commitments::from_str(commitment)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Invalid commitment: {}", e)))?;
|
||||
|
||||
let orig_n = 1 << logrows;
|
||||
|
||||
let mut reader = BufReader::new(&srs[..]);
|
||||
let result = match commit {
|
||||
Commitments::KZG => {
|
||||
let params: ParamsKZG<Bn256> = get_params(&mut reader)?;
|
||||
let strategy = KZGSingleStrategy::new(params.verifier_params());
|
||||
match proof.transcript_type {
|
||||
TranscriptType::EVM => verify_proof_circuit::<
|
||||
VerifierSHPLONK<'_, Bn256>,
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
KZGSingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<G1Affine, _, _, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n),
|
||||
|
||||
TranscriptType::Poseidon => {
|
||||
verify_proof_circuit::<
|
||||
VerifierSHPLONK<'_, Bn256>,
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
KZGSingleStrategy<_>,
|
||||
_,
|
||||
PoseidonTranscript<NativeLoader, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n)
|
||||
}
|
||||
}
|
||||
}
|
||||
Commitments::IPA => {
|
||||
let params: ParamsIPA<_> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader).map_err(
|
||||
|e| EZKLError::InternalError(format!("Failed to deserialize params: {}", e)),
|
||||
)?;
|
||||
let strategy = IPASingleStrategy::new(params.verifier_params());
|
||||
match proof.transcript_type {
|
||||
TranscriptType::EVM => verify_proof_circuit::<
|
||||
VerifierIPA<_>,
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
IPASingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<G1Affine, _, _, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n),
|
||||
TranscriptType::Poseidon => {
|
||||
verify_proof_circuit::<
|
||||
VerifierIPA<_>,
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
IPASingleStrategy<_>,
|
||||
_,
|
||||
PoseidonTranscript<NativeLoader, _>,
|
||||
>(&proof, ¶ms, &vk, strategy, orig_n)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
result
|
||||
.map(|_| true)
|
||||
.map_err(|e| EZKLError::InternalError(format!("{}", e)))
|
||||
}
|
||||
|
||||
/// Prove in browser with compiled circuit, witness json, proving key, and srs
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn prove(
|
||||
witness: Vec<u8>,
|
||||
pk: Vec<u8>,
|
||||
compiled_circuit: Vec<u8>,
|
||||
srs: Vec<u8>,
|
||||
) -> Result<Vec<u8>, EZKLError> {
|
||||
#[cfg(feature = "det-prove")]
|
||||
log::set_max_level(log::LevelFilter::Debug);
|
||||
#[cfg(not(feature = "det-prove"))]
|
||||
log::set_max_level(log::LevelFilter::Info);
|
||||
|
||||
let mut circuit: GraphCircuit = bincode::deserialize(&compiled_circuit[..])
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize circuit: {}", e)))?;
|
||||
|
||||
let data: GraphWitness = serde_json::from_slice(&witness[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
let mut reader = BufReader::new(&pk[..]);
|
||||
let pk = ProvingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytesUnchecked,
|
||||
circuit.settings().clone(),
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize proving key: {}", e)))?;
|
||||
|
||||
circuit
|
||||
.load_graph_witness(&data)
|
||||
.map_err(InnerEZKLError::from)?;
|
||||
let public_inputs = circuit
|
||||
.prepare_public_inputs(&data)
|
||||
.map_err(InnerEZKLError::from)?;
|
||||
let proof_split_commits: Option<crate::pfsys::ProofSplitCommit> = data.into();
|
||||
|
||||
let mut reader = BufReader::new(&srs[..]);
|
||||
let commitment = circuit.settings().run_args.commitment.into();
|
||||
|
||||
let proof = match commitment {
|
||||
Commitments::KZG => {
|
||||
let params: ParamsKZG<Bn256> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader).map_err(
|
||||
|e| EZKLError::InternalError(format!("Failed to deserialize srs: {}", e)),
|
||||
)?;
|
||||
|
||||
create_proof_circuit::<
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
_,
|
||||
ProverSHPLONK<_>,
|
||||
VerifierSHPLONK<_>,
|
||||
KZGSingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<_, _, _, _>,
|
||||
EvmTranscript<_, _, _, _>,
|
||||
>(
|
||||
circuit,
|
||||
vec![public_inputs],
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
Commitments::KZG,
|
||||
TranscriptType::EVM,
|
||||
proof_split_commits,
|
||||
None,
|
||||
)
|
||||
}
|
||||
Commitments::IPA => {
|
||||
let params: ParamsIPA<_> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader).map_err(
|
||||
|e| EZKLError::InternalError(format!("Failed to deserialize srs: {}", e)),
|
||||
)?;
|
||||
|
||||
create_proof_circuit::<
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
_,
|
||||
ProverIPA<_>,
|
||||
VerifierIPA<_>,
|
||||
IPASingleStrategy<_>,
|
||||
_,
|
||||
EvmTranscript<_, _, _, _>,
|
||||
EvmTranscript<_, _, _, _>,
|
||||
>(
|
||||
circuit,
|
||||
vec![public_inputs],
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::UNSAFE,
|
||||
Commitments::IPA,
|
||||
TranscriptType::EVM,
|
||||
proof_split_commits,
|
||||
None,
|
||||
)
|
||||
}
|
||||
}
|
||||
.map_err(InnerEZKLError::from)?;
|
||||
|
||||
Ok(serde_json::to_vec(&proof).map_err(InnerEZKLError::from)?)
|
||||
}
|
||||
|
||||
/// Validate the witness json
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn witness_validation(witness: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let _: GraphWitness = serde_json::from_slice(&witness[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Validate the compiled circuit
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn compiled_circuit_validation(compiled_circuit: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let _: GraphCircuit = bincode::deserialize(&compiled_circuit[..]).map_err(|e| {
|
||||
EZKLError::InternalError(format!("Failed to deserialize compiled circuit: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Validate the input json
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn input_validation(input: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let _: crate::graph::input::GraphData =
|
||||
serde_json::from_slice(&input[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Validate the proof json
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn proof_validation(proof: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let _: crate::pfsys::Snark<Fr, G1Affine> =
|
||||
serde_json::from_slice(&proof[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Validate the verifying key given the settings json
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn vk_validation(vk: Vec<u8>, settings: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let circuit_settings: GraphSettings =
|
||||
serde_json::from_slice(&settings[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
let mut reader = BufReader::new(&vk[..]);
|
||||
let _ = VerifyingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytesUnchecked,
|
||||
circuit_settings,
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize verifying key: {}", e)))?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Validate the proving key given the settings json
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn pk_validation(pk: Vec<u8>, settings: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let circuit_settings: GraphSettings =
|
||||
serde_json::from_slice(&settings[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
let mut reader = BufReader::new(&pk[..]);
|
||||
let _ = ProvingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytesUnchecked,
|
||||
circuit_settings,
|
||||
)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize proving key: {}", e)))?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Validate the settings json
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn settings_validation(settings: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let _: GraphSettings = serde_json::from_slice(&settings[..]).map_err(InnerEZKLError::from)?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Validate the srs
|
||||
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
|
||||
pub fn srs_validation(srs: Vec<u8>) -> Result<bool, EZKLError> {
|
||||
let mut reader = BufReader::new(&srs[..]);
|
||||
let _: ParamsKZG<Bn256> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader).map_err(|e| {
|
||||
EZKLError::InternalError(format!("Failed to deserialize params: {}", e))
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
// HELPER FUNCTIONS
|
||||
|
||||
fn get_params<
|
||||
Scheme: for<'a> halo2_proofs::poly::commitment::Params<'a, halo2curves::bn256::G1Affine>,
|
||||
>(
|
||||
mut reader: &mut BufReader<&[u8]>,
|
||||
) -> Result<Scheme, EZKLError> {
|
||||
halo2_proofs::poly::commitment::Params::<G1Affine>::read(&mut reader)
|
||||
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize params: {}", e)))
|
||||
}
|
||||
|
||||
/// Creates a [ProvingKey] for a [GraphCircuit] (`circuit`) with specific [CommitmentScheme] parameters (`params`) for the WASM target
|
||||
pub fn create_vk_lean<Scheme: CommitmentScheme, F: PrimeField + TensorType, C: Circuit<F>>(
|
||||
circuit: &C,
|
||||
params: &'_ Scheme::ParamsProver,
|
||||
compress_selectors: bool,
|
||||
) -> Result<VerifyingKey<Scheme::Curve>, halo2_proofs::plonk::Error>
|
||||
where
|
||||
C: Circuit<Scheme::Scalar>,
|
||||
<Scheme as CommitmentScheme>::Scalar: FromUniformBytes<64>,
|
||||
{
|
||||
// Real proof
|
||||
let empty_circuit = <C as Circuit<F>>::without_witnesses(circuit);
|
||||
|
||||
// Initialize the verifying key
|
||||
let vk = keygen_vk_custom(params, &empty_circuit, compress_selectors)?;
|
||||
Ok(vk)
|
||||
}
|
||||
/// Creates a [ProvingKey] from a [VerifyingKey] for a [GraphCircuit] (`circuit`) with specific [CommitmentScheme] parameters (`params`) for the WASM target
|
||||
pub fn create_pk_lean<Scheme: CommitmentScheme, F: PrimeField + TensorType, C: Circuit<F>>(
|
||||
vk: VerifyingKey<Scheme::Curve>,
|
||||
circuit: &C,
|
||||
params: &'_ Scheme::ParamsProver,
|
||||
) -> Result<ProvingKey<Scheme::Curve>, halo2_proofs::plonk::Error>
|
||||
where
|
||||
C: Circuit<Scheme::Scalar>,
|
||||
<Scheme as CommitmentScheme>::Scalar: FromUniformBytes<64>,
|
||||
{
|
||||
// Real proof
|
||||
let empty_circuit = <C as Circuit<F>>::without_witnesses(circuit);
|
||||
|
||||
// Initialize the proving key
|
||||
let pk = keygen_pk(params, vk, &empty_circuit)?;
|
||||
Ok(pk)
|
||||
}
|
||||
@@ -1,398 +0,0 @@
|
||||
use crate::{
|
||||
circuit::modules::polycommit::PolyCommitChip,
|
||||
fieldutils::{felt_to_integer_rep, integer_rep_to_felt},
|
||||
graph::{quantize_float, scale_to_multiplier, GraphCircuit, GraphSettings},
|
||||
};
|
||||
use console_error_panic_hook;
|
||||
use halo2_proofs::{
|
||||
plonk::*,
|
||||
poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG},
|
||||
};
|
||||
use halo2_solidity_verifier::Evm;
|
||||
use halo2curves::{
|
||||
bn256::{Bn256, Fr, G1Affine},
|
||||
ff::PrimeField,
|
||||
};
|
||||
use std::str::FromStr;
|
||||
use wasm_bindgen::prelude::*;
|
||||
use wasm_bindgen_console_logger::DEFAULT_LOGGER;
|
||||
|
||||
use crate::bindings::universal::{
|
||||
compiled_circuit_validation, encode_verifier_calldata, gen_pk, gen_vk, gen_witness,
|
||||
input_validation, pk_validation, proof_validation, settings_validation, srs_validation,
|
||||
verify_aggr, vk_validation, witness_validation, EZKLError as ExternalEZKLError,
|
||||
};
|
||||
#[cfg(feature = "web")]
|
||||
pub use wasm_bindgen_rayon::init_thread_pool;
|
||||
|
||||
impl From<ExternalEZKLError> for JsError {
|
||||
fn from(e: ExternalEZKLError) -> Self {
|
||||
JsError::new(&format!("{}", e))
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
/// Initialize logger for wasm
|
||||
pub fn init_logger() {
|
||||
log::set_logger(&DEFAULT_LOGGER).unwrap();
|
||||
}
|
||||
|
||||
#[wasm_bindgen]
|
||||
/// Initialize panic hook for wasm
|
||||
pub fn init_panic_hook() {
|
||||
console_error_panic_hook::set_once();
|
||||
}
|
||||
|
||||
/// Wrapper around the halo2 encode call data method
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn encodeVerifierCalldata(
|
||||
proof: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
vk_address: Option<Vec<u8>>,
|
||||
) -> Result<Vec<u8>, JsError> {
|
||||
encode_verifier_calldata(proof.0, vk_address).map_err(JsError::from)
|
||||
}
|
||||
|
||||
/// Converts a hex string to a byte array
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn feltToBigEndian(array: wasm_bindgen::Clamped<Vec<u8>>) -> Result<String, JsError> {
|
||||
let felt: Fr = serde_json::from_slice(&array[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize field element: {}", e)))?;
|
||||
Ok(format!("{:?}", felt))
|
||||
}
|
||||
|
||||
/// Converts a felt to a little endian string
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn feltToLittleEndian(array: wasm_bindgen::Clamped<Vec<u8>>) -> Result<String, JsError> {
|
||||
let felt: Fr = serde_json::from_slice(&array[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize field element: {}", e)))?;
|
||||
let repr = serde_json::to_string(&felt).unwrap();
|
||||
let b: String = serde_json::from_str(&repr).unwrap();
|
||||
Ok(b)
|
||||
}
|
||||
|
||||
/// Converts a hex string to a byte array
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn feltToInt(
|
||||
array: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
|
||||
let felt: Fr = serde_json::from_slice(&array[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize field element: {}", e)))?;
|
||||
Ok(wasm_bindgen::Clamped(
|
||||
serde_json::to_vec(&felt_to_integer_rep(felt))
|
||||
.map_err(|e| JsError::new(&format!("Failed to serialize integer: {}", e)))?,
|
||||
))
|
||||
}
|
||||
|
||||
/// Converts felts to a floating point element
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn feltToFloat(
|
||||
array: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
scale: crate::Scale,
|
||||
) -> Result<f64, JsError> {
|
||||
let felt: Fr = serde_json::from_slice(&array[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize field element: {}", e)))?;
|
||||
let int_rep = felt_to_integer_rep(felt);
|
||||
let multiplier = scale_to_multiplier(scale);
|
||||
Ok(int_rep as f64 / multiplier)
|
||||
}
|
||||
|
||||
/// Converts a floating point number to a hex string representing a fixed point field element
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn floatToFelt(
|
||||
mut input: f64,
|
||||
scale: crate::Scale,
|
||||
input_type: &str,
|
||||
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
|
||||
crate::circuit::InputType::roundtrip(
|
||||
&crate::circuit::InputType::from_str(input_type)
|
||||
.map_err(|e| JsError::new(&format!("{}", e)))?,
|
||||
&mut input,
|
||||
);
|
||||
let int_rep =
|
||||
quantize_float(&input, 0.0, scale).map_err(|e| JsError::new(&format!("{}", e)))?;
|
||||
let felt = integer_rep_to_felt(int_rep);
|
||||
let vec = crate::pfsys::field_to_string::<halo2curves::bn256::Fr>(&felt);
|
||||
Ok(wasm_bindgen::Clamped(serde_json::to_vec(&vec).map_err(
|
||||
|e| JsError::new(&format!("Failed to serialize a float to felt{}", e)),
|
||||
)?))
|
||||
}
|
||||
|
||||
/// Generate a kzg commitment.
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn kzgCommit(
|
||||
message: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
vk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
settings: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
params_ser: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
|
||||
let message: Vec<Fr> = serde_json::from_slice(&message[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize message: {}", e)))?;
|
||||
|
||||
let mut reader = std::io::BufReader::new(¶ms_ser[..]);
|
||||
let params: ParamsKZG<Bn256> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize params: {}", e)))?;
|
||||
|
||||
let mut reader = std::io::BufReader::new(&vk[..]);
|
||||
let circuit_settings: GraphSettings = serde_json::from_slice(&settings[..])
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize settings: {}", e)))?;
|
||||
let vk = VerifyingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
circuit_settings,
|
||||
)
|
||||
.map_err(|e| JsError::new(&format!("Failed to deserialize vk: {}", e)))?;
|
||||
|
||||
let output = PolyCommitChip::commit::<KZGCommitmentScheme<Bn256>>(
|
||||
message,
|
||||
(vk.cs().blinding_factors() + 1) as u32,
|
||||
¶ms,
|
||||
);
|
||||
|
||||
Ok(wasm_bindgen::Clamped(
|
||||
serde_json::to_vec(&output).map_err(|e| JsError::new(&format!("{}", e)))?,
|
||||
))
|
||||
}
|
||||
|
||||
/// Converts a buffer to vector of 4 u64s representing a fixed point field element
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn bufferToVecOfFelt(
|
||||
buffer: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
|
||||
// Convert the buffer to a slice
|
||||
let buffer: &[u8] = &buffer;
|
||||
|
||||
// Divide the buffer into chunks of 64 bytes
|
||||
let chunks = buffer.chunks_exact(16);
|
||||
|
||||
// Get the remainder
|
||||
let remainder = chunks.remainder();
|
||||
|
||||
// Add 0s to the remainder to make it 64 bytes
|
||||
let mut remainder = remainder.to_vec();
|
||||
|
||||
// Collect chunks into a Vec<[u8; 16]>.
|
||||
let chunks: Result<Vec<[u8; 16]>, JsError> = chunks
|
||||
.map(|slice| {
|
||||
let array: [u8; 16] = slice
|
||||
.try_into()
|
||||
.map_err(|_| JsError::new("failed to slice input chunks"))?;
|
||||
Ok(array)
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut chunks = chunks?;
|
||||
|
||||
if remainder.len() != 0 {
|
||||
remainder.resize(16, 0);
|
||||
// Convert the Vec<u8> to [u8; 16]
|
||||
let remainder_array: [u8; 16] = remainder
|
||||
.try_into()
|
||||
.map_err(|_| JsError::new("failed to slice remainder"))?;
|
||||
// append the remainder to the chunks
|
||||
chunks.push(remainder_array);
|
||||
}
|
||||
|
||||
// Convert each chunk to a field element
|
||||
let field_elements: Vec<Fr> = chunks
|
||||
.iter()
|
||||
.map(|x| PrimeField::from_u128(u8_array_to_u128_le(*x)))
|
||||
.collect();
|
||||
|
||||
Ok(wasm_bindgen::Clamped(
|
||||
serde_json::to_vec(&field_elements)
|
||||
.map_err(|e| JsError::new(&format!("Failed to serialize field elements: {}", e)))?,
|
||||
))
|
||||
}
|
||||
|
||||
/// Generate a poseidon hash in browser. Input message
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn poseidonHash(
|
||||
message: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
|
||||
super::universal::poseidon_hash(message.0)
|
||||
.map_err(JsError::from)
|
||||
.map(|x| wasm_bindgen::Clamped(x.clone()))
|
||||
}
|
||||
|
||||
/// Generate a witness file from input.json, compiled model and a settings.json file.
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn genWitness(
|
||||
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
input: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<Vec<u8>, JsError> {
|
||||
gen_witness(compiled_circuit.0, input.0).map_err(JsError::from)
|
||||
}
|
||||
|
||||
/// Generate verifying key in browser
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn genVk(
|
||||
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
params_ser: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
compress_selectors: bool,
|
||||
) -> Result<Vec<u8>, JsError> {
|
||||
gen_vk(compiled_circuit.0, params_ser.0, compress_selectors).map_err(JsError::from)
|
||||
}
|
||||
|
||||
/// Generate proving key in browser
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn genPk(
|
||||
vk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
params_ser: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<Vec<u8>, JsError> {
|
||||
gen_pk(vk.0, compiled_circuit.0, params_ser.0).map_err(JsError::from)
|
||||
}
|
||||
|
||||
/// Verify proof in browser using wasm
|
||||
#[wasm_bindgen]
|
||||
pub fn verify(
|
||||
proof_js: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
vk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
settings: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
srs: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<bool, JsError> {
|
||||
super::universal::verify(proof_js.0, vk.0, settings.0, srs.0).map_err(JsError::from)
|
||||
}
|
||||
|
||||
/// Verify proof in browser evm using wasm
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn verifyEVM(
|
||||
proof_js: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
bytecode_verifier: Vec<u8>,
|
||||
bytecode_vka: Option<Vec<u8>>,
|
||||
) -> Result<bool, JsError> {
|
||||
let mut evm = Evm::unlimited();
|
||||
let decoded_verifier = utf8_bytes_to_hex_decoded(&bytecode_verifier)?;
|
||||
let (verifier_address, _) = evm.create(decoded_verifier);
|
||||
// if bytecode_vk is Some, then create the vk contract
|
||||
let vk_address = if let Some(bytecode_vka) = bytecode_vka {
|
||||
let decoded_vka = utf8_bytes_to_hex_decoded(&bytecode_vka)?;
|
||||
let (address, _) = evm.create(decoded_vka);
|
||||
Some(address.as_slice().to_vec())
|
||||
// check if bytecode_verifier is none and if so then generate the
|
||||
// reusable verifier
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let calldata = encode_verifier_calldata(proof_js.0, vk_address).map_err(JsError::from);
|
||||
let output = evm.call(verifier_address, calldata?).1;
|
||||
let true_word = [vec![0; 31], vec![1]].concat();
|
||||
Ok(output == true_word)
|
||||
}
|
||||
|
||||
/// Verify aggregate proof in browser using wasm
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn verifyAggr(
|
||||
proof_js: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
vk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
logrows: u64,
|
||||
srs: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
commitment: &str,
|
||||
) -> Result<bool, JsError> {
|
||||
verify_aggr(proof_js.0, vk.0, logrows, srs.0, commitment).map_err(JsError::from)
|
||||
}
|
||||
|
||||
/// Prove in browser using wasm
|
||||
#[wasm_bindgen]
|
||||
pub fn prove(
|
||||
witness: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
pk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
srs: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<Vec<u8>, JsError> {
|
||||
super::universal::prove(witness.0, pk.0, compiled_circuit.0, srs.0).map_err(JsError::from)
|
||||
}
|
||||
|
||||
// VALIDATION FUNCTIONS
|
||||
|
||||
/// Witness file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn witnessValidation(witness: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsError> {
|
||||
witness_validation(witness.0).map_err(JsError::from)
|
||||
}
|
||||
/// Compiled circuit validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn compiledCircuitValidation(
|
||||
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<bool, JsError> {
|
||||
compiled_circuit_validation(compiled_circuit.0).map_err(JsError::from)
|
||||
}
|
||||
/// Input file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn inputValidation(input: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsError> {
|
||||
input_validation(input.0).map_err(JsError::from)
|
||||
}
|
||||
/// Proof file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn proofValidation(proof: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsError> {
|
||||
proof_validation(proof.0).map_err(JsError::from)
|
||||
}
|
||||
/// Vk file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn vkValidation(
|
||||
vk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
settings: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<bool, JsError> {
|
||||
vk_validation(vk.0, settings.0).map_err(JsError::from)
|
||||
}
|
||||
/// Pk file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn pkValidation(
|
||||
pk: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
settings: wasm_bindgen::Clamped<Vec<u8>>,
|
||||
) -> Result<bool, JsError> {
|
||||
pk_validation(pk.0, settings.0).map_err(JsError::from)
|
||||
}
|
||||
/// Settings file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn settingsValidation(settings: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsError> {
|
||||
settings_validation(settings.0).map_err(JsError::from)
|
||||
}
|
||||
/// Srs file validation
|
||||
#[wasm_bindgen]
|
||||
#[allow(non_snake_case)]
|
||||
pub fn srsValidation(srs: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsError> {
|
||||
srs_validation(srs.0).map_err(JsError::from)
|
||||
}
|
||||
|
||||
/// HELPER FUNCTIONS
|
||||
pub fn u8_array_to_u128_le(arr: [u8; 16]) -> u128 {
|
||||
let mut n: u128 = 0;
|
||||
for &b in arr.iter().rev() {
|
||||
n <<= 8;
|
||||
n |= b as u128;
|
||||
}
|
||||
n
|
||||
}
|
||||
///
|
||||
pub fn utf8_bytes_to_hex_decoded(input: &[u8]) -> Result<Vec<u8>, JsError> {
|
||||
let string = std::str::from_utf8(input)?.trim();
|
||||
let hex_string = if string.starts_with("0x") {
|
||||
&string[2..]
|
||||
} else {
|
||||
string
|
||||
};
|
||||
hex::decode(hex_string).map_err(JsError::from)
|
||||
}
|
||||
@@ -6229,9 +6229,9 @@ pub(crate) fn recompose<F: PrimeField + TensorType + PartialOrd + std::hash::Has
|
||||
(0..num_first_dims)
|
||||
.flat_map(|_| {
|
||||
(0..n).rev().map(|x| {
|
||||
let base = (*base).checked_pow(x as u32);
|
||||
let base = (*base as IntegerRep).checked_pow(x as u32);
|
||||
if let Some(base) = base {
|
||||
Ok(ValType::Constant(integer_rep_to_felt(base as IntegerRep)))
|
||||
Ok(ValType::Constant(integer_rep_to_felt(base)))
|
||||
} else {
|
||||
Err(CircuitError::DecompositionBaseOverflow)
|
||||
}
|
||||
@@ -6341,9 +6341,9 @@ pub(crate) fn decompose<F: PrimeField + TensorType + PartialOrd + std::hash::Has
|
||||
(0..input.len())
|
||||
.flat_map(|_| {
|
||||
(0..*n).rev().map(|x| {
|
||||
let base = (*base).checked_pow(x as u32);
|
||||
let base = (*base as IntegerRep).checked_pow(x as u32);
|
||||
if let Some(base) = base {
|
||||
Ok(ValType::Constant(integer_rep_to_felt(base as IntegerRep)))
|
||||
Ok(ValType::Constant(integer_rep_to_felt(base)))
|
||||
} else {
|
||||
Err(CircuitError::DecompositionBaseOverflow)
|
||||
}
|
||||
|
||||
@@ -359,8 +359,6 @@ mod matmul_col_ultra_overflow_double_col {
|
||||
&pk,
|
||||
// use safe mode to verify that the proof is correct
|
||||
CheckMode::SAFE,
|
||||
crate::Commitments::KZG,
|
||||
crate::pfsys::TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
@@ -480,8 +478,6 @@ mod matmul_col_ultra_overflow {
|
||||
&pk,
|
||||
// use safe mode to verify that the proof is correct
|
||||
CheckMode::SAFE,
|
||||
crate::Commitments::KZG,
|
||||
crate::pfsys::TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
@@ -1298,8 +1294,6 @@ mod conv_col_ultra_overflow {
|
||||
&pk,
|
||||
// use safe mode to verify that the proof is correct
|
||||
CheckMode::SAFE,
|
||||
crate::Commitments::KZG,
|
||||
crate::pfsys::TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
@@ -1467,8 +1461,6 @@ mod conv_relu_col_ultra_overflow {
|
||||
¶ms,
|
||||
&pk,
|
||||
CheckMode::SAFE,
|
||||
crate::Commitments::KZG,
|
||||
crate::pfsys::TranscriptType::EVM,
|
||||
// use safe mode to verify that the proof is correct
|
||||
None,
|
||||
None,
|
||||
@@ -2643,8 +2635,6 @@ mod lookup_ultra_overflow {
|
||||
&pk,
|
||||
// use safe mode to verify that the proof is correct
|
||||
CheckMode::SAFE,
|
||||
crate::Commitments::KZG,
|
||||
crate::pfsys::TranscriptType::EVM,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
|
||||
184
src/commands.rs
184
src/commands.rs
@@ -9,10 +9,9 @@ use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use tosubcommand::{ToFlags, ToSubcommand};
|
||||
|
||||
use crate::{pfsys::ProofType, Commitments, RunArgs};
|
||||
use crate::RunArgs;
|
||||
|
||||
use crate::circuit::CheckMode;
|
||||
use crate::pfsys::TranscriptType;
|
||||
|
||||
/// The default path to the .json data file
|
||||
pub const DEFAULT_DATA: &str = "input.json";
|
||||
@@ -28,26 +27,16 @@ pub const DEFAULT_SETTINGS: &str = "settings.json";
|
||||
pub const DEFAULT_PK: &str = "pk.key";
|
||||
/// The default path to the verification key file
|
||||
pub const DEFAULT_VK: &str = "vk.key";
|
||||
/// The default path to the proving key file for aggregated proofs
|
||||
pub const DEFAULT_PK_AGGREGATED: &str = "pk_aggr.key";
|
||||
/// The default path to the verification key file for aggregated proofs
|
||||
pub const DEFAULT_VK_AGGREGATED: &str = "vk_aggr.key";
|
||||
/// The default path to the proof file
|
||||
pub const DEFAULT_PROOF: &str = "proof.json";
|
||||
/// The default path to the proof file for aggregated proofs
|
||||
pub const DEFAULT_PROOF_AGGREGATED: &str = "proof_aggr.json";
|
||||
/// Default for whether to split proofs
|
||||
pub const DEFAULT_SPLIT: &str = "false";
|
||||
/// Default verifier abi
|
||||
pub const DEFAULT_VERIFIER_ABI: &str = "verifier_abi.json";
|
||||
/// Default verifier abi for aggregated proofs
|
||||
pub const DEFAULT_VERIFIER_AGGREGATED_ABI: &str = "verifier_aggr_abi.json";
|
||||
/// Default solidity code
|
||||
pub const DEFAULT_SOL_CODE: &str = "evm_deploy.sol";
|
||||
/// Default calldata path
|
||||
pub const DEFAULT_CALLDATA: &str = "calldata.bytes";
|
||||
/// Default solidity code for aggregated proofs
|
||||
pub const DEFAULT_SOL_CODE_AGGREGATED: &str = "evm_deploy_aggr.sol";
|
||||
/// Default contract address
|
||||
pub const DEFAULT_CONTRACT_ADDRESS: &str = "contract.address";
|
||||
/// Default contract address for vk
|
||||
@@ -56,8 +45,6 @@ pub const DEFAULT_CONTRACT_ADDRESS_VK: &str = "contract_vk.address";
|
||||
pub const DEFAULT_CHECKMODE: &str = "safe";
|
||||
/// Default calibration target
|
||||
pub const DEFAULT_CALIBRATION_TARGET: &str = "resources";
|
||||
/// Default logrows for aggregated proofs
|
||||
pub const DEFAULT_AGGREGATED_LOGROWS: &str = "23";
|
||||
/// Default optimizer runs
|
||||
pub const DEFAULT_OPTIMIZER_RUNS: &str = "1";
|
||||
/// Default fuzz runs
|
||||
@@ -91,35 +78,6 @@ pub const DEFAULT_DECIMALS: &str = "18";
|
||||
/// Default path for the vka digest file
|
||||
pub const DEFAULT_VKA_DIGEST: &str = "vka.digest";
|
||||
|
||||
#[cfg(feature = "python-bindings")]
|
||||
/// Converts TranscriptType into a PyObject (Required for TranscriptType to be compatible with Python)
|
||||
impl<'py> IntoPyObject<'py> for TranscriptType {
|
||||
type Target = pyo3::PyAny;
|
||||
type Output = pyo3::Bound<'py, Self::Target>;
|
||||
type Error = pyo3::PyErr;
|
||||
|
||||
fn into_pyobject(self, py: Python<'py>) -> Result<Self::Output, Self::Error> {
|
||||
let result = match self {
|
||||
TranscriptType::Poseidon => "poseidon",
|
||||
TranscriptType::EVM => "evm",
|
||||
};
|
||||
Ok(result.into_pyobject(py)?.into_any())
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "python-bindings")]
|
||||
/// Obtains TranscriptType from PyObject (Required for TranscriptType to be compatible with Python)
|
||||
impl<'source> FromPyObject<'source> for TranscriptType {
|
||||
fn extract_bound(ob: &pyo3::Bound<'source, pyo3::PyAny>) -> PyResult<Self> {
|
||||
let trystr = String::extract_bound(ob)?;
|
||||
let strval = trystr.to_string();
|
||||
match strval.to_lowercase().as_str() {
|
||||
"poseidon" => Ok(TranscriptType::Poseidon),
|
||||
"evm" => Ok(TranscriptType::EVM),
|
||||
_ => Err(PyValueError::new_err("Invalid value for TranscriptType")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Serialize, Deserialize, PartialEq, PartialOrd)]
|
||||
/// Determines what the calibration pass should optimize for
|
||||
pub enum CalibrationTarget {
|
||||
@@ -187,7 +145,6 @@ pub enum ContractType {
|
||||
/// Deploys a verifier contrat tailored to the circuit and not reusable
|
||||
Verifier {
|
||||
/// Whether to deploy a reusable verifier. This can reduce state bloat on-chain since you need only deploy a verifying key artifact (vka) for a given circuit which is significantly smaller than the verifier contract (up to 4 times smaller for large circuits)
|
||||
/// Can also be used as an alternative to aggregation for verifiers that are otherwise too large to fit on-chain.
|
||||
reusable: bool,
|
||||
},
|
||||
}
|
||||
@@ -535,9 +492,6 @@ pub enum Commands {
|
||||
/// number of logrows to use for srs
|
||||
#[arg(long, value_hint = clap::ValueHint::Other)]
|
||||
logrows: usize,
|
||||
/// commitment used
|
||||
#[arg(long, default_value = DEFAULT_COMMITMENT, value_hint = clap::ValueHint::Other)]
|
||||
commitment: Option<Commitments>,
|
||||
},
|
||||
|
||||
/// Gets an SRS from a circuit settings file.
|
||||
@@ -552,9 +506,6 @@ pub enum Commands {
|
||||
/// Number of logrows to use for srs. Overrides settings_path if specified.
|
||||
#[arg(long, default_value = None, value_hint = clap::ValueHint::Other)]
|
||||
logrows: Option<u32>,
|
||||
/// Commitment used
|
||||
#[arg(long, default_value = None, value_hint = clap::ValueHint::Other)]
|
||||
commitment: Option<Commitments>,
|
||||
},
|
||||
/// Loads model and input and runs mock prover (for testing)
|
||||
Mock {
|
||||
@@ -566,82 +517,6 @@ pub enum Commands {
|
||||
model: Option<PathBuf>,
|
||||
},
|
||||
|
||||
/// Mock aggregate proofs
|
||||
MockAggregate {
|
||||
/// The path to the snarks to aggregate over (generated using the prove command with the --proof-type=for-aggr flag)
|
||||
#[arg(long, default_value = DEFAULT_PROOF, value_delimiter = ',', allow_hyphen_values = true, value_hint = clap::ValueHint::FilePath)]
|
||||
aggregation_snarks: Vec<PathBuf>,
|
||||
/// logrows used for aggregation circuit
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
|
||||
logrows: Option<u32>,
|
||||
/// whether the accumulated are segments of a larger proof
|
||||
#[arg(long, default_value = DEFAULT_SPLIT, action = clap::ArgAction::SetTrue)]
|
||||
split_proofs: Option<bool>,
|
||||
},
|
||||
|
||||
/// Setup aggregation circuit and generate pk and vk
|
||||
SetupAggregate {
|
||||
/// The path to samples of snarks that will be aggregated over (generated using the prove command with the --proof-type=for-aggr flag)
|
||||
#[arg(long, default_value = DEFAULT_PROOF, value_delimiter = ',', allow_hyphen_values = true, value_hint = clap::ValueHint::FilePath)]
|
||||
sample_snarks: Vec<PathBuf>,
|
||||
/// The path to save the desired verification key file to
|
||||
#[arg(long, default_value = DEFAULT_VK_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
vk_path: Option<PathBuf>,
|
||||
/// The path to save the proving key to
|
||||
#[arg(long, default_value = DEFAULT_PK_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
pk_path: Option<PathBuf>,
|
||||
/// The path to SRS, if None will use ~/.ezkl/srs/kzg{logrows}.srs
|
||||
#[arg(long, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: Option<PathBuf>,
|
||||
/// logrows used for aggregation circuit
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
|
||||
logrows: Option<u32>,
|
||||
/// whether the accumulated are segments of a larger proof
|
||||
#[arg(long, default_value = DEFAULT_SPLIT, action = clap::ArgAction::SetTrue)]
|
||||
split_proofs: Option<bool>,
|
||||
/// compress selectors
|
||||
#[arg(long, default_value = DEFAULT_DISABLE_SELECTOR_COMPRESSION, action = clap::ArgAction::SetTrue)]
|
||||
disable_selector_compression: Option<bool>,
|
||||
/// commitment used
|
||||
#[arg(long, default_value = DEFAULT_COMMITMENT, value_hint = clap::ValueHint::Other)]
|
||||
commitment: Option<Commitments>,
|
||||
},
|
||||
/// Aggregates proofs
|
||||
Aggregate {
|
||||
/// The path to the snarks to aggregate over (generated using the prove command with the --proof-type=for-aggr flag)
|
||||
#[arg(long, default_value = DEFAULT_PROOF, value_delimiter = ',', allow_hyphen_values = true, value_hint = clap::ValueHint::FilePath)]
|
||||
aggregation_snarks: Vec<PathBuf>,
|
||||
/// The path to load the desired proving key file (generated using the setup-aggregate command)
|
||||
#[arg(long, default_value = DEFAULT_PK_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
pk_path: Option<PathBuf>,
|
||||
/// The path to output the proof file to
|
||||
#[arg(long, default_value = DEFAULT_PROOF_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
proof_path: Option<PathBuf>,
|
||||
/// The path to SRS, if None will use ~/.ezkl/srs/kzg{logrows}.srs
|
||||
#[arg(long)]
|
||||
srs_path: Option<PathBuf>,
|
||||
#[arg(
|
||||
long,
|
||||
require_equals = true,
|
||||
num_args = 0..=1,
|
||||
default_value_t = TranscriptType::default(),
|
||||
value_enum,
|
||||
value_hint = clap::ValueHint::Other
|
||||
)]
|
||||
transcript: TranscriptType,
|
||||
/// logrows used for aggregation circuit
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
|
||||
logrows: Option<u32>,
|
||||
/// run sanity checks during calculations (safe or unsafe)
|
||||
#[arg(long, default_value = DEFAULT_CHECKMODE, value_hint = clap::ValueHint::Other)]
|
||||
check_mode: Option<CheckMode>,
|
||||
/// whether the accumulated proofs are segments of a larger circuit
|
||||
#[arg(long, default_value = DEFAULT_SPLIT, action = clap::ArgAction::SetTrue)]
|
||||
split_proofs: Option<bool>,
|
||||
/// commitment used
|
||||
#[arg(long, default_value = DEFAULT_COMMITMENT, value_hint = clap::ValueHint::Other)]
|
||||
commitment: Option<Commitments>,
|
||||
},
|
||||
/// Compiles a circuit from onnx to a simplified graph (einsum + other ops) and parameters as sets of field elements
|
||||
CompileCircuit {
|
||||
/// The path to the .onnx model file
|
||||
@@ -702,15 +577,6 @@ pub enum Commands {
|
||||
/// The path to SRS, if None will use ~/.ezkl/srs/kzg{logrows}.srs
|
||||
#[arg(long, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: Option<PathBuf>,
|
||||
#[arg(
|
||||
long,
|
||||
require_equals = true,
|
||||
num_args = 0..=1,
|
||||
default_value_t = ProofType::Single,
|
||||
value_enum,
|
||||
value_hint = clap::ValueHint::Other
|
||||
)]
|
||||
proof_type: ProofType,
|
||||
/// run sanity checks during calculations (safe or unsafe)
|
||||
#[arg(long, default_value = DEFAULT_CHECKMODE, value_hint = clap::ValueHint::Other)]
|
||||
check_mode: Option<CheckMode>,
|
||||
@@ -778,32 +644,6 @@ pub enum Commands {
|
||||
decimals: Option<usize>,
|
||||
},
|
||||
|
||||
/// Creates an Evm verifier for an aggregate proof
|
||||
#[command(name = "create-evm-verifier-aggr")]
|
||||
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
|
||||
CreateEvmVerifierAggr {
|
||||
/// The path to SRS, if None will use ~/.ezkl/srs/kzg{logrows}.srs
|
||||
#[arg(long, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: Option<PathBuf>,
|
||||
/// The path to load the desired verification key file
|
||||
#[arg(long, default_value = DEFAULT_VK_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
vk_path: Option<PathBuf>,
|
||||
/// The path to the Solidity code
|
||||
#[arg(long, default_value = DEFAULT_SOL_CODE_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
sol_code_path: Option<PathBuf>,
|
||||
/// The path to output the Solidity verifier ABI
|
||||
#[arg(long, default_value = DEFAULT_VERIFIER_AGGREGATED_ABI, value_hint = clap::ValueHint::FilePath)]
|
||||
abi_path: Option<PathBuf>,
|
||||
// aggregated circuit settings paths, used to calculate the number of instances in the aggregate proof
|
||||
#[arg(long, default_value = DEFAULT_SETTINGS, value_delimiter = ',', allow_hyphen_values = true, value_hint = clap::ValueHint::FilePath)]
|
||||
aggregation_settings: Vec<PathBuf>,
|
||||
// logrows used for aggregation circuit
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
|
||||
logrows: Option<u32>,
|
||||
/// Whether to render the verifier as reusable or not. If true, you will need to deploy a VK artifact, passing it as part of the calldata to the verifier.
|
||||
#[cfg_attr(all(feature = "reusable-verifier", not(target_arch = "wasm32")), arg(short = 'R', long, action = clap::ArgAction::SetTrue))]
|
||||
reusable: Option<bool>,
|
||||
},
|
||||
/// Verifies a proof, returning accept or reject
|
||||
Verify {
|
||||
/// The path to load circuit settings .json file from (generated using the gen-settings command)
|
||||
@@ -822,27 +662,7 @@ pub enum Commands {
|
||||
#[arg(long, default_value = DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION, action = clap::ArgAction::SetTrue)]
|
||||
reduced_srs: Option<bool>,
|
||||
},
|
||||
/// Verifies an aggregate proof, returning accept or reject
|
||||
VerifyAggr {
|
||||
/// The path to the proof file (generated using the prove command)
|
||||
#[arg(long, default_value = DEFAULT_PROOF_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
proof_path: Option<PathBuf>,
|
||||
/// The path to the verification key file (generated using the setup-aggregate command)
|
||||
#[arg(long, default_value = DEFAULT_VK_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
vk_path: Option<PathBuf>,
|
||||
/// reduced srs
|
||||
#[arg(long, default_value = DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION, action = clap::ArgAction::SetTrue)]
|
||||
reduced_srs: Option<bool>,
|
||||
/// The path to SRS, if None will use ~/.ezkl/srs/kzg{logrows}.srs
|
||||
#[arg(long, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: Option<PathBuf>,
|
||||
/// logrows used for aggregation circuit
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
|
||||
logrows: Option<u32>,
|
||||
/// commitment
|
||||
#[arg(long, default_value = DEFAULT_COMMITMENT, value_hint = clap::ValueHint::Other)]
|
||||
commitment: Option<Commitments>,
|
||||
},
|
||||
|
||||
/// Deploys an evm contract (verifier, reusable verifier, or vk artifact) that is generated by ezkl
|
||||
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
|
||||
DeployEvm {
|
||||
|
||||
11
src/eth.rs
11
src/eth.rs
@@ -1,4 +1,3 @@
|
||||
use crate::pfsys::evm::EvmVerificationError;
|
||||
use crate::pfsys::{encode_calldata, Snark};
|
||||
use alloy::contract::CallBuilder;
|
||||
use alloy::core::primitives::Address as H160;
|
||||
@@ -57,8 +56,6 @@ pub enum EthError {
|
||||
Wallet(#[from] WalletError),
|
||||
#[error("failed to parse url {0}")]
|
||||
UrlParse(String),
|
||||
#[error("evm verification error: {0}")]
|
||||
EvmVerification(#[from] EvmVerificationError),
|
||||
#[error("Private key must be in hex format, 64 chars, without 0x prefix")]
|
||||
PrivateKeyFormat,
|
||||
#[error("failed to parse hex: {0}")]
|
||||
@@ -100,6 +97,8 @@ pub enum EthError {
|
||||
VkaData(String),
|
||||
#[error("rescaled‑instance mismatch: {0}")]
|
||||
RescaleCheckError(#[from] RescaleCheckError),
|
||||
#[error("evm verification error: {0}")]
|
||||
EvmVerificationError(String),
|
||||
}
|
||||
|
||||
pub type EthersClient = Arc<
|
||||
@@ -198,7 +197,7 @@ pub async fn register_vka_via_rv(
|
||||
let result = client.call(&tx).await;
|
||||
|
||||
if let Err(e) = result {
|
||||
return Err(EvmVerificationError::SolidityExecution(e.to_string()).into());
|
||||
return Err(EthError::EvmVerificationError(e.to_string()).into());
|
||||
}
|
||||
let result = result?;
|
||||
debug!("result: {:#?}", result.to_vec());
|
||||
@@ -270,7 +269,7 @@ pub async fn verify_proof_via_solidity(
|
||||
let result = client.call(&tx).await;
|
||||
|
||||
if let Err(e) = result {
|
||||
return Err(EvmVerificationError::SolidityExecution(e.to_string()).into());
|
||||
return Err(EthError::EvmVerificationError(e.to_string()).into());
|
||||
}
|
||||
let result = result?;
|
||||
debug!("result: {:#?}", result.to_vec());
|
||||
@@ -306,7 +305,7 @@ pub async fn verify_proof_via_solidity(
|
||||
.ok_or(EthError::NoContractOutput)?
|
||||
== &1u8;
|
||||
if !result {
|
||||
return Err(EvmVerificationError::InvalidProof.into());
|
||||
return Err(EthError::EvmVerificationError("Invalid proof".into()));
|
||||
}
|
||||
|
||||
let gas = client.estimate_gas(&tx).await?;
|
||||
|
||||
988
src/execute.rs
988
src/execute.rs
File diff suppressed because it is too large
Load Diff
@@ -1635,10 +1635,15 @@ impl GraphCircuit {
|
||||
max_logrows = std::cmp::min(
|
||||
max_logrows,
|
||||
// max of the model constraint logrows, min_bits, and the constants logrows is the upper limit
|
||||
*[model_constraint_logrows, min_bits, constants_logrows, einsum_logrows]
|
||||
.iter()
|
||||
.max()
|
||||
.unwrap(),
|
||||
*[
|
||||
model_constraint_logrows,
|
||||
min_bits,
|
||||
constants_logrows,
|
||||
einsum_logrows,
|
||||
]
|
||||
.iter()
|
||||
.max()
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
// we now have a min and max logrows
|
||||
|
||||
89
src/lib.rs
89
src/lib.rs
@@ -42,8 +42,6 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
#[allow(missing_docs)]
|
||||
pub enum EZKLError {
|
||||
#[error("[aggregation] {0}")]
|
||||
AggregationError(#[from] pfsys::evm::aggregation_kzg::AggregationError),
|
||||
#[cfg(all(
|
||||
feature = "ezkl",
|
||||
not(all(target_arch = "wasm32", target_os = "unknown"))
|
||||
@@ -100,18 +98,11 @@ impl From<String> for EZKLError {
|
||||
EZKLError::UncategorizedError(s)
|
||||
}
|
||||
}
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
use circuit::{table::Range, CheckMode};
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use clap::Args;
|
||||
use fieldutils::IntegerRep;
|
||||
use graph::{Visibility, MAX_PUBLIC_SRS};
|
||||
use halo2_proofs::poly::{
|
||||
ipa::commitment::IPACommitmentScheme, kzg::commitment::KZGCommitmentScheme,
|
||||
};
|
||||
use halo2curves::bn256::{Bn256, G1Affine};
|
||||
use serde::{Deserialize, Serialize};
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tosubcommand::ToFlags;
|
||||
@@ -130,7 +121,6 @@ pub fn version() -> &'static str {
|
||||
|
||||
/// Bindings management
|
||||
#[cfg(any(
|
||||
feature = "universal-bindings",
|
||||
all(target_arch = "wasm32", target_os = "unknown"),
|
||||
feature = "python-bindings"
|
||||
))]
|
||||
@@ -171,8 +161,6 @@ pub mod pfsys;
|
||||
pub mod srs_sha;
|
||||
/// An implementation of multi-dimensional tensors.
|
||||
pub mod tensor;
|
||||
#[cfg(feature = "ios-bindings")]
|
||||
uniffi::setup_scaffolding!();
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
@@ -198,78 +186,6 @@ const EZKL_KEY_FORMAT: &str = "raw-bytes";
|
||||
#[cfg(any(not(feature = "ezkl"), target_arch = "wasm32"))]
|
||||
const EZKL_BUF_CAPACITY: &usize = &8000;
|
||||
|
||||
#[derive(
|
||||
Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize, Default, Copy,
|
||||
)]
|
||||
/// Commitment scheme
|
||||
pub enum Commitments {
|
||||
#[default]
|
||||
/// KZG
|
||||
KZG,
|
||||
/// IPA
|
||||
IPA,
|
||||
}
|
||||
|
||||
impl From<Option<Commitments>> for Commitments {
|
||||
fn from(value: Option<Commitments>) -> Self {
|
||||
value.unwrap_or(Commitments::KZG)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Commitments {
|
||||
type Err = String;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"kzg" => Ok(Commitments::KZG),
|
||||
"ipa" => Ok(Commitments::IPA),
|
||||
_ => Err("Invalid value for Commitments".to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<KZGCommitmentScheme<Bn256>> for Commitments {
|
||||
fn from(_value: KZGCommitmentScheme<Bn256>) -> Self {
|
||||
Commitments::KZG
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IPACommitmentScheme<G1Affine>> for Commitments {
|
||||
fn from(_value: IPACommitmentScheme<G1Affine>) -> Self {
|
||||
Commitments::IPA
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Commitments {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Commitments::KZG => write!(f, "kzg"),
|
||||
Commitments::IPA => write!(f, "ipa"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
impl ToFlags for Commitments {
|
||||
/// Convert the struct to a subcommand string
|
||||
fn to_flags(&self) -> Vec<String> {
|
||||
vec![format!("{}", self)]
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for Commitments {
|
||||
fn from(value: String) -> Self {
|
||||
match value.to_lowercase().as_str() {
|
||||
"kzg" => Commitments::KZG,
|
||||
"ipa" => Commitments::IPA,
|
||||
_ => {
|
||||
log::error!("Invalid value for Commitments");
|
||||
log::warn!("defaulting to KZG");
|
||||
Commitments::KZG
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parameters specific to a proving run
|
||||
///
|
||||
/// RunArgs contains all configuration parameters needed to control the proving process,
|
||||
@@ -336,10 +252,6 @@ pub struct RunArgs {
|
||||
/// Controls level of constraint verification
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(long, default_value = "unsafe", value_hint = clap::ValueHint::Other))]
|
||||
pub check_mode: CheckMode,
|
||||
/// Commitment scheme for circuit proving
|
||||
/// Affects proof size and verification time
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(long, default_value = "kzg", value_hint = clap::ValueHint::Other))]
|
||||
pub commitment: Option<Commitments>,
|
||||
/// Base for number decomposition
|
||||
/// Must be a power of 2
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(long, default_value = "16384", value_hint = clap::ValueHint::Other))]
|
||||
@@ -400,7 +312,6 @@ impl Default for RunArgs {
|
||||
param_visibility: Visibility::Fixed,
|
||||
rebase_frac_zero_constants: false,
|
||||
check_mode: CheckMode::UNSAFE,
|
||||
commitment: None,
|
||||
decomp_base: 16384,
|
||||
decomp_legs: 2,
|
||||
ignore_range_check_inputs_outputs: false,
|
||||
|
||||
@@ -1,442 +0,0 @@
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use crate::graph::CircuitSize;
|
||||
use crate::pfsys::{Snark, SnarkWitness};
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use colored_json::ToColoredJson;
|
||||
use halo2_proofs::circuit::AssignedCell;
|
||||
use halo2_proofs::plonk::{self};
|
||||
use halo2_proofs::{
|
||||
circuit::{Layouter, SimpleFloorPlanner, Value},
|
||||
plonk::{Circuit, ConstraintSystem},
|
||||
};
|
||||
use halo2_wrong_ecc::{
|
||||
integer::rns::Rns,
|
||||
maingate::{
|
||||
MainGate, MainGateConfig, MainGateInstructions, RangeChip, RangeConfig, RangeInstructions,
|
||||
RegionCtx,
|
||||
},
|
||||
EccConfig,
|
||||
};
|
||||
use halo2curves::bn256::{Bn256, Fq, Fr, G1Affine};
|
||||
use halo2curves::ff::PrimeField;
|
||||
use itertools::Itertools;
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use log::debug;
|
||||
use log::trace;
|
||||
use rand::rngs::OsRng;
|
||||
use snark_verifier::loader::native::NativeLoader;
|
||||
use snark_verifier::loader::EcPointLoader;
|
||||
use snark_verifier::{
|
||||
loader,
|
||||
pcs::{
|
||||
kzg::{
|
||||
Bdfg21, KzgAccumulator, KzgAs, KzgSuccinctVerifyingKey, LimbsEncoding,
|
||||
LimbsEncodingInstructions,
|
||||
},
|
||||
AccumulationScheme, AccumulationSchemeProver,
|
||||
},
|
||||
system,
|
||||
util::arithmetic::fe_to_limbs,
|
||||
verifier::{self, SnarkVerifier},
|
||||
};
|
||||
use std::rc::Rc;
|
||||
use thiserror::Error;
|
||||
|
||||
const LIMBS: usize = 4;
|
||||
const BITS: usize = 68;
|
||||
type As = KzgAs<Bn256, Bdfg21>;
|
||||
/// Type for aggregator verification
|
||||
type PlonkSuccinctVerifier = verifier::plonk::PlonkSuccinctVerifier<As, LimbsEncoding<LIMBS, BITS>>;
|
||||
|
||||
const T: usize = 5;
|
||||
const RATE: usize = 4;
|
||||
const R_F: usize = 8;
|
||||
const R_P: usize = 60;
|
||||
|
||||
type Svk = KzgSuccinctVerifyingKey<G1Affine>;
|
||||
type BaseFieldEccChip = halo2_wrong_ecc::BaseFieldEccChip<G1Affine, LIMBS, BITS>;
|
||||
/// The loader type used in the transcript definition
|
||||
type Halo2Loader<'a> = loader::halo2::Halo2Loader<'a, G1Affine, BaseFieldEccChip>;
|
||||
/// Application snark transcript
|
||||
pub type PoseidonTranscript<L, S> =
|
||||
system::halo2::transcript::halo2::PoseidonTranscript<G1Affine, L, S, T, RATE, R_F, R_P>;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
/// Errors related to proof aggregation
|
||||
pub enum AggregationError {
|
||||
/// A KZG proof could not be verified
|
||||
#[error("failed to verify KZG proof")]
|
||||
KZGProofVerification,
|
||||
/// proof read errors
|
||||
#[error("Failed to read proof")]
|
||||
ProofRead,
|
||||
/// proof verification errors
|
||||
#[error("Failed to verify proof")]
|
||||
ProofVerify,
|
||||
/// proof creation errors
|
||||
#[error("Failed to create proof")]
|
||||
ProofCreate,
|
||||
}
|
||||
|
||||
type AggregationResult<'a> = (
|
||||
// accumulator
|
||||
KzgAccumulator<G1Affine, Rc<Halo2Loader<'a>>>,
|
||||
// the set of assigned cells
|
||||
Vec<Vec<AssignedCell<Fr, Fr>>>,
|
||||
);
|
||||
|
||||
type LoadedProof<'a> = verifier::plonk::PlonkProof<
|
||||
G1Affine,
|
||||
Rc<
|
||||
loader::halo2::Halo2Loader<
|
||||
'a,
|
||||
G1Affine,
|
||||
halo2_wrong_ecc::BaseFieldEccChip<G1Affine, 4, 68>,
|
||||
>,
|
||||
>,
|
||||
KzgAs<Bn256, Bdfg21>,
|
||||
>;
|
||||
|
||||
/// Aggregate one or more application snarks of the same shape into a KzgAccumulator
|
||||
pub fn aggregate<'a>(
|
||||
svk: &Svk,
|
||||
loader: &Rc<Halo2Loader<'a>>,
|
||||
snarks: &[SnarkWitness<Fr, G1Affine>],
|
||||
as_proof: Value<&'_ [u8]>,
|
||||
split_proofs: bool,
|
||||
) -> Result<AggregationResult<'a>, plonk::Error> {
|
||||
let assign_instances = |instances: &[Vec<Value<Fr>>]| {
|
||||
instances
|
||||
.iter()
|
||||
.map(|instances| {
|
||||
instances
|
||||
.iter()
|
||||
.map(|instance| loader.assign_scalar(*instance))
|
||||
.collect_vec()
|
||||
})
|
||||
.collect_vec()
|
||||
};
|
||||
|
||||
let mut accumulators = vec![];
|
||||
let mut snark_instances = vec![];
|
||||
let mut proofs: Vec<LoadedProof<'_>> = vec![];
|
||||
|
||||
for snark in snarks.iter() {
|
||||
let protocol = snark.protocol.as_ref().unwrap().loaded(loader);
|
||||
let instances = assign_instances(&snark.instances);
|
||||
|
||||
// get assigned cells
|
||||
snark_instances.extend(instances.iter().map(|instance| {
|
||||
instance
|
||||
.iter()
|
||||
.map(|v| v.clone().into_assigned())
|
||||
.collect_vec()
|
||||
}));
|
||||
|
||||
let mut transcript = PoseidonTranscript::<Rc<Halo2Loader>, _>::new(loader, snark.proof());
|
||||
let proof = PlonkSuccinctVerifier::read_proof(svk, &protocol, &instances, &mut transcript)
|
||||
.map_err(|_| plonk::Error::Synthesis)?;
|
||||
|
||||
if split_proofs {
|
||||
let previous_proof = proofs.last();
|
||||
let split_commit = match snark.clone().split {
|
||||
Some(split) => split,
|
||||
None => {
|
||||
log::error!("Failed to split KZG commit for sequential proofs");
|
||||
return Err(plonk::Error::Synthesis);
|
||||
}
|
||||
};
|
||||
if let Some(previous_proof) = previous_proof {
|
||||
// output of previous proof
|
||||
let output = &previous_proof.witnesses[split_commit.start..split_commit.end];
|
||||
// input of current proof
|
||||
let split_commit_len = split_commit.end - split_commit.start;
|
||||
let input = &proof.witnesses[..split_commit_len];
|
||||
// these points were already assigned previously when loading the transcript so this is safe
|
||||
// and equivalent to a copy constraint and an equality constraint
|
||||
for (output, input) in output.iter().zip(input.iter()) {
|
||||
loader
|
||||
.ec_point_assert_eq("assert commits match", output, input)
|
||||
.map_err(|e| {
|
||||
log::error!(
|
||||
"Failed to match KZG commits for sequential proofs: {:?}",
|
||||
e
|
||||
);
|
||||
plonk::Error::Synthesis
|
||||
})?;
|
||||
}
|
||||
}
|
||||
proofs.push(proof.clone());
|
||||
}
|
||||
|
||||
let mut accum = PlonkSuccinctVerifier::verify(svk, &protocol, &instances, &proof)
|
||||
.map_err(|_| plonk::Error::Synthesis)?;
|
||||
accumulators.append(&mut accum);
|
||||
}
|
||||
let accumulator = {
|
||||
let mut transcript = PoseidonTranscript::<Rc<Halo2Loader>, _>::new(loader, as_proof);
|
||||
let proof = As::read_proof(&Default::default(), &accumulators, &mut transcript).unwrap();
|
||||
As::verify(&Default::default(), &accumulators, &proof).map_err(|_| plonk::Error::Synthesis)
|
||||
}?;
|
||||
Ok((accumulator, snark_instances))
|
||||
}
|
||||
|
||||
/// The Halo2 Config for the aggregation circuit
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AggregationConfig {
|
||||
main_gate_config: MainGateConfig,
|
||||
range_config: RangeConfig,
|
||||
}
|
||||
|
||||
impl AggregationConfig {
|
||||
/// Configure the aggregation circuit
|
||||
pub fn configure<F: PrimeField>(
|
||||
meta: &mut ConstraintSystem<F>,
|
||||
composition_bits: Vec<usize>,
|
||||
overflow_bits: Vec<usize>,
|
||||
) -> Self {
|
||||
let main_gate_config = MainGate::<F>::configure(meta);
|
||||
let range_config =
|
||||
RangeChip::<F>::configure(meta, &main_gate_config, composition_bits, overflow_bits);
|
||||
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
{
|
||||
let circuit_size = CircuitSize::from_cs(meta, 23);
|
||||
|
||||
// not wasm
|
||||
|
||||
debug!(
|
||||
"circuit size: \n {}",
|
||||
circuit_size
|
||||
.as_json()
|
||||
.unwrap()
|
||||
.to_colored_json_auto()
|
||||
.unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
AggregationConfig {
|
||||
main_gate_config,
|
||||
range_config,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a MainGate from the aggregation approach
|
||||
pub fn main_gate(&self) -> MainGate<Fr> {
|
||||
MainGate::new(self.main_gate_config.clone())
|
||||
}
|
||||
|
||||
/// Create a range chip to decompose and range check inputs
|
||||
pub fn range_chip(&self) -> RangeChip<Fr> {
|
||||
RangeChip::new(self.range_config.clone())
|
||||
}
|
||||
|
||||
/// Create an ecc chip for ec ops
|
||||
pub fn ecc_chip(&self) -> BaseFieldEccChip {
|
||||
BaseFieldEccChip::new(EccConfig::new(
|
||||
self.range_config.clone(),
|
||||
self.main_gate_config.clone(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// Aggregation Circuit with a SuccinctVerifyingKey, application snark witnesses (each with a proof and instance variables), and the instance variables and the resulting aggregation circuit proof.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AggregationCircuit {
|
||||
svk: Svk,
|
||||
snarks: Vec<SnarkWitness<Fr, G1Affine>>,
|
||||
instances: Vec<Fr>,
|
||||
as_proof: Value<Vec<u8>>,
|
||||
split_proof: bool,
|
||||
}
|
||||
|
||||
impl AggregationCircuit {
|
||||
/// Create a new Aggregation Circuit with a SuccinctVerifyingKey, application snark witnesses (each with a proof and instance variables), and the instance variables and the resulting aggregation circuit proof.
|
||||
pub fn new(
|
||||
svk: &KzgSuccinctVerifyingKey<G1Affine>,
|
||||
snarks: impl IntoIterator<Item = Snark<Fr, G1Affine>>,
|
||||
split_proof: bool,
|
||||
) -> Result<Self, AggregationError> {
|
||||
let snarks = snarks.into_iter().collect_vec();
|
||||
|
||||
let mut accumulators = vec![];
|
||||
|
||||
for snark in snarks.iter() {
|
||||
trace!("Aggregating with snark instances {:?}", snark.instances);
|
||||
let mut transcript = PoseidonTranscript::<NativeLoader, _>::new(snark.proof.as_slice());
|
||||
let proof = PlonkSuccinctVerifier::read_proof(
|
||||
svk,
|
||||
snark.protocol.as_ref().unwrap(),
|
||||
&snark.instances,
|
||||
&mut transcript,
|
||||
)
|
||||
.map_err(|e| {
|
||||
log::error!("{:?}", e);
|
||||
AggregationError::ProofRead
|
||||
})?;
|
||||
let mut accum = PlonkSuccinctVerifier::verify(
|
||||
svk,
|
||||
snark.protocol.as_ref().unwrap(),
|
||||
&snark.instances,
|
||||
&proof,
|
||||
)
|
||||
.map_err(|_| AggregationError::ProofVerify)?;
|
||||
accumulators.append(&mut accum);
|
||||
}
|
||||
|
||||
trace!("Accumulator");
|
||||
let (accumulator, as_proof) = {
|
||||
let mut transcript = PoseidonTranscript::<NativeLoader, _>::new(Vec::new());
|
||||
let accumulator =
|
||||
As::create_proof(&Default::default(), &accumulators, &mut transcript, OsRng)
|
||||
.map_err(|_| AggregationError::ProofCreate)?;
|
||||
(accumulator, transcript.finalize())
|
||||
};
|
||||
|
||||
trace!("KzgAccumulator");
|
||||
let KzgAccumulator { lhs, rhs } = accumulator;
|
||||
let instances = [lhs.x, lhs.y, rhs.x, rhs.y]
|
||||
.map(fe_to_limbs::<_, _, LIMBS, BITS>)
|
||||
.concat();
|
||||
|
||||
Ok(Self {
|
||||
svk: *svk,
|
||||
snarks: snarks.into_iter().map_into().collect(),
|
||||
instances,
|
||||
as_proof: Value::known(as_proof),
|
||||
split_proof,
|
||||
})
|
||||
}
|
||||
|
||||
/// Number of limbs used for decomposition
|
||||
pub fn num_limbs() -> usize {
|
||||
LIMBS
|
||||
}
|
||||
/// Number of bits used for decomposition
|
||||
pub fn num_bits() -> usize {
|
||||
BITS
|
||||
}
|
||||
|
||||
/// Accumulator indices used in generating verifier.
|
||||
pub fn accumulator_indices() -> Vec<(usize, usize)> {
|
||||
(0..4 * LIMBS).map(|idx| (0, idx)).collect()
|
||||
}
|
||||
|
||||
/// Number of instance variables for the aggregation circuit, used in generating verifier.
|
||||
pub fn num_instance(orginal_circuit_instances: usize) -> Vec<usize> {
|
||||
let accumulation_instances = 4 * LIMBS;
|
||||
vec![accumulation_instances + orginal_circuit_instances]
|
||||
}
|
||||
|
||||
/// Instance variables for the aggregation circuit, fed to verifier.
|
||||
pub fn instances(&self) -> Vec<Fr> {
|
||||
// also get snark instances here
|
||||
let mut snark_instances: Vec<Vec<Vec<Value<Fr>>>> = self
|
||||
.snarks
|
||||
.iter()
|
||||
.map(|snark| snark.instances.clone())
|
||||
.collect_vec();
|
||||
|
||||
// reduce from Vec<Vec<Vec<Value<Fr>>>> to Vec<Vec<Value<Fr>>>
|
||||
let mut instances: Vec<Fr> = self.instances.clone();
|
||||
for snark_instance in snark_instances.iter_mut() {
|
||||
for instance in snark_instance.iter_mut() {
|
||||
let mut felt_evals = vec![];
|
||||
for value in instance.iter_mut() {
|
||||
value.map(|v| felt_evals.push(v));
|
||||
}
|
||||
instances.extend(felt_evals);
|
||||
}
|
||||
}
|
||||
|
||||
instances
|
||||
}
|
||||
|
||||
fn as_proof(&self) -> Value<&[u8]> {
|
||||
self.as_proof.as_ref().map(Vec::as_slice)
|
||||
}
|
||||
}
|
||||
|
||||
impl Circuit<Fr> for AggregationCircuit {
|
||||
type Config = AggregationConfig;
|
||||
type FloorPlanner = SimpleFloorPlanner;
|
||||
type Params = ();
|
||||
|
||||
fn without_witnesses(&self) -> Self {
|
||||
Self {
|
||||
svk: self.svk,
|
||||
snarks: self
|
||||
.snarks
|
||||
.iter()
|
||||
.map(SnarkWitness::without_witnesses)
|
||||
.collect(),
|
||||
instances: Vec::new(),
|
||||
as_proof: Value::unknown(),
|
||||
split_proof: self.split_proof,
|
||||
}
|
||||
}
|
||||
|
||||
fn configure(meta: &mut ConstraintSystem<Fr>) -> Self::Config {
|
||||
AggregationConfig::configure(
|
||||
meta,
|
||||
vec![BITS / LIMBS],
|
||||
Rns::<Fq, Fr, LIMBS, BITS>::construct().overflow_lengths(),
|
||||
)
|
||||
}
|
||||
|
||||
fn synthesize(
|
||||
&self,
|
||||
config: Self::Config,
|
||||
mut layouter: impl Layouter<Fr>,
|
||||
) -> Result<(), plonk::Error> {
|
||||
let main_gate = config.main_gate();
|
||||
let range_chip = config.range_chip();
|
||||
|
||||
range_chip.load_table(&mut layouter)?;
|
||||
|
||||
let (accumulator_limbs, snark_instances) = layouter.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let ctx = RegionCtx::new(region, 0);
|
||||
|
||||
let ecc_chip = config.ecc_chip();
|
||||
let loader = Halo2Loader::new(ecc_chip, ctx);
|
||||
let (accumulator, snark_instances) = aggregate(
|
||||
&self.svk,
|
||||
&loader,
|
||||
&self.snarks,
|
||||
self.as_proof(),
|
||||
self.split_proof,
|
||||
)?;
|
||||
|
||||
let accumulator_limbs = [accumulator.lhs, accumulator.rhs]
|
||||
.iter()
|
||||
.map(|ec_point| {
|
||||
loader
|
||||
.ecc_chip()
|
||||
.assign_ec_point_to_limbs(&mut loader.ctx_mut(), ec_point.assigned())
|
||||
})
|
||||
.collect::<Result<Vec<_>, plonk::Error>>()?
|
||||
.into_iter()
|
||||
.flatten();
|
||||
|
||||
Ok((accumulator_limbs, snark_instances))
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut instance_offset = 0;
|
||||
for limb in accumulator_limbs {
|
||||
main_gate.expose_public(layouter.namespace(|| ""), limb, instance_offset)?;
|
||||
instance_offset += 1;
|
||||
}
|
||||
|
||||
for instance in snark_instances.into_iter() {
|
||||
for elem in instance.into_iter() {
|
||||
main_gate.expose_public(layouter.namespace(|| ""), elem, instance_offset)?;
|
||||
instance_offset += 1;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
use thiserror::Error;
|
||||
|
||||
/// Aggregate proof generation for EVM using KZG
|
||||
pub mod aggregation_kzg;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
/// Errors related to evm verification
|
||||
pub enum EvmVerificationError {
|
||||
/// If the Solidity verifier worked but returned false
|
||||
#[error("Solidity verifier found the proof invalid")]
|
||||
InvalidProof,
|
||||
/// If the Solidity verifier threw and error (e.g. OutOfGas)
|
||||
#[error("Execution of Solidity code failed: {0}")]
|
||||
SolidityExecution(String),
|
||||
/// EVM verify errors
|
||||
#[error("evm verification reverted: {0}")]
|
||||
Reverted(String),
|
||||
/// EVM verify errors
|
||||
#[error("evm deployment failed: {0}")]
|
||||
DeploymentFailed(String),
|
||||
/// Invalid Visibility
|
||||
#[error("Invalid visibility")]
|
||||
InvalidVisibility,
|
||||
}
|
||||
299
src/pfsys/mod.rs
299
src/pfsys/mod.rs
@@ -1,6 +1,3 @@
|
||||
/// EVM related proving and verification
|
||||
pub mod evm;
|
||||
|
||||
/// SRS generation, processing, verification and downloading
|
||||
pub mod srs;
|
||||
|
||||
@@ -13,17 +10,11 @@ use std::borrow::Borrow;
|
||||
|
||||
use crate::circuit::CheckMode;
|
||||
use crate::graph::GraphWitness;
|
||||
use crate::pfsys::evm::aggregation_kzg::PoseidonTranscript;
|
||||
use crate::{Commitments, EZKL_BUF_CAPACITY, EZKL_KEY_FORMAT};
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use clap::ValueEnum;
|
||||
use halo2_proofs::circuit::Value;
|
||||
use crate::{EZKL_BUF_CAPACITY, EZKL_KEY_FORMAT};
|
||||
use halo2_proofs::plonk::{
|
||||
create_proof, keygen_pk, keygen_vk_custom, verify_proof, Circuit, ProvingKey, VerifyingKey,
|
||||
};
|
||||
use halo2_proofs::poly::commitment::{CommitmentScheme, Params, ParamsProver, Prover, Verifier};
|
||||
use halo2_proofs::poly::ipa::commitment::IPACommitmentScheme;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
use halo2_proofs::poly::VerificationStrategy;
|
||||
use halo2_proofs::transcript::{EncodedChallenge, TranscriptReadBuffer, TranscriptWriterBuffer};
|
||||
use halo2curves::ff::{FromUniformBytes, PrimeField, WithSmallOrderMulGroup};
|
||||
@@ -37,22 +28,16 @@ use rand::rngs::OsRng;
|
||||
use rand::rngs::StdRng;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use snark_verifier::loader::native::NativeLoader;
|
||||
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
|
||||
use snark_verifier::verifier::plonk::PlonkProtocol;
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader, BufWriter, Cursor, Write};
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use thiserror::Error as thisError;
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
use tosubcommand::ToFlags;
|
||||
|
||||
#[cfg(feature = "python-bindings")]
|
||||
use pyo3::types::PyDictMethods;
|
||||
|
||||
use halo2curves::bn256::{Bn256, Fr, G1Affine};
|
||||
|
||||
/// Converts a string to a `SerdeFormat`.
|
||||
/// # Panics
|
||||
/// Panics if the provided `s` is not a valid `SerdeFormat` (i.e. not one of "processed", "raw-bytes-unchecked", or "raw-bytes").
|
||||
@@ -140,144 +125,6 @@ where
|
||||
bytes
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Deserialize, Serialize, PartialOrd)]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), derive(ValueEnum))]
|
||||
pub enum ProofType {
|
||||
#[default]
|
||||
Single,
|
||||
ForAggr,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ProofType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match self {
|
||||
ProofType::Single => "single",
|
||||
ProofType::ForAggr => "for-aggr",
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
impl ToFlags for ProofType {
|
||||
fn to_flags(&self) -> Vec<String> {
|
||||
vec![format!("{}", self)]
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ProofType> for TranscriptType {
|
||||
fn from(val: ProofType) -> Self {
|
||||
match val {
|
||||
ProofType::Single => TranscriptType::EVM,
|
||||
ProofType::ForAggr => TranscriptType::Poseidon,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<ProofType> for StrategyType {
|
||||
fn from(val: ProofType) -> Self {
|
||||
match val {
|
||||
ProofType::Single => StrategyType::Single,
|
||||
ProofType::ForAggr => StrategyType::Accum,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "python-bindings")]
|
||||
impl<'py> pyo3::IntoPyObject<'py> for ProofType {
|
||||
type Target = pyo3::PyAny;
|
||||
type Output = pyo3::Bound<'py, Self::Target>;
|
||||
type Error = pyo3::PyErr;
|
||||
|
||||
fn into_pyobject(self, py: pyo3::Python<'py>) -> Result<Self::Output, Self::Error> {
|
||||
let result = match self {
|
||||
ProofType::Single => "Single",
|
||||
ProofType::ForAggr => "ForAggr",
|
||||
};
|
||||
Ok(result.into_pyobject(py)?.into_any())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "python-bindings")]
|
||||
/// Obtains StrategyType from PyObject (Required for StrategyType to be compatible with Python)
|
||||
impl<'source> pyo3::FromPyObject<'source> for ProofType {
|
||||
fn extract_bound(ob: &pyo3::Bound<'source, pyo3::PyAny>) -> pyo3::PyResult<Self> {
|
||||
let strval = String::extract_bound(ob)?;
|
||||
match strval.to_lowercase().as_str() {
|
||||
"single" => Ok(ProofType::Single),
|
||||
"for-aggr" => Ok(ProofType::ForAggr),
|
||||
_ => Err(pyo3::exceptions::PyValueError::new_err(
|
||||
"Invalid value for ProofType",
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Deserialize, Serialize)]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), derive(ValueEnum))]
|
||||
pub enum StrategyType {
|
||||
Single,
|
||||
Accum,
|
||||
}
|
||||
impl std::fmt::Display for StrategyType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
// When the `ezkl` feature is disabled or we're targeting `wasm32`, use basic string representation.
|
||||
#[cfg(any(not(feature = "ezkl"), target_arch = "wasm32"))]
|
||||
{
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match self {
|
||||
StrategyType::Single => "single",
|
||||
StrategyType::Accum => "accum",
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
// When the `ezkl` feature is enabled and we're not targeting `wasm32`, use `to_possible_value`.
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
{
|
||||
self.to_possible_value()
|
||||
.expect("no values are skipped")
|
||||
.get_name()
|
||||
.fmt(f)
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "python-bindings")]
|
||||
/// Converts StrategyType into a PyObject (Required for StrategyType to be compatible with Python)
|
||||
impl<'py> pyo3::IntoPyObject<'py> for StrategyType {
|
||||
type Target = pyo3::PyAny;
|
||||
type Output = pyo3::Bound<'py, Self::Target>;
|
||||
type Error = pyo3::PyErr;
|
||||
|
||||
fn into_pyobject(self, py: pyo3::Python<'py>) -> Result<Self::Output, Self::Error> {
|
||||
let result = match self {
|
||||
StrategyType::Single => "single",
|
||||
StrategyType::Accum => "accum",
|
||||
};
|
||||
Ok(result.into_pyobject(py)?.into_any())
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "python-bindings")]
|
||||
/// Obtains StrategyType from PyObject (Required for StrategyType to be compatible with Python)
|
||||
impl<'source> pyo3::FromPyObject<'source> for StrategyType {
|
||||
fn extract_bound(ob: &pyo3::Bound<'source, pyo3::PyAny>) -> pyo3::PyResult<Self> {
|
||||
let strval = String::extract_bound(ob)?;
|
||||
match strval.to_lowercase().as_str() {
|
||||
"single" => Ok(StrategyType::Single),
|
||||
"accum" => Ok(StrategyType::Accum),
|
||||
_ => Err(pyo3::exceptions::PyValueError::new_err(
|
||||
"Invalid value for StrategyType",
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(thisError, Debug)]
|
||||
/// Errors related to pfsys
|
||||
pub enum PfSysError {
|
||||
@@ -286,33 +133,8 @@ pub enum PfSysError {
|
||||
PackingExponent,
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Default, Copy, Clone, Debug, PartialEq, Eq, Deserialize, Serialize, PartialOrd)]
|
||||
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), derive(ValueEnum))]
|
||||
pub enum TranscriptType {
|
||||
Poseidon,
|
||||
#[default]
|
||||
EVM,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TranscriptType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match self {
|
||||
TranscriptType::Poseidon => "poseidon",
|
||||
TranscriptType::EVM => "evm",
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
|
||||
impl ToFlags for TranscriptType {
|
||||
fn to_flags(&self) -> Vec<String> {
|
||||
vec![format!("{}", self)]
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "python-bindings")]
|
||||
use halo2curves::bn256::G1Affine;
|
||||
|
||||
#[cfg(feature = "python-bindings")]
|
||||
///
|
||||
@@ -371,7 +193,7 @@ pub struct PrettyElements {
|
||||
pub outputs: Vec<Vec<String>>,
|
||||
}
|
||||
|
||||
/// An application snark with proof and instance variables ready for aggregation (raw field element)
|
||||
/// An application snark with proof and instance variables
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Snark<F: PrimeField + SerdeObject, C: CurveAffine>
|
||||
where
|
||||
@@ -386,16 +208,12 @@ where
|
||||
pub proof: Vec<u8>,
|
||||
/// hex encoded proof
|
||||
pub hex_proof: Option<String>,
|
||||
/// transcript type
|
||||
pub transcript_type: TranscriptType,
|
||||
/// the split proof
|
||||
pub split: Option<ProofSplitCommit>,
|
||||
/// the proof instances as rescaled floats
|
||||
pub pretty_public_inputs: Option<PrettyElements>,
|
||||
/// timestamp
|
||||
pub timestamp: Option<u128>,
|
||||
/// commitment
|
||||
pub commitment: Option<Commitments>,
|
||||
/// (optional) version of ezkl used to generate the proof
|
||||
version: Option<String>,
|
||||
}
|
||||
@@ -423,8 +241,6 @@ where
|
||||
dict.set_item("instances", field_elems).unwrap();
|
||||
let hex_proof = hex::encode(&self.proof);
|
||||
dict.set_item("proof", format!("0x{}", hex_proof)).unwrap();
|
||||
dict.set_item("transcript_type", self.transcript_type.into_pyobject(py)?)
|
||||
.unwrap();
|
||||
Ok(dict.into_any())
|
||||
}
|
||||
}
|
||||
@@ -437,24 +253,21 @@ where
|
||||
C::Scalar: Serialize + DeserializeOwned,
|
||||
C::ScalarExt: Serialize + DeserializeOwned,
|
||||
{
|
||||
/// Create a new application snark from proof and instance variables ready for aggregation
|
||||
/// Create a new application snark from proof and instance variables
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
protocol: Option<PlonkProtocol<C>>,
|
||||
instances: Vec<Vec<F>>,
|
||||
proof: Vec<u8>,
|
||||
hex_proof: Option<String>,
|
||||
transcript_type: TranscriptType,
|
||||
split: Option<ProofSplitCommit>,
|
||||
pretty_public_inputs: Option<PrettyElements>,
|
||||
commitment: Option<Commitments>,
|
||||
) -> Self {
|
||||
Self {
|
||||
protocol,
|
||||
instances,
|
||||
proof,
|
||||
hex_proof,
|
||||
transcript_type,
|
||||
split,
|
||||
pretty_public_inputs,
|
||||
// unix timestamp
|
||||
@@ -464,7 +277,6 @@ where
|
||||
.unwrap()
|
||||
.as_millis(),
|
||||
),
|
||||
commitment,
|
||||
version: Some(crate::version().to_string()),
|
||||
}
|
||||
}
|
||||
@@ -560,53 +372,6 @@ impl From<GraphWitness> for Option<ProofSplitCommit> {
|
||||
}
|
||||
}
|
||||
|
||||
/// An application snark with proof and instance variables ready for aggregation (wrapped field element)
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct SnarkWitness<F: PrimeField, C: CurveAffine> {
|
||||
protocol: Option<PlonkProtocol<C>>,
|
||||
instances: Vec<Vec<Value<F>>>,
|
||||
proof: Value<Vec<u8>>,
|
||||
split: Option<ProofSplitCommit>,
|
||||
}
|
||||
|
||||
impl<F: PrimeField, C: CurveAffine> SnarkWitness<F, C> {
|
||||
fn without_witnesses(&self) -> Self {
|
||||
SnarkWitness {
|
||||
protocol: self.protocol.clone(),
|
||||
instances: self
|
||||
.instances
|
||||
.iter()
|
||||
.map(|instances| vec![Value::unknown(); instances.len()])
|
||||
.collect(),
|
||||
proof: Value::unknown(),
|
||||
split: self.split.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn proof(&self) -> Value<&[u8]> {
|
||||
self.proof.as_ref().map(Vec::as_slice)
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: PrimeField + SerdeObject, C: CurveAffine> From<Snark<F, C>> for SnarkWitness<F, C>
|
||||
where
|
||||
C::Scalar: Serialize + DeserializeOwned,
|
||||
C::ScalarExt: Serialize + DeserializeOwned,
|
||||
{
|
||||
fn from(snark: Snark<F, C>) -> Self {
|
||||
Self {
|
||||
protocol: snark.protocol,
|
||||
instances: snark
|
||||
.instances
|
||||
.into_iter()
|
||||
.map(|instances| instances.into_iter().map(Value::known).collect())
|
||||
.collect(),
|
||||
proof: Value::known(snark.proof),
|
||||
split: snark.split,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a [VerifyingKey] and [ProvingKey] for a [crate::graph::GraphCircuit] (`circuit`) with specific [CommitmentScheme] parameters (`params`).
|
||||
pub fn create_keys<Scheme: CommitmentScheme, C: Circuit<Scheme::Scalar>>(
|
||||
circuit: &C,
|
||||
@@ -652,8 +417,6 @@ pub fn create_proof_circuit<
|
||||
params: &'params Scheme::ParamsProver,
|
||||
pk: &ProvingKey<Scheme::Curve>,
|
||||
check_mode: CheckMode,
|
||||
commitment: Commitments,
|
||||
transcript_type: TranscriptType,
|
||||
split: Option<ProofSplitCommit>,
|
||||
protocol: Option<PlonkProtocol<Scheme::Curve>>,
|
||||
) -> Result<Snark<Scheme::Scalar, Scheme::Curve>, PfsysError>
|
||||
@@ -701,16 +464,7 @@ where
|
||||
let proof = transcript.finalize();
|
||||
let hex_proof = format!("0x{}", hex::encode(&proof));
|
||||
|
||||
let checkable_pf = Snark::new(
|
||||
protocol,
|
||||
instances,
|
||||
proof,
|
||||
Some(hex_proof),
|
||||
transcript_type,
|
||||
split,
|
||||
None,
|
||||
Some(commitment),
|
||||
);
|
||||
let checkable_pf = Snark::new(protocol, instances, proof, Some(hex_proof), split, None);
|
||||
|
||||
// sanity check that the generated proof is valid
|
||||
if check_mode == CheckMode::SAFE {
|
||||
@@ -799,44 +553,6 @@ where
|
||||
Ok(proof_first_bytes)
|
||||
}
|
||||
|
||||
/// Swap the proof commitments to a new set in the proof for KZG
|
||||
pub fn swap_proof_commitments_polycommit(
|
||||
snark: &Snark<Fr, G1Affine>,
|
||||
commitments: &[G1Affine],
|
||||
) -> Result<Snark<Fr, G1Affine>, PfsysError> {
|
||||
let proof = match snark.commitment {
|
||||
Some(Commitments::KZG) => match snark.transcript_type {
|
||||
TranscriptType::EVM => swap_proof_commitments::<
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
_,
|
||||
EvmTranscript<G1Affine, _, _, _>,
|
||||
>(snark, commitments)?,
|
||||
TranscriptType::Poseidon => swap_proof_commitments::<
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
_,
|
||||
PoseidonTranscript<NativeLoader, _>,
|
||||
>(snark, commitments)?,
|
||||
},
|
||||
Some(Commitments::IPA) => match snark.transcript_type {
|
||||
TranscriptType::EVM => swap_proof_commitments::<
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
_,
|
||||
EvmTranscript<G1Affine, _, _, _>,
|
||||
>(snark, commitments)?,
|
||||
TranscriptType::Poseidon => swap_proof_commitments::<
|
||||
IPACommitmentScheme<G1Affine>,
|
||||
_,
|
||||
PoseidonTranscript<NativeLoader, _>,
|
||||
>(snark, commitments)?,
|
||||
},
|
||||
None => {
|
||||
return Err(PfsysError::InvalidCommitmentScheme);
|
||||
}
|
||||
};
|
||||
|
||||
Ok(proof)
|
||||
}
|
||||
|
||||
/// A wrapper around halo2's verify_proof
|
||||
pub fn verify_proof_circuit<
|
||||
'params,
|
||||
@@ -993,13 +709,11 @@ mod tests {
|
||||
let snark = Snark::<Fr, G1Affine> {
|
||||
proof: vec![1, 2, 3, 4, 5, 6, 7, 8],
|
||||
instances: vec![vec![Fr::from(1)], vec![Fr::from(2)]],
|
||||
transcript_type: TranscriptType::EVM,
|
||||
protocol: None,
|
||||
hex_proof: None,
|
||||
split: None,
|
||||
pretty_public_inputs: None,
|
||||
timestamp: None,
|
||||
commitment: None,
|
||||
version: None,
|
||||
};
|
||||
|
||||
@@ -1012,6 +726,5 @@ mod tests {
|
||||
.unwrap();
|
||||
assert_eq!(snark.instances, snark2.instances);
|
||||
assert_eq!(snark.proof, snark2.proof);
|
||||
assert_eq!(snark.transcript_type, snark2.transcript_type);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ mod native_tests {
|
||||
use ezkl::graph::input::GraphData;
|
||||
use ezkl::graph::GraphSettings;
|
||||
use ezkl::pfsys::Snark;
|
||||
use ezkl::Commitments;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
use halo2curves::bn256::Bn256;
|
||||
use lazy_static::lazy_static;
|
||||
@@ -17,8 +16,6 @@ mod native_tests {
|
||||
use std::process::{Child, Command};
|
||||
use std::sync::Once;
|
||||
static COMPILE: Once = Once::new();
|
||||
#[allow(dead_code)]
|
||||
static COMPILE_WASM: Once = Once::new();
|
||||
static ENV_SETUP: Once = Once::new();
|
||||
|
||||
const TEST_BINARY: &str = "test-runs/ezkl";
|
||||
@@ -75,13 +72,6 @@ mod native_tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn init_wasm() {
|
||||
COMPILE_WASM.call_once(|| {
|
||||
build_wasm_ezkl();
|
||||
});
|
||||
}
|
||||
|
||||
fn setup_py_env() {
|
||||
ENV_SETUP.call_once(|| {
|
||||
// supposes that you have a virtualenv called .env and have run the following
|
||||
@@ -101,16 +91,10 @@ mod native_tests {
|
||||
});
|
||||
}
|
||||
|
||||
fn download_srs(logrows: u32, commitment: Commitments) {
|
||||
fn download_srs(logrows: u32) {
|
||||
// if does not exist, download it
|
||||
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
|
||||
.args([
|
||||
"get-srs",
|
||||
"--logrows",
|
||||
&format!("{}", logrows),
|
||||
"--commitment",
|
||||
&commitment.to_string(),
|
||||
])
|
||||
.args(["get-srs", "--logrows", &format!("{}", logrows)])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
@@ -125,7 +109,7 @@ mod native_tests {
|
||||
let settings: GraphSettings = serde_json::from_str(&settings).unwrap();
|
||||
let logrows = settings.run_args.logrows;
|
||||
|
||||
download_srs(logrows, settings.run_args.commitment.into());
|
||||
download_srs(logrows);
|
||||
}
|
||||
|
||||
fn mv_test_(test_dir: &str, test: &str) {
|
||||
@@ -185,8 +169,6 @@ mod native_tests {
|
||||
|
||||
const PF_FAILURE: &str = "examples/test_failure_proof.json";
|
||||
|
||||
const PF_FAILURE_AGGR: &str = "examples/test_failure_aggr_proof.json";
|
||||
|
||||
const LARGE_TESTS: [&str; 8] = [
|
||||
"self_attention",
|
||||
"nanoGPT",
|
||||
@@ -314,90 +296,6 @@ mod native_tests {
|
||||
"large_mlp", // 99
|
||||
];
|
||||
|
||||
const WASM_TESTS: [&str; 44] = [
|
||||
"1l_mlp", // 0
|
||||
"1l_slice", // 1
|
||||
"1l_concat", // 2
|
||||
"1l_flatten", // 3
|
||||
// "1l_average",
|
||||
"1l_div", // 4
|
||||
"1l_pad", // 5
|
||||
"1l_reshape", // 6
|
||||
"1l_eltwise_div", // 7
|
||||
"1l_sigmoid", // 8
|
||||
"1l_sqrt", // 9
|
||||
"1l_softmax", // 10
|
||||
// "1l_instance_norm",
|
||||
"1l_batch_norm", // 11
|
||||
"1l_prelu", // 12
|
||||
"1l_leakyrelu", // 13
|
||||
"1l_gelu_noappx", // 14
|
||||
// "1l_gelu_tanh_appx",
|
||||
"1l_relu", // 15
|
||||
"1l_downsample", // 16
|
||||
"1l_tanh", // 17
|
||||
"2l_relu_sigmoid_small", // 18
|
||||
"2l_relu_fc", // 19
|
||||
"2l_relu_small", // 20
|
||||
"2l_relu_sigmoid", // 21
|
||||
"1l_conv", // 22
|
||||
"2l_sigmoid_small", // 23
|
||||
"2l_relu_sigmoid_conv", // 24
|
||||
// "3l_relu_conv_fc",
|
||||
// "4l_relu_conv_fc",
|
||||
"1l_erf", // 25
|
||||
"1l_var", // 26
|
||||
"1l_elu", // 27
|
||||
"min", // 28
|
||||
"max", // 29
|
||||
"1l_max_pool", // 30
|
||||
"1l_conv_transpose", // 31
|
||||
"1l_upsample", // 32
|
||||
"1l_identity", // 33
|
||||
// "idolmodel",
|
||||
"trig", // 34
|
||||
"prelu_gmm", // 35
|
||||
"lstm", // 36
|
||||
"rnn", // 37
|
||||
"quantize_dequantize", // 38
|
||||
"1l_where", // 39
|
||||
"boolean", // 40
|
||||
"boolean_identity", // 41
|
||||
"gradient_boosted_trees", // 42
|
||||
"1l_topk", // 43
|
||||
// "xgboost",
|
||||
// "lightgbm",
|
||||
// "hummingbird_decision_tree",
|
||||
];
|
||||
|
||||
#[cfg(not(feature = "gpu-accelerated"))]
|
||||
const TESTS_AGGR: [&str; 21] = [
|
||||
"1l_mlp",
|
||||
"1l_flatten",
|
||||
"1l_average",
|
||||
"1l_reshape",
|
||||
"1l_div",
|
||||
"1l_pad",
|
||||
"1l_sigmoid",
|
||||
"1l_gelu_noappx",
|
||||
"1l_sqrt",
|
||||
"1l_prelu",
|
||||
"1l_var",
|
||||
"1l_leakyrelu",
|
||||
"1l_relu",
|
||||
"1l_tanh",
|
||||
"2l_relu_fc",
|
||||
"2l_relu_sigmoid_small",
|
||||
"2l_relu_small",
|
||||
"1l_conv",
|
||||
"min",
|
||||
"max",
|
||||
"1l_max_pool",
|
||||
];
|
||||
|
||||
#[cfg(feature = "gpu-accelerated")]
|
||||
const TESTS_AGGR: [&str; 3] = ["1l_mlp", "1l_flatten", "1l_average"];
|
||||
|
||||
const TESTS_EVM: [&str; 23] = [
|
||||
"1l_mlp", // 0
|
||||
"1l_flatten", // 1
|
||||
@@ -424,110 +322,23 @@ mod native_tests {
|
||||
"quantize_dequantize", // 22
|
||||
];
|
||||
|
||||
const TESTS_EVM_AGGR: [&str; 18] = [
|
||||
"1l_mlp",
|
||||
"1l_reshape",
|
||||
"1l_sigmoid",
|
||||
"1l_div",
|
||||
"1l_sqrt",
|
||||
"1l_prelu",
|
||||
"1l_var",
|
||||
"1l_leakyrelu",
|
||||
"1l_gelu_noappx",
|
||||
"1l_relu",
|
||||
"1l_tanh",
|
||||
"2l_relu_sigmoid_small",
|
||||
"2l_relu_small",
|
||||
"2l_relu_fc",
|
||||
"min",
|
||||
"max",
|
||||
"idolmodel",
|
||||
"1l_identity",
|
||||
];
|
||||
|
||||
const EXAMPLES: [&str; 2] = ["mlp_4d_einsum", "conv2d_mnist"];
|
||||
|
||||
macro_rules! test_func_aggr {
|
||||
() => {
|
||||
#[cfg(test)]
|
||||
mod tests_aggr {
|
||||
use seq_macro::seq;
|
||||
use crate::native_tests::TESTS_AGGR;
|
||||
use test_case::test_case;
|
||||
use crate::native_tests::aggr_prove_and_verify;
|
||||
#[cfg(not(feature = "gpu-accelerated"))]
|
||||
use crate::native_tests::kzg_aggr_mock_prove_and_verify;
|
||||
use tempdir::TempDir;
|
||||
use ezkl::Commitments;
|
||||
|
||||
#[cfg(not(feature="gpu-accelerated"))]
|
||||
seq!(N in 0..=20 {
|
||||
|
||||
#(#[test_case(TESTS_AGGR[N])])*
|
||||
fn kzg_aggr_mock_prove_and_verify_(test: &str) {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
kzg_aggr_mock_prove_and_verify(path, test.to_string());
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
|
||||
|
||||
#(#[test_case(TESTS_AGGR[N])])*
|
||||
fn kzg_aggr_prove_and_verify_(test: &str) {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
aggr_prove_and_verify(path, test.to_string(), "private", "private", "public", Commitments::KZG);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
#(#[test_case(TESTS_AGGR[N])])*
|
||||
fn ipa_aggr_prove_and_verify_(test: &str) {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
aggr_prove_and_verify(path, test.to_string(), "private", "private", "public", Commitments::IPA);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
#[cfg(feature="gpu-accelerated")]
|
||||
seq!(N in 0..=2 {
|
||||
#(#[test_case(TESTS_AGGR[N])])*
|
||||
fn kzg_aggr_prove_and_verify_(test: &str) {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(test_dir.path().to_str().unwrap(), test);
|
||||
aggr_prove_and_verify(path, test.to_string(), "private", "private", "public", Commitments::KZG);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! test_func {
|
||||
() => {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use seq_macro::seq;
|
||||
use crate::native_tests::TESTS;
|
||||
use crate::native_tests::WASM_TESTS;
|
||||
use crate::native_tests::ACCURACY_CAL_TESTS;
|
||||
use crate::native_tests::LARGE_TESTS;
|
||||
use test_case::test_case;
|
||||
use crate::native_tests::mock;
|
||||
use crate::native_tests::accuracy_measurement;
|
||||
use crate::native_tests::prove_and_verify;
|
||||
// use crate::native_tests::run_js_tests;
|
||||
// use crate::native_tests::render_circuit;
|
||||
use crate::native_tests::model_serialization_different_binaries;
|
||||
|
||||
use tempdir::TempDir;
|
||||
use ezkl::Commitments;
|
||||
|
||||
#[test]
|
||||
fn model_serialization_different_binaries_() {
|
||||
@@ -806,7 +617,7 @@ mod native_tests {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 1, None, false, "single", Commitments::KZG, 2);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 1, None, false, 2);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -815,7 +626,7 @@ mod native_tests {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 3, None, false, "single", Commitments::KZG, 2);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 3, None, false, 2);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -824,7 +635,7 @@ mod native_tests {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 4, None, false, "single", Commitments::KZG, 2);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 4, None, false, 2);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -833,7 +644,7 @@ mod native_tests {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 8, None, false, "single", Commitments::KZG, 2);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 8, None, false, 2);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -842,7 +653,7 @@ mod native_tests {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 1, None, false, "single", Commitments::KZG, 2);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 1, None, false, 2);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -853,25 +664,17 @@ mod native_tests {
|
||||
let path = test_dir.into_path();
|
||||
let path = path.to_str().unwrap();
|
||||
crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 1, None, false, "single", Commitments::KZG, 1);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 1, None, false, 1);
|
||||
// test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
#(#[test_case(TESTS[N])])*
|
||||
fn ipa_prove_and_verify_(test: &str) {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 1, None, false, "single", Commitments::IPA, 2);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
#(#[test_case(TESTS[N])])*
|
||||
fn kzg_prove_and_verify_public_input_(test: &str) {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "safe", "public", "private", "public", 1, None, false, "single", Commitments::KZG, 2);
|
||||
prove_and_verify(path, test.to_string(), "safe", "public", "private", "public", 1, None, false, 2);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -880,7 +683,7 @@ mod native_tests {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "fixed", "public", 1, None, false, "single", Commitments::KZG, 2);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "fixed", "public", 1, None, false, 2);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -889,7 +692,7 @@ mod native_tests {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "hashed", 1, None, false, "single", Commitments::KZG, 2);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "hashed", 1, None, false, 2);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -898,16 +701,7 @@ mod native_tests {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "polycommit", 1, None, false, "single", Commitments::KZG, 2);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
#(#[test_case(TESTS[N])])*
|
||||
fn ipa_prove_and_verify_ipa_output(test: &str) {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "polycommit", 1, None, false, "single", Commitments::IPA, 2);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "polycommit", 1, None, false, 2);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -915,42 +709,33 @@ mod native_tests {
|
||||
|
||||
seq!(N in 0..=43 {
|
||||
|
||||
#(#[test_case(WASM_TESTS[N])])*
|
||||
#(#[test_case(TESTS[N])])*
|
||||
fn kzg_prove_and_verify_with_overflow_(test: &str) {
|
||||
crate::native_tests::init_binary();
|
||||
// crate::native_tests::init_wasm();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
env_logger::init();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 1, None, true, "single", Commitments::KZG, 2);
|
||||
// #[cfg(not(feature = "gpu-accelerated"))]
|
||||
// run_js_tests(path, test.to_string(), "testWasm", false);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 1, None, true, 2);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
#(#[test_case(WASM_TESTS[N])])*
|
||||
#(#[test_case(TESTS[N])])*
|
||||
fn kzg_prove_and_verify_with_overflow_hashed_inputs_(test: &str) {
|
||||
crate::native_tests::init_binary();
|
||||
// crate::native_tests::init_wasm();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
env_logger::init();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "safe", "hashed", "private", "public", 1, None, true, "single", Commitments::KZG, 2);
|
||||
// #[cfg(not(feature = "gpu-accelerated"))]
|
||||
// run_js_tests(path, test.to_string(), "testWasm", false);
|
||||
prove_and_verify(path, test.to_string(), "safe", "hashed", "private", "public", 1, None, true, 2);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
#(#[test_case(WASM_TESTS[N])])*
|
||||
#(#[test_case(TESTS[N])])*
|
||||
fn kzg_prove_and_verify_with_overflow_fixed_params_(test: &str) {
|
||||
crate::native_tests::init_binary();
|
||||
// crate::native_tests::init_wasm();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
env_logger::init();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "fixed", "public", 1, None, true, "single", Commitments::KZG, 2);
|
||||
// #[cfg(not(feature = "gpu-accelerated"))]
|
||||
// run_js_tests(path, test.to_string(), "testWasm", false);
|
||||
prove_and_verify(path, test.to_string(), "safe", "private", "fixed", "public", 1, None, true, 2);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -964,7 +749,7 @@ mod native_tests {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
prove_and_verify(path, test.to_string(), "unsafe", "private", "fixed", "public", 1, None, false, "single", Commitments::KZG, 2);
|
||||
prove_and_verify(path, test.to_string(), "unsafe", "private", "fixed", "public", 1, None, false, 2);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -989,16 +774,11 @@ mod native_tests {
|
||||
use seq_macro::seq;
|
||||
use crate::native_tests::TESTS_EVM;
|
||||
use crate::native_tests::TESTS;
|
||||
use crate::native_tests::TESTS_EVM_AGGR;
|
||||
use test_case::test_case;
|
||||
use crate::native_tests::kzg_evm_prove_and_verify;
|
||||
use crate::native_tests::kzg_evm_prove_and_verify_reusable_verifier;
|
||||
|
||||
use crate::native_tests::kzg_evm_aggr_prove_and_verify;
|
||||
use tempdir::TempDir;
|
||||
use crate::native_tests::Hardfork;
|
||||
#[cfg(not(feature = "gpu-accelerated"))]
|
||||
use crate::native_tests::run_js_tests;
|
||||
use ezkl::logger::init_logger;
|
||||
use crate::native_tests::lazy_static;
|
||||
|
||||
@@ -1009,20 +789,7 @@ mod native_tests {
|
||||
}
|
||||
|
||||
|
||||
seq!(N in 0..=17 {
|
||||
// these take a particularly long time to run
|
||||
#(#[test_case(TESTS_EVM_AGGR[N])])*
|
||||
#[ignore]
|
||||
fn kzg_evm_aggr_prove_and_verify_(test: &str) {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
|
||||
kzg_evm_aggr_prove_and_verify(path, test.to_string(), "private", "private", "public");
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
seq!(N in 0..=99 {
|
||||
#(#[test_case(TESTS[N])])*
|
||||
@@ -1098,8 +865,6 @@ mod native_tests {
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
|
||||
kzg_evm_prove_and_verify(2, path, test.to_string(), "private", "private", "public");
|
||||
#[cfg(not(feature = "gpu-accelerated"))]
|
||||
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
|
||||
test_dir.close().unwrap();
|
||||
|
||||
}
|
||||
@@ -1112,8 +877,6 @@ mod native_tests {
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
let mut _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
|
||||
kzg_evm_prove_and_verify(2, path, test.to_string(), "hashed", "private", "private");
|
||||
#[cfg(not(feature = "gpu-accelerated"))]
|
||||
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -1129,8 +892,6 @@ mod native_tests {
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
let mut _anvil_child = crate::native_tests::start_anvil(false, hardfork);
|
||||
kzg_evm_prove_and_verify(2, path, test.to_string(), "polycommit", "private", "public");
|
||||
#[cfg(not(feature = "gpu-accelerated"))]
|
||||
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -1142,8 +903,6 @@ mod native_tests {
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
|
||||
kzg_evm_prove_and_verify(2, path, test.to_string(), "private", "hashed", "public");
|
||||
#[cfg(not(feature = "gpu-accelerated"))]
|
||||
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
|
||||
test_dir.close().unwrap();
|
||||
|
||||
}
|
||||
@@ -1155,8 +914,6 @@ mod native_tests {
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
|
||||
kzg_evm_prove_and_verify(2, path, test.to_string(), "private", "private", "hashed");
|
||||
#[cfg(not(feature = "gpu-accelerated"))]
|
||||
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -1168,8 +925,6 @@ mod native_tests {
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
|
||||
kzg_evm_prove_and_verify(2, path, test.to_string(), "private", "polycommit", "public");
|
||||
#[cfg(not(feature = "gpu-accelerated"))]
|
||||
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -1181,8 +936,6 @@ mod native_tests {
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
|
||||
kzg_evm_prove_and_verify(2, path, test.to_string(), "private", "private", "polycommit");
|
||||
#[cfg(not(feature = "gpu-accelerated"))]
|
||||
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -1193,8 +946,6 @@ mod native_tests {
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
|
||||
kzg_evm_prove_and_verify(2, path, test.to_string(), "polycommit", "polycommit", "polycommit");
|
||||
#[cfg(not(feature = "gpu-accelerated"))]
|
||||
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
|
||||
@@ -1204,6 +955,8 @@ mod native_tests {
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
}
|
||||
|
||||
macro_rules! test_func_examples {
|
||||
@@ -1222,13 +975,21 @@ mod native_tests {
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
test_func!();
|
||||
test_func_aggr!();
|
||||
test_func_evm!();
|
||||
test_func_examples!();
|
||||
|
||||
// Mock prove (fast, but does not cover some potential issues)
|
||||
fn run_example(example_name: String) {
|
||||
let status = Command::new("cargo")
|
||||
.args(["run", "--release", "--example", example_name.as_str()])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
}
|
||||
|
||||
fn model_serialization_different_binaries(test_dir: &str, example_name: String) {
|
||||
let status = Command::new("cargo")
|
||||
.args([
|
||||
@@ -1335,15 +1096,6 @@ mod native_tests {
|
||||
assert!(status.success());
|
||||
}
|
||||
|
||||
// Mock prove (fast, but does not cover some potential issues)
|
||||
fn run_example(example_name: String) {
|
||||
let status = Command::new("cargo")
|
||||
.args(["run", "--release", "--example", example_name.as_str()])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
}
|
||||
|
||||
// Mock prove (fast, but does not cover some potential issues)
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn mock(
|
||||
@@ -1369,7 +1121,6 @@ mod native_tests {
|
||||
cal_target,
|
||||
scales_to_use,
|
||||
2,
|
||||
Commitments::KZG,
|
||||
2,
|
||||
bounded_lookup_log,
|
||||
decomp_base,
|
||||
@@ -1400,7 +1151,6 @@ mod native_tests {
|
||||
cal_target: &str,
|
||||
scales_to_use: Option<Vec<u32>>,
|
||||
num_inner_columns: usize,
|
||||
commitment: Commitments,
|
||||
lookup_safety_margin: usize,
|
||||
bounded_lookup_log: bool,
|
||||
decomp_base: Option<usize>,
|
||||
@@ -1422,7 +1172,6 @@ mod native_tests {
|
||||
format!("--param-visibility={}", param_visibility),
|
||||
format!("--output-visibility={}", output_visibility),
|
||||
format!("--num-inner-cols={}", num_inner_columns),
|
||||
format!("--commitment={}", commitment),
|
||||
format!("--logrows={}", 22),
|
||||
];
|
||||
|
||||
@@ -1533,7 +1282,6 @@ mod native_tests {
|
||||
cal_target,
|
||||
None,
|
||||
2,
|
||||
Commitments::KZG,
|
||||
2,
|
||||
false,
|
||||
None,
|
||||
@@ -1560,233 +1308,6 @@ mod native_tests {
|
||||
assert!(status.success());
|
||||
}
|
||||
|
||||
// // Mock prove (fast, but does not cover some potential issues)
|
||||
// fn render_circuit(test_dir: &str, example_name: String) {
|
||||
// let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
|
||||
// .args([
|
||||
// "render-circuit",
|
||||
// "-M",
|
||||
// format!("{}/{}/network.onnx", test_dir, example_name).as_str(),
|
||||
// "-O",
|
||||
// format!("{}/{}/render.png", test_dir, example_name).as_str(),
|
||||
// "--lookup-range=-32768->32768",
|
||||
// "-K=17",
|
||||
// ])
|
||||
// .status()
|
||||
// .expect("failed to execute process");
|
||||
// assert!(status.success());
|
||||
// }
|
||||
|
||||
// prove-serialize-verify, the usual full path
|
||||
#[cfg(not(feature = "gpu-accelerated"))]
|
||||
fn kzg_aggr_mock_prove_and_verify(test_dir: &str, example_name: String) {
|
||||
prove_and_verify(
|
||||
test_dir,
|
||||
example_name.clone(),
|
||||
"safe",
|
||||
"private",
|
||||
"private",
|
||||
"public",
|
||||
2,
|
||||
None,
|
||||
false,
|
||||
"for-aggr",
|
||||
Commitments::KZG,
|
||||
2,
|
||||
);
|
||||
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
|
||||
.args([
|
||||
"mock-aggregate",
|
||||
"--logrows=23",
|
||||
"--aggregation-snarks",
|
||||
&format!("{}/{}/proof.pf", test_dir, example_name),
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
}
|
||||
|
||||
// prove-serialize-verify, the usual full path
|
||||
fn aggr_prove_and_verify(
|
||||
test_dir: &str,
|
||||
example_name: String,
|
||||
input_visibility: &str,
|
||||
param_visibility: &str,
|
||||
output_visibility: &str,
|
||||
commitment: Commitments,
|
||||
) {
|
||||
prove_and_verify(
|
||||
test_dir,
|
||||
example_name.clone(),
|
||||
"safe",
|
||||
input_visibility,
|
||||
param_visibility,
|
||||
output_visibility,
|
||||
2,
|
||||
None,
|
||||
false,
|
||||
"for-aggr",
|
||||
Commitments::KZG,
|
||||
2,
|
||||
);
|
||||
|
||||
download_srs(23, commitment);
|
||||
// now setup-aggregate
|
||||
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
|
||||
.args([
|
||||
"setup-aggregate",
|
||||
"--sample-snarks",
|
||||
&format!("{}/{}/proof.pf", test_dir, example_name),
|
||||
"--logrows=23",
|
||||
"--vk-path",
|
||||
&format!("{}/{}/aggr.vk", test_dir, example_name),
|
||||
"--pk-path",
|
||||
&format!("{}/{}/aggr.pk", test_dir, example_name),
|
||||
&format!("--commitment={}", commitment),
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
|
||||
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
|
||||
.args([
|
||||
"aggregate",
|
||||
"--logrows=23",
|
||||
"--aggregation-snarks",
|
||||
&format!("{}/{}/proof.pf", test_dir, example_name),
|
||||
"--proof-path",
|
||||
&format!("{}/{}/aggr.pf", test_dir, example_name),
|
||||
"--pk-path",
|
||||
&format!("{}/{}/aggr.pk", test_dir, example_name),
|
||||
&format!("--commitment={}", commitment),
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
|
||||
.args([
|
||||
"verify-aggr",
|
||||
"--logrows=23",
|
||||
"--proof-path",
|
||||
&format!("{}/{}/aggr.pf", test_dir, example_name),
|
||||
"--vk-path",
|
||||
&format!("{}/{}/aggr.vk", test_dir, example_name),
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
}
|
||||
|
||||
// prove-serialize-verify, the usual full path
|
||||
fn kzg_evm_aggr_prove_and_verify(
|
||||
test_dir: &str,
|
||||
example_name: String,
|
||||
input_visibility: &str,
|
||||
param_visibility: &str,
|
||||
output_visibility: &str,
|
||||
) {
|
||||
aggr_prove_and_verify(
|
||||
test_dir,
|
||||
example_name.clone(),
|
||||
input_visibility,
|
||||
param_visibility,
|
||||
output_visibility,
|
||||
Commitments::KZG,
|
||||
);
|
||||
|
||||
download_srs(23, Commitments::KZG);
|
||||
|
||||
let vk_arg = &format!("{}/{}/aggr.vk", test_dir, example_name);
|
||||
|
||||
fn build_args<'a>(base_args: Vec<&'a str>, sol_arg: &'a str) -> Vec<&'a str> {
|
||||
let mut args = base_args;
|
||||
|
||||
args.push("--sol-code-path");
|
||||
args.push(sol_arg);
|
||||
args
|
||||
}
|
||||
|
||||
let sol_arg = format!("{}/{}/kzg_aggr.sol", test_dir, example_name);
|
||||
let addr_path_arg = format!("--addr-path={}/{}/addr.txt", test_dir, example_name);
|
||||
let rpc_arg = format!("--rpc-url={}", *ANVIL_URL);
|
||||
let settings_arg = format!("{}/{}/settings.json", test_dir, example_name);
|
||||
let private_key = format!("--private-key={}", *ANVIL_DEFAULT_PRIVATE_KEY);
|
||||
|
||||
// create encoded calldata
|
||||
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
|
||||
.args([
|
||||
"encode-evm-calldata",
|
||||
"--proof-path",
|
||||
&format!("{}/{}/aggr.pf", test_dir, example_name),
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
|
||||
assert!(status.success());
|
||||
|
||||
let base_args = vec![
|
||||
"create-evm-verifier-aggr",
|
||||
"--vk-path",
|
||||
vk_arg.as_str(),
|
||||
"--aggregation-settings",
|
||||
settings_arg.as_str(),
|
||||
"--logrows=23",
|
||||
];
|
||||
|
||||
let args = build_args(base_args, &sol_arg);
|
||||
|
||||
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
|
||||
.args(args)
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
|
||||
// deploy the verifier
|
||||
let args = vec![
|
||||
"deploy-evm",
|
||||
rpc_arg.as_str(),
|
||||
addr_path_arg.as_str(),
|
||||
"--sol-code-path",
|
||||
sol_arg.as_str(),
|
||||
private_key.as_str(),
|
||||
];
|
||||
|
||||
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
|
||||
.args(&args)
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
|
||||
// read in the address
|
||||
let addr = std::fs::read_to_string(format!("{}/{}/addr.txt", test_dir, example_name))
|
||||
.expect("failed to read address file");
|
||||
|
||||
let deployed_addr_arg = format!("--addr-verifier={}", addr);
|
||||
|
||||
let pf_arg = format!("{}/{}/aggr.pf", test_dir, example_name);
|
||||
|
||||
let mut base_args = vec![
|
||||
"verify-evm",
|
||||
"--proof-path",
|
||||
pf_arg.as_str(),
|
||||
deployed_addr_arg.as_str(),
|
||||
rpc_arg.as_str(),
|
||||
];
|
||||
|
||||
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
|
||||
.args(&base_args)
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
// As sanity check, add example that should fail.
|
||||
base_args[2] = PF_FAILURE_AGGR;
|
||||
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
|
||||
.args(base_args)
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(!status.success());
|
||||
}
|
||||
|
||||
// prove-serialize-verify, the usual full path
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn prove_and_verify(
|
||||
@@ -1799,8 +1320,7 @@ mod native_tests {
|
||||
num_inner_columns: usize,
|
||||
scales_to_use: Option<Vec<u32>>,
|
||||
overflow: bool,
|
||||
proof_type: &str,
|
||||
commitment: Commitments,
|
||||
|
||||
lookup_safety_margin: usize,
|
||||
) {
|
||||
let target_str = if overflow {
|
||||
@@ -1819,7 +1339,6 @@ mod native_tests {
|
||||
target_str,
|
||||
scales_to_use,
|
||||
num_inner_columns,
|
||||
commitment,
|
||||
lookup_safety_margin,
|
||||
false,
|
||||
None,
|
||||
@@ -1857,7 +1376,6 @@ mod native_tests {
|
||||
"--pk-path",
|
||||
&format!("{}/{}/key.pk", test_dir, example_name),
|
||||
&format!("--check-mode={}", checkmode),
|
||||
&format!("--proof-type={}", proof_type),
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
@@ -1888,15 +1406,8 @@ mod native_tests {
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
|
||||
// load settings file
|
||||
let settings =
|
||||
std::fs::read_to_string(settings_path.clone()).expect("failed to read settings file");
|
||||
|
||||
let graph_settings = serde_json::from_str::<GraphSettings>(&settings)
|
||||
.expect("failed to parse settings file");
|
||||
|
||||
// get_srs for the graph_settings_num_instances
|
||||
download_srs(1, graph_settings.run_args.commitment.into());
|
||||
download_srs(1);
|
||||
|
||||
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
|
||||
.args([
|
||||
@@ -1934,8 +1445,6 @@ mod native_tests {
|
||||
num_inner_columns,
|
||||
None,
|
||||
false,
|
||||
"single",
|
||||
Commitments::KZG,
|
||||
2,
|
||||
);
|
||||
|
||||
@@ -2061,8 +1570,6 @@ mod native_tests {
|
||||
num_inner_columns,
|
||||
None,
|
||||
overflow,
|
||||
"single",
|
||||
Commitments::KZG,
|
||||
2,
|
||||
);
|
||||
|
||||
@@ -2234,24 +1741,6 @@ mod native_tests {
|
||||
deployed_addr_arg
|
||||
}
|
||||
|
||||
// run js browser evm verify tests for a given example
|
||||
#[cfg(not(feature = "gpu-accelerated"))]
|
||||
fn run_js_tests(test_dir: &str, example_name: String, js_test: &str, vk: bool) {
|
||||
let example = format!("--example={}", example_name);
|
||||
let dir = format!("--dir={}", test_dir);
|
||||
let mut args = vec!["run", "test", js_test, &example, &dir];
|
||||
let vk_string: String;
|
||||
if vk {
|
||||
vk_string = format!("--vk={}", vk);
|
||||
args.push(&vk_string);
|
||||
};
|
||||
let status = Command::new("pnpm")
|
||||
.args(&args)
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
}
|
||||
|
||||
#[allow(unused_variables)]
|
||||
fn build_ezkl() {
|
||||
#[cfg(feature = "gpu-accelerated")]
|
||||
@@ -2291,38 +1780,4 @@ mod native_tests {
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn build_wasm_ezkl() {
|
||||
// wasm-pack build --target nodejs --out-dir ./tests/wasm/nodejs . -- -Z build-std="panic_abort,std"
|
||||
let status = Command::new("wasm-pack")
|
||||
.args([
|
||||
"build",
|
||||
"--profile=test-runs",
|
||||
"--target",
|
||||
"nodejs",
|
||||
"--out-dir",
|
||||
"./tests/wasm/nodejs",
|
||||
".",
|
||||
"--",
|
||||
"-Z",
|
||||
"build-std=panic_abort,std",
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
// fix the memory size
|
||||
// sed -i "3s|.*|imports['env'] = {memory: new WebAssembly.Memory({initial:20,maximum:65536,shared:true})}|" tests/wasm/nodejs/ezkl.js
|
||||
let status = Command::new("sed")
|
||||
.args([
|
||||
"-i",
|
||||
// is required on macos
|
||||
// "\".js\"",
|
||||
"3s|.*|imports['env'] = {memory: new WebAssembly.Memory({initial:20,maximum:65536,shared:true})}|",
|
||||
"./tests/wasm/nodejs/ezkl.js",
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
// Write a simple swift test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let proofAggrPath = pathToFile + "proof_aggr.json"
|
||||
let vkAggrPath = pathToFile + "vk_aggr.key"
|
||||
let srs1Path = pathToFile + "kzg1.srs"
|
||||
|
||||
guard let proofAggr = loadFileAsBytes(from: proofAggrPath) else {
|
||||
fatalError("Failed to load proofAggr file")
|
||||
}
|
||||
guard let vkAggr = loadFileAsBytes(from: vkAggrPath) else {
|
||||
fatalError("Failed to load vkAggr file")
|
||||
}
|
||||
guard let srs1 = loadFileAsBytes(from: srs1Path) else {
|
||||
fatalError("Failed to load srs1 file")
|
||||
}
|
||||
|
||||
let value = try verifyAggr(
|
||||
proof: proofAggr,
|
||||
vk: vkAggr,
|
||||
logrows: 21,
|
||||
srs: srs1,
|
||||
commitment: "kzg"
|
||||
)
|
||||
|
||||
// should not fail
|
||||
assert(value == true, "Failed the test")
|
||||
|
||||
}
|
||||
@@ -1,42 +0,0 @@
|
||||
// Swift version of gen_pk_test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
let networkCompiledPath = pathToFile + "model.compiled"
|
||||
let srsPath = pathToFile + "kzg"
|
||||
|
||||
// Load necessary files
|
||||
guard let compiledCircuit = loadFileAsBytes(from: networkCompiledPath) else {
|
||||
fatalError("Failed to load network compiled file")
|
||||
}
|
||||
guard let srs = loadFileAsBytes(from: srsPath) else {
|
||||
fatalError("Failed to load SRS file")
|
||||
}
|
||||
|
||||
// Generate the vk (Verifying Key)
|
||||
let vk = try genVk(
|
||||
compiledCircuit: compiledCircuit,
|
||||
srs: srs,
|
||||
compressSelectors: true // Corresponds to the `true` boolean in the Rust code
|
||||
)
|
||||
|
||||
// Generate the pk (Proving Key)
|
||||
let pk = try genPk(
|
||||
vk: vk,
|
||||
compiledCircuit: compiledCircuit,
|
||||
srs: srs
|
||||
)
|
||||
|
||||
// Ensure that the proving key is not empty
|
||||
assert(pk.count > 0, "Proving key generation failed, pk is empty")
|
||||
|
||||
} catch {
|
||||
fatalError("Test failed with error: \(error)")
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
// Swift version of gen_vk_test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
let networkCompiledPath = pathToFile + "model.compiled"
|
||||
let srsPath = pathToFile + "kzg"
|
||||
|
||||
// Load necessary files
|
||||
guard let compiledCircuit = loadFileAsBytes(from: networkCompiledPath) else {
|
||||
fatalError("Failed to load network compiled file")
|
||||
}
|
||||
guard let srs = loadFileAsBytes(from: srsPath) else {
|
||||
fatalError("Failed to load SRS file")
|
||||
}
|
||||
|
||||
// Generate the vk (Verifying Key)
|
||||
let vk = try genVk(
|
||||
compiledCircuit: compiledCircuit,
|
||||
srs: srs,
|
||||
compressSelectors: true // Corresponds to the `true` boolean in the Rust code
|
||||
)
|
||||
|
||||
// Ensure that the verifying key is not empty
|
||||
assert(vk.count > 0, "Verifying key generation failed, vk is empty")
|
||||
|
||||
} catch {
|
||||
fatalError("Test failed with error: \(error)")
|
||||
}
|
||||
@@ -1,69 +0,0 @@
|
||||
// Swift version of pk_is_valid_test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
let networkCompiledPath = pathToFile + "model.compiled"
|
||||
let srsPath = pathToFile + "kzg"
|
||||
let witnessPath = pathToFile + "witness.json"
|
||||
let settingsPath = pathToFile + "settings.json"
|
||||
|
||||
// Load necessary files
|
||||
guard let compiledCircuit = loadFileAsBytes(from: networkCompiledPath) else {
|
||||
fatalError("Failed to load network compiled file")
|
||||
}
|
||||
guard let srs = loadFileAsBytes(from: srsPath) else {
|
||||
fatalError("Failed to load SRS file")
|
||||
}
|
||||
guard let witness = loadFileAsBytes(from: witnessPath) else {
|
||||
fatalError("Failed to load witness file")
|
||||
}
|
||||
guard let settings = loadFileAsBytes(from: settingsPath) else {
|
||||
fatalError("Failed to load settings file")
|
||||
}
|
||||
|
||||
// Generate the vk (Verifying Key)
|
||||
let vk = try genVk(
|
||||
compiledCircuit: compiledCircuit,
|
||||
srs: srs,
|
||||
compressSelectors: true // Corresponds to the `true` boolean in the Rust code
|
||||
)
|
||||
|
||||
// Generate the pk (Proving Key)
|
||||
let pk = try genPk(
|
||||
vk: vk,
|
||||
compiledCircuit: compiledCircuit,
|
||||
srs: srs
|
||||
)
|
||||
|
||||
// Prove using the witness and proving key
|
||||
let proof = try prove(
|
||||
witness: witness,
|
||||
pk: pk,
|
||||
compiledCircuit: compiledCircuit,
|
||||
srs: srs
|
||||
)
|
||||
|
||||
// Ensure that the proof is not empty
|
||||
assert(proof.count > 0, "Proof generation failed, proof is empty")
|
||||
|
||||
// Verify the proof
|
||||
let value = try verify(
|
||||
proof: proof,
|
||||
vk: vk,
|
||||
settings: settings,
|
||||
srs: srs
|
||||
)
|
||||
|
||||
// Ensure that the verification passed
|
||||
assert(value == true, "Verification failed")
|
||||
|
||||
} catch {
|
||||
fatalError("Test failed with error: \(error)")
|
||||
}
|
||||
@@ -1,71 +0,0 @@
|
||||
// Swift version of verify_encode_verifier_calldata test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
let proofPath = pathToFile + "proof.json"
|
||||
|
||||
guard let proof = loadFileAsBytes(from: proofPath) else {
|
||||
fatalError("Failed to load proof file")
|
||||
}
|
||||
|
||||
// Test without vk address
|
||||
let calldataNoVk = try encodeVerifierCalldata(
|
||||
proof: proof,
|
||||
vkAddress: nil
|
||||
)
|
||||
|
||||
// Deserialize the proof data
|
||||
struct Snark: Decodable {
|
||||
let proof: Data
|
||||
let instances: Data
|
||||
}
|
||||
|
||||
let snark = try JSONDecoder().decode(Snark.self, from: proof)
|
||||
|
||||
let flattenedInstances = snark.instances.flatMap { $0 }
|
||||
let referenceCalldataNoVk = try encodeCalldata(
|
||||
vk: nil,
|
||||
proof: snark.proof,
|
||||
instances: flattenedInstances
|
||||
)
|
||||
|
||||
// Check if the encoded calldata matches the reference
|
||||
assert(calldataNoVk == referenceCalldataNoVk, "Calldata without vk does not match")
|
||||
|
||||
// Test with vk address
|
||||
let vkAddressString = "0000000000000000000000000000000000000000"
|
||||
let vkAddressData = Data(hexString: vkAddressString)
|
||||
|
||||
guard vkAddressData.count == 20 else {
|
||||
fatalError("Invalid VK address length")
|
||||
}
|
||||
|
||||
let vkAddressArray = [UInt8](vkAddressData)
|
||||
|
||||
// Serialize vkAddress to match JSON serialization in Rust
|
||||
let serializedVkAddress = try JSONEncoder().encode(vkAddressArray)
|
||||
|
||||
let calldataWithVk = try encodeVerifierCalldata(
|
||||
proof: proof,
|
||||
vk: serializedVkAddress
|
||||
)
|
||||
|
||||
let referenceCalldataWithVk = try encodeCalldata(
|
||||
vk: vkAddressArray,
|
||||
proof: snark.proof,
|
||||
instances: flattenedInstances
|
||||
)
|
||||
|
||||
// Check if the encoded calldata matches the reference
|
||||
assert(calldataWithVk == referenceCalldataWithVk, "Calldata with vk does not match")
|
||||
|
||||
} catch {
|
||||
fatalError("Test failed with error: \(error)")
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
// Swift version of verify_gen_witness test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
let networkCompiledPath = pathToFile + "model.compiled"
|
||||
let inputPath = pathToFile + "input.json"
|
||||
let witnessPath = pathToFile + "witness.json"
|
||||
|
||||
// Load necessary files
|
||||
guard let networkCompiled = loadFileAsBytes(from: networkCompiledPath) else {
|
||||
fatalError("Failed to load network compiled file")
|
||||
}
|
||||
guard let input = loadFileAsBytes(from: inputPath) else {
|
||||
fatalError("Failed to load input file")
|
||||
}
|
||||
guard let referenceWitnessData = loadFileAsBytes(from: witnessPath) else {
|
||||
fatalError("Failed to load witness file")
|
||||
}
|
||||
|
||||
// Generate witness using genWitness function
|
||||
let witnessData = try genWitness(
|
||||
compiledCircuit: networkCompiled,
|
||||
input: input
|
||||
)
|
||||
|
||||
// Deserialize the witness
|
||||
struct GraphWitness: Decodable, Equatable {}
|
||||
let witness = try JSONDecoder().decode(GraphWitness.self, from: witnessData)
|
||||
|
||||
// Deserialize the reference witness
|
||||
let referenceWitness = try JSONDecoder().decode(GraphWitness.self, from: referenceWitnessData)
|
||||
|
||||
// Check if the witness matches the reference witness
|
||||
assert(witness == referenceWitness, "Witnesses do not match")
|
||||
|
||||
} catch {
|
||||
fatalError("Test failed with error: \(error)")
|
||||
}
|
||||
@@ -1,64 +0,0 @@
|
||||
// Swift version of verify_kzg_commit test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
let vkPath = pathToFile + "vk.key"
|
||||
let srsPath = pathToFile + "kzg"
|
||||
let settingsPath = pathToFile + "settings.json"
|
||||
|
||||
guard let vk = loadFileAsBytes(from: vkPath) else {
|
||||
fatalError("Failed to load vk file")
|
||||
}
|
||||
guard let srs = loadFileAsBytes(from: srsPath) else {
|
||||
fatalError("Failed to load srs file")
|
||||
}
|
||||
guard let settings = loadFileAsBytes(from: settingsPath) else {
|
||||
fatalError("Failed to load settings file")
|
||||
}
|
||||
|
||||
// Create a vector of field elements
|
||||
var message: [UInt64] = []
|
||||
for i in 0..<32 {
|
||||
message.append(UInt64(i))
|
||||
}
|
||||
|
||||
// Serialize the message array
|
||||
let messageData = try JSONEncoder().encode(message)
|
||||
|
||||
// Deserialize settings
|
||||
struct GraphSettings: Decodable {}
|
||||
let settingsDecoded = try JSONDecoder().decode(GraphSettings.self, from: settings)
|
||||
|
||||
// Generate commitment
|
||||
let commitmentData = try kzgCommit(
|
||||
message: messageData,
|
||||
vk: vk,
|
||||
settings: settings,
|
||||
srs: srs
|
||||
)
|
||||
|
||||
// Deserialize the resulting commitment
|
||||
struct G1Affine: Decodable {}
|
||||
let commitment = try JSONDecoder().decode([G1Affine].self, from: commitmentData)
|
||||
|
||||
// Reference commitment using params and vk
|
||||
// For Swift, you'd need to implement or link the corresponding methods like in Rust
|
||||
let referenceCommitment = try polyCommit(
|
||||
message: message,
|
||||
vk: vk,
|
||||
srs: srs
|
||||
)
|
||||
|
||||
// Check if the commitment matches the reference
|
||||
assert(commitment == referenceCommitment, "Commitments do not match")
|
||||
|
||||
} catch {
|
||||
fatalError("Test failed with error: \(error)")
|
||||
}
|
||||
@@ -1,103 +0,0 @@
|
||||
// Swift version of verify_validations test
|
||||
import ezkl
|
||||
import Foundation
|
||||
|
||||
func loadFileAsBytes(from path: String) -> Data? {
|
||||
let url = URL(fileURLWithPath: path)
|
||||
return try? Data(contentsOf: url)
|
||||
}
|
||||
|
||||
do {
|
||||
let pathToFile = "../../../../tests/assets/"
|
||||
let compiledCircuitPath = pathToFile + "model.compiled"
|
||||
let networkPath = pathToFile + "network.onnx"
|
||||
let witnessPath = pathToFile + "witness.json"
|
||||
let inputPath = pathToFile + "input.json"
|
||||
let proofPath = pathToFile + "proof.json"
|
||||
let vkPath = pathToFile + "vk.key"
|
||||
let pkPath = pathToFile + "pk.key"
|
||||
let settingsPath = pathToFile + "settings.json"
|
||||
let srsPath = pathToFile + "kzg"
|
||||
|
||||
// Load necessary files
|
||||
guard let compiledCircuit = loadFileAsBytes(from: compiledCircuitPath) else {
|
||||
fatalError("Failed to load network compiled file")
|
||||
}
|
||||
guard let network = loadFileAsBytes(from: networkPath) else {
|
||||
fatalError("Failed to load network file")
|
||||
}
|
||||
guard let witness = loadFileAsBytes(from: witnessPath) else {
|
||||
fatalError("Failed to load witness file")
|
||||
}
|
||||
guard let input = loadFileAsBytes(from: inputPath) else {
|
||||
fatalError("Failed to load input file")
|
||||
}
|
||||
guard let proof = loadFileAsBytes(from: proofPath) else {
|
||||
fatalError("Failed to load proof file")
|
||||
}
|
||||
guard let vk = loadFileAsBytes(from: vkPath) else {
|
||||
fatalError("Failed to load vk file")
|
||||
}
|
||||
guard let pk = loadFileAsBytes(from: pkPath) else {
|
||||
fatalError("Failed to load pk file")
|
||||
}
|
||||
guard let settings = loadFileAsBytes(from: settingsPath) else {
|
||||
fatalError("Failed to load settings file")
|
||||
}
|
||||
guard let srs = loadFileAsBytes(from: srsPath) else {
|
||||
fatalError("Failed to load srs file")
|
||||
}
|
||||
|
||||
// Witness validation (should fail for network compiled)
|
||||
let witnessValidationResult1 = try? witnessValidation(witness:compiledCircuit)
|
||||
assert(witnessValidationResult1 == nil, "Witness validation should fail for network compiled")
|
||||
|
||||
// Witness validation (should pass for witness)
|
||||
let witnessValidationResult2 = try? witnessValidation(witness:witness)
|
||||
assert(witnessValidationResult2 != nil, "Witness validation should pass for witness")
|
||||
|
||||
// Compiled circuit validation (should fail for onnx network)
|
||||
let circuitValidationResult1 = try? compiledCircuitValidation(compiledCircuit:network)
|
||||
assert(circuitValidationResult1 == nil, "Compiled circuit validation should fail for onnx network")
|
||||
|
||||
// Compiled circuit validation (should pass for compiled network)
|
||||
let circuitValidationResult2 = try? compiledCircuitValidation(compiledCircuit:compiledCircuit)
|
||||
assert(circuitValidationResult2 != nil, "Compiled circuit validation should pass for compiled network")
|
||||
|
||||
// Input validation (should fail for witness)
|
||||
let inputValidationResult1 = try? inputValidation(input:witness)
|
||||
assert(inputValidationResult1 == nil, "Input validation should fail for witness")
|
||||
|
||||
// Input validation (should pass for input)
|
||||
let inputValidationResult2 = try? inputValidation(input:input)
|
||||
assert(inputValidationResult2 != nil, "Input validation should pass for input")
|
||||
|
||||
// Proof validation (should fail for witness)
|
||||
let proofValidationResult1 = try? proofValidation(proof:witness)
|
||||
assert(proofValidationResult1 == nil, "Proof validation should fail for witness")
|
||||
|
||||
// Proof validation (should pass for proof)
|
||||
let proofValidationResult2 = try? proofValidation(proof:proof)
|
||||
assert(proofValidationResult2 != nil, "Proof validation should pass for proof")
|
||||
|
||||
// Verifying key (vk) validation (should pass)
|
||||
let vkValidationResult = try? vkValidation(vk:vk, settings:settings)
|
||||
assert(vkValidationResult != nil, "VK validation should pass for vk")
|
||||
|
||||
// Proving key (pk) validation (should pass)
|
||||
let pkValidationResult = try? pkValidation(pk:pk, settings:settings)
|
||||
assert(pkValidationResult != nil, "PK validation should pass for pk")
|
||||
|
||||
// Settings validation (should fail for proof)
|
||||
let settingsValidationResult1 = try? settingsValidation(settings:proof)
|
||||
assert(settingsValidationResult1 == nil, "Settings validation should fail for proof")
|
||||
|
||||
// Settings validation (should pass for settings)
|
||||
let settingsValidationResult2 = try? settingsValidation(settings:settings)
|
||||
assert(settingsValidationResult2 != nil, "Settings validation should pass for settings")
|
||||
|
||||
// SRS validation (should pass)
|
||||
let srsValidationResult = try? srsValidation(srs:srs)
|
||||
assert(srsValidationResult != nil, "SRS validation should pass for srs")
|
||||
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
#[cfg(feature = "ios-bindings-test")]
|
||||
uniffi::build_foreign_language_testcases!(
|
||||
"tests/ios/can_verify_aggr.swift",
|
||||
"tests/ios/verify_gen_witness.swift",
|
||||
"tests/ios/gen_pk_test.swift",
|
||||
"tests/ios/gen_vk_test.swift",
|
||||
"tests/ios/pk_is_valid_test.swift",
|
||||
"tests/ios/verify_validations.swift",
|
||||
// "tests/ios/verify_encode_verifier_calldata.swift", // TODO - the function requires rust dependencies to test
|
||||
// "tests/ios/verify_kzg_commit.swift", // TODO - the function is not exported and requires rust dependencies to test
|
||||
);
|
||||
@@ -146,7 +146,7 @@ mod py_tests {
|
||||
}
|
||||
}
|
||||
|
||||
const TESTS: [&str; 31] = [
|
||||
const TESTS: [&str; 30] = [
|
||||
"mnist_gan.ipynb", // 0
|
||||
"ezkl_demo_batch.ipynb", // 1
|
||||
"proof_splitting.ipynb", // 2
|
||||
@@ -156,28 +156,27 @@ mod py_tests {
|
||||
"hashed_vis.ipynb", // 6
|
||||
"simple_demo_all_public.ipynb", // 7
|
||||
"little_transformer.ipynb", // 8
|
||||
"simple_demo_aggregated_proofs.ipynb", // 9
|
||||
"ezkl_demo.ipynb", // 10
|
||||
"lstm.ipynb", // 11
|
||||
"set_membership.ipynb", // 12
|
||||
"decision_tree.ipynb", // 13
|
||||
"random_forest.ipynb", // 14
|
||||
"gradient_boosted_trees.ipynb", // 15
|
||||
"xgboost.ipynb", // 16
|
||||
"lightgbm.ipynb", // 17
|
||||
"svm.ipynb", // 18
|
||||
"simple_demo_public_input_output.ipynb", // 19
|
||||
"simple_demo_public_network_output.ipynb", // 20
|
||||
"gcn.ipynb", // 21
|
||||
"linear_regression.ipynb", // 22
|
||||
"stacked_regression.ipynb", // 23
|
||||
"kzg_vis.ipynb", // 24
|
||||
"kmeans.ipynb", // 25
|
||||
"solvency.ipynb", // 26
|
||||
"sklearn_mlp.ipynb", // 27
|
||||
"generalized_inverse.ipynb", // 28
|
||||
"mnist_classifier.ipynb", // 29
|
||||
"logistic_regression.ipynb", // 30
|
||||
"ezkl_demo.ipynb", // 9
|
||||
"lstm.ipynb", // 10
|
||||
"set_membership.ipynb", // 11
|
||||
"decision_tree.ipynb", // 12
|
||||
"random_forest.ipynb", // 13
|
||||
"gradient_boosted_trees.ipynb", // 14
|
||||
"xgboost.ipynb", // 15
|
||||
"lightgbm.ipynb", // 16
|
||||
"svm.ipynb", // 17
|
||||
"simple_demo_public_input_output.ipynb", // 18
|
||||
"simple_demo_public_network_output.ipynb", // 19
|
||||
"gcn.ipynb", // 20
|
||||
"linear_regression.ipynb", // 21
|
||||
"stacked_regression.ipynb", // 22
|
||||
"kzg_vis.ipynb", // 23
|
||||
"kmeans.ipynb", // 24
|
||||
"solvency.ipynb", // 25
|
||||
"sklearn_mlp.ipynb", // 26
|
||||
"generalized_inverse.ipynb", // 27
|
||||
"mnist_classifier.ipynb", // 28
|
||||
"logistic_regression.ipynb", // 29
|
||||
];
|
||||
|
||||
macro_rules! test_func {
|
||||
@@ -190,7 +189,7 @@ mod py_tests {
|
||||
use super::*;
|
||||
|
||||
|
||||
seq!(N in 0..=30 {
|
||||
seq!(N in 0..=29 {
|
||||
|
||||
#(#[test_case(TESTS[N])])*
|
||||
fn run_notebook_(test: &str) {
|
||||
|
||||
@@ -8,22 +8,22 @@ import time
|
||||
folder_path = os.path.abspath(
|
||||
os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
'.',
|
||||
".",
|
||||
)
|
||||
)
|
||||
|
||||
examples_path = os.path.abspath(
|
||||
os.path.join(
|
||||
folder_path,
|
||||
'..',
|
||||
'..',
|
||||
'examples',
|
||||
"..",
|
||||
"..",
|
||||
"examples",
|
||||
)
|
||||
)
|
||||
|
||||
srs_path = os.path.join(folder_path, 'kzg_test.params')
|
||||
params_k17_path = os.path.join(folder_path, 'kzg_test_k17.params')
|
||||
params_k21_path = os.path.join(folder_path, 'kzg_test_k21.params')
|
||||
srs_path = os.path.join(folder_path, "kzg_test.params")
|
||||
params_k17_path = os.path.join(folder_path, "kzg_test_k17.params")
|
||||
params_k21_path = os.path.join(folder_path, "kzg_test_k21.params")
|
||||
anvil_url = "http://localhost:3030"
|
||||
|
||||
|
||||
@@ -36,8 +36,7 @@ def setup_module(module):
|
||||
|
||||
|
||||
def teardown_module(module):
|
||||
"""teardown anvil.
|
||||
"""
|
||||
"""teardown anvil."""
|
||||
proc.terminate()
|
||||
|
||||
|
||||
@@ -57,9 +56,10 @@ def test_poseidon_hash():
|
||||
message = [1.0, 2.0, 3.0, 4.0]
|
||||
message = [ezkl.float_to_felt(x, 7) for x in message]
|
||||
res = ezkl.poseidon_hash(message)
|
||||
assert ezkl.felt_to_big_endian(
|
||||
res[0]) == "0x2369898875588bf49b6539376b09705ea69aee318a58e6fcc1e68fc3e7ad81ab"
|
||||
|
||||
assert (
|
||||
ezkl.felt_to_big_endian(res[0])
|
||||
== "0x2369898875588bf49b6539376b09705ea69aee318a58e6fcc1e68fc3e7ad81ab"
|
||||
)
|
||||
|
||||
|
||||
def test_field_serialization():
|
||||
@@ -84,15 +84,18 @@ def test_buffer_to_felts():
|
||||
"""
|
||||
Test buffer_to_felt
|
||||
"""
|
||||
buffer = bytearray("a sample string!", 'utf-8')
|
||||
buffer = bytearray("a sample string!", "utf-8")
|
||||
felts = ezkl.buffer_to_felts(buffer)
|
||||
ref_felt_1 = "0x0000000000000000000000000000000021676e6972747320656c706d61732061"
|
||||
assert ezkl.felt_to_big_endian(felts[0]) == ref_felt_1
|
||||
|
||||
buffer = bytearray("a sample string!"+"high", 'utf-8')
|
||||
buffer = bytearray("a sample string!" + "high", "utf-8")
|
||||
felts = ezkl.buffer_to_felts(buffer)
|
||||
ref_felt_2 = "0x0000000000000000000000000000000000000000000000000000000068676968"
|
||||
assert [ezkl.felt_to_big_endian(felts[0]), ezkl.felt_to_big_endian(felts[1])] == [ref_felt_1, ref_felt_2]
|
||||
assert [ezkl.felt_to_big_endian(felts[0]), ezkl.felt_to_big_endian(felts[1])] == [
|
||||
ref_felt_1,
|
||||
ref_felt_2,
|
||||
]
|
||||
|
||||
|
||||
def test_gen_srs():
|
||||
@@ -107,92 +110,51 @@ def test_gen_srs():
|
||||
assert os.path.isfile(params_k21_path)
|
||||
|
||||
|
||||
|
||||
async def test_calibrate_over_user_range():
|
||||
data_path = os.path.join(
|
||||
examples_path,
|
||||
'onnx',
|
||||
'1l_relu',
|
||||
'input.json'
|
||||
)
|
||||
model_path = os.path.join(
|
||||
examples_path,
|
||||
'onnx',
|
||||
'1l_relu',
|
||||
'network.onnx'
|
||||
)
|
||||
output_path = os.path.join(
|
||||
folder_path,
|
||||
'settings.json'
|
||||
)
|
||||
data_path = os.path.join(examples_path, "onnx", "1l_relu", "input.json")
|
||||
model_path = os.path.join(examples_path, "onnx", "1l_relu", "network.onnx")
|
||||
output_path = os.path.join(folder_path, "settings.json")
|
||||
|
||||
run_args = ezkl.PyRunArgs()
|
||||
run_args.input_visibility = "hashed"
|
||||
run_args.output_visibility = "hashed"
|
||||
|
||||
# TODO: Dictionary outputs
|
||||
res = ezkl.gen_settings(
|
||||
model_path, output_path, py_run_args=run_args)
|
||||
res = ezkl.gen_settings(model_path, output_path, py_run_args=run_args)
|
||||
assert res == True
|
||||
|
||||
res = ezkl.calibrate_settings(
|
||||
data_path, model_path, output_path, "resources", 1, [0, 1, 2])
|
||||
data_path, model_path, output_path, "resources", 1, [0, 1, 2]
|
||||
)
|
||||
assert res == True
|
||||
assert os.path.isfile(output_path)
|
||||
|
||||
|
||||
|
||||
async def test_calibrate():
|
||||
data_path = os.path.join(
|
||||
examples_path,
|
||||
'onnx',
|
||||
'1l_relu',
|
||||
'input.json'
|
||||
)
|
||||
model_path = os.path.join(
|
||||
examples_path,
|
||||
'onnx',
|
||||
'1l_relu',
|
||||
'network.onnx'
|
||||
)
|
||||
output_path = os.path.join(
|
||||
folder_path,
|
||||
'settings.json'
|
||||
)
|
||||
data_path = os.path.join(examples_path, "onnx", "1l_relu", "input.json")
|
||||
model_path = os.path.join(examples_path, "onnx", "1l_relu", "network.onnx")
|
||||
output_path = os.path.join(folder_path, "settings.json")
|
||||
|
||||
run_args = ezkl.PyRunArgs()
|
||||
run_args.input_visibility = "hashed"
|
||||
run_args.output_visibility = "hashed"
|
||||
|
||||
# TODO: Dictionary outputs
|
||||
res = ezkl.gen_settings(
|
||||
model_path, output_path, py_run_args=run_args)
|
||||
res = ezkl.gen_settings(model_path, output_path, py_run_args=run_args)
|
||||
assert res == True
|
||||
|
||||
res = ezkl.calibrate_settings(
|
||||
data_path, model_path, output_path, "resources")
|
||||
res = ezkl.calibrate_settings(data_path, model_path, output_path, "resources")
|
||||
assert res == True
|
||||
assert os.path.isfile(output_path)
|
||||
|
||||
|
||||
def test_model_compile():
|
||||
"""
|
||||
Test for model compilation/serialization
|
||||
"""
|
||||
model_path = os.path.join(
|
||||
examples_path,
|
||||
'onnx',
|
||||
'1l_relu',
|
||||
'network.onnx'
|
||||
)
|
||||
compiled_model_path = os.path.join(
|
||||
folder_path,
|
||||
'model.compiled'
|
||||
)
|
||||
settings_path = os.path.join(
|
||||
folder_path,
|
||||
'settings.json'
|
||||
)
|
||||
Test for model compilation/serialization
|
||||
"""
|
||||
model_path = os.path.join(examples_path, "onnx", "1l_relu", "network.onnx")
|
||||
compiled_model_path = os.path.join(folder_path, "model.compiled")
|
||||
settings_path = os.path.join(folder_path, "settings.json")
|
||||
res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)
|
||||
assert res == True
|
||||
|
||||
@@ -201,20 +163,9 @@ async def test_forward():
|
||||
"""
|
||||
Test for vanilla forward pass
|
||||
"""
|
||||
data_path = os.path.join(
|
||||
examples_path,
|
||||
'onnx',
|
||||
'1l_relu',
|
||||
'input.json'
|
||||
)
|
||||
model_path = os.path.join(
|
||||
folder_path,
|
||||
'model.compiled'
|
||||
)
|
||||
output_path = os.path.join(
|
||||
folder_path,
|
||||
'witness.json'
|
||||
)
|
||||
data_path = os.path.join(examples_path, "onnx", "1l_relu", "input.json")
|
||||
model_path = os.path.join(folder_path, "model.compiled")
|
||||
output_path = os.path.join(folder_path, "witness.json")
|
||||
|
||||
res = ezkl.gen_witness(data_path, model_path, output_path)
|
||||
|
||||
@@ -224,15 +175,21 @@ async def test_forward():
|
||||
assert data["inputs"] == res["inputs"]
|
||||
assert data["outputs"] == res["outputs"]
|
||||
|
||||
assert data["processed_inputs"]["poseidon_hash"] == res["processed_inputs"]["poseidon_hash"]
|
||||
assert data["processed_outputs"]["poseidon_hash"] == res["processed_outputs"]["poseidon_hash"]
|
||||
assert (
|
||||
data["processed_inputs"]["poseidon_hash"]
|
||||
== res["processed_inputs"]["poseidon_hash"]
|
||||
)
|
||||
assert (
|
||||
data["processed_outputs"]["poseidon_hash"]
|
||||
== res["processed_outputs"]["poseidon_hash"]
|
||||
)
|
||||
|
||||
|
||||
async def test_get_srs():
|
||||
"""
|
||||
Test for get_srs
|
||||
"""
|
||||
settings_path = os.path.join(folder_path, 'settings.json')
|
||||
settings_path = os.path.join(folder_path, "settings.json")
|
||||
res = await ezkl.get_srs(settings_path, srs_path=srs_path)
|
||||
|
||||
assert res == True
|
||||
@@ -241,7 +198,10 @@ async def test_get_srs():
|
||||
|
||||
another_srs_path = os.path.join(folder_path, "kzg_test_k8.params")
|
||||
|
||||
res = await ezkl.get_srs(logrows=8, srs_path=another_srs_path, commitment=ezkl.PyCommitments.KZG)
|
||||
res = await ezkl.get_srs(
|
||||
logrows=8,
|
||||
srs_path=another_srs_path,
|
||||
)
|
||||
|
||||
assert os.path.isfile(another_srs_path)
|
||||
|
||||
@@ -251,17 +211,11 @@ def test_mock():
|
||||
Test for mock
|
||||
"""
|
||||
|
||||
data_path = os.path.join(
|
||||
folder_path,
|
||||
'witness.json'
|
||||
)
|
||||
data_path = os.path.join(folder_path, "witness.json")
|
||||
|
||||
model_path = os.path.join(
|
||||
folder_path,
|
||||
'model.compiled'
|
||||
)
|
||||
model_path = os.path.join(folder_path, "model.compiled")
|
||||
|
||||
settings_path = os.path.join(folder_path, 'settings.json')
|
||||
settings_path = os.path.join(folder_path, "settings.json")
|
||||
|
||||
res = ezkl.mock(data_path, model_path)
|
||||
assert res == True
|
||||
@@ -272,19 +226,13 @@ def test_setup():
|
||||
Test for setup
|
||||
"""
|
||||
|
||||
data_path = os.path.join(
|
||||
folder_path,
|
||||
'witness.json'
|
||||
)
|
||||
data_path = os.path.join(folder_path, "witness.json")
|
||||
|
||||
model_path = os.path.join(
|
||||
folder_path,
|
||||
'model.compiled'
|
||||
)
|
||||
model_path = os.path.join(folder_path, "model.compiled")
|
||||
|
||||
pk_path = os.path.join(folder_path, 'test.pk')
|
||||
vk_path = os.path.join(folder_path, 'test.vk')
|
||||
settings_path = os.path.join(folder_path, 'settings.json')
|
||||
pk_path = os.path.join(folder_path, "test.pk")
|
||||
vk_path = os.path.join(folder_path, "test.vk")
|
||||
settings_path = os.path.join(folder_path, "settings.json")
|
||||
|
||||
res = ezkl.setup(
|
||||
model_path,
|
||||
@@ -307,13 +255,10 @@ def test_setup_evm():
|
||||
Test for setup
|
||||
"""
|
||||
|
||||
model_path = os.path.join(
|
||||
folder_path,
|
||||
'model.compiled'
|
||||
)
|
||||
model_path = os.path.join(folder_path, "model.compiled")
|
||||
|
||||
pk_path = os.path.join(folder_path, 'test_evm.pk')
|
||||
vk_path = os.path.join(folder_path, 'test_evm.vk')
|
||||
pk_path = os.path.join(folder_path, "test_evm.pk")
|
||||
vk_path = os.path.join(folder_path, "test_evm.vk")
|
||||
|
||||
res = ezkl.setup(
|
||||
model_path,
|
||||
@@ -331,34 +276,25 @@ def test_prove_and_verify():
|
||||
Test for prove and verify
|
||||
"""
|
||||
|
||||
data_path = os.path.join(
|
||||
folder_path,
|
||||
'witness.json'
|
||||
)
|
||||
data_path = os.path.join(folder_path, "witness.json")
|
||||
|
||||
model_path = os.path.join(
|
||||
folder_path,
|
||||
'model.compiled'
|
||||
)
|
||||
model_path = os.path.join(folder_path, "model.compiled")
|
||||
|
||||
pk_path = os.path.join(folder_path, 'test.pk')
|
||||
proof_path = os.path.join(folder_path, 'test.pf')
|
||||
pk_path = os.path.join(folder_path, "test.pk")
|
||||
proof_path = os.path.join(folder_path, "test.pf")
|
||||
|
||||
res = ezkl.prove(
|
||||
data_path,
|
||||
model_path,
|
||||
pk_path,
|
||||
proof_path,
|
||||
"for-aggr",
|
||||
srs_path=srs_path,
|
||||
)
|
||||
assert res['transcript_type'] == 'poseidon'
|
||||
assert os.path.isfile(proof_path)
|
||||
|
||||
settings_path = os.path.join(folder_path, 'settings.json')
|
||||
vk_path = os.path.join(folder_path, 'test.vk')
|
||||
res = ezkl.verify(proof_path, settings_path,
|
||||
vk_path, srs_path)
|
||||
settings_path = os.path.join(folder_path, "settings.json")
|
||||
vk_path = os.path.join(folder_path, "test.vk")
|
||||
res = ezkl.verify(proof_path, settings_path, vk_path, srs_path)
|
||||
assert res == True
|
||||
assert os.path.isfile(vk_path)
|
||||
|
||||
@@ -368,27 +304,19 @@ def test_prove_evm():
|
||||
Test for prove using evm transcript
|
||||
"""
|
||||
|
||||
data_path = os.path.join(
|
||||
folder_path,
|
||||
'witness.json'
|
||||
)
|
||||
data_path = os.path.join(folder_path, "witness.json")
|
||||
|
||||
model_path = os.path.join(
|
||||
folder_path,
|
||||
'model.compiled'
|
||||
)
|
||||
model_path = os.path.join(folder_path, "model.compiled")
|
||||
|
||||
pk_path = os.path.join(folder_path, 'test_evm.pk')
|
||||
proof_path = os.path.join(folder_path, 'test_evm.pf')
|
||||
res = ezkl.prove(
|
||||
pk_path = os.path.join(folder_path, "test_evm.pk")
|
||||
proof_path = os.path.join(folder_path, "test_evm.pf")
|
||||
ezkl.prove(
|
||||
data_path,
|
||||
model_path,
|
||||
pk_path,
|
||||
proof_path,
|
||||
"single",
|
||||
srs_path=srs_path,
|
||||
)
|
||||
assert res['transcript_type'] == 'evm'
|
||||
assert os.path.isfile(proof_path)
|
||||
|
||||
|
||||
@@ -397,12 +325,12 @@ async def test_create_evm_verifier():
|
||||
Create EVM verifier with solidity code
|
||||
In order to run this test you will need to install solc in your environment
|
||||
"""
|
||||
vk_path = os.path.join(folder_path, 'test_evm.vk')
|
||||
settings_path = os.path.join(folder_path, 'settings.json')
|
||||
sol_code_path = os.path.join(folder_path, 'test.sol')
|
||||
abi_path = os.path.join(folder_path, 'test.abi')
|
||||
proof_path = os.path.join(folder_path, 'test_evm.pf')
|
||||
calldata_path = os.path.join(folder_path, 'calldata.bytes')
|
||||
vk_path = os.path.join(folder_path, "test_evm.vk")
|
||||
settings_path = os.path.join(folder_path, "settings.json")
|
||||
sol_code_path = os.path.join(folder_path, "test.sol")
|
||||
abi_path = os.path.join(folder_path, "test.abi")
|
||||
proof_path = os.path.join(folder_path, "test_evm.pf")
|
||||
calldata_path = os.path.join(folder_path, "calldata.bytes")
|
||||
|
||||
# res is now a vector of bytes
|
||||
res = ezkl.encode_evm_calldata(proof_path, calldata_path)
|
||||
@@ -410,7 +338,6 @@ async def test_create_evm_verifier():
|
||||
assert os.path.isfile(calldata_path)
|
||||
assert len(res) > 0
|
||||
|
||||
|
||||
res = await ezkl.create_evm_verifier(
|
||||
vk_path,
|
||||
settings_path,
|
||||
@@ -422,19 +349,20 @@ async def test_create_evm_verifier():
|
||||
assert res == True
|
||||
assert os.path.isfile(sol_code_path)
|
||||
|
||||
|
||||
async def test_create_evm_verifier_separate_vk():
|
||||
"""
|
||||
Create EVM a verifier with solidity code and separate vk
|
||||
In order to run this test you will need to install solc in your environment
|
||||
"""
|
||||
vk_path = os.path.join(folder_path, 'test_evm.vk')
|
||||
settings_path = os.path.join(folder_path, 'settings.json')
|
||||
sol_code_path = os.path.join(folder_path, 'test_separate.sol')
|
||||
vka_path = os.path.join(folder_path, 'vka.calldata')
|
||||
abi_path = os.path.join(folder_path, 'test_separate.abi')
|
||||
abi_vk_path = os.path.join(folder_path, 'test_vk_separate.abi')
|
||||
proof_path = os.path.join(folder_path, 'test_evm.pf')
|
||||
calldata_path = os.path.join(folder_path, 'calldata.bytes')
|
||||
vk_path = os.path.join(folder_path, "test_evm.vk")
|
||||
settings_path = os.path.join(folder_path, "settings.json")
|
||||
sol_code_path = os.path.join(folder_path, "test_separate.sol")
|
||||
vka_path = os.path.join(folder_path, "vka.calldata")
|
||||
abi_path = os.path.join(folder_path, "test_separate.abi")
|
||||
abi_vk_path = os.path.join(folder_path, "test_vk_separate.abi")
|
||||
proof_path = os.path.join(folder_path, "test_evm.pf")
|
||||
calldata_path = os.path.join(folder_path, "calldata.bytes")
|
||||
|
||||
# # res is now a vector of bytes
|
||||
# res = ezkl.encode_evm_calldata(proof_path, calldata_path)
|
||||
@@ -442,22 +370,16 @@ async def test_create_evm_verifier_separate_vk():
|
||||
# assert os.path.isfile(calldata_path)
|
||||
# assert len(res) > 0
|
||||
|
||||
|
||||
res = await ezkl.create_evm_verifier(
|
||||
vk_path,
|
||||
settings_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
srs_path=srs_path,
|
||||
reusable=True
|
||||
reusable=True,
|
||||
)
|
||||
|
||||
res = await ezkl.create_evm_vka(
|
||||
vk_path,
|
||||
settings_path,
|
||||
vka_path,
|
||||
srs_path=srs_path
|
||||
)
|
||||
res = await ezkl.create_evm_vka(vk_path, settings_path, vka_path, srs_path=srs_path)
|
||||
|
||||
assert res == True
|
||||
assert os.path.isfile(sol_code_path)
|
||||
@@ -468,10 +390,10 @@ async def test_deploy_evm_reusable_and_vka():
|
||||
Test deployment of the reusable verifier smart contract + vka
|
||||
In order to run this you will need to install solc in your environment
|
||||
"""
|
||||
addr_path_verifier = os.path.join(folder_path, 'address_separate.json')
|
||||
addr_path_vk = os.path.join(folder_path, 'address_vk.json')
|
||||
sol_code_path = os.path.join(folder_path, 'test_separate.sol')
|
||||
vka_path = os.path.join(folder_path, 'vka.calldata')
|
||||
addr_path_verifier = os.path.join(folder_path, "address_separate.json")
|
||||
addr_path_vk = os.path.join(folder_path, "address_vk.json")
|
||||
sol_code_path = os.path.join(folder_path, "test_separate.sol")
|
||||
vka_path = os.path.join(folder_path, "vka.calldata")
|
||||
|
||||
# TODO: without optimization there will be out of gas errors
|
||||
# sol_code_path = os.path.join(folder_path, 'test.sol')
|
||||
@@ -483,7 +405,7 @@ async def test_deploy_evm_reusable_and_vka():
|
||||
"verifier/reusable",
|
||||
)
|
||||
|
||||
with open(addr_path_verifier, 'r') as file:
|
||||
with open(addr_path_verifier, "r") as file:
|
||||
addr_verifier = file.read().rstrip()
|
||||
|
||||
# TODO fix: we need to call register vka instead of deploy evm
|
||||
@@ -495,13 +417,14 @@ async def test_deploy_evm_reusable_and_vka():
|
||||
|
||||
assert res == True
|
||||
|
||||
|
||||
async def test_deploy_evm():
|
||||
"""
|
||||
Test deployment of the verifier smart contract
|
||||
In order to run this you will need to install solc in your environment
|
||||
"""
|
||||
addr_path = os.path.join(folder_path, 'address.json')
|
||||
sol_code_path = os.path.join(folder_path, 'test.sol')
|
||||
addr_path = os.path.join(folder_path, "address.json")
|
||||
sol_code_path = os.path.join(folder_path, "test.sol")
|
||||
|
||||
# TODO: without optimization there will be out of gas errors
|
||||
# sol_code_path = os.path.join(folder_path, 'test.sol')
|
||||
@@ -520,31 +443,32 @@ async def test_deploy_evm_with_private_key():
|
||||
Test deployment of the verifier smart contract using a custom private key
|
||||
In order to run this you will need to install solc in your environment
|
||||
"""
|
||||
addr_path = os.path.join(folder_path, 'address.json')
|
||||
sol_code_path = os.path.join(folder_path, 'test.sol')
|
||||
addr_path = os.path.join(folder_path, "address.json")
|
||||
sol_code_path = os.path.join(folder_path, "test.sol")
|
||||
|
||||
# TODO: without optimization there will be out of gas errors
|
||||
# sol_code_path = os.path.join(folder_path, 'test.sol')
|
||||
|
||||
anvil_default_private_key = "ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"
|
||||
anvil_default_private_key = (
|
||||
"ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"
|
||||
)
|
||||
|
||||
res = await ezkl.deploy_evm(
|
||||
addr_path,
|
||||
anvil_url,
|
||||
sol_code_path,
|
||||
private_key=anvil_default_private_key
|
||||
addr_path, anvil_url, sol_code_path, private_key=anvil_default_private_key
|
||||
)
|
||||
|
||||
assert res == True
|
||||
|
||||
custom_zero_balance_private_key = "ff9dfe0b6d31e93ba13460a4d6f63b5e31dd9532b1304f1cbccea7092a042aa4"
|
||||
custom_zero_balance_private_key = (
|
||||
"ff9dfe0b6d31e93ba13460a4d6f63b5e31dd9532b1304f1cbccea7092a042aa4"
|
||||
)
|
||||
|
||||
with pytest.raises(RuntimeError, match="Failed to run deploy_evm"):
|
||||
res = await ezkl.deploy_evm(
|
||||
addr_path,
|
||||
anvil_url,
|
||||
sol_code_path,
|
||||
private_key=custom_zero_balance_private_key
|
||||
private_key=custom_zero_balance_private_key,
|
||||
)
|
||||
|
||||
|
||||
@@ -553,10 +477,10 @@ async def test_verify_evm():
|
||||
Verifies an evm proof
|
||||
In order to run this you will need to install solc in your environment
|
||||
"""
|
||||
proof_path = os.path.join(folder_path, 'test_evm.pf')
|
||||
addr_path = os.path.join(folder_path, 'address.json')
|
||||
proof_path = os.path.join(folder_path, "test_evm.pf")
|
||||
addr_path = os.path.join(folder_path, "address.json")
|
||||
|
||||
with open(addr_path, 'r') as file:
|
||||
with open(addr_path, "r") as file:
|
||||
addr = file.read().rstrip()
|
||||
|
||||
print(addr)
|
||||
@@ -574,18 +498,19 @@ async def test_verify_evm():
|
||||
|
||||
assert res == True
|
||||
|
||||
|
||||
async def test_verify_evm_separate_vk():
|
||||
"""
|
||||
Verifies an evm proof
|
||||
In order to run this you will need to install solc in your environment
|
||||
"""
|
||||
proof_path = os.path.join(folder_path, 'test_evm.pf')
|
||||
addr_path_verifier = os.path.join(folder_path, 'address_separate.json')
|
||||
vka_path = os.path.join(folder_path, 'vka.calldata')
|
||||
proof_path = os.path.join(folder_path, 'test_evm.pf')
|
||||
calldata_path = os.path.join(folder_path, 'calldata_separate.bytes')
|
||||
proof_path = os.path.join(folder_path, "test_evm.pf")
|
||||
addr_path_verifier = os.path.join(folder_path, "address_separate.json")
|
||||
vka_path = os.path.join(folder_path, "vka.calldata")
|
||||
proof_path = os.path.join(folder_path, "test_evm.pf")
|
||||
calldata_path = os.path.join(folder_path, "calldata_separate.bytes")
|
||||
|
||||
with open(addr_path_verifier, 'r') as file:
|
||||
with open(addr_path_verifier, "r") as file:
|
||||
addr_verifier = file.read().rstrip()
|
||||
|
||||
print(addr_verifier)
|
||||
@@ -611,262 +536,20 @@ async def test_verify_evm_separate_vk():
|
||||
assert res == True
|
||||
|
||||
|
||||
async def test_aggregate_and_verify_aggr():
|
||||
data_path = os.path.join(
|
||||
examples_path,
|
||||
'onnx',
|
||||
'1l_relu',
|
||||
'input.json'
|
||||
)
|
||||
|
||||
model_path = os.path.join(
|
||||
examples_path,
|
||||
'onnx',
|
||||
'1l_relu',
|
||||
'network.onnx'
|
||||
)
|
||||
|
||||
compiled_model_path = os.path.join(
|
||||
folder_path,
|
||||
'compiled_relu.onnx'
|
||||
)
|
||||
|
||||
pk_path = os.path.join(folder_path, '1l_relu.pk')
|
||||
vk_path = os.path.join(folder_path, '1l_relu.vk')
|
||||
settings_path = os.path.join(
|
||||
folder_path, '1l_relu_aggr_settings.json')
|
||||
|
||||
# TODO: Dictionary outputs
|
||||
res = ezkl.gen_settings(model_path, settings_path)
|
||||
assert res == True
|
||||
|
||||
res = ezkl.calibrate_settings(
|
||||
data_path, model_path, settings_path, "resources")
|
||||
assert res == True
|
||||
assert os.path.isfile(settings_path)
|
||||
|
||||
res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)
|
||||
assert res == True
|
||||
|
||||
ezkl.setup(
|
||||
compiled_model_path,
|
||||
vk_path,
|
||||
pk_path,
|
||||
srs_path=srs_path,
|
||||
)
|
||||
|
||||
proof_path = os.path.join(folder_path, '1l_relu.pf')
|
||||
|
||||
output_path = os.path.join(
|
||||
folder_path,
|
||||
'1l_relu_aggr_witness.json'
|
||||
)
|
||||
|
||||
res = ezkl.gen_witness(data_path, compiled_model_path,
|
||||
output_path)
|
||||
|
||||
ezkl.prove(
|
||||
output_path,
|
||||
compiled_model_path,
|
||||
pk_path,
|
||||
proof_path,
|
||||
"for-aggr",
|
||||
srs_path=srs_path,
|
||||
)
|
||||
|
||||
# mock aggregate
|
||||
res = ezkl.mock_aggregate([proof_path], 21)
|
||||
assert res == True
|
||||
|
||||
aggregate_proof_path = os.path.join(folder_path, 'aggr_1l_relu.pf')
|
||||
aggregate_vk_path = os.path.join(folder_path, 'aggr_1l_relu.vk')
|
||||
aggregate_pk_path = os.path.join(folder_path, 'aggr_1l_relu.pk')
|
||||
|
||||
res = ezkl.setup_aggregate(
|
||||
[proof_path],
|
||||
aggregate_vk_path,
|
||||
aggregate_pk_path,
|
||||
21,
|
||||
srs_path=params_k21_path,
|
||||
)
|
||||
|
||||
res = ezkl.gen_vk_from_pk_aggr(aggregate_pk_path, aggregate_vk_path)
|
||||
assert res == True
|
||||
assert os.path.isfile(vk_path)
|
||||
|
||||
res = ezkl.aggregate(
|
||||
[proof_path],
|
||||
aggregate_proof_path,
|
||||
aggregate_pk_path,
|
||||
"poseidon",
|
||||
21,
|
||||
"unsafe",
|
||||
srs_path=params_k21_path,
|
||||
)
|
||||
|
||||
assert res == True
|
||||
assert os.path.isfile(aggregate_proof_path)
|
||||
assert os.path.isfile(aggregate_vk_path)
|
||||
|
||||
res = ezkl.verify_aggr(
|
||||
aggregate_proof_path,
|
||||
aggregate_vk_path,
|
||||
21,
|
||||
srs_path=params_k21_path,
|
||||
)
|
||||
assert res == True
|
||||
|
||||
|
||||
async def test_evm_aggregate_and_verify_aggr():
|
||||
data_path = os.path.join(
|
||||
examples_path,
|
||||
'onnx',
|
||||
'1l_relu',
|
||||
'input.json'
|
||||
)
|
||||
|
||||
model_path = os.path.join(
|
||||
examples_path,
|
||||
'onnx',
|
||||
'1l_relu',
|
||||
'network.onnx'
|
||||
)
|
||||
|
||||
pk_path = os.path.join(folder_path, '1l_relu.pk')
|
||||
vk_path = os.path.join(folder_path, '1l_relu.vk')
|
||||
settings_path = os.path.join(
|
||||
folder_path, '1l_relu_evm_aggr_settings.json')
|
||||
|
||||
ezkl.gen_settings(
|
||||
model_path,
|
||||
settings_path,
|
||||
)
|
||||
|
||||
ezkl.calibrate_settings(
|
||||
data_path,
|
||||
model_path,
|
||||
settings_path,
|
||||
"resources",
|
||||
)
|
||||
|
||||
compiled_model_path = os.path.join(
|
||||
folder_path,
|
||||
'compiled_relu.onnx'
|
||||
)
|
||||
|
||||
res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)
|
||||
assert res == True
|
||||
|
||||
ezkl.setup(
|
||||
compiled_model_path,
|
||||
vk_path,
|
||||
pk_path,
|
||||
srs_path=srs_path,
|
||||
)
|
||||
|
||||
proof_path = os.path.join(folder_path, '1l_relu.pf')
|
||||
|
||||
output_path = os.path.join(
|
||||
folder_path,
|
||||
'1l_relu_aggr_evm_witness.json'
|
||||
)
|
||||
|
||||
res = ezkl.gen_witness(data_path, compiled_model_path,
|
||||
output_path)
|
||||
|
||||
ezkl.prove(
|
||||
output_path,
|
||||
compiled_model_path,
|
||||
pk_path,
|
||||
proof_path,
|
||||
"for-aggr",
|
||||
srs_path=srs_path,
|
||||
)
|
||||
|
||||
aggregate_proof_path = os.path.join(folder_path, 'aggr_evm_1l_relu.pf')
|
||||
aggregate_vk_path = os.path.join(folder_path, 'aggr_evm_1l_relu.vk')
|
||||
aggregate_pk_path = os.path.join(folder_path, 'aggr_evm_1l_relu.pk')
|
||||
|
||||
res = ezkl.setup_aggregate(
|
||||
[proof_path],
|
||||
aggregate_vk_path,
|
||||
aggregate_pk_path,
|
||||
21,
|
||||
srs_path=params_k21_path,
|
||||
)
|
||||
|
||||
res = ezkl.aggregate(
|
||||
[proof_path],
|
||||
aggregate_proof_path,
|
||||
aggregate_pk_path,
|
||||
"evm",
|
||||
21,
|
||||
"unsafe",
|
||||
srs_path=params_k21_path,
|
||||
)
|
||||
|
||||
assert res == True
|
||||
assert os.path.isfile(aggregate_proof_path)
|
||||
assert os.path.isfile(aggregate_vk_path)
|
||||
|
||||
sol_code_path = os.path.join(folder_path, 'aggr_evm_1l_relu.sol')
|
||||
abi_path = os.path.join(folder_path, 'aggr_evm_1l_relu.abi')
|
||||
|
||||
res = await ezkl.create_evm_verifier_aggr(
|
||||
[settings_path],
|
||||
aggregate_vk_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
logrows=21,
|
||||
srs_path=params_k21_path,
|
||||
)
|
||||
|
||||
assert res == True
|
||||
assert os.path.isfile(sol_code_path)
|
||||
|
||||
addr_path = os.path.join(folder_path, 'address_aggr.json')
|
||||
|
||||
res = await ezkl.deploy_evm(
|
||||
addr_path,
|
||||
anvil_url,
|
||||
sol_code_path,
|
||||
)
|
||||
|
||||
# as a sanity check
|
||||
res = ezkl.verify_aggr(
|
||||
aggregate_proof_path,
|
||||
aggregate_vk_path,
|
||||
21,
|
||||
srs_path=params_k21_path,
|
||||
)
|
||||
assert res == True
|
||||
|
||||
# with open(addr_path, 'r') as file:
|
||||
# addr_aggr = file.read().rstrip()
|
||||
|
||||
# res = await ezkl.verify_evm(
|
||||
# aggregate_proof_path,
|
||||
# addr_aggr,
|
||||
# rpc_url=anvil_url,
|
||||
# )
|
||||
|
||||
# assert res == True
|
||||
|
||||
|
||||
def get_examples():
|
||||
EXAMPLES_OMIT = [
|
||||
# these are too large
|
||||
'mobilenet_large',
|
||||
'mobilenet',
|
||||
'doodles',
|
||||
'nanoGPT',
|
||||
"mobilenet_large",
|
||||
"mobilenet",
|
||||
"doodles",
|
||||
"nanoGPT",
|
||||
"self_attention",
|
||||
'multihead_attention',
|
||||
'large_op_graph',
|
||||
'1l_instance_norm',
|
||||
'variable_cnn',
|
||||
'accuracy',
|
||||
'linear_regression',
|
||||
"multihead_attention",
|
||||
"large_op_graph",
|
||||
"1l_instance_norm",
|
||||
"variable_cnn",
|
||||
"accuracy",
|
||||
"linear_regression",
|
||||
"mnist_gan",
|
||||
"smallworm",
|
||||
"fr_age",
|
||||
@@ -874,14 +557,16 @@ def get_examples():
|
||||
]
|
||||
examples = []
|
||||
for subdir, _, _ in os.walk(os.path.join(examples_path, "onnx")):
|
||||
name = subdir.split('/')[-1]
|
||||
name = subdir.split("/")[-1]
|
||||
if name in EXAMPLES_OMIT or name == "onnx":
|
||||
continue
|
||||
else:
|
||||
examples.append((
|
||||
os.path.join(subdir, "network.onnx"),
|
||||
os.path.join(subdir, "input.json"),
|
||||
))
|
||||
examples.append(
|
||||
(
|
||||
os.path.join(subdir, "network.onnx"),
|
||||
os.path.join(subdir, "input.json"),
|
||||
)
|
||||
)
|
||||
return examples
|
||||
|
||||
|
||||
@@ -890,11 +575,11 @@ async def test_all_examples(model_file, input_file):
|
||||
"""Tests all examples in the examples folder"""
|
||||
# gen settings
|
||||
settings_path = os.path.join(folder_path, "settings.json")
|
||||
compiled_model_path = os.path.join(folder_path, 'network.ezkl')
|
||||
pk_path = os.path.join(folder_path, 'test.pk')
|
||||
vk_path = os.path.join(folder_path, 'test.vk')
|
||||
witness_path = os.path.join(folder_path, 'witness.json')
|
||||
proof_path = os.path.join(folder_path, 'proof.json')
|
||||
compiled_model_path = os.path.join(folder_path, "network.ezkl")
|
||||
pk_path = os.path.join(folder_path, "test.pk")
|
||||
vk_path = os.path.join(folder_path, "test.vk")
|
||||
witness_path = os.path.join(folder_path, "witness.json")
|
||||
proof_path = os.path.join(folder_path, "proof.json")
|
||||
|
||||
print("Testing example: ", model_file)
|
||||
|
||||
@@ -905,15 +590,14 @@ async def test_all_examples(model_file, input_file):
|
||||
res = ezkl.gen_settings(model_file, settings_path, py_run_args=run_args)
|
||||
assert res
|
||||
|
||||
res = ezkl.calibrate_settings(
|
||||
input_file, model_file, settings_path, "resources")
|
||||
res = ezkl.calibrate_settings(input_file, model_file, settings_path, "resources")
|
||||
assert res
|
||||
|
||||
print("Compiling example: ", model_file)
|
||||
res = ezkl.compile_circuit(model_file, compiled_model_path, settings_path)
|
||||
assert res
|
||||
|
||||
with open(settings_path, 'r') as f:
|
||||
with open(settings_path, "r") as f:
|
||||
data = json.load(f)
|
||||
|
||||
logrows = data["run_args"]["logrows"]
|
||||
@@ -925,12 +609,7 @@ async def test_all_examples(model_file, input_file):
|
||||
ezkl.gen_srs(os.path.join(folder_path, srs_path), logrows)
|
||||
|
||||
print("Setting up example: ", model_file)
|
||||
res = ezkl.setup(
|
||||
compiled_model_path,
|
||||
vk_path,
|
||||
pk_path,
|
||||
srs_path
|
||||
)
|
||||
res = ezkl.setup(compiled_model_path, vk_path, pk_path, srs_path)
|
||||
assert res == True
|
||||
assert os.path.isfile(vk_path)
|
||||
assert os.path.isfile(pk_path)
|
||||
@@ -945,7 +624,6 @@ async def test_all_examples(model_file, input_file):
|
||||
compiled_model_path,
|
||||
pk_path,
|
||||
proof_path,
|
||||
"single",
|
||||
srs_path=srs_path,
|
||||
)
|
||||
|
||||
|
||||
394
tests/wasm.rs
394
tests/wasm.rs
@@ -1,394 +0,0 @@
|
||||
#[cfg(all(target_arch = "wasm32", target_os = "unknown"))]
|
||||
#[cfg(test)]
|
||||
mod wasm32 {
|
||||
use ezkl::bindings::wasm::{
|
||||
bufferToVecOfFelt, compiledCircuitValidation, encodeVerifierCalldata, feltToBigEndian,
|
||||
feltToFloat, feltToInt, feltToLittleEndian, genPk, genVk, genWitness, inputValidation,
|
||||
kzgCommit, pkValidation, poseidonHash, proofValidation, prove, settingsValidation,
|
||||
srsValidation, u8_array_to_u128_le, verify, verifyAggr, vkValidation, witnessValidation,
|
||||
};
|
||||
use ezkl::circuit::modules::polycommit::PolyCommitChip;
|
||||
use ezkl::circuit::modules::poseidon::spec::{PoseidonSpec, POSEIDON_RATE, POSEIDON_WIDTH};
|
||||
use ezkl::circuit::modules::poseidon::PoseidonChip;
|
||||
use ezkl::circuit::modules::Module;
|
||||
use ezkl::graph::GraphCircuit;
|
||||
use ezkl::graph::{GraphSettings, GraphWitness};
|
||||
use ezkl::pfsys;
|
||||
use ezkl::pfsys::encode_calldata;
|
||||
use halo2_proofs::plonk::VerifyingKey;
|
||||
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
|
||||
use halo2_proofs::poly::kzg::commitment::ParamsKZG;
|
||||
use halo2curves::bn256::Bn256;
|
||||
use halo2curves::bn256::{Fr, G1Affine};
|
||||
use snark_verifier::util::arithmetic::PrimeField;
|
||||
#[cfg(feature = "web")]
|
||||
pub use wasm_bindgen_rayon::init_thread_pool;
|
||||
use wasm_bindgen_test::*;
|
||||
|
||||
wasm_bindgen_test::wasm_bindgen_test_configure!(run_in_browser);
|
||||
|
||||
pub const WITNESS: &[u8] = include_bytes!("assets/witness.json");
|
||||
pub const NETWORK_COMPILED: &[u8] = include_bytes!("assets/model.compiled");
|
||||
pub const NETWORK: &[u8] = include_bytes!("assets/network.onnx");
|
||||
pub const INPUT: &[u8] = include_bytes!("assets/input.json");
|
||||
pub const PROOF: &[u8] = include_bytes!("assets/proof.json");
|
||||
pub const PROOF_AGGR: &[u8] = include_bytes!("assets/proof_aggr.json");
|
||||
pub const SETTINGS: &[u8] = include_bytes!("assets/settings.json");
|
||||
pub const PK: &[u8] = include_bytes!("assets/pk.key");
|
||||
pub const VK: &[u8] = include_bytes!("assets/vk.key");
|
||||
pub const VK_AGGR: &[u8] = include_bytes!("assets/vk_aggr.key");
|
||||
pub const SRS: &[u8] = include_bytes!("assets/kzg");
|
||||
pub const SRS1: &[u8] = include_bytes!("assets/kzg1.srs");
|
||||
pub const VERIFIER_BYTECODE: &[u8] = include_bytes!("assets/wasm.code");
|
||||
|
||||
// #[wasm_bindgen_test]
|
||||
// async fn can_verify_aggr() {
|
||||
// let value = verifyAggr(
|
||||
// wasm_bindgen::Clamped(PROOF_AGGR.to_vec()),
|
||||
// wasm_bindgen::Clamped(VK_AGGR.to_vec()),
|
||||
// 21,
|
||||
// wasm_bindgen::Clamped(SRS1.to_vec()),
|
||||
// "kzg",
|
||||
// )
|
||||
// .map_err(|_| "failed")
|
||||
// .unwrap();
|
||||
|
||||
// // should not fail
|
||||
// assert!(value);
|
||||
// }
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
async fn verify_encode_verifier_calldata() {
|
||||
let ser_proof = wasm_bindgen::Clamped(PROOF.to_vec());
|
||||
|
||||
// with no vk address
|
||||
let calldata = encodeVerifierCalldata(ser_proof.clone(), None)
|
||||
.map_err(|_| "failed")
|
||||
.unwrap();
|
||||
|
||||
let snark: pfsys::Snark<Fr, G1Affine> = serde_json::from_slice(&PROOF).unwrap();
|
||||
let flattened_instances = snark.instances.into_iter().flatten();
|
||||
let reference_calldata = encode_calldata(
|
||||
None,
|
||||
&snark.proof,
|
||||
&flattened_instances.clone().collect::<Vec<_>>(),
|
||||
);
|
||||
assert_eq!(calldata, reference_calldata);
|
||||
// with vk address
|
||||
let dummy_32_byte_word = [0u8; 32];
|
||||
|
||||
// define and initialize a variable of type: &[[u8; 32]] named "vka"
|
||||
let vka: &[[u8; 32]] = &[dummy_32_byte_word.into()];
|
||||
|
||||
let serialized = serde_json::to_vec(vka).unwrap();
|
||||
|
||||
let calldata = encodeVerifierCalldata(ser_proof, Some(serialized))
|
||||
.map_err(|_| "failed")
|
||||
.unwrap();
|
||||
let reference_calldata = encode_calldata(
|
||||
Some(vka),
|
||||
&snark.proof,
|
||||
&flattened_instances.collect::<Vec<_>>(),
|
||||
);
|
||||
assert_eq!(calldata, reference_calldata);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
fn verify_kzg_commit() {
|
||||
// create a vector of field elements Vec<Fr> and assign it to the message variable
|
||||
let mut message: Vec<Fr> = vec![];
|
||||
for i in 0..32 {
|
||||
message.push(Fr::from(i as u64));
|
||||
}
|
||||
let message_ser = serde_json::to_vec(&message).unwrap();
|
||||
|
||||
let settings: GraphSettings = serde_json::from_slice(&SETTINGS).unwrap();
|
||||
let mut reader = std::io::BufReader::new(SRS);
|
||||
let params: ParamsKZG<Bn256> =
|
||||
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader).unwrap();
|
||||
let mut reader = std::io::BufReader::new(VK);
|
||||
let vk = VerifyingKey::<G1Affine>::read::<_, GraphCircuit>(
|
||||
&mut reader,
|
||||
halo2_proofs::SerdeFormat::RawBytes,
|
||||
settings.clone(),
|
||||
)
|
||||
.unwrap();
|
||||
let commitment_ser = kzgCommit(
|
||||
wasm_bindgen::Clamped(message_ser),
|
||||
wasm_bindgen::Clamped(VK.to_vec()),
|
||||
wasm_bindgen::Clamped(SETTINGS.to_vec()),
|
||||
wasm_bindgen::Clamped(SRS.to_vec()),
|
||||
)
|
||||
.map_err(|_| "failed")
|
||||
.unwrap();
|
||||
let commitment: Vec<halo2curves::bn256::G1Affine> =
|
||||
serde_json::from_slice(&commitment_ser[..]).unwrap();
|
||||
let reference_commitment = PolyCommitChip::commit::<KZGCommitmentScheme<Bn256>>(
|
||||
message,
|
||||
(vk.cs().blinding_factors() + 1) as u32,
|
||||
¶ms,
|
||||
);
|
||||
|
||||
assert_eq!(commitment, reference_commitment);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
async fn verify_field_serialization_roundtrip() {
|
||||
for i in 0..32 {
|
||||
let field_element = Fr::from(i);
|
||||
let serialized = serde_json::to_vec(&field_element).unwrap();
|
||||
|
||||
let clamped = wasm_bindgen::Clamped(serialized);
|
||||
let scale = 2;
|
||||
let floating_point = feltToFloat(clamped.clone(), scale)
|
||||
.map_err(|_| "failed")
|
||||
.unwrap();
|
||||
assert_eq!(floating_point, (i as f64) / 4.0);
|
||||
|
||||
let integer: i64 =
|
||||
serde_json::from_slice(&feltToInt(clamped.clone()).map_err(|_| "failed").unwrap())
|
||||
.unwrap();
|
||||
assert_eq!(integer, i as i64);
|
||||
|
||||
let hex_string = format!("{:?}", field_element.clone());
|
||||
let returned_string: String = feltToBigEndian(clamped.clone())
|
||||
.map_err(|_| "failed")
|
||||
.unwrap();
|
||||
assert_eq!(hex_string, returned_string);
|
||||
let repr = serde_json::to_string(&field_element).unwrap();
|
||||
let little_endian_string: String = serde_json::from_str(&repr).unwrap();
|
||||
let returned_string: String =
|
||||
feltToLittleEndian(clamped).map_err(|_| "failed").unwrap();
|
||||
assert_eq!(little_endian_string, returned_string);
|
||||
}
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
async fn verify_buffer_to_field_elements() {
|
||||
let string_high = String::from("high");
|
||||
let mut buffer = string_high.clone().into_bytes();
|
||||
let clamped = wasm_bindgen::Clamped(buffer.clone());
|
||||
|
||||
let field_elements_ser = bufferToVecOfFelt(clamped).map_err(|_| "failed").unwrap();
|
||||
|
||||
let field_elements: Vec<Fr> = serde_json::from_slice(&field_elements_ser[..]).unwrap();
|
||||
|
||||
buffer.resize(16, 0);
|
||||
|
||||
let reference_int = u8_array_to_u128_le(buffer.try_into().unwrap());
|
||||
|
||||
let reference_field_element_high = PrimeField::from_u128(reference_int);
|
||||
|
||||
assert_eq!(field_elements[0], reference_field_element_high);
|
||||
|
||||
// length 16 string (divisible by 16 so doesn't need padding)
|
||||
let string_sample = String::from("a sample string!");
|
||||
let buffer = string_sample.clone().into_bytes();
|
||||
let clamped = wasm_bindgen::Clamped(buffer.clone());
|
||||
|
||||
let field_elements_ser = bufferToVecOfFelt(clamped).map_err(|_| "failed").unwrap();
|
||||
|
||||
let field_elements: Vec<Fr> = serde_json::from_slice(&field_elements_ser[..]).unwrap();
|
||||
|
||||
let reference_int = u8_array_to_u128_le(buffer.try_into().unwrap());
|
||||
|
||||
let reference_field_element_sample = PrimeField::from_u128(reference_int);
|
||||
|
||||
assert_eq!(field_elements[0], reference_field_element_sample);
|
||||
|
||||
let string_concat = string_sample + &string_high;
|
||||
|
||||
let buffer = string_concat.into_bytes();
|
||||
let clamped = wasm_bindgen::Clamped(buffer.clone());
|
||||
|
||||
let field_elements_ser = bufferToVecOfFelt(clamped).map_err(|_| "failed").unwrap();
|
||||
|
||||
let field_elements: Vec<Fr> = serde_json::from_slice(&field_elements_ser[..]).unwrap();
|
||||
|
||||
assert_eq!(field_elements[0], reference_field_element_sample);
|
||||
assert_eq!(field_elements[1], reference_field_element_high);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
async fn verify_hash() {
|
||||
let mut message: Vec<Fr> = vec![];
|
||||
for i in 0..32 {
|
||||
message.push(Fr::from(i as u64));
|
||||
}
|
||||
|
||||
let message_ser = serde_json::to_vec(&message).unwrap();
|
||||
|
||||
let hash = poseidonHash(wasm_bindgen::Clamped(message_ser))
|
||||
.map_err(|_| "failed")
|
||||
.unwrap();
|
||||
let hash: Vec<Vec<Fr>> = serde_json::from_slice(&hash[..]).unwrap();
|
||||
|
||||
let reference_hash =
|
||||
PoseidonChip::<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE>::run(message.clone())
|
||||
.map_err(|_| "failed")
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(hash, reference_hash)
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
async fn verify_gen_witness() {
|
||||
let witness = genWitness(
|
||||
wasm_bindgen::Clamped(NETWORK_COMPILED.to_vec()),
|
||||
wasm_bindgen::Clamped(INPUT.to_vec()),
|
||||
)
|
||||
.map_err(|_| "failed")
|
||||
.unwrap();
|
||||
|
||||
let witness: GraphWitness = serde_json::from_slice(&witness[..]).unwrap();
|
||||
|
||||
let reference_witness: GraphWitness = serde_json::from_slice(&WITNESS).unwrap();
|
||||
// should not fail
|
||||
assert_eq!(witness, reference_witness);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
async fn gen_pk_test() {
|
||||
let vk = genVk(
|
||||
wasm_bindgen::Clamped(NETWORK_COMPILED.to_vec()),
|
||||
wasm_bindgen::Clamped(SRS.to_vec()),
|
||||
true,
|
||||
)
|
||||
.map_err(|_| "failed")
|
||||
.unwrap();
|
||||
|
||||
let pk = genPk(
|
||||
wasm_bindgen::Clamped(vk),
|
||||
wasm_bindgen::Clamped(NETWORK_COMPILED.to_vec()),
|
||||
wasm_bindgen::Clamped(SRS.to_vec()),
|
||||
)
|
||||
.map_err(|_| "failed")
|
||||
.unwrap();
|
||||
|
||||
assert!(pk.len() > 0);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
async fn gen_vk_test() {
|
||||
let vk = genVk(
|
||||
wasm_bindgen::Clamped(NETWORK_COMPILED.to_vec()),
|
||||
wasm_bindgen::Clamped(SRS.to_vec()),
|
||||
true,
|
||||
)
|
||||
.map_err(|_| "failed")
|
||||
.unwrap();
|
||||
|
||||
assert!(vk.len() > 0);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
async fn pk_is_valid_test() {
|
||||
let vk = genVk(
|
||||
wasm_bindgen::Clamped(NETWORK_COMPILED.to_vec()),
|
||||
wasm_bindgen::Clamped(SRS.to_vec()),
|
||||
true,
|
||||
)
|
||||
.map_err(|_| "failed")
|
||||
.unwrap();
|
||||
|
||||
let pk = genPk(
|
||||
wasm_bindgen::Clamped(vk.clone()),
|
||||
wasm_bindgen::Clamped(NETWORK_COMPILED.to_vec()),
|
||||
wasm_bindgen::Clamped(SRS.to_vec()),
|
||||
)
|
||||
.map_err(|_| "failed")
|
||||
.unwrap();
|
||||
|
||||
// prove
|
||||
let proof = prove(
|
||||
wasm_bindgen::Clamped(WITNESS.to_vec()),
|
||||
wasm_bindgen::Clamped(pk.clone()),
|
||||
wasm_bindgen::Clamped(NETWORK_COMPILED.to_vec()),
|
||||
wasm_bindgen::Clamped(SRS.to_vec()),
|
||||
)
|
||||
.map_err(|_| "failed")
|
||||
.unwrap();
|
||||
|
||||
assert!(proof.len() > 0);
|
||||
|
||||
let value = verify(
|
||||
wasm_bindgen::Clamped(proof.to_vec()),
|
||||
wasm_bindgen::Clamped(vk),
|
||||
wasm_bindgen::Clamped(SETTINGS.to_vec()),
|
||||
wasm_bindgen::Clamped(SRS.to_vec()),
|
||||
)
|
||||
.map_err(|_| "failed")
|
||||
.unwrap();
|
||||
|
||||
// should not fail
|
||||
assert!(value);
|
||||
}
|
||||
|
||||
#[wasm_bindgen_test]
|
||||
async fn verify_validations() {
|
||||
// Run witness validation on network (should fail)
|
||||
let witness = witnessValidation(wasm_bindgen::Clamped(NETWORK_COMPILED.to_vec()));
|
||||
assert!(witness.is_err());
|
||||
// Run witness validation on witness (should pass)
|
||||
let witness = witnessValidation(wasm_bindgen::Clamped(WITNESS.to_vec()));
|
||||
assert!(witness.is_ok());
|
||||
// Run compiled circuit validation on onnx network (should fail)
|
||||
let circuit = compiledCircuitValidation(wasm_bindgen::Clamped(NETWORK.to_vec()));
|
||||
assert!(circuit.is_err());
|
||||
// Run compiled circuit validation on compiled network (should pass)
|
||||
let circuit = compiledCircuitValidation(wasm_bindgen::Clamped(NETWORK_COMPILED.to_vec()));
|
||||
assert!(circuit.is_ok());
|
||||
// Run input validation on witness (should fail)
|
||||
let input = inputValidation(wasm_bindgen::Clamped(WITNESS.to_vec()));
|
||||
assert!(input.is_err());
|
||||
// Run input validation on input (should pass)
|
||||
let input = inputValidation(wasm_bindgen::Clamped(INPUT.to_vec()));
|
||||
assert!(input.is_ok());
|
||||
// Run proof validation on witness (should fail)
|
||||
let proof = proofValidation(wasm_bindgen::Clamped(WITNESS.to_vec()));
|
||||
assert!(proof.is_err());
|
||||
// Run proof validation on proof (should pass)
|
||||
let proof = proofValidation(wasm_bindgen::Clamped(PROOF.to_vec()));
|
||||
assert!(proof.is_ok());
|
||||
// // Run vk validation on SRS (should fail)
|
||||
// let vk = vkValidation(
|
||||
// wasm_bindgen::Clamped(SRS.to_vec()),
|
||||
// wasm_bindgen::Clamped(SETTINGS.to_vec())
|
||||
// );
|
||||
// assert!(vk.is_err());
|
||||
|
||||
// Run vk validation on vk (should pass)
|
||||
let vk = vkValidation(
|
||||
wasm_bindgen::Clamped(VK.to_vec()),
|
||||
wasm_bindgen::Clamped(SETTINGS.to_vec()),
|
||||
);
|
||||
assert!(vk.is_ok());
|
||||
// // Run pk validation on vk (should fail)
|
||||
// let pk = pkValidation(
|
||||
// wasm_bindgen::Clamped(VK.to_vec()),
|
||||
// wasm_bindgen::Clamped(SETTINGS.to_vec())
|
||||
// );
|
||||
// assert!(pk.is_err());
|
||||
// Run pk validation on pk (should pass)
|
||||
let pk = pkValidation(
|
||||
wasm_bindgen::Clamped(PK.to_vec()),
|
||||
wasm_bindgen::Clamped(SETTINGS.to_vec()),
|
||||
);
|
||||
|
||||
assert!(pk.is_ok());
|
||||
// Run settings validation on proof (should fail)
|
||||
let settings = settingsValidation(wasm_bindgen::Clamped(PROOF.to_vec()));
|
||||
assert!(settings.is_err());
|
||||
// Run settings validation on settings (should pass)
|
||||
let settings = settingsValidation(wasm_bindgen::Clamped(SETTINGS.to_vec()));
|
||||
assert!(settings.is_ok());
|
||||
// // Run srs validation on vk (should fail)
|
||||
// let srs = srsValidation(
|
||||
// wasm_bindgen::Clamped(VK.to_vec())
|
||||
// );
|
||||
// assert!(srs.is_err());
|
||||
// Run srs validation on srs (should pass)
|
||||
let srs = srsValidation(wasm_bindgen::Clamped(SRS.to_vec()));
|
||||
assert!(srs.is_ok());
|
||||
}
|
||||
}
|
||||
@@ -1,80 +0,0 @@
|
||||
import {
|
||||
serialize,
|
||||
deserialize
|
||||
} from './utils';
|
||||
import * as wasmFunctions from './nodejs/ezkl'
|
||||
import { compileContracts } from './utils'
|
||||
import * as fs from 'fs'
|
||||
|
||||
exports.EXAMPLE = require("minimist")(process.argv.slice(2))["example"];
|
||||
exports.PATH = require("minimist")(process.argv.slice(2))["dir"];
|
||||
exports.VK = require("minimist")(process.argv.slice(2))["vk"];
|
||||
|
||||
describe('localEVMVerify', () => {
|
||||
|
||||
let bytecode_verifier_buffer: Uint8Array
|
||||
|
||||
let bytecode_vk_buffer: Uint8Array | undefined = undefined
|
||||
|
||||
let proof: any
|
||||
|
||||
const example = exports.EXAMPLE || "1l_mlp"
|
||||
const path = exports.PATH || "../ezkl/examples/onnx"
|
||||
const vk = exports.VK || false
|
||||
|
||||
beforeEach(() => {
|
||||
const solcOutput = compileContracts(path, example, 'kzg')
|
||||
|
||||
let bytecode_verifier =
|
||||
solcOutput.contracts['artifacts/Verifier.sol']['Halo2Verifier'].evm.bytecode
|
||||
.object
|
||||
bytecode_verifier_buffer = new TextEncoder().encode(bytecode_verifier)
|
||||
|
||||
|
||||
if (vk) {
|
||||
const solcOutput_vk = compileContracts(path, example, 'vk')
|
||||
|
||||
let bytecode_vk =
|
||||
solcOutput_vk.contracts['artifacts/Verifier.sol']['Halo2VerifyingKey'].evm.bytecode
|
||||
.object
|
||||
bytecode_vk_buffer = new TextEncoder().encode(bytecode_vk)
|
||||
|
||||
|
||||
console.log('size of verifier bytecode', bytecode_verifier.length)
|
||||
}
|
||||
console.log('verifier bytecode', bytecode_verifier)
|
||||
})
|
||||
|
||||
it('should return true when verification succeeds', async () => {
|
||||
const proofFileBuffer = fs.readFileSync(`${path}/${example}/proof.pf`)
|
||||
const proofSer = new Uint8ClampedArray(proofFileBuffer.buffer)
|
||||
|
||||
proof = deserialize(proofSer)
|
||||
|
||||
const result = wasmFunctions.verifyEVM(proofSer, bytecode_verifier_buffer, bytecode_vk_buffer)
|
||||
|
||||
console.log('result', result)
|
||||
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it('should fail to verify faulty proofs', async () => {
|
||||
let result: boolean = true
|
||||
console.log(proof.proof)
|
||||
try {
|
||||
let index = Math.round((Math.random() * (proof.proof.length))) % proof.proof.length
|
||||
console.log('index', index)
|
||||
console.log('index', proof.proof[index])
|
||||
let number = (proof.proof[index] + 1) % 256
|
||||
console.log('index', index)
|
||||
console.log('new number', number)
|
||||
proof.proof[index] = number
|
||||
console.log('index post', proof.proof[index])
|
||||
const proofModified = serialize(proof)
|
||||
result = wasmFunctions.verifyEVM(proofModified, bytecode_verifier_buffer, bytecode_vk_buffer)
|
||||
} catch (error) {
|
||||
result = false
|
||||
}
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
})
|
||||
@@ -1,75 +0,0 @@
|
||||
import * as wasmFunctions from './nodejs/ezkl'
|
||||
import {
|
||||
readEzklArtifactsFile,
|
||||
readEzklSrsFile,
|
||||
serialize,
|
||||
deserialize
|
||||
} from './utils';
|
||||
import fs from 'fs';
|
||||
exports.USER_NAME = require("minimist")(process.argv.slice(2))["example"];
|
||||
exports.PATH = require("minimist")(process.argv.slice(2))["dir"];
|
||||
|
||||
const timingData: {
|
||||
example: string,
|
||||
proveTime: number,
|
||||
verifyTime: number,
|
||||
verifyResult: boolean | undefined
|
||||
}[] = [];
|
||||
|
||||
|
||||
|
||||
describe('Generate witness, prove and verify', () => {
|
||||
|
||||
let proof_ser: Uint8ClampedArray
|
||||
let proof_ser_ref: Uint8ClampedArray
|
||||
let circuit_settings_ser: Uint8ClampedArray;
|
||||
let params_ser: Uint8ClampedArray;
|
||||
|
||||
let proveTime = 0;
|
||||
let verifyTime = 0;
|
||||
let verifyResult: boolean | undefined = false;
|
||||
|
||||
const example = exports.USER_NAME || "1l_mlp"
|
||||
const path = exports.PATH || "../ezkl/examples/onnx"
|
||||
|
||||
it('prove', async () => {
|
||||
let result
|
||||
let witness = await readEzklArtifactsFile(path, example, 'witness.json');
|
||||
let pk = await readEzklArtifactsFile(path, example, 'key.pk');
|
||||
let circuit_ser = await readEzklArtifactsFile(path, example, 'network.compiled');
|
||||
circuit_settings_ser = await readEzklArtifactsFile(path, example, 'settings.json');
|
||||
// get the log rows from the circuit settings
|
||||
const circuit_settings = deserialize(circuit_settings_ser) as any;
|
||||
const logrows = circuit_settings.run_args.logrows as string;
|
||||
params_ser = await readEzklSrsFile(logrows);
|
||||
const startTimeProve = Date.now();
|
||||
result = wasmFunctions.prove(witness, pk, circuit_ser, params_ser);
|
||||
const endTimeProve = Date.now();
|
||||
proof_ser = new Uint8ClampedArray(result.buffer);
|
||||
// test serialization/deserialization methods
|
||||
const proof = deserialize(proof_ser);
|
||||
proof_ser_ref = serialize(proof);
|
||||
proveTime = endTimeProve - startTimeProve;
|
||||
expect(result).toBeInstanceOf(Uint8Array);
|
||||
});
|
||||
|
||||
it('verify', async () => {
|
||||
let result
|
||||
const vk = await readEzklArtifactsFile(path, example, 'key.vk');
|
||||
const startTimeVerify = Date.now();
|
||||
params_ser = await readEzklSrsFile("1");
|
||||
result = wasmFunctions.verify(proof_ser, vk, circuit_settings_ser, params_ser);
|
||||
const result_ref = wasmFunctions.verify(proof_ser_ref, vk, circuit_settings_ser, params_ser);
|
||||
const endTimeVerify = Date.now();
|
||||
verifyTime = endTimeVerify - startTimeVerify;
|
||||
verifyResult = result;
|
||||
// test serialization/deserialization methods
|
||||
expect(typeof result).toBe('boolean');
|
||||
expect(result).toBe(true);
|
||||
expect(result_ref).toBe(true);
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
fs.writeFileSync('timingData.json', JSON.stringify(timingData, null, 2));
|
||||
});
|
||||
});
|
||||
@@ -1,28 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es5",
|
||||
"lib": ["dom", "dom.iterable", "esnext"],
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"strict": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"noEmit": true,
|
||||
"esModuleInterop": true,
|
||||
"module": "esnext",
|
||||
"moduleResolution": "bundler",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"jsx": "preserve",
|
||||
"incremental": true,
|
||||
"plugins": [
|
||||
{
|
||||
"name": "next"
|
||||
}
|
||||
],
|
||||
"paths": {
|
||||
"@/*": ["./*"]
|
||||
}
|
||||
},
|
||||
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts", "jest.config.js"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
@@ -1,94 +0,0 @@
|
||||
import * as fs from 'fs/promises';
|
||||
import * as fsSync from 'fs'
|
||||
import JSONBig from 'json-bigint';
|
||||
const solc = require('solc');
|
||||
|
||||
// import os module
|
||||
const os = require('os');
|
||||
|
||||
// check the available memory
|
||||
const userHomeDir = os.homedir();
|
||||
|
||||
export async function readEzklArtifactsFile(path: string, example: string, filename: string): Promise<Uint8ClampedArray> {
|
||||
//const filePath = path.join(__dirname, '..', '..', 'ezkl', 'examples', 'onnx', example, filename);
|
||||
const filePath = `${path}/${example}/${filename}`
|
||||
const buffer = await fs.readFile(filePath);
|
||||
return new Uint8ClampedArray(buffer.buffer);
|
||||
}
|
||||
|
||||
export async function readEzklSrsFile(logrows: string): Promise<Uint8ClampedArray> {
|
||||
const filePath = `${userHomeDir}/.ezkl/srs/kzg${logrows}.srs`
|
||||
const buffer = await fs.readFile(filePath);
|
||||
return new Uint8ClampedArray(buffer.buffer);
|
||||
}
|
||||
|
||||
export function deserialize(buffer: Uint8Array | Uint8ClampedArray): object { // buffer is a Uint8ClampedArray | Uint8Array // return a JSON object
|
||||
if (buffer instanceof Uint8ClampedArray) {
|
||||
buffer = new Uint8Array(buffer.buffer);
|
||||
}
|
||||
const string = new TextDecoder().decode(buffer);
|
||||
const jsonObject = JSONBig.parse(string);
|
||||
return jsonObject;
|
||||
}
|
||||
|
||||
export function serialize(data: object | string): Uint8ClampedArray { // data is an object // return a Uint8ClampedArray
|
||||
// Step 1: Stringify the Object with BigInt support
|
||||
if (typeof data === "object") {
|
||||
data = JSONBig.stringify(data);
|
||||
}
|
||||
// Step 2: Encode the JSON String
|
||||
const uint8Array = new TextEncoder().encode(data as string);
|
||||
|
||||
// Step 3: Convert to Uint8ClampedArray
|
||||
return new Uint8ClampedArray(uint8Array.buffer);
|
||||
}
|
||||
|
||||
export function getSolcInput(path: string, example: string, name: string) {
|
||||
return {
|
||||
language: 'Solidity',
|
||||
sources: {
|
||||
'artifacts/Verifier.sol': {
|
||||
content: fsSync.readFileSync(`${path}/${example}/${name}.sol`, 'utf-8'),
|
||||
},
|
||||
// If more contracts were to be compiled, they should have their own entries here
|
||||
},
|
||||
settings: {
|
||||
optimizer: {
|
||||
enabled: true,
|
||||
runs: 1,
|
||||
},
|
||||
evmVersion: 'shanghai',
|
||||
outputSelection: {
|
||||
'*': {
|
||||
'*': ['abi', 'evm.bytecode'],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export function compileContracts(path: string, example: string, name: string) {
|
||||
const input = getSolcInput(path, example, name)
|
||||
const output = JSON.parse(solc.compile(JSON.stringify(input)))
|
||||
|
||||
let compilationFailed = false
|
||||
|
||||
if (output.errors) {
|
||||
for (const error of output.errors) {
|
||||
if (error.severity === 'error') {
|
||||
console.error(error.formattedMessage)
|
||||
compilationFailed = true
|
||||
} else {
|
||||
console.warn(error.formattedMessage)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (compilationFailed) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es5",
|
||||
"lib": ["dom", "dom.iterable", "esnext"],
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"strict": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"noEmit": true,
|
||||
"esModuleInterop": true,
|
||||
"module": "esnext",
|
||||
"moduleResolution": "bundler",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"jsx": "preserve",
|
||||
"incremental": true,
|
||||
"plugins": [
|
||||
{
|
||||
"name": "next"
|
||||
}
|
||||
],
|
||||
"paths": {
|
||||
"@/*": ["./*"]
|
||||
}
|
||||
},
|
||||
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts", "jest.config.js"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
Reference in New Issue
Block a user