mirror of
https://github.com/zkonduit/ezkl.git
synced 2026-01-13 08:17:57 -05:00
Compare commits
21 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3bfde7ac98 | ||
|
|
33d41c7e49 | ||
|
|
e04c959662 | ||
|
|
27b1f2e9d4 | ||
|
|
4a172877af | ||
|
|
5a8498894d | ||
|
|
095c0ca5b4 | ||
|
|
3fa482c9ef | ||
|
|
6be3b1d663 | ||
|
|
d5a1d1439c | ||
|
|
ff8fd01f86 | ||
|
|
e9020f942e | ||
|
|
e7f54cb6ac | ||
|
|
ed65e8c090 | ||
|
|
d9f2adad99 | ||
|
|
5125aaa090 | ||
|
|
f1950e6cd0 | ||
|
|
998ca22c2a | ||
|
|
5c574adc31 | ||
|
|
749e0ba652 | ||
|
|
d464ddf6b6 |
55
.github/workflows/engine.yml
vendored
55
.github/workflows/engine.yml
vendored
@@ -178,3 +178,58 @@ jobs:
|
||||
npm publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
|
||||
in-browser-evm-ver-publish:
|
||||
name: publish-in-browser-evm-verifier-package
|
||||
needs: [publish-wasm-bindings]
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Update version in package.json
|
||||
shell: bash
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.ref_name }}
|
||||
run: |
|
||||
sed -i "s|\"version\": \".*\"|\"version\": \"${{ github.ref_name }}\"|" in-browser-evm-verifier/package.json
|
||||
- name: Prepare tag and fetch package integrity
|
||||
run: |
|
||||
CLEANED_TAG=${{ github.ref_name }} # Get the tag from ref_name
|
||||
CLEANED_TAG="${CLEANED_TAG#v}" # Remove leading 'v'
|
||||
echo "CLEANED_TAG=${CLEANED_TAG}" >> $GITHUB_ENV # Set it as an environment variable for later steps
|
||||
ENGINE_INTEGRITY=$(npm view @ezkljs/engine@$CLEANED_TAG dist.integrity)
|
||||
echo "ENGINE_INTEGRITY=$ENGINE_INTEGRITY" >> $GITHUB_ENV
|
||||
- name: Update @ezkljs/engine version in package.json
|
||||
shell: bash
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.ref_name }}
|
||||
run: |
|
||||
sed -i "s|\"@ezkljs/engine\": \".*\"|\"@ezkljs/engine\": \"$CLEANED_TAG\"|" in-browser-evm-verifier/package.json
|
||||
- name: Update the engine import in in-browser-evm-verifier to use @ezkljs/engine package instead of the local one;
|
||||
run: |
|
||||
sed -i "s|import { encodeVerifierCalldata } from '../nodejs/ezkl';|import { encodeVerifierCalldata } from '@ezkljs/engine';|" in-browser-evm-verifier/src/index.ts
|
||||
- name: Update pnpm-lock.yaml versions and integrity
|
||||
run: |
|
||||
awk -v integrity="$ENGINE_INTEGRITY" -v tag="$CLEANED_TAG" '
|
||||
NR==30{$0=" specifier: \"" tag "\""}
|
||||
NR==31{$0=" version: \"" tag "\""}
|
||||
NR==400{$0=" /@ezkljs/engine@" tag ":"}
|
||||
NR==401{$0=" resolution: {integrity: \"" integrity "\"}"} 1' in-browser-evm-verifier/pnpm-lock.yaml > temp.yaml && mv temp.yaml in-browser-evm-verifier/pnpm-lock.yaml
|
||||
- name: Use pnpm 8
|
||||
uses: pnpm/action-setup@v2
|
||||
with:
|
||||
version: 8
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "18.12.1"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Publish to npm
|
||||
run: |
|
||||
cd in-browser-evm-verifier
|
||||
pnpm install --frozen-lockfile
|
||||
pnpm run build
|
||||
pnpm publish --no-git-checks
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
||||
7
.github/workflows/pypi.yml
vendored
7
.github/workflows/pypi.yml
vendored
@@ -40,7 +40,7 @@ jobs:
|
||||
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2023-06-27
|
||||
toolchain: nightly-2024-02-06
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
|
||||
@@ -50,6 +50,7 @@ jobs:
|
||||
target: ${{ matrix.target }}
|
||||
args: --release --out dist --features python-bindings
|
||||
- name: Install built wheel
|
||||
if: matrix.target == 'universal2-apple-darwin'
|
||||
run: |
|
||||
pip install ezkl --no-index --find-links dist --force-reinstall
|
||||
python -c "import ezkl"
|
||||
@@ -85,7 +86,7 @@ jobs:
|
||||
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2023-06-27
|
||||
toolchain: nightly-2024-02-06
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
|
||||
@@ -110,7 +111,7 @@ jobs:
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
strategy:
|
||||
matrix:
|
||||
target: [x86_64, i686]
|
||||
target: [x86_64]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
|
||||
18
.github/workflows/release.yml
vendored
18
.github/workflows/release.yml
vendored
@@ -102,28 +102,32 @@ jobs:
|
||||
PCRE2_SYS_STATIC: 1
|
||||
strategy:
|
||||
matrix:
|
||||
build: [windows-msvc, macos, macos-aarch64, linux-musl, linux-gnu]
|
||||
build: [windows-msvc, macos, macos-aarch64, linux-musl, linux-gnu, linux-aarch64]
|
||||
include:
|
||||
- build: windows-msvc
|
||||
os: windows-latest
|
||||
rust: nightly-2023-06-27
|
||||
rust: nightly-2024-02-06
|
||||
target: x86_64-pc-windows-msvc
|
||||
- build: macos
|
||||
os: macos-13
|
||||
rust: nightly-2023-06-27
|
||||
rust: nightly-2024-02-06
|
||||
target: x86_64-apple-darwin
|
||||
- build: macos-aarch64
|
||||
os: macos-13
|
||||
rust: nightly-2023-06-27
|
||||
rust: nightly-2024-02-06
|
||||
target: aarch64-apple-darwin
|
||||
- build: linux-musl
|
||||
os: ubuntu-22.04
|
||||
rust: nightly-2023-06-27
|
||||
rust: nightly-2024-02-06
|
||||
target: x86_64-unknown-linux-musl
|
||||
- build: linux-gnu
|
||||
os: ubuntu-22.04
|
||||
rust: nightly-2023-06-27
|
||||
rust: nightly-2024-02-06
|
||||
target: x86_64-unknown-linux-gnu
|
||||
- build: linux-aarch64
|
||||
os: ubuntu-22.04
|
||||
rust: nightly-2024-02-06
|
||||
target: aarch64-unknown-linux-gnu
|
||||
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
@@ -181,7 +185,7 @@ jobs:
|
||||
run: ${{ env.CARGO }} build --release ${{ env.TARGET_FLAGS }} -Z sparse-registry
|
||||
|
||||
- name: Strip release binary
|
||||
if: matrix.build != 'windows-msvc'
|
||||
if: matrix.build != 'windows-msvc' && matrix.build != 'linux-aarch64'
|
||||
run: strip "target/${{ matrix.target }}/release/ezkl"
|
||||
|
||||
- name: Strip release binary (Windows)
|
||||
|
||||
2
.github/workflows/rust.yml
vendored
2
.github/workflows/rust.yml
vendored
@@ -574,7 +574,7 @@ jobs:
|
||||
- name: Build python ezkl
|
||||
run: source .env/bin/activate; unset CONDA_PREFIX; maturin develop --features python-bindings --release
|
||||
- name: Run pytest
|
||||
run: source .env/bin/activate; pytest -vv
|
||||
run: source .env/bin/activate; pip install pytest-asyncio; pytest -vv
|
||||
|
||||
accuracy-measurement-tests:
|
||||
runs-on: ubuntu-latest-32-cores
|
||||
|
||||
65
.github/workflows/verify.yml
vendored
65
.github/workflows/verify.yml
vendored
@@ -1,65 +0,0 @@
|
||||
name: Build and Publish EZKL npm packages (wasm bindings and in-browser evm verifier)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "The tag to release"
|
||||
required: true
|
||||
push:
|
||||
tags:
|
||||
- "*"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: .
|
||||
jobs:
|
||||
in-browser-evm-ver-publish:
|
||||
name: publish-in-browser-evm-verifier-package
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/')
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Update version in package.json
|
||||
shell: bash
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.ref_name }}
|
||||
run: |
|
||||
sed -i "s|\"version\": \".*\"|\"version\": \"${{ github.ref_name }}\"|" in-browser-evm-verifier/package.json
|
||||
- name: Update @ezkljs/engine version in package.json
|
||||
shell: bash
|
||||
env:
|
||||
RELEASE_TAG: ${{ github.ref_name }}
|
||||
run: |
|
||||
sed -i "s|\"@ezkljs/engine\": \".*\"|\"@ezkljs/engine\": \"${{ github.ref_name#v }}\"|" in-browser-evm-verifier/package.json
|
||||
- name: Update the engine import in in-browser-evm-verifier to use @ezkljs/engine package instead of the local one;
|
||||
run: |
|
||||
sed -i "s|import { encodeVerifierCalldata } from '../nodejs/ezkl';|import { encodeVerifierCalldata } from '@ezkljs/engine';|" in-browser-evm-verifier/src/index.ts
|
||||
- name: Fetch integrity
|
||||
run: |
|
||||
ENGINE_INTEGRITY=$(npm view @ezkljs/engine@${{ github.ref_name#v }} dist.integrity)
|
||||
echo "ENGINE_INTEGRITY=$ENGINE_INTEGRITY" >> $GITHUB_ENV
|
||||
- name: Update pnpm-lock.yaml versions and integrity
|
||||
run: |
|
||||
awk -v integrity="$ENGINE_INTEGRITY" -v tag="${{ github.ref_name#v }}" '
|
||||
NR==30{$0=" specifier: \"" tag "\""}
|
||||
NR==31{$0=" version: \"" tag "\""}
|
||||
NR==400{$0=" /@ezkljs/engine@" tag ":"}
|
||||
NR==401{$0=" resolution: {integrity: \"" integrity "\"}"} 1' in-browser-evm-verifier/pnpm-lock.yaml > temp.yaml && mv temp.yaml in-browser-evm-verifier/pnpm-lock.yaml
|
||||
- name: Use pnpm 8
|
||||
uses: pnpm/action-setup@v2
|
||||
with:
|
||||
version: 8
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: "18.12.1"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
- name: Publish to npm
|
||||
run: |
|
||||
cd in-browser-evm-verifier
|
||||
pnpm install --frozen-lockfile
|
||||
pnpm run build
|
||||
pnpm publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
1809
Cargo.lock
generated
1809
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
25
Cargo.toml
25
Cargo.toml
@@ -23,6 +23,7 @@ halo2curves = { git = "https://github.com/privacy-scaling-explorations/halo2curv
|
||||
rand = { version = "0.8", default_features = false }
|
||||
itertools = { version = "0.10.3", default_features = false }
|
||||
clap = { version = "4.5.3", features = ["derive"] }
|
||||
clap_complete = "4.5.2"
|
||||
serde = { version = "1.0.126", features = ["derive"], optional = true }
|
||||
serde_json = { version = "1.0.97", default_features = false, features = [
|
||||
"float_roundtrip",
|
||||
@@ -45,42 +46,42 @@ portable-atomic = "1.6.0"
|
||||
tosubcommand = { git = "https://github.com/zkonduit/enum_to_subcommand", package = "tosubcommand" }
|
||||
metal = { git = "https://github.com/gfx-rs/metal-rs", optional = true }
|
||||
|
||||
|
||||
# evm related deps
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
|
||||
ethers = { version = "2.0.11", default_features = false, features = [
|
||||
"ethers-solc",
|
||||
] }
|
||||
alloy = { git = "https://github.com/alloy-rs/alloy", version = "0.1.0", rev="5fbf57bac99edef9d8475190109a7ea9fb7e5e83", features = ["provider-http", "signers", "contract", "rpc-types-eth", "signer-wallet", "node-bindings"] }
|
||||
foundry-compilers = {version = "0.4.1", features = ["svm-solc"]}
|
||||
ethabi = "18"
|
||||
indicatif = { version = "0.17.5", features = ["rayon"] }
|
||||
gag = { version = "1.0.0", default_features = false }
|
||||
instant = { version = "0.1" }
|
||||
reqwest = { version = "0.11.14", default-features = false, features = [
|
||||
reqwest = { version = "0.12.4", default-features = false, features = [
|
||||
"default-tls",
|
||||
"multipart",
|
||||
"stream",
|
||||
] }
|
||||
openssl = { version = "0.10.55", features = ["vendored"] }
|
||||
postgres = "0.19.5"
|
||||
tokio-postgres = "0.7.10"
|
||||
pg_bigdecimal = "0.1.5"
|
||||
futures-util = "0.3.30"
|
||||
lazy_static = "1.4.0"
|
||||
colored_json = { version = "3.0.1", default_features = false, optional = true }
|
||||
plotters = { version = "0.3.0", default_features = false, optional = true }
|
||||
regex = { version = "1", default_features = false }
|
||||
tokio = { version = "1.26.0", default_features = false, features = [
|
||||
tokio = { version = "1.35", default_features = false, features = [
|
||||
"macros",
|
||||
"rt",
|
||||
"rt-multi-thread"
|
||||
] }
|
||||
tokio-util = { version = "0.7.9", features = ["codec"] }
|
||||
pyo3 = { version = "0.20.2", features = [
|
||||
pyo3 = { version = "0.21.2", features = [
|
||||
"extension-module",
|
||||
"abi3-py37",
|
||||
"macros",
|
||||
], default_features = false, optional = true }
|
||||
pyo3-asyncio = { version = "0.20.0", features = [
|
||||
pyo3-asyncio = { git = "https://github.com/jopemachine/pyo3-asyncio/", branch="migration-pyo3-0.21", features = [
|
||||
"attributes",
|
||||
"tokio-runtime",
|
||||
], default_features = false, optional = true }
|
||||
pyo3-log = { version = "0.9.0", default_features = false, optional = true }
|
||||
pyo3-log = { version = "0.10.0", default_features = false, optional = true }
|
||||
tract-onnx = { git = "https://github.com/sonos/tract/", rev = "05ebf550aa9922b221af4635c21a67a8d2af12a9", default_features = false, optional = true }
|
||||
tabled = { version = "0.12.0", optional = true }
|
||||
|
||||
@@ -178,7 +179,7 @@ required-features = ["ezkl"]
|
||||
|
||||
[features]
|
||||
web = ["wasm-bindgen-rayon"]
|
||||
default = ["ezkl", "mv-lookup"]
|
||||
default = ["ezkl", "mv-lookup", "no-banner"]
|
||||
onnx = ["dep:tract-onnx"]
|
||||
python-bindings = ["pyo3", "pyo3-log", "pyo3-asyncio"]
|
||||
ezkl = [
|
||||
|
||||
@@ -91,9 +91,9 @@ You can install the library from source
|
||||
cargo install --locked --path .
|
||||
```
|
||||
|
||||
You will need a functioning installation of `solc` in order to run `ezkl` properly.
|
||||
[solc-select](https://github.com/crytic/solc-select) is recommended.
|
||||
Follow the instructions on [solc-select](https://github.com/crytic/solc-select) to activate `solc` in your environment.
|
||||
`ezkl` now auto-manages solc installation for you.
|
||||
|
||||
|
||||
|
||||
|
||||
#### building python bindings
|
||||
|
||||
@@ -1,6 +1,97 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
pragma solidity ^0.8.20;
|
||||
import './LoadInstances.sol';
|
||||
contract LoadInstances {
|
||||
/**
|
||||
* @dev Parse the instances array from the Halo2Verifier encoded calldata.
|
||||
* @notice must pass encoded bytes from memory
|
||||
* @param encoded - verifier calldata
|
||||
*/
|
||||
function getInstancesMemory(
|
||||
bytes memory encoded
|
||||
) internal pure returns (uint256[] memory instances) {
|
||||
bytes4 funcSig;
|
||||
uint256 instances_offset;
|
||||
uint256 instances_length;
|
||||
assembly {
|
||||
// fetch function sig. Either `verifyProof(bytes,uint256[])` or `verifyProof(address,bytes,uint256[])`
|
||||
funcSig := mload(add(encoded, 0x20))
|
||||
|
||||
// Fetch instances offset which is 4 + 32 + 32 bytes away from
|
||||
// start of encoded for `verifyProof(bytes,uint256[])`,
|
||||
// and 4 + 32 + 32 +32 away for `verifyProof(address,bytes,uint256[])`
|
||||
|
||||
instances_offset := mload(
|
||||
add(encoded, add(0x44, mul(0x20, eq(funcSig, 0xaf83a18d))))
|
||||
)
|
||||
|
||||
instances_length := mload(add(add(encoded, 0x24), instances_offset))
|
||||
}
|
||||
instances = new uint256[](instances_length); // Allocate memory for the instances array.
|
||||
assembly {
|
||||
// Now instances points to the start of the array data
|
||||
// (right after the length field).
|
||||
for {
|
||||
let i := 0x20
|
||||
} lt(i, add(mul(instances_length, 0x20), 0x20)) {
|
||||
i := add(i, 0x20)
|
||||
} {
|
||||
mstore(
|
||||
add(instances, i),
|
||||
mload(add(add(encoded, add(i, 0x24)), instances_offset))
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @dev Parse the instances array from the Halo2Verifier encoded calldata.
|
||||
* @notice must pass encoded bytes from calldata
|
||||
* @param encoded - verifier calldata
|
||||
*/
|
||||
function getInstancesCalldata(
|
||||
bytes calldata encoded
|
||||
) internal pure returns (uint256[] memory instances) {
|
||||
bytes4 funcSig;
|
||||
uint256 instances_offset;
|
||||
uint256 instances_length;
|
||||
assembly {
|
||||
// fetch function sig. Either `verifyProof(bytes,uint256[])` or `verifyProof(address,bytes,uint256[])`
|
||||
funcSig := calldataload(encoded.offset)
|
||||
|
||||
// Fetch instances offset which is 4 + 32 + 32 bytes away from
|
||||
// start of encoded for `verifyProof(bytes,uint256[])`,
|
||||
// and 4 + 32 + 32 +32 away for `verifyProof(address,bytes,uint256[])`
|
||||
|
||||
instances_offset := calldataload(
|
||||
add(
|
||||
encoded.offset,
|
||||
add(0x24, mul(0x20, eq(funcSig, 0xaf83a18d)))
|
||||
)
|
||||
)
|
||||
|
||||
instances_length := calldataload(
|
||||
add(add(encoded.offset, 0x04), instances_offset)
|
||||
)
|
||||
}
|
||||
instances = new uint256[](instances_length); // Allocate memory for the instances array.
|
||||
assembly {
|
||||
// Now instances points to the start of the array data
|
||||
// (right after the length field).
|
||||
|
||||
for {
|
||||
let i := 0x20
|
||||
} lt(i, add(mul(instances_length, 0x20), 0x20)) {
|
||||
i := add(i, 0x20)
|
||||
} {
|
||||
mstore(
|
||||
add(instances, i),
|
||||
calldataload(
|
||||
add(add(encoded.offset, add(i, 0x04)), instances_offset)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This contract serves as a Data Attestation Verifier for the EZKL model.
|
||||
// It is designed to read and attest to instances of proofs generated from a specified circuit.
|
||||
@@ -34,11 +125,14 @@ contract DataAttestation is LoadInstances {
|
||||
address public admin;
|
||||
|
||||
/**
|
||||
* @notice EZKL P value
|
||||
* @notice EZKL P value
|
||||
* @dev In order to prevent the verifier from accepting two version of the same pubInput, n and the quantity (n + P), where n + P <= 2^256, we require that all instances are stricly less than P. a
|
||||
* @dev The reason for this is that the assmebly code of the verifier performs all arithmetic operations modulo P and as a consequence can't distinguish between n and n + P.
|
||||
*/
|
||||
uint256 constant ORDER = uint256(0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001);
|
||||
uint256 constant ORDER =
|
||||
uint256(
|
||||
0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001
|
||||
);
|
||||
|
||||
uint256 constant INPUT_CALLS = 0;
|
||||
|
||||
@@ -69,7 +163,7 @@ contract DataAttestation is LoadInstances {
|
||||
|
||||
function updateAdmin(address _admin) external {
|
||||
require(msg.sender == admin, "Only admin can update admin");
|
||||
if(_admin == address(0)) {
|
||||
if (_admin == address(0)) {
|
||||
revert();
|
||||
}
|
||||
admin = _admin;
|
||||
@@ -80,7 +174,7 @@ contract DataAttestation is LoadInstances {
|
||||
bytes[][] memory _callData,
|
||||
uint256[][] memory _decimals
|
||||
) external {
|
||||
require(msg.sender == admin, "Only admin can update instanceOffset");
|
||||
require(msg.sender == admin, "Only admin can update account calls");
|
||||
populateAccountCalls(_contractAddresses, _callData, _decimals);
|
||||
}
|
||||
|
||||
@@ -111,7 +205,10 @@ contract DataAttestation is LoadInstances {
|
||||
// count the total number of storage reads across all of the accounts
|
||||
counter += _callData[i].length;
|
||||
}
|
||||
require(counter == INPUT_CALLS + OUTPUT_CALLS, "Invalid number of calls");
|
||||
require(
|
||||
counter == INPUT_CALLS + OUTPUT_CALLS,
|
||||
"Invalid number of calls"
|
||||
);
|
||||
}
|
||||
|
||||
function mulDiv(
|
||||
@@ -167,7 +264,7 @@ contract DataAttestation is LoadInstances {
|
||||
* @dev Quantize the data returned from the account calls to the scale used by the EZKL model.
|
||||
* @param data - The data returned from the account calls.
|
||||
* @param decimals - The number of decimals the data returned from the account calls has (for floating point representation).
|
||||
* @param scale - The scale used to convert the floating point value into a fixed point value.
|
||||
* @param scale - The scale used to convert the floating point value into a fixed point value.
|
||||
*/
|
||||
function quantizeData(
|
||||
bytes memory data,
|
||||
@@ -181,7 +278,7 @@ contract DataAttestation is LoadInstances {
|
||||
if (mulmod(uint256(x), scale, decimals) * 2 >= decimals) {
|
||||
output += 1;
|
||||
}
|
||||
quantized_data = neg ? -int256(output): int256(output);
|
||||
quantized_data = neg ? -int256(output) : int256(output);
|
||||
}
|
||||
/**
|
||||
* @dev Make a static call to the account to fetch the data that EZKL reads from.
|
||||
@@ -211,7 +308,9 @@ contract DataAttestation is LoadInstances {
|
||||
* @param x - The quantized data.
|
||||
* @return field_element - The field element.
|
||||
*/
|
||||
function toFieldElement(int256 x) internal pure returns (uint256 field_element) {
|
||||
function toFieldElement(
|
||||
int256 x
|
||||
) internal pure returns (uint256 field_element) {
|
||||
// The casting down to uint256 is safe because the order is about 2^254, and the value
|
||||
// of x ranges of -2^127 to 2^127, so x + int(ORDER) is always positive.
|
||||
return uint256(x + int(ORDER)) % ORDER;
|
||||
@@ -251,12 +350,11 @@ contract DataAttestation is LoadInstances {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function verifyWithDataAttestation(
|
||||
address verifier,
|
||||
bytes calldata encoded
|
||||
) public view returns (bool) {
|
||||
require(verifier.code.length > 0,"Address: call to non-contract");
|
||||
require(verifier.code.length > 0, "Address: call to non-contract");
|
||||
attestData(getInstancesCalldata(encoded));
|
||||
// static call the verifier contract to verify the proof
|
||||
(bool success, bytes memory returndata) = verifier.staticcall(encoded);
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
// SPDX-License-Identifier: MIT
|
||||
pragma solidity ^0.8.20;
|
||||
contract LoadInstances {
|
||||
/**
|
||||
* @dev Parse the instances array from the Halo2Verifier encoded calldata.
|
||||
* @notice must pass encoded bytes from memory
|
||||
* @param encoded - verifier calldata
|
||||
*/
|
||||
function getInstancesMemory(
|
||||
bytes memory encoded
|
||||
) internal pure returns (uint256[] memory instances) {
|
||||
bytes4 funcSig;
|
||||
uint256 instances_offset;
|
||||
uint256 instances_length;
|
||||
assembly {
|
||||
// fetch function sig. Either `verifyProof(bytes,uint256[])` or `verifyProof(address,bytes,uint256[])`
|
||||
funcSig := mload(add(encoded, 0x20))
|
||||
|
||||
// Fetch instances offset which is 4 + 32 + 32 bytes away from
|
||||
// start of encoded for `verifyProof(bytes,uint256[])`,
|
||||
// and 4 + 32 + 32 +32 away for `verifyProof(address,bytes,uint256[])`
|
||||
|
||||
instances_offset := mload(
|
||||
add(encoded, add(0x44, mul(0x20, eq(funcSig, 0xaf83a18d))))
|
||||
)
|
||||
|
||||
instances_length := mload(add(add(encoded, 0x24), instances_offset))
|
||||
}
|
||||
instances = new uint256[](instances_length); // Allocate memory for the instances array.
|
||||
assembly {
|
||||
// Now instances points to the start of the array data
|
||||
// (right after the length field).
|
||||
for {
|
||||
let i := 0x20
|
||||
} lt(i, add(mul(instances_length, 0x20), 0x20)) {
|
||||
i := add(i, 0x20)
|
||||
} {
|
||||
mstore(
|
||||
add(instances, i),
|
||||
mload(add(add(encoded, add(i, 0x24)), instances_offset))
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* @dev Parse the instances array from the Halo2Verifier encoded calldata.
|
||||
* @notice must pass encoded bytes from calldata
|
||||
* @param encoded - verifier calldata
|
||||
*/
|
||||
function getInstancesCalldata(
|
||||
bytes calldata encoded
|
||||
) internal pure returns (uint256[] memory instances) {
|
||||
bytes4 funcSig;
|
||||
uint256 instances_offset;
|
||||
uint256 instances_length;
|
||||
assembly {
|
||||
// fetch function sig. Either `verifyProof(bytes,uint256[])` or `verifyProof(address,bytes,uint256[])`
|
||||
funcSig := calldataload(encoded.offset)
|
||||
|
||||
// Fetch instances offset which is 4 + 32 + 32 bytes away from
|
||||
// start of encoded for `verifyProof(bytes,uint256[])`,
|
||||
// and 4 + 32 + 32 +32 away for `verifyProof(address,bytes,uint256[])`
|
||||
|
||||
instances_offset := calldataload(
|
||||
add(
|
||||
encoded.offset,
|
||||
add(0x24, mul(0x20, eq(funcSig, 0xaf83a18d)))
|
||||
)
|
||||
)
|
||||
|
||||
instances_length := calldataload(add(add(encoded.offset, 0x04), instances_offset))
|
||||
}
|
||||
instances = new uint256[](instances_length); // Allocate memory for the instances array.
|
||||
assembly{
|
||||
// Now instances points to the start of the array data
|
||||
// (right after the length field).
|
||||
|
||||
for {
|
||||
let i := 0x20
|
||||
} lt(i, add(mul(instances_length, 0x20), 0x20)) {
|
||||
i := add(i, 0x20)
|
||||
} {
|
||||
mstore(
|
||||
add(instances, i),
|
||||
calldataload(
|
||||
add(add(encoded.offset, add(i, 0x04)), instances_offset)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,135 +0,0 @@
|
||||
// SPDX-License-Identifier: GPL-3.0
|
||||
|
||||
pragma solidity ^0.8.17;
|
||||
|
||||
contract QuantizeData {
|
||||
/**
|
||||
* @notice EZKL P value
|
||||
* @dev In order to prevent the verifier from accepting two version of the same instance, n and the quantity (n + P), where n + P <= 2^256, we require that all instances are stricly less than P. a
|
||||
* @dev The reason for this is that the assmebly code of the verifier performs all arithmetic operations modulo P and as a consequence can't distinguish between n and n + P.
|
||||
*/
|
||||
uint256 constant ORDER =
|
||||
uint256(
|
||||
0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001
|
||||
);
|
||||
|
||||
/**
|
||||
* @notice Calculates floor(x * y / denominator) with full precision. Throws if result overflows a uint256 or denominator == 0
|
||||
* @dev Original credit to Remco Bloemen under MIT license (https://xn--2-umb.com/21/muldiv)
|
||||
* with further edits by Uniswap Labs also under MIT license.
|
||||
*/
|
||||
function mulDiv(
|
||||
uint256 x,
|
||||
uint256 y,
|
||||
uint256 denominator
|
||||
) internal pure returns (uint256 result) {
|
||||
unchecked {
|
||||
// 512-bit multiply [prod1 prod0] = x * y. Compute the product mod 2^256 and mod 2^256 - 1, then use
|
||||
// use the Chinese Remainder Theorem to reconstruct the 512 bit result. The result is stored in two 256
|
||||
// variables such that product = prod1 * 2^256 + prod0.
|
||||
uint256 prod0; // Least significant 256 bits of the product
|
||||
uint256 prod1; // Most significant 256 bits of the product
|
||||
assembly {
|
||||
let mm := mulmod(x, y, not(0))
|
||||
prod0 := mul(x, y)
|
||||
prod1 := sub(sub(mm, prod0), lt(mm, prod0))
|
||||
}
|
||||
|
||||
// Handle non-overflow cases, 256 by 256 division.
|
||||
if (prod1 == 0) {
|
||||
// Solidity will revert if denominator == 0, unlike the div opcode on its own.
|
||||
// The surrounding unchecked block does not change this fact.
|
||||
// See https://docs.soliditylang.org/en/latest/control-structures.html#checked-or-unchecked-arithmetic.
|
||||
return prod0 / denominator;
|
||||
}
|
||||
|
||||
// Make sure the result is less than 2^256. Also prevents denominator == 0.
|
||||
require(denominator > prod1, "Math: mulDiv overflow");
|
||||
|
||||
///////////////////////////////////////////////
|
||||
// 512 by 256 division.
|
||||
///////////////////////////////////////////////
|
||||
|
||||
// Make division exact by subtracting the remainder from [prod1 prod0].
|
||||
uint256 remainder;
|
||||
assembly {
|
||||
// Compute remainder using mulmod.
|
||||
remainder := mulmod(x, y, denominator)
|
||||
|
||||
// Subtract 256 bit number from 512 bit number.
|
||||
prod1 := sub(prod1, gt(remainder, prod0))
|
||||
prod0 := sub(prod0, remainder)
|
||||
}
|
||||
|
||||
// Factor powers of two out of denominator and compute largest power of two divisor of denominator. Always >= 1.
|
||||
// See https://cs.stackexchange.com/q/138556/92363.
|
||||
|
||||
// Does not overflow because the denominator cannot be zero at this stage in the function.
|
||||
uint256 twos = denominator & (~denominator + 1);
|
||||
assembly {
|
||||
// Divide denominator by twos.
|
||||
denominator := div(denominator, twos)
|
||||
|
||||
// Divide [prod1 prod0] by twos.
|
||||
prod0 := div(prod0, twos)
|
||||
|
||||
// Flip twos such that it is 2^256 / twos. If twos is zero, then it becomes one.
|
||||
twos := add(div(sub(0, twos), twos), 1)
|
||||
}
|
||||
|
||||
// Shift in bits from prod1 into prod0.
|
||||
prod0 |= prod1 * twos;
|
||||
|
||||
// Invert denominator mod 2^256. Now that denominator is an odd number, it has an inverse modulo 2^256 such
|
||||
// that denominator * inv = 1 mod 2^256. Compute the inverse by starting with a seed that is correct for
|
||||
// four bits. That is, denominator * inv = 1 mod 2^4.
|
||||
uint256 inverse = (3 * denominator) ^ 2;
|
||||
|
||||
// Use the Newton-Raphson iteration to improve the precision. Thanks to Hensel's lifting lemma, this also works
|
||||
// in modular arithmetic, doubling the correct bits in each step.
|
||||
inverse *= 2 - denominator * inverse; // inverse mod 2^8
|
||||
inverse *= 2 - denominator * inverse; // inverse mod 2^16
|
||||
inverse *= 2 - denominator * inverse; // inverse mod 2^32
|
||||
inverse *= 2 - denominator * inverse; // inverse mod 2^64
|
||||
inverse *= 2 - denominator * inverse; // inverse mod 2^128
|
||||
inverse *= 2 - denominator * inverse; // inverse mod 2^256
|
||||
|
||||
// Because the division is now exact we can divide by multiplying with the modular inverse of denominator.
|
||||
// This will give us the correct result modulo 2^256. Since the preconditions guarantee that the outcome is
|
||||
// less than 2^256, this is the final result. We don't need to compute the high bits of the result and prod1
|
||||
// is no longer required.
|
||||
result = prod0 * inverse;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
function quantize_data(
|
||||
bytes[] memory data,
|
||||
uint256[] memory decimals,
|
||||
uint256[] memory scales
|
||||
) external pure returns (int256[] memory quantized_data) {
|
||||
quantized_data = new int256[](data.length);
|
||||
for (uint i; i < data.length; i++) {
|
||||
int x = abi.decode(data[i], (int256));
|
||||
bool neg = x < 0;
|
||||
if (neg) x = -x;
|
||||
uint denom = 10 ** decimals[i];
|
||||
uint scale = 1 << scales[i];
|
||||
uint output = mulDiv(uint256(x), scale, denom);
|
||||
if (mulmod(uint256(x), scale, denom) * 2 >= denom) {
|
||||
output += 1;
|
||||
}
|
||||
|
||||
quantized_data[i] = neg ? -int256(output) : int256(output);
|
||||
}
|
||||
}
|
||||
|
||||
function to_field_element(
|
||||
int64[] memory quantized_data
|
||||
) public pure returns (uint256[] memory output) {
|
||||
output = new uint256[](quantized_data.length);
|
||||
for (uint i; i < quantized_data.length; i++) {
|
||||
output[i] = uint256(quantized_data[i] + int(ORDER)) % ORDER;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
// SPDX-License-Identifier: UNLICENSED
|
||||
pragma solidity ^0.8.17;
|
||||
|
||||
contract TestReads {
|
||||
int[] public arr;
|
||||
|
||||
constructor(int256[] memory _numbers) {
|
||||
for (uint256 i = 0; i < _numbers.length; i++) {
|
||||
arr.push(_numbers[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
ezkl==0.0.0
|
||||
ezkl==11.2.3
|
||||
sphinx
|
||||
sphinx-rtd-theme
|
||||
sphinxcontrib-napoleon
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import ezkl
|
||||
|
||||
project = 'ezkl'
|
||||
release = '0.0.0'
|
||||
release = '11.2.3'
|
||||
version = release
|
||||
|
||||
|
||||
|
||||
@@ -251,7 +251,7 @@
|
||||
"with open(cal_path, \"w\") as f:\n",
|
||||
" json.dump(cal_data, f)\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -307,7 +307,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"ezkl.setup_test_evm_witness(\n",
|
||||
"await ezkl.setup_test_evm_witness(\n",
|
||||
" data_path,\n",
|
||||
" compiled_model_path,\n",
|
||||
" # we write the call data to the same file as the input data\n",
|
||||
@@ -333,7 +333,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res = ezkl.get_srs( settings_path)\n"
|
||||
"res = await ezkl.get_srs( settings_path)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -354,7 +354,7 @@
|
||||
"\n",
|
||||
"witness_path = \"witness.json\"\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -462,7 +462,7 @@
|
||||
"abi_path = 'test.abi'\n",
|
||||
"sol_code_path = 'test.sol'\n",
|
||||
"\n",
|
||||
"res = ezkl.create_evm_verifier(\n",
|
||||
"res = await ezkl.create_evm_verifier(\n",
|
||||
" vk_path,\n",
|
||||
" \n",
|
||||
" settings_path,\n",
|
||||
@@ -482,7 +482,7 @@
|
||||
"\n",
|
||||
"addr_path_verifier = \"addr_verifier.txt\"\n",
|
||||
"\n",
|
||||
"res = ezkl.deploy_evm(\n",
|
||||
"res = await ezkl.deploy_evm(\n",
|
||||
" addr_path_verifier,\n",
|
||||
" sol_code_path,\n",
|
||||
" 'http://127.0.0.1:3030'\n",
|
||||
@@ -510,7 +510,7 @@
|
||||
"sol_code_path = 'test.sol'\n",
|
||||
"input_path = 'input.json'\n",
|
||||
"\n",
|
||||
"res = ezkl.create_evm_data_attestation(\n",
|
||||
"res = await ezkl.create_evm_data_attestation(\n",
|
||||
" input_path,\n",
|
||||
" settings_path,\n",
|
||||
" sol_code_path,\n",
|
||||
@@ -535,7 +535,7 @@
|
||||
"source": [
|
||||
"addr_path_da = \"addr_da.txt\"\n",
|
||||
"\n",
|
||||
"res = ezkl.deploy_da_evm(\n",
|
||||
"res = await ezkl.deploy_da_evm(\n",
|
||||
" addr_path_da,\n",
|
||||
" input_path,\n",
|
||||
" settings_path,\n",
|
||||
@@ -567,7 +567,7 @@
|
||||
"with open(addr_path_da, 'r') as f:\n",
|
||||
" addr_da = f.read()\n",
|
||||
"\n",
|
||||
"res = ezkl.verify_evm(\n",
|
||||
"res = await ezkl.verify_evm(\n",
|
||||
" addr,\n",
|
||||
" proof_path,\n",
|
||||
" RPC_URL,\n",
|
||||
@@ -592,7 +592,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.15"
|
||||
"version": "3.12.2"
|
||||
},
|
||||
"orig_nbformat": 4
|
||||
},
|
||||
|
||||
@@ -249,7 +249,7 @@
|
||||
"with open(cal_path, \"w\") as f:\n",
|
||||
" json.dump(cal_data, f)\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -278,7 +278,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res = ezkl.get_srs( settings_path)\n"
|
||||
"res = await ezkl.get_srs( settings_path)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -299,7 +299,7 @@
|
||||
"\n",
|
||||
"witness_path = \"witness.json\"\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
@@ -518,7 +518,7 @@
|
||||
"abi_path = 'test.abi'\n",
|
||||
"sol_code_path = 'test.sol'\n",
|
||||
"\n",
|
||||
"res = ezkl.create_evm_verifier(\n",
|
||||
"res = await ezkl.create_evm_verifier(\n",
|
||||
" vk_path,\n",
|
||||
" \n",
|
||||
" settings_path,\n",
|
||||
@@ -538,7 +538,7 @@
|
||||
"\n",
|
||||
"addr_path_verifier = \"addr_verifier.txt\"\n",
|
||||
"\n",
|
||||
"res = ezkl.deploy_evm(\n",
|
||||
"res = await ezkl.deploy_evm(\n",
|
||||
" addr_path_verifier,\n",
|
||||
" sol_code_path,\n",
|
||||
" 'http://127.0.0.1:3030'\n",
|
||||
@@ -566,7 +566,7 @@
|
||||
"sol_code_path = 'test.sol'\n",
|
||||
"input_path = 'input.json'\n",
|
||||
"\n",
|
||||
"res = ezkl.create_evm_data_attestation(\n",
|
||||
"res = await ezkl.create_evm_data_attestation(\n",
|
||||
" input_path,\n",
|
||||
" settings_path,\n",
|
||||
" sol_code_path,\n",
|
||||
@@ -591,7 +591,7 @@
|
||||
"source": [
|
||||
"addr_path_da = \"addr_da.txt\"\n",
|
||||
"\n",
|
||||
"res = ezkl.deploy_da_evm(\n",
|
||||
"res = await ezkl.deploy_da_evm(\n",
|
||||
" addr_path_da,\n",
|
||||
" input_path,\n",
|
||||
" settings_path,\n",
|
||||
@@ -623,7 +623,7 @@
|
||||
"with open(addr_path_da, 'r') as f:\n",
|
||||
" addr_da = f.read()\n",
|
||||
"\n",
|
||||
"res = ezkl.verify_evm(\n",
|
||||
"res = await ezkl.verify_evm(\n",
|
||||
" addr,\n",
|
||||
" proof_path,\n",
|
||||
" RPC_URL,\n",
|
||||
@@ -654,4 +654,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
}
|
||||
@@ -150,7 +150,7 @@
|
||||
"res = ezkl.gen_settings(model_path, settings_path)\n",
|
||||
"assert res == True\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"assert res == True"
|
||||
]
|
||||
},
|
||||
@@ -170,7 +170,7 @@
|
||||
"with open(cal_path, \"w\") as f:\n",
|
||||
" json.dump(cal_data, f)\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -192,7 +192,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -204,7 +204,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -303,4 +303,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
@@ -352,14 +352,8 @@
|
||||
"# Specify all the files we need\n",
|
||||
"\n",
|
||||
"model_path = os.path.join('network.onnx')\n",
|
||||
"compiled_model_path = os.path.join('network.ezkl')\n",
|
||||
"pk_path = os.path.join('test.pk')\n",
|
||||
"vk_path = os.path.join('test.vk')\n",
|
||||
"settings_path = os.path.join('settings.json')\n",
|
||||
"\n",
|
||||
"witness_path = os.path.join('witness.json')\n",
|
||||
"data_path = os.path.join('input.json')\n",
|
||||
"cal_data_path = os.path.join('cal_data.json')"
|
||||
"cal_data_path = os.path.join('calibration.json')"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -424,7 +418,7 @@
|
||||
"source": [
|
||||
"!RUST_LOG=trace\n",
|
||||
"# TODO: Dictionary outputs\n",
|
||||
"res = ezkl.gen_settings(model_path, settings_path)\n",
|
||||
"res = ezkl.gen_settings()\n",
|
||||
"assert res == True\n",
|
||||
"\n"
|
||||
]
|
||||
@@ -443,7 +437,7 @@
|
||||
"\n",
|
||||
"# Optimize for resources, we cap logrows at 12 to reduce setup and proving time, at the expense of accuracy\n",
|
||||
"# You may want to increase the max logrows if accuracy is a concern\n",
|
||||
"res = ezkl.calibrate_settings(cal_data_path, model_path, settings_path, \"resources\", max_logrows = 12, scales = [2])"
|
||||
"res = await ezkl.calibrate_settings(target = \"resources\", max_logrows = 12, scales = [2])"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -463,7 +457,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)\n",
|
||||
"res = ezkl.compile_circuit()\n",
|
||||
"assert res == True"
|
||||
]
|
||||
},
|
||||
@@ -484,7 +478,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs()"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -504,17 +498,10 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res = ezkl.setup(\n",
|
||||
" compiled_model_path,\n",
|
||||
" vk_path,\n",
|
||||
" pk_path,\n",
|
||||
" )\n",
|
||||
"res = ezkl.setup()\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"assert res == True\n",
|
||||
"assert os.path.isfile(vk_path)\n",
|
||||
"assert os.path.isfile(pk_path)\n",
|
||||
"assert os.path.isfile(settings_path)"
|
||||
"assert res == True"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -539,7 +526,7 @@
|
||||
"# now generate the witness file\n",
|
||||
"witness_path = os.path.join('witness.json')\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness()\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -559,13 +546,7 @@
|
||||
"\n",
|
||||
"proof_path = os.path.join('proof.json')\n",
|
||||
"\n",
|
||||
"proof = ezkl.prove(\n",
|
||||
" witness_path,\n",
|
||||
" compiled_model_path,\n",
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \"single\",\n",
|
||||
" )\n",
|
||||
"proof = ezkl.prove(proof_type=\"single\", proof_path=proof_path)\n",
|
||||
"\n",
|
||||
"print(proof)\n",
|
||||
"assert os.path.isfile(proof_path)"
|
||||
@@ -585,11 +566,7 @@
|
||||
"source": [
|
||||
"# verify our proof\n",
|
||||
"\n",
|
||||
"res = ezkl.verify(\n",
|
||||
" proof_path,\n",
|
||||
" settings_path,\n",
|
||||
" vk_path,\n",
|
||||
" )\n",
|
||||
"res = ezkl.verify()\n",
|
||||
"\n",
|
||||
"assert res == True\n",
|
||||
"print(\"verified\")"
|
||||
@@ -664,12 +641,9 @@
|
||||
"sol_code_path = os.path.join('Verifier.sol')\n",
|
||||
"abi_path = os.path.join('Verifier.abi')\n",
|
||||
"\n",
|
||||
"res = ezkl.create_evm_verifier(\n",
|
||||
" vk_path,\n",
|
||||
" \n",
|
||||
" settings_path,\n",
|
||||
" sol_code_path,\n",
|
||||
" abi_path\n",
|
||||
"res = await ezkl.create_evm_verifier(\n",
|
||||
" sol_code_path=sol_code_path,\n",
|
||||
" abi_path=abi_path, \n",
|
||||
" )\n",
|
||||
"\n",
|
||||
"assert res == True\n",
|
||||
@@ -757,7 +731,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.15"
|
||||
"version": "3.12.2"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
@@ -467,7 +467,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"assert res == True"
|
||||
]
|
||||
},
|
||||
@@ -494,7 +494,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -508,7 +508,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -625,4 +625,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
@@ -195,7 +195,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"assert res == True\n"
|
||||
]
|
||||
},
|
||||
@@ -222,7 +222,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -236,7 +236,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -179,7 +179,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"assert res == True\n"
|
||||
]
|
||||
},
|
||||
@@ -202,7 +202,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -214,7 +214,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -313,4 +313,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
@@ -241,7 +241,7 @@
|
||||
"with open(cal_path, \"w\") as f:\n",
|
||||
" json.dump(cal_data, f)\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -270,7 +270,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res = ezkl.get_srs( settings_path)\n"
|
||||
"res = await ezkl.get_srs( settings_path)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -291,7 +291,7 @@
|
||||
"\n",
|
||||
"witness_path = \"witness.json\"\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -420,7 +420,7 @@
|
||||
"abi_path = 'test.abi'\n",
|
||||
"sol_code_path = 'test.sol'\n",
|
||||
"\n",
|
||||
"res = ezkl.create_evm_verifier(\n",
|
||||
"res = await ezkl.create_evm_verifier(\n",
|
||||
" vk_path,\n",
|
||||
" \n",
|
||||
" settings_path,\n",
|
||||
@@ -451,7 +451,7 @@
|
||||
"\n",
|
||||
"address_path = os.path.join(\"address.json\")\n",
|
||||
"\n",
|
||||
"res = ezkl.deploy_evm(\n",
|
||||
"res = await ezkl.deploy_evm(\n",
|
||||
" address_path,\n",
|
||||
" sol_code_path,\n",
|
||||
" 'http://127.0.0.1:3030'\n",
|
||||
@@ -472,7 +472,7 @@
|
||||
"# make sure anvil is running locally\n",
|
||||
"# $ anvil -p 3030\n",
|
||||
"\n",
|
||||
"res = ezkl.verify_evm(\n",
|
||||
"res = await ezkl.verify_evm(\n",
|
||||
" addr,\n",
|
||||
" proof_path,\n",
|
||||
" \"http://127.0.0.1:3030\"\n",
|
||||
|
||||
@@ -152,7 +152,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"assert res == True\n"
|
||||
]
|
||||
},
|
||||
@@ -175,7 +175,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs(settings_path = settings_path)"
|
||||
"res = await ezkl.get_srs(settings_path = settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -188,7 +188,7 @@
|
||||
"# now generate the witness file \n",
|
||||
"witness_path = \"witness.json\"\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -284,4 +284,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
@@ -155,7 +155,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"assert res == True\n"
|
||||
]
|
||||
},
|
||||
@@ -178,7 +178,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -190,7 +190,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -289,4 +289,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
@@ -233,7 +233,7 @@
|
||||
"with open(cal_path, \"w\") as f:\n",
|
||||
" json.dump(cal_data, f)\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -262,7 +262,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res = ezkl.get_srs( settings_path)\n"
|
||||
"res = await ezkl.get_srs( settings_path)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -315,7 +315,7 @@
|
||||
"\n",
|
||||
"witness_path = \"witness.json\"\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n"
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -429,7 +429,7 @@
|
||||
"abi_path = 'test.abi'\n",
|
||||
"sol_code_path = 'test.sol'\n",
|
||||
"\n",
|
||||
"res = ezkl.create_evm_verifier(\n",
|
||||
"res = await ezkl.create_evm_verifier(\n",
|
||||
" vk_path,\n",
|
||||
" \n",
|
||||
" settings_path,\n",
|
||||
@@ -460,7 +460,7 @@
|
||||
"\n",
|
||||
"address_path = os.path.join(\"address.json\")\n",
|
||||
"\n",
|
||||
"res = ezkl.deploy_evm(\n",
|
||||
"res = await ezkl.deploy_evm(\n",
|
||||
" address_path,\n",
|
||||
" sol_code_path,\n",
|
||||
" 'http://127.0.0.1:3030'\n",
|
||||
@@ -481,7 +481,7 @@
|
||||
"# make sure anvil is running locally\n",
|
||||
"# $ anvil -p 3030\n",
|
||||
"\n",
|
||||
"res = ezkl.verify_evm(\n",
|
||||
"res = await ezkl.verify_evm(\n",
|
||||
" addr,\n",
|
||||
" proof_path,\n",
|
||||
" \"http://127.0.0.1:3030\"\n",
|
||||
|
||||
@@ -193,7 +193,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"assert res == True\n"
|
||||
]
|
||||
},
|
||||
@@ -216,7 +216,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -228,7 +228,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -347,4 +347,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
@@ -142,7 +142,7 @@
|
||||
"# Serialize data into file:\n",
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"assert res == True\n"
|
||||
]
|
||||
},
|
||||
@@ -165,7 +165,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -177,7 +177,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -276,4 +276,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
@@ -347,7 +347,7 @@
|
||||
"# Serialize data into file:\n",
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"assert res == True\n"
|
||||
]
|
||||
},
|
||||
@@ -370,7 +370,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -383,7 +383,7 @@
|
||||
"# now generate the witness file \n",
|
||||
"witness_path = \"gan_witness.json\"\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -490,4 +490,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
279
examples/notebooks/logistic_regression.ipynb
Normal file
279
examples/notebooks/logistic_regression.ipynb
Normal file
@@ -0,0 +1,279 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "cf69bb3f-94e6-4dba-92cd-ce08df117d67",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Logistic Regression\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"Sklearn based models are slightly finicky to get into a suitable onnx format. \n",
|
||||
"This notebook showcases how to do so using the `hummingbird-ml` python package for a Logistic Regression model. "
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "95613ee9",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# check if notebook is in colab\n",
|
||||
"try:\n",
|
||||
" # install ezkl\n",
|
||||
" import google.colab\n",
|
||||
" import subprocess\n",
|
||||
" import sys\n",
|
||||
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"ezkl\"])\n",
|
||||
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"onnx\"])\n",
|
||||
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"hummingbird-ml\"])\n",
|
||||
"\n",
|
||||
"# rely on local installation of ezkl if the notebook is not in colab\n",
|
||||
"except:\n",
|
||||
" pass\n",
|
||||
"\n",
|
||||
"import os\n",
|
||||
"import torch\n",
|
||||
"import ezkl\n",
|
||||
"import json\n",
|
||||
"from hummingbird.ml import convert\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# here we create and (potentially train a model)\n",
|
||||
"\n",
|
||||
"# make sure you have the dependencies required here already installed\n",
|
||||
"import numpy as np\n",
|
||||
"from sklearn.linear_model import LogisticRegression\n",
|
||||
"X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])\n",
|
||||
"# y = 1 * x_0 + 2 * x_1 + 3\n",
|
||||
"y = np.dot(X, np.array([1, 2])) + 3\n",
|
||||
"reg = LogisticRegression().fit(X, y)\n",
|
||||
"reg.score(X, y)\n",
|
||||
"\n",
|
||||
"circuit = convert(reg, \"torch\", X[:1]).model\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "b37637c4",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"model_path = os.path.join('network.onnx')\n",
|
||||
"compiled_model_path = os.path.join('network.compiled')\n",
|
||||
"pk_path = os.path.join('test.pk')\n",
|
||||
"vk_path = os.path.join('test.vk')\n",
|
||||
"settings_path = os.path.join('settings.json')\n",
|
||||
"\n",
|
||||
"witness_path = os.path.join('witness.json')\n",
|
||||
"data_path = os.path.join('input.json')"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "82db373a",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"\n",
|
||||
"# export to onnx format\n",
|
||||
"# !!!!!!!!!!!!!!!!! This will flash a warning but it is fine !!!!!!!!!!!!!!!!!!!!!\n",
|
||||
"\n",
|
||||
"# Input to the model\n",
|
||||
"shape = X.shape[1:]\n",
|
||||
"x = torch.rand(1, *shape, requires_grad=True)\n",
|
||||
"torch_out = circuit(x)\n",
|
||||
"# Export the model\n",
|
||||
"torch.onnx.export(circuit, # model being run\n",
|
||||
" # model input (or a tuple for multiple inputs)\n",
|
||||
" x,\n",
|
||||
" # where to save the model (can be a file or file-like object)\n",
|
||||
" \"network.onnx\",\n",
|
||||
" export_params=True, # store the trained parameter weights inside the model file\n",
|
||||
" opset_version=10, # the ONNX version to export the model to\n",
|
||||
" do_constant_folding=True, # whether to execute constant folding for optimization\n",
|
||||
" input_names=['input'], # the model's input names\n",
|
||||
" output_names=['output'], # the model's output names\n",
|
||||
" dynamic_axes={'input': {0: 'batch_size'}, # variable length axes\n",
|
||||
" 'output': {0: 'batch_size'}})\n",
|
||||
"\n",
|
||||
"d = ((x).detach().numpy()).reshape([-1]).tolist()\n",
|
||||
"\n",
|
||||
"data = dict(input_shapes=[shape],\n",
|
||||
" input_data=[d],\n",
|
||||
" output_data=[((o).detach().numpy()).reshape([-1]).tolist() for o in torch_out])\n",
|
||||
"\n",
|
||||
"# Serialize data into file:\n",
|
||||
"json.dump(data, open(\"input.json\", 'w'))\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "d5e374a2",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"!RUST_LOG=trace\n",
|
||||
"# TODO: Dictionary outputs\n",
|
||||
"res = ezkl.gen_settings(model_path, settings_path)\n",
|
||||
"assert res == True\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"cal_path = os.path.join(\"calibration.json\")\n",
|
||||
"\n",
|
||||
"data_array = (torch.randn(20, *shape).detach().numpy()).reshape([-1]).tolist()\n",
|
||||
"\n",
|
||||
"data = dict(input_data = [data_array])\n",
|
||||
"\n",
|
||||
"# Serialize data into file:\n",
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"assert res == True\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "3aa4f090",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)\n",
|
||||
"assert res == True"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "8b74dcee",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "18c8b7c7",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "b1c561a8",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"# HERE WE SETUP THE CIRCUIT PARAMS\n",
|
||||
"# WE GOT KEYS\n",
|
||||
"# WE GOT CIRCUIT PARAMETERS\n",
|
||||
"# EVERYTHING ANYONE HAS EVER NEEDED FOR ZK\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"res = ezkl.setup(\n",
|
||||
" compiled_model_path,\n",
|
||||
" vk_path,\n",
|
||||
" pk_path,\n",
|
||||
" \n",
|
||||
" )\n",
|
||||
"\n",
|
||||
"assert res == True\n",
|
||||
"assert os.path.isfile(vk_path)\n",
|
||||
"assert os.path.isfile(pk_path)\n",
|
||||
"assert os.path.isfile(settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "c384cbc8",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# GENERATE A PROOF\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"proof_path = os.path.join('test.pf')\n",
|
||||
"\n",
|
||||
"res = ezkl.prove(\n",
|
||||
" witness_path,\n",
|
||||
" compiled_model_path,\n",
|
||||
" pk_path,\n",
|
||||
" proof_path,\n",
|
||||
" \n",
|
||||
" \"single\",\n",
|
||||
" )\n",
|
||||
"\n",
|
||||
"print(res)\n",
|
||||
"assert os.path.isfile(proof_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "76f00d41",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# VERIFY IT\n",
|
||||
"\n",
|
||||
"res = ezkl.verify(\n",
|
||||
" proof_path,\n",
|
||||
" settings_path,\n",
|
||||
" vk_path,\n",
|
||||
" \n",
|
||||
" )\n",
|
||||
"\n",
|
||||
"assert res == True\n",
|
||||
"print(\"verified\")"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3 (ipykernel)",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.12.2"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
@@ -139,7 +139,7 @@
|
||||
"res = ezkl.gen_settings(model_path, settings_path, py_run_args=run_args)\n",
|
||||
"assert res == True\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"assert res == True\n"
|
||||
]
|
||||
},
|
||||
@@ -180,7 +180,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -193,7 +193,7 @@
|
||||
"# now generate the witness file \n",
|
||||
"witness_path = \"lstmwitness.json\"\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -300,4 +300,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
@@ -91,11 +91,14 @@
|
||||
"os.system(\"echo shovel is now installed. starting:\")\n",
|
||||
"\n",
|
||||
"command = [\"./shovel\", \"-config\", \"config.json\"]\n",
|
||||
"subprocess.Popen(command)\n",
|
||||
"proc = subprocess.Popen(command)\n",
|
||||
"\n",
|
||||
"os.system(\"echo shovel started.\")\n",
|
||||
"\n",
|
||||
"time.sleep(5)\n",
|
||||
"time.sleep(10)\n",
|
||||
"\n",
|
||||
"# after we've fetched some data -- kill the process\n",
|
||||
"proc.terminate()\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
@@ -310,7 +313,7 @@
|
||||
"\n",
|
||||
"assert res == True\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(input_filename, onnx_filename, settings_filename, \"resources\")\n",
|
||||
"res = await ezkl.calibrate_settings(input_filename, onnx_filename, settings_filename, \"resources\")\n",
|
||||
"\n",
|
||||
"assert res == True"
|
||||
]
|
||||
@@ -387,7 +390,7 @@
|
||||
"witness_path = \"witness.json\"\n",
|
||||
"\n",
|
||||
"# generate the witness\n",
|
||||
"res = ezkl.gen_witness(\n",
|
||||
"res = await ezkl.gen_witness(\n",
|
||||
" input_filename,\n",
|
||||
" compiled_filename,\n",
|
||||
" witness_path\n",
|
||||
@@ -425,16 +428,6 @@
|
||||
"assert os.path.isfile(proof_path)\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# kill all shovel process \n",
|
||||
"os.system(\"pkill -f shovel\")"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
|
||||
@@ -323,7 +323,7 @@
|
||||
"res = ezkl.gen_settings(model_path, settings_path, py_run_args=run_args)\n",
|
||||
"assert res == True\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales=[2,7])\n",
|
||||
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales=[2,7])\n",
|
||||
"assert res == True"
|
||||
]
|
||||
},
|
||||
@@ -348,7 +348,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs(settings_path)"
|
||||
"res = await ezkl.get_srs(settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -362,7 +362,7 @@
|
||||
"# now generate the witness file\n",
|
||||
"witness_path = \"witness.json\"\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -469,7 +469,7 @@
|
||||
"abi_path = 'test.abi'\n",
|
||||
"sol_code_path = 'test_1.sol'\n",
|
||||
"\n",
|
||||
"res = ezkl.create_evm_verifier(\n",
|
||||
"res = await ezkl.create_evm_verifier(\n",
|
||||
" vk_path,\n",
|
||||
" settings_path,\n",
|
||||
" sol_code_path,\n",
|
||||
@@ -502,7 +502,7 @@
|
||||
"\n",
|
||||
"address_path = os.path.join(\"address.json\")\n",
|
||||
"\n",
|
||||
"res = ezkl.deploy_evm(\n",
|
||||
"res = await ezkl.deploy_evm(\n",
|
||||
" address_path,\n",
|
||||
" sol_code_path,\n",
|
||||
" 'http://127.0.0.1:3030'\n",
|
||||
@@ -525,7 +525,7 @@
|
||||
"# make sure anvil is running locally\n",
|
||||
"# $ anvil -p 3030\n",
|
||||
"\n",
|
||||
"res = ezkl.verify_evm(\n",
|
||||
"res = await ezkl.verify_evm(\n",
|
||||
" addr,\n",
|
||||
" proof_path,\n",
|
||||
" \"http://127.0.0.1:3030\"\n",
|
||||
@@ -558,4 +558,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
||||
}
|
||||
@@ -289,7 +289,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales=[0,6])"
|
||||
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales=[0,6])"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -309,7 +309,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -321,7 +321,7 @@
|
||||
"# now generate the witness file \n",
|
||||
"witness_path = \"gan_witness.json\"\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -215,7 +215,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -235,7 +235,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -247,7 +247,7 @@
|
||||
"# now generate the witness file\n",
|
||||
"witness_path = \"ae_witness.json\"\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -451,7 +451,7 @@
|
||||
"res = ezkl.gen_settings(model_path, settings_path)\n",
|
||||
"assert res == True\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"assert res == True\n",
|
||||
"print(\"verified\")"
|
||||
]
|
||||
@@ -473,7 +473,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -485,7 +485,7 @@
|
||||
"# now generate the witness file \n",
|
||||
"witness_path = \"vae_witness.json\"\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -845,7 +845,7 @@
|
||||
"res = ezkl.gen_settings(model_path, settings_path)\n",
|
||||
"assert res == True\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\", max_logrows = 20, scales = [3])\n",
|
||||
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\", max_logrows = 20, scales = [3])\n",
|
||||
"assert res == True"
|
||||
]
|
||||
},
|
||||
@@ -870,7 +870,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -881,7 +881,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -993,4 +993,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
||||
}
|
||||
@@ -261,7 +261,7 @@
|
||||
"source": [
|
||||
"# iterate over each submodel gen-settings, compile circuit and setup zkSNARK\n",
|
||||
"\n",
|
||||
"def setup(i):\n",
|
||||
"async def setup(i):\n",
|
||||
" # file names\n",
|
||||
" model_path = os.path.join('network_split_'+str(i)+'.onnx')\n",
|
||||
" settings_path = os.path.join('settings_split_'+str(i)+'.json')\n",
|
||||
@@ -282,7 +282,7 @@
|
||||
"\n",
|
||||
" # generate settings for the current model\n",
|
||||
" res = ezkl.gen_settings(model_path, settings_path, py_run_args=run_args)\n",
|
||||
" res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\", scales=[run_args.input_scale], max_logrows=run_args.logrows)\n",
|
||||
" res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\", scales=[run_args.input_scale], max_logrows=run_args.logrows)\n",
|
||||
" assert res == True\n",
|
||||
"\n",
|
||||
" # load settings and print them to the console\n",
|
||||
@@ -303,11 +303,11 @@
|
||||
" assert os.path.isfile(vk_path)\n",
|
||||
" assert os.path.isfile(pk_path)\n",
|
||||
"\n",
|
||||
" res = ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n",
|
||||
" res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n",
|
||||
" run_args.input_scale = settings[\"model_output_scales\"][0]\n",
|
||||
"\n",
|
||||
"for i in range(2):\n",
|
||||
" setup(i)\n"
|
||||
" await setup(i)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -414,7 +414,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"for i in range(2):\n",
|
||||
" setup(i)"
|
||||
" await setup(i)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -466,7 +466,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.15"
|
||||
"version": "3.12.2"
|
||||
},
|
||||
"orig_nbformat": 4
|
||||
},
|
||||
|
||||
@@ -174,7 +174,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -196,7 +196,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -208,7 +208,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -215,7 +215,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -229,7 +229,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -265,7 +265,7 @@
|
||||
" # Serialize data into file:\n",
|
||||
"json.dump( data, open(data_path_faulty, 'w' ))\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path_faulty, compiled_model_path, witness_path_faulty)\n",
|
||||
"res = await ezkl.gen_witness(data_path_faulty, compiled_model_path, witness_path_faulty)\n",
|
||||
"assert os.path.isfile(witness_path_faulty)"
|
||||
]
|
||||
},
|
||||
@@ -310,7 +310,7 @@
|
||||
"# Serialize data into file:\n",
|
||||
"json.dump( data, open(data_path_truthy, 'w' ))\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path_truthy, compiled_model_path, witness_path_truthy)\n",
|
||||
"res = await ezkl.gen_witness(data_path_truthy, compiled_model_path, witness_path_truthy)\n",
|
||||
"assert os.path.isfile(witness_path_truthy)"
|
||||
]
|
||||
},
|
||||
@@ -519,4 +519,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
@@ -193,7 +193,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -205,7 +205,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -290,7 +290,7 @@
|
||||
"source": [
|
||||
"# Generate a larger SRS. This is needed for the aggregated proof\n",
|
||||
"\n",
|
||||
"res = ezkl.get_srs(settings_path=None, logrows=21, commitment=ezkl.PyCommitments.KZG)"
|
||||
"res = await ezkl.get_srs(settings_path=None, logrows=21, commitment=ezkl.PyCommitments.KZG)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -374,7 +374,7 @@
|
||||
"sol_code_path = os.path.join(\"Verifier.sol\")\n",
|
||||
"abi_path = os.path.join(\"Verifier_ABI.json\")\n",
|
||||
"\n",
|
||||
"res = ezkl.create_evm_verifier_aggr(\n",
|
||||
"res = await ezkl.create_evm_verifier_aggr(\n",
|
||||
" [settings_path],\n",
|
||||
" aggregate_vk_path,\n",
|
||||
" sol_code_path,\n",
|
||||
@@ -404,4 +404,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
@@ -157,6 +157,7 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "b78d3cbf",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -170,7 +171,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -204,7 +205,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -298,7 +299,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.15"
|
||||
"version": "3.12.3"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
@@ -169,7 +169,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -191,7 +191,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -203,7 +203,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -170,7 +170,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -192,7 +192,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -204,7 +204,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
|
||||
@@ -149,7 +149,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -171,7 +171,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -183,7 +183,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -282,4 +282,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
@@ -250,7 +250,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -297,7 +297,7 @@
|
||||
"\n",
|
||||
"witness_path = \"witness.json\"\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n",
|
||||
"assert os.path.isfile(witness_path)\n",
|
||||
"\n",
|
||||
"# we force the output to be 1 this corresponds to the solvency test being true -- and we set this to a fixed vis output\n",
|
||||
@@ -411,7 +411,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path_faulty, compiled_model_path, witness_path, vk_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path_faulty, compiled_model_path, witness_path, vk_path)\n",
|
||||
"assert os.path.isfile(witness_path)\n",
|
||||
"\n",
|
||||
"# we force the output to be 1 this corresponds to the solvency test being true -- and we set this to a fixed vis output\n",
|
||||
|
||||
@@ -167,7 +167,7 @@
|
||||
"res = ezkl.gen_settings(model_path, settings_path)\n",
|
||||
"assert res == True\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"assert res == True"
|
||||
]
|
||||
},
|
||||
@@ -187,7 +187,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -209,7 +209,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -221,7 +221,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -320,4 +320,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
@@ -180,7 +180,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -202,7 +202,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -214,7 +214,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -420,7 +420,7 @@
|
||||
"res = ezkl.gen_settings(model_path, settings_path)\n",
|
||||
"assert res == True\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
|
||||
"assert res == True"
|
||||
]
|
||||
}
|
||||
@@ -446,4 +446,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
@@ -13,7 +13,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -57,7 +57,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -119,7 +119,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -163,7 +163,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -217,7 +217,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -637,7 +637,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales = [11])"
|
||||
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales = [11])"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -646,7 +646,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"ezkl.get_srs( settings_path)"
|
||||
"await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -683,7 +683,7 @@
|
||||
" data = json.load(f)\n",
|
||||
" print(len(data['input_data'][0]))\n",
|
||||
"\n",
|
||||
"ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
|
||||
"await ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -525,7 +525,7 @@
|
||||
"json.dump(data, open(cal_path, 'w'))\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales = [4])"
|
||||
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales = [4])"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -572,7 +572,7 @@
|
||||
" data = json.load(f)\n",
|
||||
" print(len(data['input_data'][0]))\n",
|
||||
"\n",
|
||||
"ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
|
||||
"await ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 1,
|
||||
"metadata": {
|
||||
"id": "9Byiv2Nc2MsK"
|
||||
},
|
||||
@@ -49,7 +49,11 @@
|
||||
"import pandas as pd\n",
|
||||
"import requests\n",
|
||||
"import json\n",
|
||||
"import os"
|
||||
"import os\n",
|
||||
"\n",
|
||||
"import logging\n",
|
||||
"\n",
|
||||
"logging.basicConfig(level=logging.INFO)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -63,7 +67,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 2,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
@@ -71,7 +75,15 @@
|
||||
"id": "x1vl9ZXF3EEW",
|
||||
"outputId": "bda21d02-fe5f-4fb2-8106-f51a8e2e67aa"
|
||||
},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"cpu\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from torch import nn\n",
|
||||
"import torch\n",
|
||||
@@ -133,7 +145,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 3,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
@@ -141,7 +153,18 @@
|
||||
"id": "6RAMplxk5xPk",
|
||||
"outputId": "bd2158fe-0c00-44fd-e632-6a3f70cdb7c9"
|
||||
},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"1715422870\n",
|
||||
"1714818070\n",
|
||||
"https://api.coingecko.com/api/v3/coins/ethereum/market_chart/range?vs_currency=usd&from=1714818070&to=1715422870\n",
|
||||
"<Response [200]>\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"\n",
|
||||
"def get_url(coin, currency, start, end):\n",
|
||||
@@ -174,7 +197,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 4,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/",
|
||||
@@ -183,7 +206,115 @@
|
||||
"id": "WSj1Uxln65vf",
|
||||
"outputId": "51422d71-9680-4b51-c4df-e400d20f988b"
|
||||
},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/html": [
|
||||
"<div>\n",
|
||||
"<style scoped>\n",
|
||||
" .dataframe tbody tr th:only-of-type {\n",
|
||||
" vertical-align: middle;\n",
|
||||
" }\n",
|
||||
"\n",
|
||||
" .dataframe tbody tr th {\n",
|
||||
" vertical-align: top;\n",
|
||||
" }\n",
|
||||
"\n",
|
||||
" .dataframe thead th {\n",
|
||||
" text-align: right;\n",
|
||||
" }\n",
|
||||
"</style>\n",
|
||||
"<table border=\"1\" class=\"dataframe\">\n",
|
||||
" <thead>\n",
|
||||
" <tr style=\"text-align: right;\">\n",
|
||||
" <th></th>\n",
|
||||
" <th>time</th>\n",
|
||||
" <th>prices</th>\n",
|
||||
" </tr>\n",
|
||||
" </thead>\n",
|
||||
" <tbody>\n",
|
||||
" <tr>\n",
|
||||
" <th>0</th>\n",
|
||||
" <td>1714820485367</td>\n",
|
||||
" <td>3146.785806</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>1</th>\n",
|
||||
" <td>1714824033868</td>\n",
|
||||
" <td>3127.968728</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>2</th>\n",
|
||||
" <td>1714828058243</td>\n",
|
||||
" <td>3156.141681</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>3</th>\n",
|
||||
" <td>1714831650751</td>\n",
|
||||
" <td>3124.834064</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>4</th>\n",
|
||||
" <td>1714834972229</td>\n",
|
||||
" <td>3133.115333</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>...</th>\n",
|
||||
" <td>...</td>\n",
|
||||
" <td>...</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>163</th>\n",
|
||||
" <td>1715407579346</td>\n",
|
||||
" <td>2918.049749</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>164</th>\n",
|
||||
" <td>1715411090715</td>\n",
|
||||
" <td>2920.330834</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>165</th>\n",
|
||||
" <td>1715414554830</td>\n",
|
||||
" <td>2923.986611</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>166</th>\n",
|
||||
" <td>1715418419843</td>\n",
|
||||
" <td>2910.537671</td>\n",
|
||||
" </tr>\n",
|
||||
" <tr>\n",
|
||||
" <th>167</th>\n",
|
||||
" <td>1715421675338</td>\n",
|
||||
" <td>2907.702307</td>\n",
|
||||
" </tr>\n",
|
||||
" </tbody>\n",
|
||||
"</table>\n",
|
||||
"<p>168 rows × 2 columns</p>\n",
|
||||
"</div>"
|
||||
],
|
||||
"text/plain": [
|
||||
" time prices\n",
|
||||
"0 1714820485367 3146.785806\n",
|
||||
"1 1714824033868 3127.968728\n",
|
||||
"2 1714828058243 3156.141681\n",
|
||||
"3 1714831650751 3124.834064\n",
|
||||
"4 1714834972229 3133.115333\n",
|
||||
".. ... ...\n",
|
||||
"163 1715407579346 2918.049749\n",
|
||||
"164 1715411090715 2920.330834\n",
|
||||
"165 1715414554830 2923.986611\n",
|
||||
"166 1715418419843 2910.537671\n",
|
||||
"167 1715421675338 2907.702307\n",
|
||||
"\n",
|
||||
"[168 rows x 2 columns]"
|
||||
]
|
||||
},
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"output_type": "execute_result"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"df = pd.DataFrame(new_data)\n",
|
||||
"df\n"
|
||||
@@ -200,7 +331,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -217,7 +348,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 6,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
@@ -225,7 +356,98 @@
|
||||
"id": "4MmE9SX66_Il",
|
||||
"outputId": "16403639-66a4-4280-ac7f-6966b75de5a3"
|
||||
},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"INFO:ezkl.execute:SRS already exists at that path\n",
|
||||
"INFO:ezkl.execute:num calibration batches: 1\n",
|
||||
"INFO:ezkl.execute:read 16777476 bytes from file (vector of len = 16777476)\n",
|
||||
"WARNING:ezkl.execute:\n",
|
||||
"\n",
|
||||
" <------------- Numerical Fidelity Report (input_scale: 4, param_scale: 4, scale_input_multiplier: 10) ------------->\n",
|
||||
"\n",
|
||||
"+------------+--------------+-----------+-----------+----------------+------------------+---------------+---------------+--------------------+--------------------+------------------------+\n",
|
||||
"| mean_error | median_error | max_error | min_error | mean_abs_error | median_abs_error | max_abs_error | min_abs_error | mean_squared_error | mean_percent_error | mean_abs_percent_error |\n",
|
||||
"+------------+--------------+-----------+-----------+----------------+------------------+---------------+---------------+--------------------+--------------------+------------------------+\n",
|
||||
"| -727.9929 | -727.9929 | -727.9929 | -727.9929 | 727.9929 | 727.9929 | 727.9929 | 727.9929 | 529973.7 | -0.24999964 | 0.24999964 |\n",
|
||||
"+------------+--------------+-----------+-----------+----------------+------------------+---------------+---------------+--------------------+--------------------+------------------------+\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"INFO:ezkl.execute:file hash: 41509f380362a8d14401c5ae92073154922fe23e45459ce6f696f58607655db7\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"{\n",
|
||||
" \"run_args\": {\n",
|
||||
" \"tolerance\": {\n",
|
||||
" \"val\": 0.0,\n",
|
||||
" \"scale\": 1.0\n",
|
||||
" },\n",
|
||||
" \"input_scale\": 4,\n",
|
||||
" \"param_scale\": 4,\n",
|
||||
" \"scale_rebase_multiplier\": 10,\n",
|
||||
" \"lookup_range\": [\n",
|
||||
" 0,\n",
|
||||
" 0\n",
|
||||
" ],\n",
|
||||
" \"logrows\": 6,\n",
|
||||
" \"num_inner_cols\": 2,\n",
|
||||
" \"variables\": [\n",
|
||||
" [\n",
|
||||
" \"batch_size\",\n",
|
||||
" 1\n",
|
||||
" ]\n",
|
||||
" ],\n",
|
||||
" \"input_visibility\": \"Private\",\n",
|
||||
" \"output_visibility\": \"Public\",\n",
|
||||
" \"param_visibility\": \"Private\",\n",
|
||||
" \"div_rebasing\": false,\n",
|
||||
" \"rebase_frac_zero_constants\": false,\n",
|
||||
" \"check_mode\": \"UNSAFE\",\n",
|
||||
" \"commitment\": \"KZG\"\n",
|
||||
" },\n",
|
||||
" \"num_rows\": 21,\n",
|
||||
" \"total_assignments\": 42,\n",
|
||||
" \"total_const_size\": 0,\n",
|
||||
" \"total_dynamic_col_size\": 0,\n",
|
||||
" \"num_dynamic_lookups\": 0,\n",
|
||||
" \"num_shuffles\": 0,\n",
|
||||
" \"total_shuffle_col_size\": 0,\n",
|
||||
" \"model_instance_shapes\": [\n",
|
||||
" [\n",
|
||||
" 1\n",
|
||||
" ]\n",
|
||||
" ],\n",
|
||||
" \"model_output_scales\": [\n",
|
||||
" 8\n",
|
||||
" ],\n",
|
||||
" \"model_input_scales\": [\n",
|
||||
" 4\n",
|
||||
" ],\n",
|
||||
" \"module_sizes\": {\n",
|
||||
" \"polycommit\": [],\n",
|
||||
" \"poseidon\": [\n",
|
||||
" 0,\n",
|
||||
" [\n",
|
||||
" 0\n",
|
||||
" ]\n",
|
||||
" ]\n",
|
||||
" },\n",
|
||||
" \"required_lookups\": [],\n",
|
||||
" \"required_range_checks\": [],\n",
|
||||
" \"check_mode\": \"UNSAFE\",\n",
|
||||
" \"version\": \"0.0.0\",\n",
|
||||
" \"num_blinding_factors\": null,\n",
|
||||
" \"timestamp\": 1715422871248\n",
|
||||
"}\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# generate settings\n",
|
||||
"onnx_filename = os.path.join('lol.onnx')\n",
|
||||
@@ -236,9 +458,9 @@
|
||||
"\n",
|
||||
"\n",
|
||||
"ezkl.gen_settings(onnx_filename, settings_filename)\n",
|
||||
"ezkl.calibrate_settings(\n",
|
||||
"await ezkl.calibrate_settings(\n",
|
||||
" input_filename, onnx_filename, settings_filename, \"resources\", scales = [4])\n",
|
||||
"res = ezkl.get_srs(settings_filename)\n",
|
||||
"res = await ezkl.get_srs(settings_filename)\n",
|
||||
"ezkl.compile_circuit(onnx_filename, compiled_filename, settings_filename)\n",
|
||||
"\n",
|
||||
"# show the settings.json\n",
|
||||
@@ -259,7 +481,24 @@
|
||||
"metadata": {
|
||||
"id": "fULvvnK7_CMb"
|
||||
},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"INFO:ezkl.pfsys.srs:loading srs from \"/Users/dante/.ezkl/srs/kzg6.srs\"\n",
|
||||
"INFO:ezkl.execute:downsizing params to 6 logrows\n",
|
||||
"INFO:ezkl.graph.model:model layout...\n",
|
||||
"INFO:ezkl.pfsys:VK took 0.8\n",
|
||||
"INFO:ezkl.graph.model:model layout...\n",
|
||||
"INFO:ezkl.pfsys:PK took 0.2\n",
|
||||
"INFO:ezkl.pfsys:saving verification key 💾\n",
|
||||
"INFO:ezkl.pfsys:done saving verification key ✅\n",
|
||||
"INFO:ezkl.pfsys:saving proving key 💾\n",
|
||||
"INFO:ezkl.pfsys:done saving proving key ✅\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"pk_path = os.path.join('test.pk')\n",
|
||||
"vk_path = os.path.join('test.vk')\n",
|
||||
@@ -281,20 +520,20 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"witness_path = \"witness.json\"\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(input_filename, compiled_filename, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(input_filename, compiled_filename, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 9,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/"
|
||||
@@ -302,7 +541,34 @@
|
||||
"id": "Oog3j6Kd-Wed",
|
||||
"outputId": "5839d0c1-5b43-476e-c2f8-6707de562260"
|
||||
},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"INFO:ezkl.pfsys:loading proving key from \"test.pk\"\n",
|
||||
"INFO:ezkl.pfsys:done loading proving key ✅\n",
|
||||
"INFO:ezkl.pfsys.srs:loading srs from \"/Users/dante/.ezkl/srs/kzg6.srs\"\n",
|
||||
"INFO:ezkl.execute:downsizing params to 6 logrows\n",
|
||||
"INFO:ezkl.pfsys:proof started...\n",
|
||||
"INFO:ezkl.graph.model:model layout...\n",
|
||||
"INFO:ezkl.pfsys:proof took 0.15\n",
|
||||
"INFO:ezkl.pfsys.srs:loading srs from \"/Users/dante/.ezkl/srs/kzg6.srs\"\n",
|
||||
"INFO:ezkl.execute:downsizing params to 6 logrows\n",
|
||||
"INFO:ezkl.pfsys:loading verification key from \"test.vk\"\n",
|
||||
"INFO:ezkl.pfsys:done loading verification key ✅\n",
|
||||
"INFO:ezkl.execute:verify took 0.2\n",
|
||||
"INFO:ezkl.execute:verified: true\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"verified\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# prove the zk circuit\n",
|
||||
"# GENERATE A PROOF\n",
|
||||
@@ -351,7 +617,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 10,
|
||||
"metadata": {
|
||||
"colab": {
|
||||
"base_uri": "https://localhost:8080/",
|
||||
@@ -360,7 +626,26 @@
|
||||
"id": "fodkNgwS70FM",
|
||||
"outputId": "827b5efd-f74f-44de-c114-861b3a86daf2"
|
||||
},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"INFO:ezkl.pfsys.srs:loading srs from \"/Users/dante/.ezkl/srs/kzg6.srs\"\n",
|
||||
"INFO:ezkl.execute:downsizing params to 6 logrows\n",
|
||||
"INFO:ezkl.pfsys:loading verification key from \"test.vk\"\n",
|
||||
"INFO:ezkl.pfsys:done loading verification key ✅\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"test.vk\n",
|
||||
"settings.json\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# first we need to create evm verifier\n",
|
||||
"print(vk_path)\n",
|
||||
@@ -370,7 +655,7 @@
|
||||
"abi_path = 'test.abi'\n",
|
||||
"sol_code_path = 'test.sol'\n",
|
||||
"\n",
|
||||
"res = ezkl.create_evm_verifier(\n",
|
||||
"res = await ezkl.create_evm_verifier(\n",
|
||||
" vk_path,\n",
|
||||
" settings_filename,\n",
|
||||
" sol_code_path,\n",
|
||||
@@ -381,9 +666,18 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 11,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"INFO:ezkl.eth:using chain 31337\n",
|
||||
"INFO:ezkl.execute:Contract deployed at: 0x998abeb3e57409262ae5b751f60747921b33613e\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# Make sure anvil is running locally first\n",
|
||||
"# run with $ anvil -p 3030\n",
|
||||
@@ -391,8 +685,9 @@
|
||||
"import json\n",
|
||||
"\n",
|
||||
"address_path = os.path.join(\"address.json\")\n",
|
||||
"\n",
|
||||
"res = ezkl.deploy_evm(\n",
|
||||
"sol_code_path = 'test.sol'\n",
|
||||
"# await\n",
|
||||
"res = await ezkl.deploy_evm(\n",
|
||||
" address_path,\n",
|
||||
" sol_code_path,\n",
|
||||
" 'http://127.0.0.1:3030'\n",
|
||||
@@ -406,16 +701,26 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"execution_count": 12,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"INFO:ezkl.eth:using chain 31337\n",
|
||||
"INFO:ezkl.eth:estimated verify gas cost: 399775\n",
|
||||
"INFO:ezkl.execute:Solidity verification result: true\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# read the address from addr_path\n",
|
||||
"addr = None\n",
|
||||
"with open(address_path, 'r') as f:\n",
|
||||
" addr = f.read()\n",
|
||||
"\n",
|
||||
"res = ezkl.verify_evm(\n",
|
||||
"res = await ezkl.verify_evm(\n",
|
||||
" addr,\n",
|
||||
" proof_path,\n",
|
||||
" \"http://127.0.0.1:3030\"\n",
|
||||
@@ -451,7 +756,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.15"
|
||||
"version": "3.12.2"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
1
examples/notebooks/verifier_abi.json
Normal file
1
examples/notebooks/verifier_abi.json
Normal file
@@ -0,0 +1 @@
|
||||
[{"type":"function","name":"verifyProof","inputs":[{"name":"proof","type":"bytes","internalType":"bytes"},{"name":"instances","type":"uint256[]","internalType":"uint256[]"}],"outputs":[{"name":"","type":"bool","internalType":"bool"}],"stateMutability":"nonpayable"}]
|
||||
@@ -629,7 +629,7 @@
|
||||
"source": [
|
||||
"\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(val_data, model_path, settings_path, \"resources\", scales = [4])\n",
|
||||
"res = await ezkl.calibrate_settings(val_data, model_path, settings_path, \"resources\", scales = [4])\n",
|
||||
"assert res == True\n",
|
||||
"print(\"verified\")\n"
|
||||
]
|
||||
@@ -660,7 +660,7 @@
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"res = ezkl.get_srs(settings_path)"
|
||||
"res = await ezkl.get_srs(settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -680,7 +680,7 @@
|
||||
"\n",
|
||||
"witness_path = \"witness.json\"\n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -807,7 +807,7 @@
|
||||
"settings_path = os.path.join('settings.json')\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"res = ezkl.create_evm_verifier(\n",
|
||||
"res = await ezkl.create_evm_verifier(\n",
|
||||
" vk_path,\n",
|
||||
" \n",
|
||||
" settings_path,\n",
|
||||
@@ -847,7 +847,7 @@
|
||||
"\n",
|
||||
"address_path = os.path.join(\"address.json\")\n",
|
||||
"\n",
|
||||
"res = ezkl.deploy_evm(\n",
|
||||
"res = await ezkl.deploy_evm(\n",
|
||||
" address_path,\n",
|
||||
" sol_code_path,\n",
|
||||
" 'http://127.0.0.1:3030'\n",
|
||||
@@ -868,7 +868,7 @@
|
||||
"# make sure anvil is running locally\n",
|
||||
"# $ anvil -p 3030\n",
|
||||
"\n",
|
||||
"res = ezkl.verify_evm(\n",
|
||||
"res = await ezkl.verify_evm(\n",
|
||||
" addr,\n",
|
||||
" proof_path,\n",
|
||||
" \"http://127.0.0.1:3030\"\n",
|
||||
@@ -905,4 +905,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
}
|
||||
@@ -242,6 +242,7 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "2007dc77",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -257,6 +258,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "ab993958",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"As we use Halo2 with KZG-commitments we need an SRS string from (preferably) a multi-party trusted setup ceremony. For an overview of the procedures for such a ceremony check out [this page](https://blog.ethereum.org/2023/01/16/announcing-kzg-ceremony). The `get_srs` command retrieves a correctly sized SRS given the calibrated settings file from [here](https://github.com/han0110/halo2-kzg-srs). \n",
|
||||
@@ -272,7 +274,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -284,12 +286,13 @@
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"witness = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"witness = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "ad58432e",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Here we setup verifying and proving keys for the circuit. As the name suggests the proving key is needed for ... proving and the verifying key is needed for ... verifying. "
|
||||
@@ -317,6 +320,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "1746c8d1",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"We can now create an EVM verifier contract from our circuit. This contract will be deployed to the chain we are using. In this case we are using a local anvil instance."
|
||||
@@ -325,15 +329,15 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "d1920c0f",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"abi_path = 'test.abi'\n",
|
||||
"sol_code_path = 'test.sol'\n",
|
||||
"\n",
|
||||
"res = ezkl.create_evm_verifier(\n",
|
||||
"res = await ezkl.create_evm_verifier(\n",
|
||||
" vk_path,\n",
|
||||
" \n",
|
||||
" settings_path,\n",
|
||||
" sol_code_path,\n",
|
||||
" abi_path,\n",
|
||||
@@ -344,6 +348,7 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "0fd7f22b",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -351,7 +356,7 @@
|
||||
"\n",
|
||||
"addr_path_verifier = \"addr_verifier.txt\"\n",
|
||||
"\n",
|
||||
"res = ezkl.deploy_evm(\n",
|
||||
"res = await ezkl.deploy_evm(\n",
|
||||
" addr_path_verifier,\n",
|
||||
" sol_code_path,\n",
|
||||
" 'http://127.0.0.1:3030'\n",
|
||||
@@ -362,6 +367,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "9c0dffab",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"With the vanilla verifier deployed, we can now create the data attestation contract, which will read in the instances from the calldata to the verifier, attest to them, call the verifier and then return the result. \n",
|
||||
@@ -371,6 +377,7 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "cc888848",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
@@ -378,6 +385,7 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "c2db14d7",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
@@ -385,7 +393,7 @@
|
||||
"sol_code_path = 'test.sol'\n",
|
||||
"input_path = 'input.json'\n",
|
||||
"\n",
|
||||
"res = ezkl.create_evm_data_attestation(\n",
|
||||
"res = await ezkl.create_evm_data_attestation(\n",
|
||||
" input_path,\n",
|
||||
" settings_path,\n",
|
||||
" sol_code_path,\n",
|
||||
@@ -396,12 +404,13 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"id": "5a018ba6",
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"addr_path_da = \"addr_da.txt\"\n",
|
||||
"\n",
|
||||
"res = ezkl.deploy_da_evm(\n",
|
||||
"res = await ezkl.deploy_da_evm(\n",
|
||||
" addr_path_da,\n",
|
||||
" input_path,\n",
|
||||
" settings_path,\n",
|
||||
@@ -412,6 +421,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "2adad845",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Now we can pull in the data from the contract and calculate a new set of coordinates. We then rotate the world by 1 transform and submit the proof to the contract. The contract could then update the world rotation (logic not inserted here). For demo purposes we do this repeatedly, rotating the world by 1 transform. "
|
||||
@@ -444,6 +454,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"id": "90eda56e",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"Call the view only verify method on the contract to verify the proof. Since it is a view function this is safe to use in production since you don't have to pass your private key."
|
||||
@@ -528,7 +539,7 @@
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.9.15"
|
||||
"version": "3.12.2"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
|
||||
@@ -193,7 +193,7 @@
|
||||
"with open(cal_path, \"w\") as f:\n",
|
||||
" json.dump(cal_data, f)\n",
|
||||
"\n",
|
||||
"res = ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -215,7 +215,7 @@
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# srs path\n",
|
||||
"res = ezkl.get_srs( settings_path)"
|
||||
"res = await ezkl.get_srs( settings_path)"
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -227,7 +227,7 @@
|
||||
"source": [
|
||||
"# now generate the witness file \n",
|
||||
"\n",
|
||||
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
|
||||
"assert os.path.isfile(witness_path)"
|
||||
]
|
||||
},
|
||||
@@ -346,4 +346,4 @@
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 5
|
||||
}
|
||||
}
|
||||
9
examples/onnx/lstm_medium/input.json
Normal file
9
examples/onnx/lstm_medium/input.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"input_data": [
|
||||
[
|
||||
1.514470100402832, 1.519423007965088, 1.5182757377624512,
|
||||
1.5262789726257324, 1.5298409461975098
|
||||
]
|
||||
],
|
||||
"output_data": [[-0.1862019]]
|
||||
}
|
||||
BIN
examples/onnx/lstm_medium/network.onnx
Normal file
BIN
examples/onnx/lstm_medium/network.onnx
Normal file
Binary file not shown.
@@ -104,5 +104,5 @@ json.dump(data, open("input.json", 'w'))
|
||||
# ezkl.gen_settings("network.onnx", "settings.json")
|
||||
|
||||
# !RUST_LOG = full
|
||||
# res = ezkl.calibrate_settings(
|
||||
# res = await ezkl.calibrate_settings(
|
||||
# "input.json", "network.onnx", "settings.json", "resources")
|
||||
|
||||
@@ -126,7 +126,6 @@ elif [ "$PLATFORM" == "macos" ]; then
|
||||
|
||||
echo "Cleaning up"
|
||||
rm "$EZKL_DIR/build-artifacts.ezkl-macos-aarch64.tar.gz"
|
||||
|
||||
else
|
||||
JSON_RESPONSE=$(curl -s "$RELEASE_URL")
|
||||
FILE_URL=$(echo "$JSON_RESPONSE" | grep -o 'https://github.com[^"]*' | grep "build-artifacts.ezkl-macos.tar.gz")
|
||||
@@ -143,7 +142,7 @@ elif [ "$PLATFORM" == "macos" ]; then
|
||||
fi
|
||||
|
||||
elif [ "$PLATFORM" == "linux" ]; then
|
||||
if [ "${ARCHITECTURE}" = "amd64" ]; then
|
||||
if [ "$ARCHITECTURE" == "amd64" ]; then
|
||||
JSON_RESPONSE=$(curl -s "$RELEASE_URL")
|
||||
FILE_URL=$(echo "$JSON_RESPONSE" | grep -o 'https://github.com[^"]*' | grep "build-artifacts.ezkl-linux-gnu.tar.gz")
|
||||
|
||||
@@ -155,9 +154,20 @@ elif [ "$PLATFORM" == "linux" ]; then
|
||||
|
||||
echo "Cleaning up"
|
||||
rm "$EZKL_DIR/build-artifacts.ezkl-linux-gnu.tar.gz"
|
||||
elif [ "$ARCHITECTURE" == "aarch64" ]; then
|
||||
JSON_RESPONSE=$(curl -s "$RELEASE_URL")
|
||||
FILE_URL=$(echo "$JSON_RESPONSE" | grep -o 'https://github.com[^"]*' | grep "build-artifacts.ezkl-linux-aarch64.tar.gz")
|
||||
|
||||
echo "Downloading package"
|
||||
curl -L "$FILE_URL" -o "$EZKL_DIR/build-artifacts.ezkl-linux-aarch64.tar.gz"
|
||||
|
||||
echo "Unpacking package"
|
||||
tar -xzf "$EZKL_DIR/build-artifacts.ezkl-linux-aarch64.tar.gz" -C "$EZKL_DIR"
|
||||
|
||||
echo "Cleaning up"
|
||||
rm "$EZKL_DIR/build-artifacts.ezkl-linux-aarch64.tar.gz"
|
||||
else
|
||||
echo "ARM architectures are not supported for Linux at the moment. If you would need support for the ARM architectures on linux please submit an issue https://github.com/zkonduit/ezkl/issues/new/choose"
|
||||
echo "Non aarch ARM architectures are not supported for Linux at the moment. If you would need support for the ARM architectures on linux please submit an issue https://github.com/zkonduit/ezkl/issues/new/choose"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
|
||||
@@ -8,6 +8,7 @@ addopts = "-rfEX -p pytester --strict-markers"
|
||||
testpaths = [
|
||||
"tests/python/*_tests.py",
|
||||
]
|
||||
asyncio_mode = "auto"
|
||||
|
||||
[project]
|
||||
name = "ezkl"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// ignore file if compiling for wasm
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use clap::Parser;
|
||||
use clap::{CommandFactory, Parser};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use colored_json::ToColoredJson;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@@ -11,7 +11,7 @@ use ezkl::execute::run;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use ezkl::logger::init_logger;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use log::{debug, error, info};
|
||||
use log::{error, info};
|
||||
#[cfg(not(any(target_arch = "wasm32", feature = "no-banner")))]
|
||||
use rand::prelude::SliceRandom;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@@ -24,22 +24,30 @@ use std::error::Error;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub async fn main() -> Result<(), Box<dyn Error>> {
|
||||
let args = Cli::parse();
|
||||
init_logger();
|
||||
#[cfg(not(any(target_arch = "wasm32", feature = "no-banner")))]
|
||||
banner();
|
||||
#[cfg(feature = "icicle")]
|
||||
if env::var("ENABLE_ICICLE_GPU").is_ok() {
|
||||
info!("Running with ICICLE GPU");
|
||||
|
||||
if let Some(generator) = args.generator {
|
||||
ezkl::commands::print_completions(generator, &mut Cli::command());
|
||||
Ok(())
|
||||
} else if let Some(command) = args.command {
|
||||
init_logger();
|
||||
#[cfg(not(any(target_arch = "wasm32", feature = "no-banner")))]
|
||||
banner();
|
||||
#[cfg(feature = "icicle")]
|
||||
if env::var("ENABLE_ICICLE_GPU").is_ok() {
|
||||
info!("Running with ICICLE GPU");
|
||||
} else {
|
||||
info!("Running with CPU");
|
||||
}
|
||||
info!("command: \n {}", &command.as_json().to_colored_json_auto()?);
|
||||
let res = run(command).await;
|
||||
match &res {
|
||||
Ok(_) => info!("succeeded"),
|
||||
Err(e) => error!("failed: {}", e),
|
||||
};
|
||||
res.map(|_| ())
|
||||
} else {
|
||||
info!("Running with CPU");
|
||||
Err("No command provided".into())
|
||||
}
|
||||
debug!("command: \n {}", &args.as_json()?.to_colored_json_auto()?);
|
||||
let res = run(args.command).await;
|
||||
match &res {
|
||||
Ok(_) => info!("succeeded"),
|
||||
Err(e) => error!("failed: {}", e),
|
||||
};
|
||||
res.map(|_| ())
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
|
||||
@@ -80,14 +80,18 @@ impl ToFlags for CheckMode {
|
||||
|
||||
impl From<String> for CheckMode {
|
||||
fn from(value: String) -> Self {
|
||||
match value.to_lowercase().as_str() {
|
||||
"safe" => CheckMode::SAFE,
|
||||
"unsafe" => CheckMode::UNSAFE,
|
||||
_ => {
|
||||
log::error!("Invalid value for CheckMode");
|
||||
log::warn!("defaulting to SAFE");
|
||||
CheckMode::SAFE
|
||||
}
|
||||
Self::from_str(value.as_str()).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for CheckMode {
|
||||
type Err = String;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"safe" => Ok(CheckMode::SAFE),
|
||||
"unsafe" => Ok(CheckMode::UNSAFE),
|
||||
_ => Err("Invalid value for CheckMode".to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3910,7 +3910,7 @@ pub(crate) fn range_check<F: PrimeField + TensorType + PartialOrd + std::hash::H
|
||||
let int_values = w.get_int_evals()?;
|
||||
for v in int_values.iter() {
|
||||
if v < &range.0 || v > &range.1 {
|
||||
log::error!("Value ({:?}) out of range: {:?}", v, range);
|
||||
log::warn!("Value ({:?}) out of range: {:?}", v, range);
|
||||
return Err(Box::new(TensorError::TableLookupError));
|
||||
}
|
||||
}
|
||||
|
||||
532
src/commands.rs
532
src/commands.rs
@@ -1,6 +1,7 @@
|
||||
use clap::{Parser, Subcommand};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use ethers::types::H160;
|
||||
use alloy::primitives::Address as H160;
|
||||
use clap::{Command, Parser, Subcommand};
|
||||
use clap_complete::{generate, Generator, Shell};
|
||||
#[cfg(feature = "python-bindings")]
|
||||
use pyo3::{
|
||||
conversion::{FromPyObject, PyTryFrom},
|
||||
@@ -10,7 +11,7 @@ use pyo3::{
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use std::{error::Error, str::FromStr};
|
||||
use std::str::FromStr;
|
||||
use tosubcommand::{ToFlags, ToSubcommand};
|
||||
|
||||
use crate::{pfsys::ProofType, Commitments, RunArgs};
|
||||
@@ -52,6 +53,8 @@ pub const DEFAULT_VERIFIER_AGGREGATED_ABI: &str = "verifier_aggr_abi.json";
|
||||
pub const DEFAULT_VERIFIER_DA_ABI: &str = "verifier_da_abi.json";
|
||||
/// Default solidity code
|
||||
pub const DEFAULT_SOL_CODE: &str = "evm_deploy.sol";
|
||||
/// Default calldata path
|
||||
pub const DEFAULT_CALLDATA: &str = "calldata.bytes";
|
||||
/// Default solidity code for aggregated proofs
|
||||
pub const DEFAULT_SOL_CODE_AGGREGATED: &str = "evm_deploy_aggr.sol";
|
||||
/// Default solidity code for data attestation
|
||||
@@ -78,7 +81,7 @@ pub const DEFAULT_CALIBRATION_FILE: &str = "calibration.json";
|
||||
pub const DEFAULT_LOOKUP_SAFETY_MARGIN: &str = "2";
|
||||
/// Default Compress selectors
|
||||
pub const DEFAULT_DISABLE_SELECTOR_COMPRESSION: &str = "false";
|
||||
/// Default render vk seperately
|
||||
/// Default render vk separately
|
||||
pub const DEFAULT_RENDER_VK_SEPERATELY: &str = "false";
|
||||
/// Default VK sol path
|
||||
pub const DEFAULT_VK_SOL: &str = "vk.sol";
|
||||
@@ -253,33 +256,66 @@ lazy_static! {
|
||||
};
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Parser, Debug, Clone, Deserialize, Serialize)]
|
||||
#[command(author, about, long_about = None)]
|
||||
#[clap(version = *VERSION)]
|
||||
pub struct Cli {
|
||||
#[command(subcommand)]
|
||||
#[allow(missing_docs)]
|
||||
pub command: Commands,
|
||||
/// Get the styles for the CLI
|
||||
pub fn get_styles() -> clap::builder::Styles {
|
||||
clap::builder::Styles::styled()
|
||||
.usage(
|
||||
clap::builder::styling::Style::new()
|
||||
.bold()
|
||||
.underline()
|
||||
.fg_color(Some(clap::builder::styling::Color::Ansi(clap::builder::styling::AnsiColor::Cyan))),
|
||||
)
|
||||
.header(
|
||||
clap::builder::styling::Style::new()
|
||||
.bold()
|
||||
.underline()
|
||||
.fg_color(Some(clap::builder::styling::Color::Ansi(clap::builder::styling::AnsiColor::Cyan))),
|
||||
)
|
||||
.literal(
|
||||
clap::builder::styling::Style::new().fg_color(Some(clap::builder::styling::Color::Ansi(clap::builder::styling::AnsiColor::Magenta))),
|
||||
)
|
||||
.invalid(
|
||||
clap::builder::styling::Style::new()
|
||||
.bold()
|
||||
.fg_color(Some(clap::builder::styling::Color::Ansi(clap::builder::styling::AnsiColor::Red))),
|
||||
)
|
||||
.error(
|
||||
clap::builder::styling::Style::new()
|
||||
.bold()
|
||||
.fg_color(Some(clap::builder::styling::Color::Ansi(clap::builder::styling::AnsiColor::Red))),
|
||||
)
|
||||
.valid(
|
||||
clap::builder::styling::Style::new()
|
||||
.bold()
|
||||
.underline()
|
||||
.fg_color(Some(clap::builder::styling::Color::Ansi(clap::builder::styling::AnsiColor::Green))),
|
||||
)
|
||||
.placeholder(
|
||||
clap::builder::styling::Style::new().fg_color(Some(clap::builder::styling::Color::Ansi(clap::builder::styling::AnsiColor::White))),
|
||||
)
|
||||
}
|
||||
|
||||
impl Cli {
|
||||
/// Export the ezkl configuration as json
|
||||
pub fn as_json(&self) -> Result<String, Box<dyn Error>> {
|
||||
let serialized = match serde_json::to_string(&self) {
|
||||
Ok(s) => s,
|
||||
Err(e) => {
|
||||
return Err(Box::new(e));
|
||||
}
|
||||
};
|
||||
Ok(serialized)
|
||||
}
|
||||
/// Parse an ezkl configuration from a json
|
||||
pub fn from_json(arg_json: &str) -> Result<Self, serde_json::Error> {
|
||||
serde_json::from_str(arg_json)
|
||||
}
|
||||
|
||||
/// Print completions for the given generator
|
||||
pub fn print_completions<G: Generator>(gen: G, cmd: &mut Command) {
|
||||
generate(gen, cmd, cmd.get_name().to_string(), &mut std::io::stdout());
|
||||
}
|
||||
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Parser, Debug, Clone)]
|
||||
#[command(author, about, long_about = None)]
|
||||
#[clap(version = *VERSION, styles = get_styles(), trailing_var_arg = true)]
|
||||
pub struct Cli {
|
||||
/// If provided, outputs the completion file for given shell
|
||||
#[clap(long = "generate", value_parser)]
|
||||
pub generator: Option<Shell>,
|
||||
#[command(subcommand)]
|
||||
#[allow(missing_docs)]
|
||||
pub command: Option<Commands>,
|
||||
}
|
||||
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Debug, Subcommand, Clone, Deserialize, Serialize, PartialEq, PartialOrd, ToSubcommand)]
|
||||
pub enum Commands {
|
||||
@@ -289,8 +325,8 @@ pub enum Commands {
|
||||
/// Loads model and prints model table
|
||||
Table {
|
||||
/// The path to the .onnx model file
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_MODEL)]
|
||||
model: PathBuf,
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_MODEL, value_hint = clap::ValueHint::FilePath)]
|
||||
model: Option<PathBuf>,
|
||||
/// proving arguments
|
||||
#[clap(flatten)]
|
||||
args: RunArgs,
|
||||
@@ -299,30 +335,30 @@ pub enum Commands {
|
||||
/// Generates the witness from an input file.
|
||||
GenWitness {
|
||||
/// The path to the .json data file
|
||||
#[arg(short = 'D', long, default_value = DEFAULT_DATA)]
|
||||
data: PathBuf,
|
||||
#[arg(short = 'D', long, default_value = DEFAULT_DATA, value_hint = clap::ValueHint::FilePath)]
|
||||
data: Option<PathBuf>,
|
||||
/// The path to the compiled model file (generated using the compile-circuit command)
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT)]
|
||||
compiled_circuit: PathBuf,
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT, value_hint = clap::ValueHint::FilePath)]
|
||||
compiled_circuit: Option<PathBuf>,
|
||||
/// Path to output the witness .json file
|
||||
#[arg(short = 'O', long, default_value = DEFAULT_WITNESS)]
|
||||
output: PathBuf,
|
||||
#[arg(short = 'O', long, default_value = DEFAULT_WITNESS, value_hint = clap::ValueHint::FilePath)]
|
||||
output: Option<PathBuf>,
|
||||
/// Path to the verification key file (optional - solely used to generate kzg commits)
|
||||
#[arg(short = 'V', long)]
|
||||
#[arg(short = 'V', long, value_hint = clap::ValueHint::FilePath)]
|
||||
vk_path: Option<PathBuf>,
|
||||
/// Path to the srs file (optional - solely used to generate kzg commits)
|
||||
#[arg(short = 'P', long)]
|
||||
#[arg(short = 'P', long, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: Option<PathBuf>,
|
||||
},
|
||||
|
||||
/// Produces the proving hyperparameters, from run-args
|
||||
GenSettings {
|
||||
/// The path to the .onnx model file
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_MODEL)]
|
||||
model: PathBuf,
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_MODEL, value_hint = clap::ValueHint::FilePath)]
|
||||
model: Option<PathBuf>,
|
||||
/// The path to generate the circuit settings .json file to
|
||||
#[arg(short = 'O', long, default_value = DEFAULT_SETTINGS)]
|
||||
settings_path: PathBuf,
|
||||
#[arg(short = 'O', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
|
||||
settings_path: Option<PathBuf>,
|
||||
/// proving arguments
|
||||
#[clap(flatten)]
|
||||
args: RunArgs,
|
||||
@@ -332,51 +368,52 @@ pub enum Commands {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
CalibrateSettings {
|
||||
/// The path to the .json calibration data file.
|
||||
#[arg(short = 'D', long, default_value = DEFAULT_CALIBRATION_FILE)]
|
||||
data: PathBuf,
|
||||
#[arg(short = 'D', long, default_value = DEFAULT_CALIBRATION_FILE, value_hint = clap::ValueHint::FilePath)]
|
||||
data: Option<PathBuf>,
|
||||
/// The path to the .onnx model file
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_MODEL)]
|
||||
model: PathBuf,
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_MODEL, value_hint = clap::ValueHint::FilePath)]
|
||||
model: Option<PathBuf>,
|
||||
/// The path to load circuit settings .json file AND overwrite (generated using the gen-settings command).
|
||||
#[arg(short = 'O', long, default_value = DEFAULT_SETTINGS)]
|
||||
settings_path: PathBuf,
|
||||
#[arg(long = "target", default_value = DEFAULT_CALIBRATION_TARGET)]
|
||||
#[arg(short = 'O', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
|
||||
settings_path: Option<PathBuf>,
|
||||
#[arg(long = "target", default_value = DEFAULT_CALIBRATION_TARGET, value_hint = clap::ValueHint::Other)]
|
||||
/// Target for calibration. Set to "resources" to optimize for computational resource. Otherwise, set to "accuracy" to optimize for accuracy.
|
||||
target: CalibrationTarget,
|
||||
/// the lookup safety margin to use for calibration. if the max lookup is 2^k, then the max lookup will be 2^k * lookup_safety_margin. larger = safer but slower
|
||||
#[arg(long, default_value = DEFAULT_LOOKUP_SAFETY_MARGIN)]
|
||||
#[arg(long, default_value = DEFAULT_LOOKUP_SAFETY_MARGIN, value_hint = clap::ValueHint::Other)]
|
||||
lookup_safety_margin: i64,
|
||||
/// Optional scales to specifically try for calibration. Example, --scales 0,4
|
||||
#[arg(long, value_delimiter = ',', allow_hyphen_values = true)]
|
||||
#[arg(long, value_delimiter = ',', allow_hyphen_values = true, value_hint = clap::ValueHint::Other)]
|
||||
scales: Option<Vec<crate::Scale>>,
|
||||
/// Optional scale rebase multipliers to specifically try for calibration. This is the multiplier at which we divide to return to the input scale. Example, --scale-rebase-multipliers 0,4
|
||||
#[arg(
|
||||
long,
|
||||
value_delimiter = ',',
|
||||
allow_hyphen_values = true,
|
||||
default_value = DEFAULT_SCALE_REBASE_MULTIPLIERS
|
||||
default_value = DEFAULT_SCALE_REBASE_MULTIPLIERS,
|
||||
value_hint = clap::ValueHint::Other
|
||||
)]
|
||||
scale_rebase_multiplier: Vec<u32>,
|
||||
/// max logrows to use for calibration, 26 is the max public SRS size
|
||||
#[arg(long)]
|
||||
#[arg(long, value_hint = clap::ValueHint::Other)]
|
||||
max_logrows: Option<u32>,
|
||||
// whether to only range check rebases (instead of trying both range check and lookup)
|
||||
#[arg(long, default_value = DEFAULT_ONLY_RANGE_CHECK_REBASE)]
|
||||
only_range_check_rebase: bool,
|
||||
#[arg(long, default_value = DEFAULT_ONLY_RANGE_CHECK_REBASE, action = clap::ArgAction::SetTrue)]
|
||||
only_range_check_rebase: Option<bool>,
|
||||
},
|
||||
|
||||
/// Generates a dummy SRS
|
||||
#[command(name = "gen-srs", arg_required_else_help = true)]
|
||||
GenSrs {
|
||||
/// The path to output the generated SRS
|
||||
#[arg(long)]
|
||||
#[arg(long, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: PathBuf,
|
||||
/// number of logrows to use for srs
|
||||
#[arg(long)]
|
||||
#[arg(long, value_hint = clap::ValueHint::Other)]
|
||||
logrows: usize,
|
||||
/// commitment used
|
||||
#[arg(long, default_value = DEFAULT_COMMITMENT)]
|
||||
commitment: Commitments,
|
||||
#[arg(long, default_value = DEFAULT_COMMITMENT, value_hint = clap::ValueHint::Other)]
|
||||
commitment: Option<Commitments>,
|
||||
},
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@@ -384,79 +421,79 @@ pub enum Commands {
|
||||
#[command(name = "get-srs")]
|
||||
GetSrs {
|
||||
/// The path to output the desired srs file, if set to None will save to $EZKL_REPO_PATH/srs
|
||||
#[arg(long)]
|
||||
#[arg(long, default_value = None, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: Option<PathBuf>,
|
||||
/// Path to the circuit settings .json file to read in logrows from. Overriden by logrows if specified.
|
||||
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS)]
|
||||
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
|
||||
settings_path: Option<PathBuf>,
|
||||
/// Number of logrows to use for srs. Overrides settings_path if specified.
|
||||
#[arg(long, default_value = None)]
|
||||
#[arg(long, default_value = None, value_hint = clap::ValueHint::Other)]
|
||||
logrows: Option<u32>,
|
||||
/// Commitment used
|
||||
#[arg(long, default_value = None)]
|
||||
#[arg(long, default_value = None, value_hint = clap::ValueHint::Other)]
|
||||
commitment: Option<Commitments>,
|
||||
},
|
||||
/// Loads model and input and runs mock prover (for testing)
|
||||
Mock {
|
||||
/// The path to the .json witness file (generated using the gen-witness command)
|
||||
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS)]
|
||||
witness: PathBuf,
|
||||
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS, value_hint = clap::ValueHint::FilePath)]
|
||||
witness: Option<PathBuf>,
|
||||
/// The path to the compiled model file (generated using the compile-circuit command)
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT)]
|
||||
model: PathBuf,
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT, value_hint = clap::ValueHint::FilePath)]
|
||||
model: Option<PathBuf>,
|
||||
},
|
||||
|
||||
/// Mock aggregate proofs
|
||||
MockAggregate {
|
||||
/// The path to the snarks to aggregate over (generated using the prove command with the --proof-type=for-aggr flag)
|
||||
#[arg(long, default_value = DEFAULT_PROOF, value_delimiter = ',', allow_hyphen_values = true)]
|
||||
#[arg(long, default_value = DEFAULT_PROOF, value_delimiter = ',', allow_hyphen_values = true, value_hint = clap::ValueHint::FilePath)]
|
||||
aggregation_snarks: Vec<PathBuf>,
|
||||
/// logrows used for aggregation circuit
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS)]
|
||||
logrows: u32,
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
|
||||
logrows: Option<u32>,
|
||||
/// whether the accumulated are segments of a larger proof
|
||||
#[arg(long, default_value = DEFAULT_SPLIT)]
|
||||
split_proofs: bool,
|
||||
#[arg(long, default_value = DEFAULT_SPLIT, action = clap::ArgAction::SetTrue)]
|
||||
split_proofs: Option<bool>,
|
||||
},
|
||||
|
||||
/// setup aggregation circuit :)
|
||||
/// Setup aggregation circuit and generate pk and vk
|
||||
SetupAggregate {
|
||||
/// The path to samples of snarks that will be aggregated over (generated using the prove command with the --proof-type=for-aggr flag)
|
||||
#[arg(long, default_value = DEFAULT_PROOF, value_delimiter = ',', allow_hyphen_values = true)]
|
||||
#[arg(long, default_value = DEFAULT_PROOF, value_delimiter = ',', allow_hyphen_values = true, value_hint = clap::ValueHint::FilePath)]
|
||||
sample_snarks: Vec<PathBuf>,
|
||||
/// The path to save the desired verification key file to
|
||||
#[arg(long, default_value = DEFAULT_VK_AGGREGATED)]
|
||||
vk_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_VK_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
vk_path: Option<PathBuf>,
|
||||
/// The path to save the proving key to
|
||||
#[arg(long, default_value = DEFAULT_PK_AGGREGATED)]
|
||||
pk_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_PK_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
pk_path: Option<PathBuf>,
|
||||
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
|
||||
#[arg(long)]
|
||||
#[arg(long, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: Option<PathBuf>,
|
||||
/// logrows used for aggregation circuit
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS)]
|
||||
logrows: u32,
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
|
||||
logrows: Option<u32>,
|
||||
/// whether the accumulated are segments of a larger proof
|
||||
#[arg(long, default_value = DEFAULT_SPLIT)]
|
||||
split_proofs: bool,
|
||||
#[arg(long, default_value = DEFAULT_SPLIT, action = clap::ArgAction::SetTrue)]
|
||||
split_proofs: Option<bool>,
|
||||
/// compress selectors
|
||||
#[arg(long, default_value = DEFAULT_DISABLE_SELECTOR_COMPRESSION)]
|
||||
disable_selector_compression: bool,
|
||||
#[arg(long, default_value = DEFAULT_DISABLE_SELECTOR_COMPRESSION, action = clap::ArgAction::SetTrue)]
|
||||
disable_selector_compression: Option<bool>,
|
||||
/// commitment used
|
||||
#[arg(long, default_value = DEFAULT_COMMITMENT)]
|
||||
#[arg(long, default_value = DEFAULT_COMMITMENT, value_hint = clap::ValueHint::Other)]
|
||||
commitment: Option<Commitments>,
|
||||
},
|
||||
/// Aggregates proofs :)
|
||||
/// Aggregates proofs
|
||||
Aggregate {
|
||||
/// The path to the snarks to aggregate over (generated using the prove command with the --proof-type=for-aggr flag)
|
||||
#[arg(long, default_value = DEFAULT_PROOF, value_delimiter = ',', allow_hyphen_values = true)]
|
||||
#[arg(long, default_value = DEFAULT_PROOF, value_delimiter = ',', allow_hyphen_values = true, value_hint = clap::ValueHint::FilePath)]
|
||||
aggregation_snarks: Vec<PathBuf>,
|
||||
/// The path to load the desired proving key file (generated using the setup-aggregate command)
|
||||
#[arg(long, default_value = DEFAULT_PK_AGGREGATED)]
|
||||
pk_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_PK_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
pk_path: Option<PathBuf>,
|
||||
/// The path to output the proof file to
|
||||
#[arg(long, default_value = DEFAULT_PROOF_AGGREGATED)]
|
||||
proof_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_PROOF_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
proof_path: Option<PathBuf>,
|
||||
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
|
||||
#[arg(long)]
|
||||
srs_path: Option<PathBuf>,
|
||||
@@ -465,78 +502,79 @@ pub enum Commands {
|
||||
require_equals = true,
|
||||
num_args = 0..=1,
|
||||
default_value_t = TranscriptType::default(),
|
||||
value_enum
|
||||
value_enum,
|
||||
value_hint = clap::ValueHint::Other
|
||||
)]
|
||||
transcript: TranscriptType,
|
||||
/// logrows used for aggregation circuit
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS)]
|
||||
logrows: u32,
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
|
||||
logrows: Option<u32>,
|
||||
/// run sanity checks during calculations (safe or unsafe)
|
||||
#[arg(long, default_value = DEFAULT_CHECKMODE)]
|
||||
check_mode: CheckMode,
|
||||
#[arg(long, default_value = DEFAULT_CHECKMODE, value_hint = clap::ValueHint::Other)]
|
||||
check_mode: Option<CheckMode>,
|
||||
/// whether the accumulated proofs are segments of a larger circuit
|
||||
#[arg(long, default_value = DEFAULT_SPLIT)]
|
||||
split_proofs: bool,
|
||||
#[arg(long, default_value = DEFAULT_SPLIT, action = clap::ArgAction::SetTrue)]
|
||||
split_proofs: Option<bool>,
|
||||
/// commitment used
|
||||
#[arg(long, default_value = DEFAULT_COMMITMENT)]
|
||||
#[arg(long, default_value = DEFAULT_COMMITMENT, value_hint = clap::ValueHint::Other)]
|
||||
commitment: Option<Commitments>,
|
||||
},
|
||||
/// Compiles a circuit from onnx to a simplified graph (einsum + other ops) and parameters as sets of field elements
|
||||
CompileCircuit {
|
||||
/// The path to the .onnx model file
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_MODEL)]
|
||||
model: PathBuf,
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_MODEL, value_hint = clap::ValueHint::FilePath)]
|
||||
model: Option<PathBuf>,
|
||||
/// The path to the compiled model file (generated using the compile-circuit command)
|
||||
#[arg(long, default_value = DEFAULT_COMPILED_CIRCUIT)]
|
||||
compiled_circuit: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_COMPILED_CIRCUIT, value_hint = clap::ValueHint::FilePath)]
|
||||
compiled_circuit: Option<PathBuf>,
|
||||
/// The path to load circuit settings .json file from (generated using the gen-settings command)
|
||||
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS)]
|
||||
settings_path: PathBuf,
|
||||
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
|
||||
settings_path: Option<PathBuf>,
|
||||
},
|
||||
/// Creates pk and vk
|
||||
Setup {
|
||||
/// The path to the compiled model file (generated using the compile-circuit command)
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT)]
|
||||
compiled_circuit: PathBuf,
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT, value_hint = clap::ValueHint::FilePath)]
|
||||
compiled_circuit: Option<PathBuf>,
|
||||
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
|
||||
#[arg(long)]
|
||||
#[arg(long, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: Option<PathBuf>,
|
||||
/// The path to output the verification key file to
|
||||
#[arg(long, default_value = DEFAULT_VK)]
|
||||
vk_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_VK, value_hint = clap::ValueHint::FilePath)]
|
||||
vk_path: Option<PathBuf>,
|
||||
/// The path to output the proving key file to
|
||||
#[arg(long, default_value = DEFAULT_PK)]
|
||||
pk_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_PK, value_hint = clap::ValueHint::FilePath)]
|
||||
pk_path: Option<PathBuf>,
|
||||
/// The graph witness (optional - used to override fixed values in the circuit)
|
||||
#[arg(short = 'W', long)]
|
||||
#[arg(short = 'W', long, value_hint = clap::ValueHint::FilePath)]
|
||||
witness: Option<PathBuf>,
|
||||
/// compress selectors
|
||||
#[arg(long, default_value = DEFAULT_DISABLE_SELECTOR_COMPRESSION)]
|
||||
disable_selector_compression: bool,
|
||||
#[arg(long, default_value = DEFAULT_DISABLE_SELECTOR_COMPRESSION, action = clap::ArgAction::SetTrue)]
|
||||
disable_selector_compression: Option<bool>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Deploys a test contact that the data attester reads from and creates a data attestation formatted input.json file that contains call data information
|
||||
#[command(arg_required_else_help = true)]
|
||||
SetupTestEvmData {
|
||||
/// The path to the .json data file, which should include both the network input (possibly private) and the network output (public input to the proof)
|
||||
#[arg(short = 'D', long)]
|
||||
data: PathBuf,
|
||||
#[arg(short = 'D', long, value_hint = clap::ValueHint::FilePath)]
|
||||
data: Option<PathBuf>,
|
||||
/// The path to the compiled model file (generated using the compile-circuit command)
|
||||
#[arg(short = 'M', long)]
|
||||
compiled_circuit: PathBuf,
|
||||
#[arg(short = 'M', long, value_hint = clap::ValueHint::FilePath)]
|
||||
compiled_circuit: Option<PathBuf>,
|
||||
/// For testing purposes only. The optional path to the .json data file that will be generated that contains the OnChain data storage information
|
||||
/// derived from the file information in the data .json file.
|
||||
/// Should include both the network input (possibly private) and the network output (public input to the proof)
|
||||
#[arg(short = 'T', long)]
|
||||
#[arg(short = 'T', long, value_hint = clap::ValueHint::FilePath)]
|
||||
test_data: PathBuf,
|
||||
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
|
||||
#[arg(short = 'U', long)]
|
||||
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
|
||||
rpc_url: Option<String>,
|
||||
/// where the input data come from
|
||||
#[arg(long, default_value = "on-chain")]
|
||||
#[arg(long, default_value = "on-chain", value_hint = clap::ValueHint::Other)]
|
||||
input_source: TestDataSource,
|
||||
/// where the output data come from
|
||||
#[arg(long, default_value = "on-chain")]
|
||||
#[arg(long, default_value = "on-chain", value_hint = clap::ValueHint::Other)]
|
||||
output_source: TestDataSource,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@@ -544,121 +582,136 @@ pub enum Commands {
|
||||
#[command(arg_required_else_help = true)]
|
||||
TestUpdateAccountCalls {
|
||||
/// The path to the verifier contract's address
|
||||
#[arg(long)]
|
||||
#[arg(long, value_hint = clap::ValueHint::Other)]
|
||||
addr: H160Flag,
|
||||
/// The path to the .json data file.
|
||||
#[arg(short = 'D', long)]
|
||||
data: PathBuf,
|
||||
#[arg(short = 'D', long, value_hint = clap::ValueHint::FilePath)]
|
||||
data: Option<PathBuf>,
|
||||
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
|
||||
#[arg(short = 'U', long)]
|
||||
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
|
||||
rpc_url: Option<String>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Swaps the positions in the transcript that correspond to commitments
|
||||
SwapProofCommitments {
|
||||
/// The path to the proof file
|
||||
#[arg(short = 'P', long, default_value = DEFAULT_PROOF)]
|
||||
proof_path: PathBuf,
|
||||
#[arg(short = 'P', long, default_value = DEFAULT_PROOF, value_hint = clap::ValueHint::FilePath)]
|
||||
proof_path: Option<PathBuf>,
|
||||
/// The path to the witness file
|
||||
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS)]
|
||||
witness_path: PathBuf,
|
||||
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS, value_hint = clap::ValueHint::FilePath)]
|
||||
witness_path: Option<PathBuf>,
|
||||
},
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Loads model, data, and creates proof
|
||||
Prove {
|
||||
/// The path to the .json witness file (generated using the gen-witness command)
|
||||
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS)]
|
||||
witness: PathBuf,
|
||||
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS, value_hint = clap::ValueHint::FilePath)]
|
||||
witness: Option<PathBuf>,
|
||||
/// The path to the compiled model file (generated using the compile-circuit command)
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT)]
|
||||
compiled_circuit: PathBuf,
|
||||
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT, value_hint = clap::ValueHint::FilePath)]
|
||||
compiled_circuit: Option<PathBuf>,
|
||||
/// The path to load the desired proving key file (generated using the setup command)
|
||||
#[arg(long, default_value = DEFAULT_PK)]
|
||||
pk_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_PK, value_hint = clap::ValueHint::FilePath)]
|
||||
pk_path: Option<PathBuf>,
|
||||
/// The path to output the proof file to
|
||||
#[arg(long, default_value = DEFAULT_PROOF)]
|
||||
proof_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_PROOF, value_hint = clap::ValueHint::FilePath)]
|
||||
proof_path: Option<PathBuf>,
|
||||
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
|
||||
#[arg(long)]
|
||||
#[arg(long, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: Option<PathBuf>,
|
||||
#[arg(
|
||||
long,
|
||||
require_equals = true,
|
||||
num_args = 0..=1,
|
||||
default_value_t = ProofType::Single,
|
||||
value_enum
|
||||
value_enum,
|
||||
value_hint = clap::ValueHint::Other
|
||||
)]
|
||||
proof_type: ProofType,
|
||||
/// run sanity checks during calculations (safe or unsafe)
|
||||
#[arg(long, default_value = DEFAULT_CHECKMODE)]
|
||||
check_mode: CheckMode,
|
||||
#[arg(long, default_value = DEFAULT_CHECKMODE, value_hint = clap::ValueHint::Other)]
|
||||
check_mode: Option<CheckMode>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Encodes a proof into evm calldata
|
||||
#[command(name = "encode-evm-calldata")]
|
||||
EncodeEvmCalldata {
|
||||
/// The path to the proof file (generated using the prove command)
|
||||
#[arg(long, default_value = DEFAULT_PROOF, value_hint = clap::ValueHint::FilePath)]
|
||||
proof_path: Option<PathBuf>,
|
||||
/// The path to save the calldata to
|
||||
#[arg(long, default_value = DEFAULT_CALLDATA, value_hint = clap::ValueHint::FilePath)]
|
||||
calldata_path: Option<PathBuf>,
|
||||
/// The path to the verification key address (only used if the vk is rendered as a separate contract)
|
||||
#[arg(long, value_hint = clap::ValueHint::Other)]
|
||||
addr_vk: Option<H160Flag>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Creates an Evm verifier for a single proof
|
||||
#[command(name = "create-evm-verifier")]
|
||||
CreateEvmVerifier {
|
||||
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
|
||||
#[arg(long)]
|
||||
#[arg(long, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: Option<PathBuf>,
|
||||
/// The path to load circuit settings .json file from (generated using the gen-settings command)
|
||||
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS)]
|
||||
settings_path: PathBuf,
|
||||
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
|
||||
settings_path: Option<PathBuf>,
|
||||
/// The path to load the desired verification key file
|
||||
#[arg(long, default_value = DEFAULT_VK)]
|
||||
vk_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_VK, value_hint = clap::ValueHint::FilePath)]
|
||||
vk_path: Option<PathBuf>,
|
||||
/// The path to output the Solidity code
|
||||
#[arg(long, default_value = DEFAULT_SOL_CODE)]
|
||||
sol_code_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_SOL_CODE, value_hint = clap::ValueHint::FilePath)]
|
||||
sol_code_path: Option<PathBuf>,
|
||||
/// The path to output the Solidity verifier ABI
|
||||
#[arg(long, default_value = DEFAULT_VERIFIER_ABI)]
|
||||
abi_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_VERIFIER_ABI, value_hint = clap::ValueHint::FilePath)]
|
||||
abi_path: Option<PathBuf>,
|
||||
/// Whether the verifier key should be rendered as a separate contract.
|
||||
/// We recommend disabling selector compression if this is enabled.
|
||||
/// To save the verifier key as a separate contract, set this to true and then call the create-evm-vk command.
|
||||
#[arg(long, default_value = DEFAULT_RENDER_VK_SEPERATELY)]
|
||||
render_vk_seperately: bool,
|
||||
#[arg(long, default_value = DEFAULT_RENDER_VK_SEPERATELY, action = clap::ArgAction::SetTrue)]
|
||||
render_vk_seperately: Option<bool>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Creates an Evm verifier for a single proof
|
||||
#[command(name = "create-evm-vk")]
|
||||
CreateEvmVK {
|
||||
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
|
||||
#[arg(long)]
|
||||
#[arg(long, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: Option<PathBuf>,
|
||||
/// The path to load circuit settings .json file from (generated using the gen-settings command)
|
||||
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS)]
|
||||
settings_path: PathBuf,
|
||||
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
|
||||
settings_path: Option<PathBuf>,
|
||||
/// The path to load the desired verification key file
|
||||
#[arg(long, default_value = DEFAULT_VK)]
|
||||
vk_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_VK, value_hint = clap::ValueHint::FilePath)]
|
||||
vk_path: Option<PathBuf>,
|
||||
/// The path to output the Solidity code
|
||||
#[arg(long, default_value = DEFAULT_VK_SOL)]
|
||||
sol_code_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_VK_SOL, value_hint = clap::ValueHint::FilePath)]
|
||||
sol_code_path: Option<PathBuf>,
|
||||
/// The path to output the Solidity verifier ABI
|
||||
#[arg(long, default_value = DEFAULT_VK_ABI)]
|
||||
abi_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_VK_ABI, value_hint = clap::ValueHint::FilePath)]
|
||||
abi_path: Option<PathBuf>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Creates an Evm verifier that attests to on-chain inputs for a single proof
|
||||
#[command(name = "create-evm-da")]
|
||||
CreateEvmDataAttestation {
|
||||
/// The path to load circuit settings .json file from (generated using the gen-settings command)
|
||||
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS)]
|
||||
settings_path: PathBuf,
|
||||
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
|
||||
settings_path: Option<PathBuf>,
|
||||
/// The path to output the Solidity code
|
||||
#[arg(long, default_value = DEFAULT_SOL_CODE_DA)]
|
||||
sol_code_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_SOL_CODE_DA, value_hint = clap::ValueHint::FilePath)]
|
||||
sol_code_path: Option<PathBuf>,
|
||||
/// The path to output the Solidity verifier ABI
|
||||
#[arg(long, default_value = DEFAULT_VERIFIER_DA_ABI)]
|
||||
abi_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_VERIFIER_DA_ABI, value_hint = clap::ValueHint::FilePath)]
|
||||
abi_path: Option<PathBuf>,
|
||||
/// The path to the .json data file, which should
|
||||
/// contain the necessary calldata and account addresses
|
||||
/// needed to read from all the on-chain
|
||||
/// view functions that return the data that the network
|
||||
/// ingests as inputs.
|
||||
#[arg(short = 'D', long, default_value = DEFAULT_DATA)]
|
||||
data: PathBuf,
|
||||
#[arg(short = 'D', long, default_value = DEFAULT_DATA, value_hint = clap::ValueHint::FilePath)]
|
||||
data: Option<PathBuf>,
|
||||
},
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@@ -666,104 +719,104 @@ pub enum Commands {
|
||||
#[command(name = "create-evm-verifier-aggr")]
|
||||
CreateEvmVerifierAggr {
|
||||
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
|
||||
#[arg(long)]
|
||||
#[arg(long, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: Option<PathBuf>,
|
||||
/// The path to load the desired verification key file
|
||||
#[arg(long, default_value = DEFAULT_VK_AGGREGATED)]
|
||||
vk_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_VK_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
vk_path: Option<PathBuf>,
|
||||
/// The path to the Solidity code
|
||||
#[arg(long, default_value = DEFAULT_SOL_CODE_AGGREGATED)]
|
||||
sol_code_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_SOL_CODE_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
sol_code_path: Option<PathBuf>,
|
||||
/// The path to output the Solidity verifier ABI
|
||||
#[arg(long, default_value = DEFAULT_VERIFIER_AGGREGATED_ABI)]
|
||||
abi_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_VERIFIER_AGGREGATED_ABI, value_hint = clap::ValueHint::FilePath)]
|
||||
abi_path: Option<PathBuf>,
|
||||
// aggregated circuit settings paths, used to calculate the number of instances in the aggregate proof
|
||||
#[arg(long, default_value = DEFAULT_SETTINGS, value_delimiter = ',', allow_hyphen_values = true)]
|
||||
#[arg(long, default_value = DEFAULT_SETTINGS, value_delimiter = ',', allow_hyphen_values = true, value_hint = clap::ValueHint::FilePath)]
|
||||
aggregation_settings: Vec<PathBuf>,
|
||||
// logrows used for aggregation circuit
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS)]
|
||||
logrows: u32,
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
|
||||
logrows: Option<u32>,
|
||||
/// Whether the verifier key should be rendered as a separate contract.
|
||||
/// We recommend disabling selector compression if this is enabled.
|
||||
/// To save the verifier key as a separate contract, set this to true and then call the create-evm-vk command.
|
||||
#[arg(long, default_value = DEFAULT_RENDER_VK_SEPERATELY)]
|
||||
render_vk_seperately: bool,
|
||||
#[arg(long, default_value = DEFAULT_RENDER_VK_SEPERATELY, action = clap::ArgAction::SetTrue)]
|
||||
render_vk_seperately: Option<bool>,
|
||||
},
|
||||
/// Verifies a proof, returning accept or reject
|
||||
Verify {
|
||||
/// The path to load circuit settings .json file from (generated using the gen-settings command)
|
||||
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS)]
|
||||
settings_path: PathBuf,
|
||||
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
|
||||
settings_path: Option<PathBuf>,
|
||||
/// The path to the proof file (generated using the prove command)
|
||||
#[arg(long, default_value = DEFAULT_PROOF)]
|
||||
proof_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_PROOF, value_hint = clap::ValueHint::FilePath)]
|
||||
proof_path: Option<PathBuf>,
|
||||
/// The path to the verification key file (generated using the setup command)
|
||||
#[arg(long, default_value = DEFAULT_VK)]
|
||||
vk_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_VK, value_hint = clap::ValueHint::FilePath)]
|
||||
vk_path: Option<PathBuf>,
|
||||
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
|
||||
#[arg(long)]
|
||||
#[arg(long, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: Option<PathBuf>,
|
||||
/// Reduce SRS logrows to the number of instances rather than the number of logrows used for proofs (only works if the srs were generated in the same ceremony)
|
||||
#[arg(long, default_value = DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION)]
|
||||
reduced_srs: bool,
|
||||
#[arg(long, default_value = DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION, action = clap::ArgAction::SetTrue)]
|
||||
reduced_srs: Option<bool>,
|
||||
},
|
||||
/// Verifies an aggregate proof, returning accept or reject
|
||||
VerifyAggr {
|
||||
/// The path to the proof file (generated using the prove command)
|
||||
#[arg(long, default_value = DEFAULT_PROOF_AGGREGATED)]
|
||||
proof_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_PROOF_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
proof_path: Option<PathBuf>,
|
||||
/// The path to the verification key file (generated using the setup-aggregate command)
|
||||
#[arg(long, default_value = DEFAULT_VK_AGGREGATED)]
|
||||
vk_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_VK_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
|
||||
vk_path: Option<PathBuf>,
|
||||
/// reduced srs
|
||||
#[arg(long, default_value = DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION)]
|
||||
reduced_srs: bool,
|
||||
#[arg(long, default_value = DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION, action = clap::ArgAction::SetTrue)]
|
||||
reduced_srs: Option<bool>,
|
||||
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
|
||||
#[arg(long)]
|
||||
#[arg(long, value_hint = clap::ValueHint::FilePath)]
|
||||
srs_path: Option<PathBuf>,
|
||||
/// logrows used for aggregation circuit
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS)]
|
||||
logrows: u32,
|
||||
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
|
||||
logrows: Option<u32>,
|
||||
/// commitment
|
||||
#[arg(long, default_value = DEFAULT_COMMITMENT)]
|
||||
#[arg(long, default_value = DEFAULT_COMMITMENT, value_hint = clap::ValueHint::Other)]
|
||||
commitment: Option<Commitments>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Deploys an evm verifier that is generated by ezkl
|
||||
DeployEvmVerifier {
|
||||
/// The path to the Solidity code (generated using the create-evm-verifier command)
|
||||
#[arg(long, default_value = DEFAULT_SOL_CODE)]
|
||||
sol_code_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_SOL_CODE, value_hint = clap::ValueHint::FilePath)]
|
||||
sol_code_path: Option<PathBuf>,
|
||||
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
|
||||
#[arg(short = 'U', long)]
|
||||
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
|
||||
rpc_url: Option<String>,
|
||||
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS)]
|
||||
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS, value_hint = clap::ValueHint::Other)]
|
||||
/// The path to output the contract address
|
||||
addr_path: PathBuf,
|
||||
addr_path: Option<PathBuf>,
|
||||
/// The optimizer runs to set on the verifier. Lower values optimize for deployment cost, while higher values optimize for gas cost.
|
||||
#[arg(long, default_value = DEFAULT_OPTIMIZER_RUNS)]
|
||||
#[arg(long, default_value = DEFAULT_OPTIMIZER_RUNS, value_hint = clap::ValueHint::Other)]
|
||||
optimizer_runs: usize,
|
||||
/// Private secp256K1 key in hex format, 64 chars, no 0x prefix, of the account signing transactions. If None the private key will be generated by Anvil
|
||||
#[arg(short = 'P', long)]
|
||||
#[arg(short = 'P', long, value_hint = clap::ValueHint::Other)]
|
||||
private_key: Option<String>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Deploys an evm verifier that is generated by ezkl
|
||||
DeployEvmVK {
|
||||
/// The path to the Solidity code (generated using the create-evm-verifier command)
|
||||
#[arg(long, default_value = DEFAULT_VK_SOL)]
|
||||
sol_code_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_VK_SOL, value_hint = clap::ValueHint::FilePath)]
|
||||
sol_code_path: Option<PathBuf>,
|
||||
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
|
||||
#[arg(short = 'U', long)]
|
||||
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
|
||||
rpc_url: Option<String>,
|
||||
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS_VK)]
|
||||
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS_VK, value_hint = clap::ValueHint::Other)]
|
||||
/// The path to output the contract address
|
||||
addr_path: PathBuf,
|
||||
addr_path: Option<PathBuf>,
|
||||
/// The optimizer runs to set on the verifier. Lower values optimize for deployment cost, while higher values optimize for gas cost.
|
||||
#[arg(long, default_value = DEFAULT_OPTIMIZER_RUNS)]
|
||||
#[arg(long, default_value = DEFAULT_OPTIMIZER_RUNS, value_hint = clap::ValueHint::Other)]
|
||||
optimizer_runs: usize,
|
||||
/// Private secp256K1 key in hex format, 64 chars, no 0x prefix, of the account signing transactions. If None the private key will be generated by Anvil
|
||||
#[arg(short = 'P', long)]
|
||||
#[arg(short = 'P', long, value_hint = clap::ValueHint::Other)]
|
||||
private_key: Option<String>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@@ -771,25 +824,25 @@ pub enum Commands {
|
||||
#[command(name = "deploy-evm-da")]
|
||||
DeployEvmDataAttestation {
|
||||
/// The path to the .json data file, which should include both the network input (possibly private) and the network output (public input to the proof)
|
||||
#[arg(short = 'D', long, default_value = DEFAULT_DATA)]
|
||||
data: PathBuf,
|
||||
#[arg(short = 'D', long, default_value = DEFAULT_DATA, value_hint = clap::ValueHint::FilePath)]
|
||||
data: Option<PathBuf>,
|
||||
/// The path to load circuit settings .json file from (generated using the gen-settings command)
|
||||
#[arg(long, default_value = DEFAULT_SETTINGS)]
|
||||
settings_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
|
||||
settings_path: Option<PathBuf>,
|
||||
/// The path to the Solidity code
|
||||
#[arg(long, default_value = DEFAULT_SOL_CODE_DA)]
|
||||
sol_code_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_SOL_CODE_DA, value_hint = clap::ValueHint::FilePath)]
|
||||
sol_code_path: Option<PathBuf>,
|
||||
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
|
||||
#[arg(short = 'U', long)]
|
||||
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
|
||||
rpc_url: Option<String>,
|
||||
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS_DA)]
|
||||
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS_DA, value_hint = clap::ValueHint::FilePath)]
|
||||
/// The path to output the contract address
|
||||
addr_path: PathBuf,
|
||||
addr_path: Option<PathBuf>,
|
||||
/// The optimizer runs to set on the verifier. (Lower values optimize for deployment, while higher values optimize for execution)
|
||||
#[arg(long, default_value = DEFAULT_OPTIMIZER_RUNS)]
|
||||
#[arg(long, default_value = DEFAULT_OPTIMIZER_RUNS, value_hint = clap::ValueHint::Other)]
|
||||
optimizer_runs: usize,
|
||||
/// Private secp256K1 key in hex format, 64 chars, no 0x prefix, of the account signing transactions. If None the private key will be generated by Anvil
|
||||
#[arg(short = 'P', long)]
|
||||
#[arg(short = 'P', long, value_hint = clap::ValueHint::Other)]
|
||||
private_key: Option<String>,
|
||||
},
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@@ -797,19 +850,32 @@ pub enum Commands {
|
||||
#[command(name = "verify-evm")]
|
||||
VerifyEvm {
|
||||
/// The path to the proof file (generated using the prove command)
|
||||
#[arg(long, default_value = DEFAULT_PROOF)]
|
||||
proof_path: PathBuf,
|
||||
#[arg(long, default_value = DEFAULT_PROOF, value_hint = clap::ValueHint::FilePath)]
|
||||
proof_path: Option<PathBuf>,
|
||||
/// The path to verifier contract's address
|
||||
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS)]
|
||||
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS, value_hint = clap::ValueHint::Other)]
|
||||
addr_verifier: H160Flag,
|
||||
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
|
||||
#[arg(short = 'U', long)]
|
||||
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
|
||||
rpc_url: Option<String>,
|
||||
/// does the verifier use data attestation ?
|
||||
#[arg(long)]
|
||||
#[arg(long, value_hint = clap::ValueHint::Other)]
|
||||
addr_da: Option<H160Flag>,
|
||||
// is the vk rendered seperately, if so specify an address
|
||||
#[arg(long)]
|
||||
#[arg(long, value_hint = clap::ValueHint::Other)]
|
||||
addr_vk: Option<H160Flag>,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
impl Commands {
|
||||
/// Converts the commands to a json string
|
||||
pub fn as_json(&self) -> String {
|
||||
serde_json::to_string(self).unwrap()
|
||||
}
|
||||
|
||||
/// Converts a json string to a Commands struct
|
||||
pub fn from_json(json: &str) -> Self {
|
||||
serde_json::from_str(json).unwrap()
|
||||
}
|
||||
}
|
||||
687
src/eth.rs
687
src/eth.rs
File diff suppressed because one or more lines are too long
328
src/execute.rs
328
src/execute.rs
@@ -1,9 +1,7 @@
|
||||
use crate::circuit::CheckMode;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::commands::CalibrationTarget;
|
||||
use crate::commands::Commands;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::commands::H160Flag;
|
||||
use crate::commands::*;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::eth::{deploy_contract_via_solidity, deploy_da_verifier_via_solidity};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
@@ -23,6 +21,8 @@ use crate::pfsys::{
|
||||
};
|
||||
use crate::pfsys::{save_vk, srs::*};
|
||||
use crate::tensor::TensorError;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::EZKL_BUF_CAPACITY;
|
||||
use crate::{Commitments, RunArgs};
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use colored::Colorize;
|
||||
@@ -66,48 +66,16 @@ use snark_verifier::system::halo2::Config;
|
||||
use std::error::Error;
|
||||
use std::fs::File;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use std::io::BufWriter;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use std::io::{Cursor, Write};
|
||||
use std::path::Path;
|
||||
use std::path::PathBuf;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use std::process::Command;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use std::sync::OnceLock;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::EZKL_BUF_CAPACITY;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use std::io::BufWriter;
|
||||
use std::str::FromStr;
|
||||
use std::time::Duration;
|
||||
use tabled::Tabled;
|
||||
use thiserror::Error;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
static _SOLC_REQUIREMENT: OnceLock<bool> = OnceLock::new();
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
fn check_solc_requirement() {
|
||||
info!("checking solc installation..");
|
||||
_SOLC_REQUIREMENT.get_or_init(|| match Command::new("solc").arg("--version").output() {
|
||||
Ok(output) => {
|
||||
debug!("solc output: {:#?}", output);
|
||||
debug!("solc output success: {:#?}", output.status.success());
|
||||
if !output.status.success() {
|
||||
log::error!(
|
||||
"`solc` check failed: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
return false;
|
||||
}
|
||||
debug!("solc check passed, proceeding");
|
||||
true
|
||||
}
|
||||
Err(_) => {
|
||||
log::error!("`solc` check failed: solc not found");
|
||||
false
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
lazy_static! {
|
||||
@@ -152,7 +120,11 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
srs_path,
|
||||
logrows,
|
||||
commitment,
|
||||
} => gen_srs_cmd(srs_path, logrows as u32, commitment),
|
||||
} => gen_srs_cmd(
|
||||
srs_path,
|
||||
logrows as u32,
|
||||
commitment.unwrap_or(Commitments::from_str(DEFAULT_COMMITMENT)?),
|
||||
),
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::GetSrs {
|
||||
srs_path,
|
||||
@@ -160,12 +132,16 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
logrows,
|
||||
commitment,
|
||||
} => get_srs_cmd(srs_path, settings_path, logrows, commitment).await,
|
||||
Commands::Table { model, args } => table(model, args),
|
||||
Commands::Table { model, args } => table(model.unwrap_or(DEFAULT_MODEL.into()), args),
|
||||
Commands::GenSettings {
|
||||
model,
|
||||
settings_path,
|
||||
args,
|
||||
} => gen_circuit_settings(model, settings_path, args),
|
||||
} => gen_circuit_settings(
|
||||
model.unwrap_or(DEFAULT_MODEL.into()),
|
||||
settings_path.unwrap_or(DEFAULT_SETTINGS.into()),
|
||||
args,
|
||||
),
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::CalibrateSettings {
|
||||
model,
|
||||
@@ -178,16 +154,17 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
max_logrows,
|
||||
only_range_check_rebase,
|
||||
} => calibrate(
|
||||
model,
|
||||
data,
|
||||
settings_path,
|
||||
model.unwrap_or(DEFAULT_MODEL.into()),
|
||||
data.unwrap_or(DEFAULT_DATA.into()),
|
||||
settings_path.unwrap_or(DEFAULT_SETTINGS.into()),
|
||||
target,
|
||||
lookup_safety_margin,
|
||||
scales,
|
||||
scale_rebase_multiplier,
|
||||
only_range_check_rebase,
|
||||
only_range_check_rebase.unwrap_or(DEFAULT_ONLY_RANGE_CHECK_REBASE.parse()?),
|
||||
max_logrows,
|
||||
)
|
||||
.await
|
||||
.map(|e| serde_json::to_string(&e).unwrap()),
|
||||
Commands::GenWitness {
|
||||
data,
|
||||
@@ -195,9 +172,19 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
output,
|
||||
vk_path,
|
||||
srs_path,
|
||||
} => gen_witness(compiled_circuit, data, Some(output), vk_path, srs_path)
|
||||
.map(|e| serde_json::to_string(&e).unwrap()),
|
||||
Commands::Mock { model, witness } => mock(model, witness),
|
||||
} => gen_witness(
|
||||
compiled_circuit.unwrap_or(DEFAULT_COMPILED_CIRCUIT.into()),
|
||||
data.unwrap_or(DEFAULT_DATA.into()),
|
||||
Some(output.unwrap_or(DEFAULT_WITNESS.into())),
|
||||
vk_path,
|
||||
srs_path,
|
||||
)
|
||||
.await
|
||||
.map(|e| serde_json::to_string(&e).unwrap()),
|
||||
Commands::Mock { model, witness } => mock(
|
||||
model.unwrap_or(DEFAULT_MODEL.into()),
|
||||
witness.unwrap_or(DEFAULT_WITNESS.into()),
|
||||
),
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::CreateEvmVerifier {
|
||||
vk_path,
|
||||
@@ -206,28 +193,60 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
render_vk_seperately,
|
||||
} => create_evm_verifier(
|
||||
vk_path,
|
||||
srs_path,
|
||||
settings_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
render_vk_seperately,
|
||||
),
|
||||
} => {
|
||||
create_evm_verifier(
|
||||
vk_path.unwrap_or(DEFAULT_VK.into()),
|
||||
srs_path,
|
||||
settings_path.unwrap_or(DEFAULT_SETTINGS.into()),
|
||||
sol_code_path.unwrap_or(DEFAULT_SOL_CODE.into()),
|
||||
abi_path.unwrap_or(DEFAULT_VERIFIER_ABI.into()),
|
||||
render_vk_seperately.unwrap_or(DEFAULT_RENDER_VK_SEPERATELY.parse()?),
|
||||
)
|
||||
.await
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::EncodeEvmCalldata {
|
||||
proof_path,
|
||||
calldata_path,
|
||||
addr_vk,
|
||||
} => encode_evm_calldata(
|
||||
proof_path.unwrap_or(DEFAULT_PROOF.into()),
|
||||
calldata_path.unwrap_or(DEFAULT_CALLDATA.into()),
|
||||
addr_vk,
|
||||
)
|
||||
.map(|e| serde_json::to_string(&e).unwrap()),
|
||||
|
||||
Commands::CreateEvmVK {
|
||||
vk_path,
|
||||
srs_path,
|
||||
settings_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
} => create_evm_vk(vk_path, srs_path, settings_path, sol_code_path, abi_path),
|
||||
} => {
|
||||
create_evm_vk(
|
||||
vk_path.unwrap_or(DEFAULT_VK.into()),
|
||||
srs_path,
|
||||
settings_path.unwrap_or(DEFAULT_SETTINGS.into()),
|
||||
sol_code_path.unwrap_or(DEFAULT_VK_SOL.into()),
|
||||
abi_path.unwrap_or(DEFAULT_VK_ABI.into()),
|
||||
)
|
||||
.await
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::CreateEvmDataAttestation {
|
||||
settings_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
data,
|
||||
} => create_evm_data_attestation(settings_path, sol_code_path, abi_path, data),
|
||||
} => {
|
||||
create_evm_data_attestation(
|
||||
settings_path.unwrap_or(DEFAULT_SETTINGS.into()),
|
||||
sol_code_path.unwrap_or(DEFAULT_SOL_CODE_DA.into()),
|
||||
abi_path.unwrap_or(DEFAULT_VERIFIER_DA_ABI.into()),
|
||||
data.unwrap_or(DEFAULT_DATA.into()),
|
||||
)
|
||||
.await
|
||||
}
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::CreateEvmVerifierAggr {
|
||||
vk_path,
|
||||
@@ -237,20 +256,27 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
aggregation_settings,
|
||||
logrows,
|
||||
render_vk_seperately,
|
||||
} => create_evm_aggregate_verifier(
|
||||
vk_path,
|
||||
srs_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
aggregation_settings,
|
||||
logrows,
|
||||
render_vk_seperately,
|
||||
),
|
||||
} => {
|
||||
create_evm_aggregate_verifier(
|
||||
vk_path.unwrap_or(DEFAULT_VK.into()),
|
||||
srs_path,
|
||||
sol_code_path.unwrap_or(DEFAULT_SOL_CODE_AGGREGATED.into()),
|
||||
abi_path.unwrap_or(DEFAULT_VERIFIER_AGGREGATED_ABI.into()),
|
||||
aggregation_settings,
|
||||
logrows.unwrap_or(DEFAULT_AGGREGATED_LOGROWS.parse()?),
|
||||
render_vk_seperately.unwrap_or(DEFAULT_RENDER_VK_SEPERATELY.parse()?),
|
||||
)
|
||||
.await
|
||||
}
|
||||
Commands::CompileCircuit {
|
||||
model,
|
||||
compiled_circuit,
|
||||
settings_path,
|
||||
} => compile_circuit(model, compiled_circuit, settings_path),
|
||||
} => compile_circuit(
|
||||
model.unwrap_or(DEFAULT_MODEL.into()),
|
||||
compiled_circuit.unwrap_or(DEFAULT_COMPILED_CIRCUIT.into()),
|
||||
settings_path.unwrap_or(DEFAULT_SETTINGS.into()),
|
||||
),
|
||||
Commands::Setup {
|
||||
compiled_circuit,
|
||||
srs_path,
|
||||
@@ -259,12 +285,12 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
witness,
|
||||
disable_selector_compression,
|
||||
} => setup(
|
||||
compiled_circuit,
|
||||
compiled_circuit.unwrap_or(DEFAULT_COMPILED_CIRCUIT.into()),
|
||||
srs_path,
|
||||
vk_path,
|
||||
pk_path,
|
||||
vk_path.unwrap_or(DEFAULT_VK.into()),
|
||||
pk_path.unwrap_or(DEFAULT_PK.into()),
|
||||
witness,
|
||||
disable_selector_compression,
|
||||
disable_selector_compression.unwrap_or(DEFAULT_DISABLE_SELECTOR_COMPRESSION.parse()?),
|
||||
),
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::SetupTestEvmData {
|
||||
@@ -276,8 +302,8 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
output_source,
|
||||
} => {
|
||||
setup_test_evm_witness(
|
||||
data,
|
||||
compiled_circuit,
|
||||
data.unwrap_or(DEFAULT_DATA.into()),
|
||||
compiled_circuit.unwrap_or(DEFAULT_COMPILED_CIRCUIT.into()),
|
||||
test_data,
|
||||
rpc_url,
|
||||
input_source,
|
||||
@@ -290,13 +316,17 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
addr,
|
||||
data,
|
||||
rpc_url,
|
||||
} => test_update_account_calls(addr, data, rpc_url).await,
|
||||
} => test_update_account_calls(addr, data.unwrap_or(DEFAULT_DATA.into()), rpc_url).await,
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::SwapProofCommitments {
|
||||
proof_path,
|
||||
witness_path,
|
||||
} => swap_proof_commitments_cmd(proof_path, witness_path)
|
||||
.map(|e| serde_json::to_string(&e).unwrap()),
|
||||
} => swap_proof_commitments_cmd(
|
||||
proof_path.unwrap_or(DEFAULT_PROOF.into()),
|
||||
witness_path.unwrap_or(DEFAULT_WITNESS.into()),
|
||||
)
|
||||
.map(|e| serde_json::to_string(&e).unwrap()),
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
Commands::Prove {
|
||||
witness,
|
||||
@@ -307,20 +337,24 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
proof_type,
|
||||
check_mode,
|
||||
} => prove(
|
||||
witness,
|
||||
compiled_circuit,
|
||||
pk_path,
|
||||
Some(proof_path),
|
||||
witness.unwrap_or(DEFAULT_WITNESS.into()),
|
||||
compiled_circuit.unwrap_or(DEFAULT_COMPILED_CIRCUIT.into()),
|
||||
pk_path.unwrap_or(DEFAULT_PK.into()),
|
||||
Some(proof_path.unwrap_or(DEFAULT_PROOF.into())),
|
||||
srs_path,
|
||||
proof_type,
|
||||
check_mode,
|
||||
check_mode.unwrap_or(DEFAULT_CHECKMODE.parse()?),
|
||||
)
|
||||
.map(|e| serde_json::to_string(&e).unwrap()),
|
||||
Commands::MockAggregate {
|
||||
aggregation_snarks,
|
||||
logrows,
|
||||
split_proofs,
|
||||
} => mock_aggregate(aggregation_snarks, logrows, split_proofs),
|
||||
} => mock_aggregate(
|
||||
aggregation_snarks,
|
||||
logrows.unwrap_or(DEFAULT_AGGREGATED_LOGROWS.parse()?),
|
||||
split_proofs.unwrap_or(DEFAULT_SPLIT.parse()?),
|
||||
),
|
||||
Commands::SetupAggregate {
|
||||
sample_snarks,
|
||||
vk_path,
|
||||
@@ -332,12 +366,12 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
commitment,
|
||||
} => setup_aggregate(
|
||||
sample_snarks,
|
||||
vk_path,
|
||||
pk_path,
|
||||
vk_path.unwrap_or(DEFAULT_VK_AGGREGATED.into()),
|
||||
pk_path.unwrap_or(DEFAULT_PK_AGGREGATED.into()),
|
||||
srs_path,
|
||||
logrows,
|
||||
split_proofs,
|
||||
disable_selector_compression,
|
||||
logrows.unwrap_or(DEFAULT_AGGREGATED_LOGROWS.parse()?),
|
||||
split_proofs.unwrap_or(DEFAULT_SPLIT.parse()?),
|
||||
disable_selector_compression.unwrap_or(DEFAULT_DISABLE_SELECTOR_COMPRESSION.parse()?),
|
||||
commitment.into(),
|
||||
),
|
||||
Commands::Aggregate {
|
||||
@@ -351,14 +385,14 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
split_proofs,
|
||||
commitment,
|
||||
} => aggregate(
|
||||
proof_path,
|
||||
proof_path.unwrap_or(DEFAULT_PROOF_AGGREGATED.into()),
|
||||
aggregation_snarks,
|
||||
pk_path,
|
||||
pk_path.unwrap_or(DEFAULT_PK_AGGREGATED.into()),
|
||||
srs_path,
|
||||
transcript,
|
||||
logrows,
|
||||
check_mode,
|
||||
split_proofs,
|
||||
logrows.unwrap_or(DEFAULT_AGGREGATED_LOGROWS.parse()?),
|
||||
check_mode.unwrap_or(DEFAULT_CHECKMODE.parse()?),
|
||||
split_proofs.unwrap_or(DEFAULT_SPLIT.parse()?),
|
||||
commitment.into(),
|
||||
)
|
||||
.map(|e| serde_json::to_string(&e).unwrap()),
|
||||
@@ -368,8 +402,14 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
vk_path,
|
||||
srs_path,
|
||||
reduced_srs,
|
||||
} => verify(proof_path, settings_path, vk_path, srs_path, reduced_srs)
|
||||
.map(|e| serde_json::to_string(&e).unwrap()),
|
||||
} => verify(
|
||||
proof_path.unwrap_or(DEFAULT_PROOF.into()),
|
||||
settings_path.unwrap_or(DEFAULT_SETTINGS.into()),
|
||||
vk_path.unwrap_or(DEFAULT_VK.into()),
|
||||
srs_path,
|
||||
reduced_srs.unwrap_or(DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION.parse()?),
|
||||
)
|
||||
.map(|e| serde_json::to_string(&e).unwrap()),
|
||||
Commands::VerifyAggr {
|
||||
proof_path,
|
||||
vk_path,
|
||||
@@ -378,11 +418,11 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
logrows,
|
||||
commitment,
|
||||
} => verify_aggr(
|
||||
proof_path,
|
||||
vk_path,
|
||||
proof_path.unwrap_or(DEFAULT_PROOF_AGGREGATED.into()),
|
||||
vk_path.unwrap_or(DEFAULT_VK_AGGREGATED.into()),
|
||||
srs_path,
|
||||
logrows,
|
||||
reduced_srs,
|
||||
logrows.unwrap_or(DEFAULT_AGGREGATED_LOGROWS.parse()?),
|
||||
reduced_srs.unwrap_or(DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION.parse()?),
|
||||
commitment.into(),
|
||||
)
|
||||
.map(|e| serde_json::to_string(&e).unwrap()),
|
||||
@@ -395,9 +435,9 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
private_key,
|
||||
} => {
|
||||
deploy_evm(
|
||||
sol_code_path,
|
||||
sol_code_path.unwrap_or(DEFAULT_SOL_CODE.into()),
|
||||
rpc_url,
|
||||
addr_path,
|
||||
addr_path.unwrap_or(DEFAULT_CONTRACT_ADDRESS.into()),
|
||||
optimizer_runs,
|
||||
private_key,
|
||||
"Halo2Verifier",
|
||||
@@ -413,9 +453,9 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
private_key,
|
||||
} => {
|
||||
deploy_evm(
|
||||
sol_code_path,
|
||||
sol_code_path.unwrap_or(DEFAULT_VK_SOL.into()),
|
||||
rpc_url,
|
||||
addr_path,
|
||||
addr_path.unwrap_or(DEFAULT_CONTRACT_ADDRESS_VK.into()),
|
||||
optimizer_runs,
|
||||
private_key,
|
||||
"Halo2VerifyingKey",
|
||||
@@ -433,11 +473,11 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
private_key,
|
||||
} => {
|
||||
deploy_da_evm(
|
||||
data,
|
||||
settings_path,
|
||||
sol_code_path,
|
||||
data.unwrap_or(DEFAULT_DATA.into()),
|
||||
settings_path.unwrap_or(DEFAULT_SETTINGS.into()),
|
||||
sol_code_path.unwrap_or(DEFAULT_SOL_CODE_DA.into()),
|
||||
rpc_url,
|
||||
addr_path,
|
||||
addr_path.unwrap_or(DEFAULT_CONTRACT_ADDRESS_DA.into()),
|
||||
optimizer_runs,
|
||||
private_key,
|
||||
)
|
||||
@@ -450,7 +490,16 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
|
||||
rpc_url,
|
||||
addr_da,
|
||||
addr_vk,
|
||||
} => verify_evm(proof_path, addr_verifier, rpc_url, addr_da, addr_vk).await,
|
||||
} => {
|
||||
verify_evm(
|
||||
proof_path.unwrap_or(DEFAULT_PROOF.into()),
|
||||
addr_verifier,
|
||||
rpc_url,
|
||||
addr_da,
|
||||
addr_vk,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -636,7 +685,7 @@ pub(crate) fn table(model: PathBuf, run_args: RunArgs) -> Result<String, Box<dyn
|
||||
Ok(String::new())
|
||||
}
|
||||
|
||||
pub(crate) fn gen_witness(
|
||||
pub(crate) async fn gen_witness(
|
||||
compiled_circuit_path: PathBuf,
|
||||
data: PathBuf,
|
||||
output: Option<PathBuf>,
|
||||
@@ -659,7 +708,7 @@ pub(crate) fn gen_witness(
|
||||
};
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let mut input = circuit.load_graph_input(&data)?;
|
||||
let mut input = circuit.load_graph_input(&data).await?;
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
let mut input = circuit.load_graph_input(&data)?;
|
||||
|
||||
@@ -890,7 +939,7 @@ impl AccuracyResults {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[allow(trivial_casts)]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub(crate) fn calibrate(
|
||||
pub(crate) async fn calibrate(
|
||||
model_path: PathBuf,
|
||||
data: PathBuf,
|
||||
settings_path: PathBuf,
|
||||
@@ -913,7 +962,9 @@ pub(crate) fn calibrate(
|
||||
|
||||
let model = Model::from_run_args(&settings.run_args, &model_path)?;
|
||||
|
||||
let chunks = data.split_into_batches(model.graph.input_shapes()?)?;
|
||||
let input_shapes = model.graph.input_shapes()?;
|
||||
|
||||
let chunks = data.split_into_batches(input_shapes).await?;
|
||||
info!("num calibration batches: {}", chunks.len());
|
||||
|
||||
debug!("running onnx predictions...");
|
||||
@@ -1139,7 +1190,6 @@ pub(crate) fn calibrate(
|
||||
);
|
||||
num_passed += 1;
|
||||
} else {
|
||||
error!("calibration failed {}", res.err().unwrap());
|
||||
num_failed += 1;
|
||||
}
|
||||
|
||||
@@ -1296,7 +1346,7 @@ pub(crate) fn mock(
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) fn create_evm_verifier(
|
||||
pub(crate) async fn create_evm_verifier(
|
||||
vk_path: PathBuf,
|
||||
srs_path: Option<PathBuf>,
|
||||
settings_path: PathBuf,
|
||||
@@ -1304,8 +1354,6 @@ pub(crate) fn create_evm_verifier(
|
||||
abi_path: PathBuf,
|
||||
render_vk_seperately: bool,
|
||||
) -> Result<String, Box<dyn Error>> {
|
||||
check_solc_requirement();
|
||||
|
||||
let settings = GraphSettings::load(&settings_path)?;
|
||||
let commitment: Commitments = settings.run_args.commitment.into();
|
||||
let params = load_params_verifier::<KZGCommitmentScheme<Bn256>>(
|
||||
@@ -1335,7 +1383,7 @@ pub(crate) fn create_evm_verifier(
|
||||
File::create(sol_code_path.clone())?.write_all(verifier_solidity.as_bytes())?;
|
||||
|
||||
// fetch abi of the contract
|
||||
let (abi, _, _) = get_contract_artifacts(sol_code_path, "Halo2Verifier", 0)?;
|
||||
let (abi, _, _) = get_contract_artifacts(sol_code_path, "Halo2Verifier", 0).await?;
|
||||
// save abi to file
|
||||
serde_json::to_writer(std::fs::File::create(abi_path)?, &abi)?;
|
||||
|
||||
@@ -1343,14 +1391,13 @@ pub(crate) fn create_evm_verifier(
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) fn create_evm_vk(
|
||||
pub(crate) async fn create_evm_vk(
|
||||
vk_path: PathBuf,
|
||||
srs_path: Option<PathBuf>,
|
||||
settings_path: PathBuf,
|
||||
sol_code_path: PathBuf,
|
||||
abi_path: PathBuf,
|
||||
) -> Result<String, Box<dyn Error>> {
|
||||
check_solc_requirement();
|
||||
let settings = GraphSettings::load(&settings_path)?;
|
||||
let commitment: Commitments = settings.run_args.commitment.into();
|
||||
let params = load_params_verifier::<KZGCommitmentScheme<Bn256>>(
|
||||
@@ -1377,7 +1424,7 @@ pub(crate) fn create_evm_vk(
|
||||
File::create(sol_code_path.clone())?.write_all(vk_solidity.as_bytes())?;
|
||||
|
||||
// fetch abi of the contract
|
||||
let (abi, _, _) = get_contract_artifacts(sol_code_path, "Halo2VerifyingKey", 0)?;
|
||||
let (abi, _, _) = get_contract_artifacts(sol_code_path, "Halo2VerifyingKey", 0).await?;
|
||||
// save abi to file
|
||||
serde_json::to_writer(std::fs::File::create(abi_path)?, &abi)?;
|
||||
|
||||
@@ -1385,7 +1432,7 @@ pub(crate) fn create_evm_vk(
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) fn create_evm_data_attestation(
|
||||
pub(crate) async fn create_evm_data_attestation(
|
||||
settings_path: PathBuf,
|
||||
_sol_code_path: PathBuf,
|
||||
_abi_path: PathBuf,
|
||||
@@ -1393,7 +1440,6 @@ pub(crate) fn create_evm_data_attestation(
|
||||
) -> Result<String, Box<dyn Error>> {
|
||||
#[allow(unused_imports)]
|
||||
use crate::graph::{DataSource, VarVisibility};
|
||||
check_solc_requirement();
|
||||
|
||||
let settings = GraphSettings::load(&settings_path)?;
|
||||
|
||||
@@ -1433,7 +1479,7 @@ pub(crate) fn create_evm_data_attestation(
|
||||
let mut f = File::create(_sol_code_path.clone())?;
|
||||
let _ = f.write(output.as_bytes());
|
||||
// fetch abi of the contract
|
||||
let (abi, _, _) = get_contract_artifacts(_sol_code_path, "DataAttestation", 0)?;
|
||||
let (abi, _, _) = get_contract_artifacts(_sol_code_path, "DataAttestation", 0).await?;
|
||||
// save abi to file
|
||||
serde_json::to_writer(std::fs::File::create(_abi_path)?, &abi)?;
|
||||
} else {
|
||||
@@ -1454,7 +1500,6 @@ pub(crate) async fn deploy_da_evm(
|
||||
runs: usize,
|
||||
private_key: Option<String>,
|
||||
) -> Result<String, Box<dyn Error>> {
|
||||
check_solc_requirement();
|
||||
let contract_address = deploy_da_verifier_via_solidity(
|
||||
settings_path,
|
||||
data,
|
||||
@@ -1481,7 +1526,6 @@ pub(crate) async fn deploy_evm(
|
||||
private_key: Option<String>,
|
||||
contract_name: &str,
|
||||
) -> Result<String, Box<dyn Error>> {
|
||||
check_solc_requirement();
|
||||
let contract_address = deploy_contract_via_solidity(
|
||||
sol_code_path,
|
||||
rpc_url.as_deref(),
|
||||
@@ -1498,6 +1542,32 @@ pub(crate) async fn deploy_evm(
|
||||
Ok(String::new())
|
||||
}
|
||||
|
||||
/// Encodes the calldata for the EVM verifier (both aggregated and single proof)
|
||||
pub(crate) fn encode_evm_calldata(
|
||||
proof_path: PathBuf,
|
||||
calldata_path: PathBuf,
|
||||
addr_vk: Option<H160Flag>,
|
||||
) -> Result<Vec<u8>, Box<dyn Error>> {
|
||||
let snark = Snark::load::<IPACommitmentScheme<G1Affine>>(&proof_path)?;
|
||||
|
||||
let flattened_instances = snark.instances.into_iter().flatten();
|
||||
|
||||
let encoded = halo2_solidity_verifier::encode_calldata(
|
||||
addr_vk
|
||||
.as_ref()
|
||||
.map(|x| alloy::primitives::Address::from(*x).0)
|
||||
.map(|x| x.0),
|
||||
&snark.proof,
|
||||
&flattened_instances.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
log::debug!("Encoded calldata: {:?}", encoded);
|
||||
|
||||
File::create(calldata_path)?.write_all(encoded.as_slice())?;
|
||||
|
||||
Ok(encoded)
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) async fn verify_evm(
|
||||
proof_path: PathBuf,
|
||||
@@ -1507,7 +1577,6 @@ pub(crate) async fn verify_evm(
|
||||
addr_vk: Option<H160Flag>,
|
||||
) -> Result<String, Box<dyn Error>> {
|
||||
use crate::eth::verify_proof_with_data_attestation;
|
||||
check_solc_requirement();
|
||||
|
||||
let proof = Snark::load::<KZGCommitmentScheme<Bn256>>(&proof_path)?;
|
||||
|
||||
@@ -1540,7 +1609,7 @@ pub(crate) async fn verify_evm(
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub(crate) fn create_evm_aggregate_verifier(
|
||||
pub(crate) async fn create_evm_aggregate_verifier(
|
||||
vk_path: PathBuf,
|
||||
srs_path: Option<PathBuf>,
|
||||
sol_code_path: PathBuf,
|
||||
@@ -1549,7 +1618,6 @@ pub(crate) fn create_evm_aggregate_verifier(
|
||||
logrows: u32,
|
||||
render_vk_seperately: bool,
|
||||
) -> Result<String, Box<dyn Error>> {
|
||||
check_solc_requirement();
|
||||
let srs_path = get_srs_path(logrows, srs_path, Commitments::KZG);
|
||||
let params: ParamsKZG<Bn256> = load_srs_verifier::<KZGCommitmentScheme<Bn256>>(srs_path)?;
|
||||
|
||||
@@ -1595,7 +1663,7 @@ pub(crate) fn create_evm_aggregate_verifier(
|
||||
File::create(sol_code_path.clone())?.write_all(verifier_solidity.as_bytes())?;
|
||||
|
||||
// fetch abi of the contract
|
||||
let (abi, _, _) = get_contract_artifacts(sol_code_path, "Halo2Verifier", 0)?;
|
||||
let (abi, _, _) = get_contract_artifacts(sol_code_path, "Halo2Verifier", 0).await?;
|
||||
// save abi to file
|
||||
serde_json::to_writer(std::fs::File::create(abi_path)?, &abi)?;
|
||||
|
||||
@@ -1675,7 +1743,6 @@ pub(crate) async fn setup_test_evm_witness(
|
||||
) -> Result<String, Box<dyn Error>> {
|
||||
use crate::graph::TestOnChainData;
|
||||
|
||||
info!("run this command in background to keep the instance running for testing");
|
||||
let mut data = GraphData::from_path(data_path)?;
|
||||
let mut circuit = GraphCircuit::load(compiled_circuit_path)?;
|
||||
|
||||
@@ -1711,7 +1778,6 @@ pub(crate) async fn test_update_account_calls(
|
||||
) -> Result<String, Box<dyn Error>> {
|
||||
use crate::eth::update_account_calls;
|
||||
|
||||
check_solc_requirement();
|
||||
update_account_calls(addr.into(), data, rpc_url.as_deref()).await?;
|
||||
|
||||
Ok(String::new())
|
||||
|
||||
@@ -3,11 +3,11 @@ use super::GraphError;
|
||||
use crate::circuit::InputType;
|
||||
use crate::fieldutils::i64_to_felt;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::graph::postgres::Client;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use crate::tensor::Tensor;
|
||||
use crate::EZKL_BUF_CAPACITY;
|
||||
use halo2curves::bn256::Fr as Fp;
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use postgres::{Client, NoTls};
|
||||
#[cfg(feature = "python-bindings")]
|
||||
use pyo3::prelude::*;
|
||||
#[cfg(feature = "python-bindings")]
|
||||
@@ -211,7 +211,9 @@ impl PostgresSource {
|
||||
}
|
||||
|
||||
/// Fetch data from postgres
|
||||
pub fn fetch(&self) -> Result<Vec<Vec<pg_bigdecimal::PgNumeric>>, Box<dyn std::error::Error>> {
|
||||
pub async fn fetch(
|
||||
&self,
|
||||
) -> Result<Vec<Vec<pg_bigdecimal::PgNumeric>>, Box<dyn std::error::Error>> {
|
||||
// clone to move into thread
|
||||
let user = self.user.clone();
|
||||
let host = self.host.clone();
|
||||
@@ -232,10 +234,10 @@ impl PostgresSource {
|
||||
)
|
||||
};
|
||||
|
||||
let mut client = Client::connect(&config, NoTls)?;
|
||||
let mut client = Client::connect(&config).await?;
|
||||
let mut res: Vec<pg_bigdecimal::PgNumeric> = Vec::new();
|
||||
// extract rows from query
|
||||
for row in client.query(&query, &[])? {
|
||||
for row in client.query(&query, &[]).await? {
|
||||
// extract features from row
|
||||
for i in 0..row.len() {
|
||||
res.push(row.get(i));
|
||||
@@ -245,11 +247,12 @@ impl PostgresSource {
|
||||
}
|
||||
|
||||
/// Fetch data from postgres and format it as a FileSource
|
||||
pub fn fetch_and_format_as_file(
|
||||
pub async fn fetch_and_format_as_file(
|
||||
&self,
|
||||
) -> Result<Vec<Vec<FileSourceInner>>, Box<dyn std::error::Error>> {
|
||||
Ok(self
|
||||
.fetch()?
|
||||
.fetch()
|
||||
.await?
|
||||
.iter()
|
||||
.map(|d| {
|
||||
d.iter()
|
||||
@@ -277,13 +280,13 @@ impl OnChainSource {
|
||||
mut shapes: Vec<Vec<usize>>,
|
||||
rpc: Option<&str>,
|
||||
) -> Result<(Vec<Tensor<Fp>>, Self), Box<dyn std::error::Error>> {
|
||||
use crate::eth::{evm_quantize, read_on_chain_inputs, test_on_chain_data};
|
||||
use crate::eth::{
|
||||
evm_quantize, read_on_chain_inputs, test_on_chain_data, DEFAULT_ANVIL_ENDPOINT,
|
||||
};
|
||||
use log::debug;
|
||||
|
||||
// Set up local anvil instance for reading on-chain data
|
||||
let (anvil, client) = crate::eth::setup_eth_backend(rpc, None).await?;
|
||||
|
||||
let address = client.address();
|
||||
let (client, client_address) = crate::eth::setup_eth_backend(rpc, None).await?;
|
||||
|
||||
let mut scales = scales;
|
||||
// set scales to 1 where data is a field element
|
||||
@@ -296,7 +299,8 @@ impl OnChainSource {
|
||||
|
||||
let calls_to_accounts = test_on_chain_data(client.clone(), data).await?;
|
||||
debug!("Calls to accounts: {:?}", calls_to_accounts);
|
||||
let inputs = read_on_chain_inputs(client.clone(), address, &calls_to_accounts).await?;
|
||||
let inputs =
|
||||
read_on_chain_inputs(client.clone(), client_address, &calls_to_accounts).await?;
|
||||
debug!("Inputs: {:?}", inputs);
|
||||
|
||||
let mut quantized_evm_inputs = vec![];
|
||||
@@ -325,7 +329,7 @@ impl OnChainSource {
|
||||
inputs.push(t);
|
||||
}
|
||||
|
||||
let used_rpc = rpc.unwrap_or(&anvil.endpoint()).to_string();
|
||||
let used_rpc = rpc.unwrap_or(DEFAULT_ANVIL_ENDPOINT).to_string();
|
||||
|
||||
// Fill the input_data field of the GraphData struct
|
||||
Ok((
|
||||
@@ -502,7 +506,7 @@ impl GraphData {
|
||||
}
|
||||
|
||||
///
|
||||
pub fn split_into_batches(
|
||||
pub async fn split_into_batches(
|
||||
&self,
|
||||
input_shapes: Vec<Vec<usize>>,
|
||||
) -> Result<Vec<Self>, Box<dyn std::error::Error>> {
|
||||
@@ -527,7 +531,7 @@ impl GraphData {
|
||||
GraphData {
|
||||
input_data: DataSource::DB(data),
|
||||
output_data: _,
|
||||
} => data.fetch_and_format_as_file()?,
|
||||
} => data.fetch_and_format_as_file().await?,
|
||||
};
|
||||
|
||||
for (i, shape) in input_shapes.iter().enumerate() {
|
||||
|
||||
@@ -6,10 +6,14 @@ pub mod model;
|
||||
pub mod modules;
|
||||
/// Inner elements of a computational graph that represent a single operation / constraints.
|
||||
pub mod node;
|
||||
/// postgres helper functions
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub mod postgres;
|
||||
/// Helper functions
|
||||
pub mod utilities;
|
||||
/// Representations of a computational graph's variables.
|
||||
pub mod vars;
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
use colored_json::ToColoredJson;
|
||||
#[cfg(unix)]
|
||||
@@ -958,7 +962,7 @@ impl GraphCircuit {
|
||||
|
||||
///
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn load_graph_input(
|
||||
pub async fn load_graph_input(
|
||||
&mut self,
|
||||
data: &GraphData,
|
||||
) -> Result<Vec<Tensor<Fp>>, Box<dyn std::error::Error>> {
|
||||
@@ -968,6 +972,7 @@ impl GraphCircuit {
|
||||
debug!("input scales: {:?}", scales);
|
||||
|
||||
self.process_data_source(&data.input_data, shapes, scales, input_types)
|
||||
.await
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
@@ -991,7 +996,7 @@ impl GraphCircuit {
|
||||
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
/// Process the data source for the model
|
||||
fn process_data_source(
|
||||
async fn process_data_source(
|
||||
&mut self,
|
||||
data: &DataSource,
|
||||
shapes: Vec<Vec<usize>>,
|
||||
@@ -1005,21 +1010,14 @@ impl GraphCircuit {
|
||||
per_item_scale.extend(vec![scales[i]; shape.iter().product::<usize>()]);
|
||||
}
|
||||
|
||||
// start runtime and fetch data
|
||||
let runtime = tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build()?;
|
||||
|
||||
runtime.block_on(async {
|
||||
self.load_on_chain_data(source.clone(), &shapes, per_item_scale)
|
||||
.await
|
||||
})
|
||||
self.load_on_chain_data(source.clone(), &shapes, per_item_scale)
|
||||
.await
|
||||
}
|
||||
DataSource::File(file_data) => {
|
||||
self.load_file_data(file_data, &shapes, scales, input_types)
|
||||
}
|
||||
DataSource::DB(pg) => {
|
||||
let data = pg.fetch_and_format_as_file()?;
|
||||
let data = pg.fetch_and_format_as_file().await?;
|
||||
self.load_file_data(&data, &shapes, scales, input_types)
|
||||
}
|
||||
}
|
||||
@@ -1034,8 +1032,8 @@ impl GraphCircuit {
|
||||
scales: Vec<crate::Scale>,
|
||||
) -> Result<Vec<Tensor<Fp>>, Box<dyn std::error::Error>> {
|
||||
use crate::eth::{evm_quantize, read_on_chain_inputs, setup_eth_backend};
|
||||
let (_, client) = setup_eth_backend(Some(&source.rpc), None).await?;
|
||||
let inputs = read_on_chain_inputs(client.clone(), client.address(), &source.calls).await?;
|
||||
let (client, client_address) = setup_eth_backend(Some(&source.rpc), None).await?;
|
||||
let inputs = read_on_chain_inputs(client.clone(), client_address, &source.calls).await?;
|
||||
// quantize the supplied data using the provided scale + QuantizeData.sol
|
||||
let quantized_evm_inputs = evm_quantize(client, scales, &inputs).await?;
|
||||
// on-chain data has already been quantized at this point. Just need to reshape it and push into tensor vector
|
||||
|
||||
@@ -15,7 +15,7 @@ use super::{VarVisibility, Visibility};
|
||||
|
||||
/// poseidon len to hash in tree
|
||||
pub const POSEIDON_LEN_GRAPH: usize = 32;
|
||||
/// Poseidon number of instancess
|
||||
/// Poseidon number of instances
|
||||
pub const POSEIDON_INSTANCES: usize = 1;
|
||||
|
||||
/// Poseidon module type
|
||||
|
||||
@@ -550,7 +550,7 @@ impl Node {
|
||||
.collect::<Result<Vec<_>, Box<dyn Error>>>()?;
|
||||
|
||||
let homogenous_inputs = opkind.requires_homogenous_input_scales();
|
||||
// autoamtically increases a constant's scale if it is only used once and
|
||||
// automatically increases a constant's scale if it is only used once and
|
||||
for input in homogenous_inputs
|
||||
.into_iter()
|
||||
.filter(|i| !deleted_indices.contains(i))
|
||||
|
||||
493
src/graph/postgres.rs
Normal file
493
src/graph/postgres.rs
Normal file
@@ -0,0 +1,493 @@
|
||||
use log::{debug, error, info};
|
||||
use std::fmt::Debug;
|
||||
use std::net::IpAddr;
|
||||
#[cfg(unix)]
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use std::{fmt, pin::Pin};
|
||||
use tokio::task::JoinHandle;
|
||||
#[doc(inline)]
|
||||
pub use tokio_postgres::config::{
|
||||
ChannelBinding, Host, LoadBalanceHosts, SslMode, TargetSessionAttrs,
|
||||
};
|
||||
use tokio_postgres::tls::NoTlsStream;
|
||||
use tokio_postgres::NoTls;
|
||||
use tokio_postgres::{error::DbError, types::ToSql, Error, Row, Socket, ToStatement};
|
||||
|
||||
/// Connection configuration.
|
||||
///
|
||||
/// Configuration can be parsed from libpq-style connection strings. These strings come in two formats:
|
||||
///
|
||||
///
|
||||
#[derive(Clone)]
|
||||
pub struct Config {
|
||||
config: tokio_postgres::Config,
|
||||
notice_callback: Arc<dyn Fn(DbError) + Send + Sync>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for Config {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt.debug_struct("Config")
|
||||
.field("config", &self.config)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Config {
|
||||
fn default() -> Config {
|
||||
Config::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Config {
|
||||
/// Creates a new configuration.
|
||||
pub fn new() -> Config {
|
||||
tokio_postgres::Config::new().into()
|
||||
}
|
||||
|
||||
/// Sets the user to authenticate with.
|
||||
///
|
||||
/// If the user is not set, then this defaults to the user executing this process.
|
||||
pub fn user(&mut self, user: &str) -> &mut Config {
|
||||
self.config.user(user);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the user to authenticate with, if one has been configured with
|
||||
/// the `user` method.
|
||||
pub fn get_user(&self) -> Option<&str> {
|
||||
self.config.get_user()
|
||||
}
|
||||
|
||||
/// Sets the password to authenticate with.
|
||||
pub fn password<T>(&mut self, password: T) -> &mut Config
|
||||
where
|
||||
T: AsRef<[u8]>,
|
||||
{
|
||||
self.config.password(password);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the password to authenticate with, if one has been configured with
|
||||
/// the `password` method.
|
||||
pub fn get_password(&self) -> Option<&[u8]> {
|
||||
self.config.get_password()
|
||||
}
|
||||
|
||||
/// Sets the name of the database to connect to.
|
||||
///
|
||||
/// Defaults to the user.
|
||||
pub fn dbname(&mut self, dbname: &str) -> &mut Config {
|
||||
self.config.dbname(dbname);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the name of the database to connect to, if one has been configured
|
||||
/// with the `dbname` method.
|
||||
pub fn get_dbname(&self) -> Option<&str> {
|
||||
self.config.get_dbname()
|
||||
}
|
||||
|
||||
/// Sets command line options used to configure the server.
|
||||
pub fn options(&mut self, options: &str) -> &mut Config {
|
||||
self.config.options(options);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the command line options used to configure the server, if the
|
||||
/// options have been set with the `options` method.
|
||||
pub fn get_options(&self) -> Option<&str> {
|
||||
self.config.get_options()
|
||||
}
|
||||
|
||||
/// Sets the value of the `application_name` runtime parameter.
|
||||
pub fn application_name(&mut self, application_name: &str) -> &mut Config {
|
||||
self.config.application_name(application_name);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the value of the `application_name` runtime parameter, if it has
|
||||
/// been set with the `application_name` method.
|
||||
pub fn get_application_name(&self) -> Option<&str> {
|
||||
self.config.get_application_name()
|
||||
}
|
||||
|
||||
/// Sets the SSL configuration.
|
||||
///
|
||||
/// Defaults to `prefer`.
|
||||
pub fn ssl_mode(&mut self, ssl_mode: SslMode) -> &mut Config {
|
||||
self.config.ssl_mode(ssl_mode);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the SSL configuration.
|
||||
pub fn get_ssl_mode(&self) -> SslMode {
|
||||
self.config.get_ssl_mode()
|
||||
}
|
||||
|
||||
/// Adds a host to the configuration.
|
||||
///
|
||||
/// Multiple hosts can be specified by calling this method multiple times, and each will be tried in order. On Unix
|
||||
/// systems, a host starting with a `/` is interpreted as a path to a directory containing Unix domain sockets.
|
||||
/// There must be either no hosts, or the same number of hosts as hostaddrs.
|
||||
pub fn host(&mut self, host: &str) -> &mut Config {
|
||||
self.config.host(host);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the hosts that have been added to the configuration with `host`.
|
||||
pub fn get_hosts(&self) -> &[Host] {
|
||||
self.config.get_hosts()
|
||||
}
|
||||
|
||||
/// Gets the hostaddrs that have been added to the configuration with `hostaddr`.
|
||||
pub fn get_hostaddrs(&self) -> &[IpAddr] {
|
||||
self.config.get_hostaddrs()
|
||||
}
|
||||
|
||||
/// Adds a Unix socket host to the configuration.
|
||||
///
|
||||
/// Unlike `host`, this method allows non-UTF8 paths.
|
||||
#[cfg(unix)]
|
||||
pub fn host_path<T>(&mut self, host: T) -> &mut Config
|
||||
where
|
||||
T: AsRef<Path>,
|
||||
{
|
||||
self.config.host_path(host);
|
||||
self
|
||||
}
|
||||
|
||||
/// Adds a hostaddr to the configuration.
|
||||
///
|
||||
/// Multiple hostaddrs can be specified by calling this method multiple times, and each will be tried in order.
|
||||
/// There must be either no hostaddrs, or the same number of hostaddrs as hosts.
|
||||
pub fn hostaddr(&mut self, hostaddr: IpAddr) -> &mut Config {
|
||||
self.config.hostaddr(hostaddr);
|
||||
self
|
||||
}
|
||||
|
||||
/// Adds a port to the configuration.
|
||||
///
|
||||
/// Multiple ports can be specified by calling this method multiple times. There must either be no ports, in which
|
||||
/// case the default of 5432 is used, a single port, in which it is used for all hosts, or the same number of ports
|
||||
/// as hosts.
|
||||
pub fn port(&mut self, port: u16) -> &mut Config {
|
||||
self.config.port(port);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the ports that have been added to the configuration with `port`.
|
||||
pub fn get_ports(&self) -> &[u16] {
|
||||
self.config.get_ports()
|
||||
}
|
||||
|
||||
/// Sets the timeout applied to socket-level connection attempts.
|
||||
///
|
||||
/// Note that hostnames can resolve to multiple IP addresses, and this timeout will apply to each address of each
|
||||
/// host separately. Defaults to no limit.
|
||||
pub fn connect_timeout(&mut self, connect_timeout: Duration) -> &mut Config {
|
||||
self.config.connect_timeout(connect_timeout);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the connection timeout, if one has been set with the
|
||||
/// `connect_timeout` method.
|
||||
pub fn get_connect_timeout(&self) -> Option<&Duration> {
|
||||
self.config.get_connect_timeout()
|
||||
}
|
||||
|
||||
/// Sets the TCP user timeout.
|
||||
///
|
||||
/// This is ignored for Unix domain socket connections. It is only supported on systems where
|
||||
/// TCP_USER_TIMEOUT is available and will default to the system default if omitted or set to 0;
|
||||
/// on other systems, it has no effect.
|
||||
pub fn tcp_user_timeout(&mut self, tcp_user_timeout: Duration) -> &mut Config {
|
||||
self.config.tcp_user_timeout(tcp_user_timeout);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the TCP user timeout, if one has been set with the
|
||||
/// `user_timeout` method.
|
||||
pub fn get_tcp_user_timeout(&self) -> Option<&Duration> {
|
||||
self.config.get_tcp_user_timeout()
|
||||
}
|
||||
|
||||
/// Controls the use of TCP keepalive.
|
||||
///
|
||||
/// This is ignored for Unix domain socket connections. Defaults to `true`.
|
||||
pub fn keepalives(&mut self, keepalives: bool) -> &mut Config {
|
||||
self.config.keepalives(keepalives);
|
||||
self
|
||||
}
|
||||
|
||||
/// Reports whether TCP keepalives will be used.
|
||||
pub fn get_keepalives(&self) -> bool {
|
||||
self.config.get_keepalives()
|
||||
}
|
||||
|
||||
/// Sets the amount of idle time before a keepalive packet is sent on the connection.
|
||||
///
|
||||
/// This is ignored for Unix domain sockets, or if the `keepalives` option is disabled. Defaults to 2 hours.
|
||||
pub fn keepalives_idle(&mut self, keepalives_idle: Duration) -> &mut Config {
|
||||
self.config.keepalives_idle(keepalives_idle);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the configured amount of idle time before a keepalive packet will
|
||||
/// be sent on the connection.
|
||||
pub fn get_keepalives_idle(&self) -> Duration {
|
||||
self.config.get_keepalives_idle()
|
||||
}
|
||||
|
||||
/// Sets the time interval between TCP keepalive probes.
|
||||
/// On Windows, this sets the value of the tcp_keepalive struct’s keepaliveinterval field.
|
||||
///
|
||||
/// This is ignored for Unix domain sockets, or if the `keepalives` option is disabled.
|
||||
pub fn keepalives_interval(&mut self, keepalives_interval: Duration) -> &mut Config {
|
||||
self.config.keepalives_interval(keepalives_interval);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the time interval between TCP keepalive probes.
|
||||
pub fn get_keepalives_interval(&self) -> Option<Duration> {
|
||||
self.config.get_keepalives_interval()
|
||||
}
|
||||
|
||||
/// Sets the maximum number of TCP keepalive probes that will be sent before dropping a connection.
|
||||
///
|
||||
/// This is ignored for Unix domain sockets, or if the `keepalives` option is disabled.
|
||||
pub fn keepalives_retries(&mut self, keepalives_retries: u32) -> &mut Config {
|
||||
self.config.keepalives_retries(keepalives_retries);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the maximum number of TCP keepalive probes that will be sent before dropping a connection.
|
||||
pub fn get_keepalives_retries(&self) -> Option<u32> {
|
||||
self.config.get_keepalives_retries()
|
||||
}
|
||||
|
||||
/// Sets the requirements of the session.
|
||||
///
|
||||
/// This can be used to connect to the primary server in a clustered database rather than one of the read-only
|
||||
/// secondary servers. Defaults to `Any`.
|
||||
pub fn target_session_attrs(
|
||||
&mut self,
|
||||
target_session_attrs: TargetSessionAttrs,
|
||||
) -> &mut Config {
|
||||
self.config.target_session_attrs(target_session_attrs);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the requirements of the session.
|
||||
pub fn get_target_session_attrs(&self) -> TargetSessionAttrs {
|
||||
self.config.get_target_session_attrs()
|
||||
}
|
||||
|
||||
/// Sets the channel binding behavior.
|
||||
///
|
||||
/// Defaults to `prefer`.
|
||||
pub fn channel_binding(&mut self, channel_binding: ChannelBinding) -> &mut Config {
|
||||
self.config.channel_binding(channel_binding);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the channel binding behavior.
|
||||
pub fn get_channel_binding(&self) -> ChannelBinding {
|
||||
self.config.get_channel_binding()
|
||||
}
|
||||
|
||||
/// Sets the host load balancing behavior.
|
||||
///
|
||||
/// Defaults to `disable`.
|
||||
pub fn load_balance_hosts(&mut self, load_balance_hosts: LoadBalanceHosts) -> &mut Config {
|
||||
self.config.load_balance_hosts(load_balance_hosts);
|
||||
self
|
||||
}
|
||||
|
||||
/// Gets the host load balancing behavior.
|
||||
pub fn get_load_balance_hosts(&self) -> LoadBalanceHosts {
|
||||
self.config.get_load_balance_hosts()
|
||||
}
|
||||
|
||||
/// Sets the notice callback.
|
||||
///
|
||||
/// This callback will be invoked with the contents of every
|
||||
/// [`AsyncMessage::Notice`] that is received by the connection. Notices use
|
||||
/// the same structure as errors, but they are not "errors" per-se.
|
||||
///
|
||||
/// Notices are distinct from notifications, which are instead accessible
|
||||
/// via the [`Notifications`] API.
|
||||
///
|
||||
/// [`AsyncMessage::Notice`]: tokio_postgres::AsyncMessage::Notice
|
||||
/// [`Notifications`]: crate::Notifications
|
||||
pub fn notice_callback<F>(&mut self, f: F) -> &mut Config
|
||||
where
|
||||
F: Fn(DbError) + Send + Sync + 'static,
|
||||
{
|
||||
self.notice_callback = Arc::new(f);
|
||||
self
|
||||
}
|
||||
|
||||
/// Opens a connection to a PostgreSQL database.
|
||||
pub async fn connect(&self) -> Result<Client, Error> {
|
||||
let (client, connection) = self.config.connect(NoTls).await?;
|
||||
|
||||
let connection = Connection::new(connection);
|
||||
|
||||
Ok(Client::new(client, connection))
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Config {
|
||||
type Err = Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Config, Error> {
|
||||
s.parse::<tokio_postgres::Config>().map(Config::from)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<tokio_postgres::Config> for Config {
|
||||
fn from(config: tokio_postgres::Config) -> Config {
|
||||
Config {
|
||||
config,
|
||||
notice_callback: Arc::new(|notice| {
|
||||
info!("{}: {}", notice.severity(), notice.message())
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(missing_debug_implementations, dead_code)]
|
||||
/// An asynchronous PostgreSQL connection. We use this to keep the connection alive / keep it pinned so that it doesn't
|
||||
/// get dropped.
|
||||
pub struct Connection {
|
||||
/// The underlying connection stream.
|
||||
connection: Pin<Box<tokio_postgres::Connection<Socket, NoTlsStream>>>,
|
||||
}
|
||||
|
||||
impl Connection {
|
||||
/// Creates a new connection.
|
||||
pub fn new(connection: tokio_postgres::Connection<Socket, NoTlsStream>) -> Self {
|
||||
Connection {
|
||||
connection: Box::pin(connection),
|
||||
}
|
||||
}
|
||||
|
||||
/// start the connection
|
||||
pub async fn start(self) {
|
||||
if let Err(e) = self.connection.await {
|
||||
error!("connection error: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(missing_debug_implementations, dead_code)]
|
||||
/// An asynchronous PostgreSQL client.
|
||||
pub struct Client {
|
||||
connection: JoinHandle<()>,
|
||||
client: tokio_postgres::Client,
|
||||
}
|
||||
|
||||
impl Drop for Client {
|
||||
fn drop(&mut self) {
|
||||
let _ = self.close_inner();
|
||||
}
|
||||
}
|
||||
|
||||
impl Client {
|
||||
pub(crate) fn new(client: tokio_postgres::Client, connection: Connection) -> Client {
|
||||
// The connection object performs the actual communication with the database,
|
||||
// so spawn it off to run on its own.
|
||||
let thread = tokio::spawn(async move {
|
||||
connection.start().await;
|
||||
});
|
||||
|
||||
Client {
|
||||
client,
|
||||
connection: thread,
|
||||
}
|
||||
}
|
||||
|
||||
/// A convenience function which parses a configuration string into a `Config` and then connects to the database.
|
||||
///
|
||||
/// See the documentation for [`Config`] for information about the connection syntax.
|
||||
///
|
||||
/// [`Config`]: config/struct.Config.html
|
||||
pub async fn connect(params: &str) -> Result<Client, Error> {
|
||||
debug!("Connecting to database with params: {}", params);
|
||||
params.parse::<Config>()?.connect().await
|
||||
}
|
||||
|
||||
/// Returns a new `Config` object which can be used to configure and connect to a database.
|
||||
pub fn configure() -> Config {
|
||||
Config::new()
|
||||
}
|
||||
|
||||
/// Executes a statement, returning the number of rows modified.
|
||||
///
|
||||
/// A statement may contain parameters, specified by `$n`, where `n` is the index of the parameter of the list
|
||||
/// provided, 1-indexed.
|
||||
///
|
||||
/// If the statement does not modify any rows (e.g. `SELECT`), 0 is returned.
|
||||
///
|
||||
/// The `query` argument can either be a `Statement`, or a raw query string. If the same statement will be
|
||||
/// repeatedly executed (perhaps with different query parameters), consider preparing the statement up front
|
||||
/// with the `prepare` method.
|
||||
///
|
||||
pub async fn execute<T>(
|
||||
&mut self,
|
||||
query: &T,
|
||||
params: &[&(dyn ToSql + Sync)],
|
||||
) -> Result<u64, Error>
|
||||
where
|
||||
T: ?Sized + ToStatement + Debug,
|
||||
{
|
||||
debug!("Executing query: {:?}", query);
|
||||
self.client.execute(query, params).await
|
||||
}
|
||||
|
||||
/// Executes a statement, returning the resulting rows.
|
||||
///
|
||||
/// A statement may contain parameters, specified by `$n`, where `n` is the index of the parameter of the list
|
||||
/// provided, 1-indexed.
|
||||
///
|
||||
/// The `query` argument can either be a `Statement`, or a raw query string. If the same statement will be
|
||||
/// repeatedly executed (perhaps with different query parameters), consider preparing the statement up front
|
||||
/// with the `prepare` method.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
pub async fn query<T>(
|
||||
&mut self,
|
||||
query: &T,
|
||||
params: &[&(dyn ToSql + Sync)],
|
||||
) -> Result<Vec<Row>, Error>
|
||||
where
|
||||
T: ?Sized + ToStatement + Debug,
|
||||
{
|
||||
debug!("Executing query: {:?}", query);
|
||||
self.client.query(query, params).await
|
||||
}
|
||||
|
||||
/// Determines if the client's connection has already closed.
|
||||
///
|
||||
/// If this returns `true`, the client is no longer usable.
|
||||
pub fn is_closed(&self) -> bool {
|
||||
self.client.is_closed()
|
||||
}
|
||||
|
||||
/// Closes the client's connection to the server.
|
||||
///
|
||||
/// This is equivalent to `Client`'s `Drop` implementation, except that it returns any error encountered to the
|
||||
/// caller.
|
||||
pub fn close(mut self) -> Result<(), Error> {
|
||||
self.close_inner()
|
||||
}
|
||||
|
||||
fn close_inner(&mut self) -> Result<(), Error> {
|
||||
self.client.__private_api_close();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -72,7 +72,7 @@ impl ToFlags for Visibility {
|
||||
impl<'a> From<&'a str> for Visibility {
|
||||
fn from(s: &'a str) -> Self {
|
||||
if s.contains("hashed/private") {
|
||||
// split on last occurence of '/'
|
||||
// split on last occurrence of '/'
|
||||
let (_, outlets) = s.split_at(s.rfind('/').unwrap());
|
||||
let outlets = outlets
|
||||
.trim_start_matches('/')
|
||||
|
||||
30
src/lib.rs
30
src/lib.rs
@@ -178,37 +178,37 @@ impl From<String> for Commitments {
|
||||
#[derive(Debug, Args, Deserialize, Serialize, Clone, PartialEq, PartialOrd, ToFlags)]
|
||||
pub struct RunArgs {
|
||||
/// The tolerance for error on model outputs
|
||||
#[arg(short = 'T', long, default_value = "0")]
|
||||
#[arg(short = 'T', long, default_value = "0", value_hint = clap::ValueHint::Other)]
|
||||
pub tolerance: Tolerance,
|
||||
/// The denominator in the fixed point representation used when quantizing inputs
|
||||
#[arg(short = 'S', long, default_value = "7", allow_hyphen_values = true)]
|
||||
#[arg(short = 'S', long, default_value = "7", value_hint = clap::ValueHint::Other)]
|
||||
pub input_scale: Scale,
|
||||
/// The denominator in the fixed point representation used when quantizing parameters
|
||||
#[arg(long, default_value = "7", allow_hyphen_values = true)]
|
||||
#[arg(long, default_value = "7", value_hint = clap::ValueHint::Other)]
|
||||
pub param_scale: Scale,
|
||||
/// if the scale is ever > scale_rebase_multiplier * input_scale then the scale is rebased to input_scale (this a more advanced parameter, use with caution)
|
||||
#[arg(long, default_value = "1")]
|
||||
#[arg(long, default_value = "1", value_hint = clap::ValueHint::Other)]
|
||||
pub scale_rebase_multiplier: u32,
|
||||
/// The min and max elements in the lookup table input column
|
||||
#[arg(short = 'B', long, value_parser = parse_key_val::<i64, i64>, default_value = "-32768->32768")]
|
||||
pub lookup_range: Range,
|
||||
/// The log_2 number of rows
|
||||
#[arg(short = 'K', long, default_value = "17")]
|
||||
#[arg(short = 'K', long, default_value = "17", value_hint = clap::ValueHint::Other)]
|
||||
pub logrows: u32,
|
||||
/// The log_2 number of rows
|
||||
#[arg(short = 'N', long, default_value = "2")]
|
||||
#[arg(short = 'N', long, default_value = "2", value_hint = clap::ValueHint::Other)]
|
||||
pub num_inner_cols: usize,
|
||||
/// Hand-written parser for graph variables, eg. batch_size=1
|
||||
#[arg(short = 'V', long, value_parser = parse_key_val::<String, usize>, default_value = "batch_size->1", value_delimiter = ',')]
|
||||
#[arg(short = 'V', long, value_parser = parse_key_val::<String, usize>, default_value = "batch_size->1", value_delimiter = ',', value_hint = clap::ValueHint::Other)]
|
||||
pub variables: Vec<(String, usize)>,
|
||||
/// Flags whether inputs are public, private, hashed, fixed, kzgcommit
|
||||
#[arg(long, default_value = "private")]
|
||||
/// Flags whether inputs are public, private, fixed, hashed, polycommit
|
||||
#[arg(long, default_value = "private", value_hint = clap::ValueHint::Other)]
|
||||
pub input_visibility: Visibility,
|
||||
/// Flags whether outputs are public, private, fixed, hashed, kzgcommit
|
||||
#[arg(long, default_value = "public")]
|
||||
/// Flags whether outputs are public, private, fixed, hashed, polycommit
|
||||
#[arg(long, default_value = "public", value_hint = clap::ValueHint::Other)]
|
||||
pub output_visibility: Visibility,
|
||||
/// Flags whether params are fixed, private, hashed, kzgcommit
|
||||
#[arg(long, default_value = "private")]
|
||||
/// Flags whether params are fixed, private, hashed, polycommit
|
||||
#[arg(long, default_value = "private", value_hint = clap::ValueHint::Other)]
|
||||
pub param_visibility: Visibility,
|
||||
#[arg(long, default_value = "false")]
|
||||
/// Rebase the scale using lookup table for division instead of using a range check
|
||||
@@ -217,10 +217,10 @@ pub struct RunArgs {
|
||||
#[arg(long, default_value = "false")]
|
||||
pub rebase_frac_zero_constants: bool,
|
||||
/// check mode (safe, unsafe, etc)
|
||||
#[arg(long, default_value = "unsafe")]
|
||||
#[arg(long, default_value = "unsafe", value_hint = clap::ValueHint::Other)]
|
||||
pub check_mode: CheckMode,
|
||||
/// commitment scheme
|
||||
#[arg(long, default_value = "kzg")]
|
||||
#[arg(long, default_value = "kzg", value_hint = clap::ValueHint::Other)]
|
||||
pub commitment: Option<Commitments>,
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ pub enum EvmVerificationError {
|
||||
/// EVM verify errors
|
||||
#[error("evm deployment failed")]
|
||||
Deploy,
|
||||
/// Invalid Visibilit
|
||||
/// Invalid Visibility
|
||||
#[error("Invalid visibility")]
|
||||
InvalidVisibility,
|
||||
}
|
||||
|
||||
300
src/python.rs
300
src/python.rs
@@ -29,7 +29,6 @@ use pyo3_log;
|
||||
use snark_verifier::util::arithmetic::PrimeField;
|
||||
use std::str::FromStr;
|
||||
use std::{fs::File, path::PathBuf};
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
type PyFelt = String;
|
||||
|
||||
@@ -467,7 +466,7 @@ fn buffer_to_felts(buffer: Vec<u8>) -> PyResult<Vec<String>> {
|
||||
/// Arguments
|
||||
/// -------
|
||||
/// message: list[str]
|
||||
/// List of field elements represnted as strings
|
||||
/// List of field elements represented as strings
|
||||
///
|
||||
/// Returns
|
||||
/// -------
|
||||
@@ -779,29 +778,27 @@ fn gen_srs(srs_path: PathBuf, logrows: usize) -> PyResult<()> {
|
||||
commitment=None,
|
||||
))]
|
||||
fn get_srs(
|
||||
py: Python,
|
||||
settings_path: Option<PathBuf>,
|
||||
logrows: Option<u32>,
|
||||
srs_path: Option<PathBuf>,
|
||||
commitment: Option<PyCommitments>,
|
||||
) -> PyResult<bool> {
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
let commitment: Option<Commitments> = match commitment {
|
||||
Some(c) => Some(c.into()),
|
||||
None => None,
|
||||
};
|
||||
|
||||
Runtime::new()
|
||||
.unwrap()
|
||||
.block_on(crate::execute::get_srs_cmd(
|
||||
srs_path,
|
||||
settings_path,
|
||||
logrows,
|
||||
commitment,
|
||||
))
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to get srs: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
Ok(true)
|
||||
pyo3_asyncio::tokio::future_into_py(py, async move {
|
||||
crate::execute::get_srs_cmd(srs_path, settings_path, logrows, commitment)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to get srs: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
})
|
||||
}
|
||||
|
||||
/// Generates the circuit settings
|
||||
@@ -885,6 +882,7 @@ fn gen_settings(
|
||||
only_range_check_rebase = DEFAULT_ONLY_RANGE_CHECK_REBASE.parse().unwrap(),
|
||||
))]
|
||||
fn calibrate_settings(
|
||||
py: Python,
|
||||
data: PathBuf,
|
||||
model: PathBuf,
|
||||
settings: PathBuf,
|
||||
@@ -894,24 +892,27 @@ fn calibrate_settings(
|
||||
scale_rebase_multiplier: Vec<u32>,
|
||||
max_logrows: Option<u32>,
|
||||
only_range_check_rebase: bool,
|
||||
) -> Result<bool, PyErr> {
|
||||
crate::execute::calibrate(
|
||||
model,
|
||||
data,
|
||||
settings,
|
||||
target,
|
||||
lookup_safety_margin,
|
||||
scales,
|
||||
scale_rebase_multiplier,
|
||||
only_range_check_rebase,
|
||||
max_logrows,
|
||||
)
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to calibrate settings: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
pyo3_asyncio::tokio::future_into_py(py, async move {
|
||||
crate::execute::calibrate(
|
||||
model,
|
||||
data,
|
||||
settings,
|
||||
target,
|
||||
lookup_safety_margin,
|
||||
scales,
|
||||
scale_rebase_multiplier,
|
||||
only_range_check_rebase,
|
||||
max_logrows,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to calibrate settings: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
Ok(true)
|
||||
})
|
||||
}
|
||||
|
||||
/// Runs the forward pass operation to generate a witness
|
||||
@@ -946,18 +947,22 @@ fn calibrate_settings(
|
||||
srs_path=None,
|
||||
))]
|
||||
fn gen_witness(
|
||||
py: Python,
|
||||
data: PathBuf,
|
||||
model: PathBuf,
|
||||
output: Option<PathBuf>,
|
||||
vk_path: Option<PathBuf>,
|
||||
srs_path: Option<PathBuf>,
|
||||
) -> PyResult<PyObject> {
|
||||
let output =
|
||||
crate::execute::gen_witness(model, data, output, vk_path, srs_path).map_err(|e| {
|
||||
let err_str = format!("Failed to run generate witness: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
Python::with_gil(|py| Ok(output.to_object(py)))
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
pyo3_asyncio::tokio::future_into_py(py, async move {
|
||||
let output = crate::execute::gen_witness(model, data, output, vk_path, srs_path)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to generate witness: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
Python::with_gil(|py| Ok(output.to_object(py)))
|
||||
})
|
||||
}
|
||||
|
||||
/// Mocks the prover
|
||||
@@ -1425,6 +1430,47 @@ fn verify_aggr(
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
/// Creates encoded evm calldata from a proof file
|
||||
///
|
||||
/// Arguments
|
||||
/// ---------
|
||||
/// proof: str
|
||||
/// Path to the proof file
|
||||
///
|
||||
/// calldata: str
|
||||
/// Path to the calldata file to save
|
||||
///
|
||||
/// addr_vk: str
|
||||
/// The address of the verification key contract (if the verifier key is to be rendered as a separate contract)
|
||||
///
|
||||
/// Returns
|
||||
/// -------
|
||||
/// vec[u8]
|
||||
/// The encoded calldata
|
||||
///
|
||||
#[pyfunction(signature = (
|
||||
proof=PathBuf::from(DEFAULT_PROOF),
|
||||
calldata=PathBuf::from(DEFAULT_CALLDATA),
|
||||
addr_vk=None,
|
||||
))]
|
||||
fn encode_evm_calldata<'a>(
|
||||
proof: PathBuf,
|
||||
calldata: PathBuf,
|
||||
addr_vk: Option<&'a str>,
|
||||
) -> Result<Vec<u8>, PyErr> {
|
||||
let addr_vk = if let Some(addr_vk) = addr_vk {
|
||||
let addr_vk = H160Flag::from(addr_vk);
|
||||
Some(addr_vk)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
crate::execute::encode_evm_calldata(proof, calldata, addr_vk).map_err(|e| {
|
||||
let err_str = format!("Failed to generate calldata: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates an EVM compatible verifier, you will need solc installed in your environment to run this
|
||||
///
|
||||
/// Arguments
|
||||
@@ -1460,27 +1506,31 @@ fn verify_aggr(
|
||||
render_vk_seperately = DEFAULT_RENDER_VK_SEPERATELY.parse().unwrap(),
|
||||
))]
|
||||
fn create_evm_verifier(
|
||||
py: Python,
|
||||
vk_path: PathBuf,
|
||||
settings_path: PathBuf,
|
||||
sol_code_path: PathBuf,
|
||||
abi_path: PathBuf,
|
||||
srs_path: Option<PathBuf>,
|
||||
render_vk_seperately: bool,
|
||||
) -> Result<bool, PyErr> {
|
||||
crate::execute::create_evm_verifier(
|
||||
vk_path,
|
||||
srs_path,
|
||||
settings_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
render_vk_seperately,
|
||||
)
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run create_evm_verifier: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
pyo3_asyncio::tokio::future_into_py(py, async move {
|
||||
crate::execute::create_evm_verifier(
|
||||
vk_path,
|
||||
srs_path,
|
||||
settings_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
render_vk_seperately,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run create_evm_verifier: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
Ok(true)
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates an EVM compatible data attestation verifier, you will need solc installed in your environment to run this
|
||||
@@ -1510,18 +1560,27 @@ fn create_evm_verifier(
|
||||
abi_path=PathBuf::from(DEFAULT_VERIFIER_DA_ABI),
|
||||
))]
|
||||
fn create_evm_data_attestation(
|
||||
py: Python,
|
||||
input_data: PathBuf,
|
||||
settings_path: PathBuf,
|
||||
sol_code_path: PathBuf,
|
||||
abi_path: PathBuf,
|
||||
) -> Result<bool, PyErr> {
|
||||
crate::execute::create_evm_data_attestation(settings_path, sol_code_path, abi_path, input_data)
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
pyo3_asyncio::tokio::future_into_py(py, async move {
|
||||
crate::execute::create_evm_data_attestation(
|
||||
settings_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
input_data,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run create_evm_data_attestation: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
Ok(true)
|
||||
})
|
||||
}
|
||||
|
||||
/// Setup test evm witness
|
||||
@@ -1559,29 +1618,31 @@ fn create_evm_data_attestation(
|
||||
rpc_url=None,
|
||||
))]
|
||||
fn setup_test_evm_witness(
|
||||
py: Python,
|
||||
data_path: PathBuf,
|
||||
compiled_circuit_path: PathBuf,
|
||||
test_data: PathBuf,
|
||||
input_source: PyTestDataSource,
|
||||
output_source: PyTestDataSource,
|
||||
rpc_url: Option<String>,
|
||||
) -> Result<bool, PyErr> {
|
||||
Runtime::new()
|
||||
.unwrap()
|
||||
.block_on(crate::execute::setup_test_evm_witness(
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
pyo3_asyncio::tokio::future_into_py(py, async move {
|
||||
crate::execute::setup_test_evm_witness(
|
||||
data_path,
|
||||
compiled_circuit_path,
|
||||
test_data,
|
||||
rpc_url,
|
||||
input_source.into(),
|
||||
output_source.into(),
|
||||
))
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run setup_test_evm_witness: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
Ok(true)
|
||||
})
|
||||
}
|
||||
|
||||
/// deploys the solidity verifier
|
||||
@@ -1593,28 +1654,30 @@ fn setup_test_evm_witness(
|
||||
private_key=None,
|
||||
))]
|
||||
fn deploy_evm(
|
||||
py: Python,
|
||||
addr_path: PathBuf,
|
||||
sol_code_path: PathBuf,
|
||||
rpc_url: Option<String>,
|
||||
optimizer_runs: usize,
|
||||
private_key: Option<String>,
|
||||
) -> Result<bool, PyErr> {
|
||||
Runtime::new()
|
||||
.unwrap()
|
||||
.block_on(crate::execute::deploy_evm(
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
pyo3_asyncio::tokio::future_into_py(py, async move {
|
||||
crate::execute::deploy_evm(
|
||||
sol_code_path,
|
||||
rpc_url,
|
||||
addr_path,
|
||||
optimizer_runs,
|
||||
private_key,
|
||||
"Halo2Verifier",
|
||||
))
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run deploy_evm: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
Ok(true)
|
||||
})
|
||||
}
|
||||
|
||||
/// deploys the solidity vk verifier
|
||||
@@ -1626,28 +1689,30 @@ fn deploy_evm(
|
||||
private_key=None,
|
||||
))]
|
||||
fn deploy_vk_evm(
|
||||
py: Python,
|
||||
addr_path: PathBuf,
|
||||
sol_code_path: PathBuf,
|
||||
rpc_url: Option<String>,
|
||||
optimizer_runs: usize,
|
||||
private_key: Option<String>,
|
||||
) -> Result<bool, PyErr> {
|
||||
Runtime::new()
|
||||
.unwrap()
|
||||
.block_on(crate::execute::deploy_evm(
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
pyo3_asyncio::tokio::future_into_py(py, async move {
|
||||
crate::execute::deploy_evm(
|
||||
sol_code_path,
|
||||
rpc_url,
|
||||
addr_path,
|
||||
optimizer_runs,
|
||||
private_key,
|
||||
"Halo2VerifyingKey",
|
||||
))
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run deploy_evm: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
Ok(true)
|
||||
})
|
||||
}
|
||||
|
||||
/// deploys the solidity da verifier
|
||||
@@ -1661,6 +1726,7 @@ fn deploy_vk_evm(
|
||||
private_key=None
|
||||
))]
|
||||
fn deploy_da_evm(
|
||||
py: Python,
|
||||
addr_path: PathBuf,
|
||||
input_data: PathBuf,
|
||||
settings_path: PathBuf,
|
||||
@@ -1668,10 +1734,9 @@ fn deploy_da_evm(
|
||||
rpc_url: Option<String>,
|
||||
optimizer_runs: usize,
|
||||
private_key: Option<String>,
|
||||
) -> Result<bool, PyErr> {
|
||||
Runtime::new()
|
||||
.unwrap()
|
||||
.block_on(crate::execute::deploy_da_evm(
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
pyo3_asyncio::tokio::future_into_py(py, async move {
|
||||
crate::execute::deploy_da_evm(
|
||||
input_data,
|
||||
settings_path,
|
||||
sol_code_path,
|
||||
@@ -1679,13 +1744,15 @@ fn deploy_da_evm(
|
||||
addr_path,
|
||||
optimizer_runs,
|
||||
private_key,
|
||||
))
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run deploy_da_evm: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
Ok(true)
|
||||
})
|
||||
}
|
||||
/// verifies an evm compatible proof, you will need solc installed in your environment to run this
|
||||
///
|
||||
@@ -1716,13 +1783,14 @@ fn deploy_da_evm(
|
||||
addr_da = None,
|
||||
addr_vk = None,
|
||||
))]
|
||||
fn verify_evm(
|
||||
addr_verifier: &str,
|
||||
fn verify_evm<'a>(
|
||||
py: Python<'a>,
|
||||
addr_verifier: &'a str,
|
||||
proof_path: PathBuf,
|
||||
rpc_url: Option<String>,
|
||||
addr_da: Option<&str>,
|
||||
addr_vk: Option<&str>,
|
||||
) -> Result<bool, PyErr> {
|
||||
addr_da: Option<&'a str>,
|
||||
addr_vk: Option<&'a str>,
|
||||
) -> PyResult<Bound<'a, PyAny>> {
|
||||
let addr_verifier = H160Flag::from(addr_verifier);
|
||||
let addr_da = if let Some(addr_da) = addr_da {
|
||||
let addr_da = H160Flag::from(addr_da);
|
||||
@@ -1737,21 +1805,16 @@ fn verify_evm(
|
||||
None
|
||||
};
|
||||
|
||||
Runtime::new()
|
||||
.unwrap()
|
||||
.block_on(crate::execute::verify_evm(
|
||||
proof_path,
|
||||
addr_verifier,
|
||||
rpc_url,
|
||||
addr_da,
|
||||
addr_vk,
|
||||
))
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run verify_evm: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
pyo3_asyncio::tokio::future_into_py(py, async move {
|
||||
crate::execute::verify_evm(proof_path, addr_verifier, rpc_url, addr_da, addr_vk)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run verify_evm: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
Ok(true)
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates an evm compatible aggregate verifier, you will need solc installed in your environment to run this
|
||||
@@ -1793,6 +1856,7 @@ fn verify_evm(
|
||||
render_vk_seperately = DEFAULT_RENDER_VK_SEPERATELY.parse().unwrap(),
|
||||
))]
|
||||
fn create_evm_verifier_aggr(
|
||||
py: Python,
|
||||
aggregation_settings: Vec<PathBuf>,
|
||||
vk_path: PathBuf,
|
||||
sol_code_path: PathBuf,
|
||||
@@ -1800,21 +1864,25 @@ fn create_evm_verifier_aggr(
|
||||
logrows: u32,
|
||||
srs_path: Option<PathBuf>,
|
||||
render_vk_seperately: bool,
|
||||
) -> Result<bool, PyErr> {
|
||||
crate::execute::create_evm_aggregate_verifier(
|
||||
vk_path,
|
||||
srs_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
aggregation_settings,
|
||||
logrows,
|
||||
render_vk_seperately,
|
||||
)
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run create_evm_verifier_aggr: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
Ok(true)
|
||||
) -> PyResult<Bound<'_, PyAny>> {
|
||||
pyo3_asyncio::tokio::future_into_py(py, async move {
|
||||
crate::execute::create_evm_aggregate_verifier(
|
||||
vk_path,
|
||||
srs_path,
|
||||
sol_code_path,
|
||||
abi_path,
|
||||
aggregation_settings,
|
||||
logrows,
|
||||
render_vk_seperately,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
let err_str = format!("Failed to run create_evm_verifier_aggr: {}", e);
|
||||
PyRuntimeError::new_err(err_str)
|
||||
})?;
|
||||
|
||||
Ok(true)
|
||||
})
|
||||
}
|
||||
|
||||
// Python Module
|
||||
@@ -1861,6 +1929,6 @@ fn ezkl(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
|
||||
m.add_function(wrap_pyfunction!(setup_test_evm_witness, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(create_evm_verifier_aggr, m)?)?;
|
||||
m.add_function(wrap_pyfunction!(create_evm_data_attestation, m)?)?;
|
||||
|
||||
m.add_function(wrap_pyfunction!(encode_evm_calldata, m)?)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -401,7 +401,7 @@ impl IntoI64 for () {
|
||||
0
|
||||
}
|
||||
fn from_i64(_: i64) -> Self {
|
||||
()
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -419,7 +419,7 @@ impl<F: PrimeField + IntoI64> IntoI64 for Value<F> {
|
||||
let mut res = vec![];
|
||||
self.map(|x| res.push(x.into_i64()));
|
||||
|
||||
if res.len() == 0 {
|
||||
if res.is_empty() {
|
||||
0
|
||||
} else {
|
||||
res[0]
|
||||
|
||||
@@ -200,7 +200,7 @@ mod native_tests {
|
||||
"1l_tiny_div",
|
||||
];
|
||||
|
||||
const TESTS: [&str; 92] = [
|
||||
const TESTS: [&str; 93] = [
|
||||
"1l_mlp", //0
|
||||
"1l_slice",
|
||||
"1l_concat",
|
||||
@@ -296,7 +296,8 @@ mod native_tests {
|
||||
"reducel1",
|
||||
"reducel2", // 89
|
||||
"1l_lppool",
|
||||
"lstm_large", // 91
|
||||
"lstm_large", // 91
|
||||
"lstm_medium", // 92
|
||||
];
|
||||
|
||||
const WASM_TESTS: [&str; 46] = [
|
||||
@@ -535,7 +536,7 @@ mod native_tests {
|
||||
}
|
||||
});
|
||||
|
||||
seq!(N in 0..=91 {
|
||||
seq!(N in 0..=92 {
|
||||
|
||||
#(#[test_case(TESTS[N])])*
|
||||
#[ignore]
|
||||
@@ -623,7 +624,7 @@ mod native_tests {
|
||||
#(#[test_case(TESTS[N])])*
|
||||
fn mock_large_batch_public_outputs_(test: &str) {
|
||||
// currently variable output rank is not supported in ONNX
|
||||
if test != "gather_nd" && test != "lstm_large" {
|
||||
if test != "gather_nd" && test != "lstm_large" && test != "lstm_medium" {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
@@ -1813,6 +1814,18 @@ mod native_tests {
|
||||
let settings_arg = format!("{}/{}/settings.json", test_dir, example_name);
|
||||
let private_key = format!("--private-key={}", *ANVIL_DEFAULT_PRIVATE_KEY);
|
||||
|
||||
// create encoded calldata
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args([
|
||||
"encode-evm-calldata",
|
||||
"--proof-path",
|
||||
&format!("{}/{}/aggr.pf", test_dir, example_name),
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
|
||||
assert!(status.success());
|
||||
|
||||
let base_args = vec![
|
||||
"create-evm-verifier-aggr",
|
||||
"--vk-path",
|
||||
@@ -2035,6 +2048,18 @@ mod native_tests {
|
||||
let addr_path_arg = format!("--addr-path={}/{}/addr.txt", test_dir, example_name);
|
||||
let settings_arg = format!("--settings-path={}", settings_path);
|
||||
|
||||
// create encoded calldata
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args([
|
||||
"encode-evm-calldata",
|
||||
"--proof-path",
|
||||
&format!("{}/{}/proof.pf", test_dir, example_name),
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
|
||||
assert!(status.success());
|
||||
|
||||
// create the verifier
|
||||
let mut args = vec!["create-evm-verifier", "--vk-path", &vk_arg, &settings_arg];
|
||||
|
||||
@@ -2204,6 +2229,19 @@ mod native_tests {
|
||||
|
||||
let deployed_addr_arg_vk = format!("--addr-vk={}", addr_vk);
|
||||
|
||||
// create encoded calldata
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args([
|
||||
"encode-evm-calldata",
|
||||
"--proof-path",
|
||||
&format!("{}/{}/proof.pf", test_dir, example_name),
|
||||
&deployed_addr_arg_vk,
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
|
||||
assert!(status.success());
|
||||
|
||||
// now verify the proof
|
||||
let pf_arg = format!("{}/{}/proof.pf", test_dir, example_name);
|
||||
let mut args = vec![
|
||||
@@ -2375,6 +2413,18 @@ mod native_tests {
|
||||
|
||||
let settings_arg = format!("--settings-path={}", settings_path);
|
||||
|
||||
// create encoded calldata
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args([
|
||||
"encode-evm-calldata",
|
||||
"--proof-path",
|
||||
&format!("{}/{}/proof.pf", test_dir, example_name),
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
|
||||
assert!(status.success());
|
||||
|
||||
// create the verifier
|
||||
let mut args = vec!["create-evm-verifier", "--vk-path", &vk_arg, &settings_arg];
|
||||
|
||||
|
||||
@@ -123,7 +123,7 @@ mod py_tests {
|
||||
}
|
||||
}
|
||||
|
||||
const TESTS: [&str; 32] = [
|
||||
const TESTS: [&str; 33] = [
|
||||
"proof_splitting.ipynb", // 0
|
||||
"variance.ipynb",
|
||||
"mnist_gan.ipynb",
|
||||
@@ -157,6 +157,7 @@ mod py_tests {
|
||||
"generalized_inverse.ipynb",
|
||||
"mnist_classifier.ipynb", // 30
|
||||
"world_rotation.ipynb",
|
||||
"logistic_regression.ipynb",
|
||||
];
|
||||
|
||||
macro_rules! test_func {
|
||||
@@ -169,7 +170,7 @@ mod py_tests {
|
||||
use super::*;
|
||||
|
||||
|
||||
seq!(N in 0..=31 {
|
||||
seq!(N in 0..=32 {
|
||||
|
||||
#(#[test_case(TESTS[N])])*
|
||||
fn run_notebook_(test: &str) {
|
||||
|
||||
@@ -109,7 +109,7 @@ def test_gen_srs():
|
||||
|
||||
|
||||
|
||||
def test_calibrate_over_user_range():
|
||||
async def test_calibrate_over_user_range():
|
||||
data_path = os.path.join(
|
||||
examples_path,
|
||||
'onnx',
|
||||
@@ -136,14 +136,14 @@ def test_calibrate_over_user_range():
|
||||
model_path, output_path, py_run_args=run_args)
|
||||
assert res == True
|
||||
|
||||
res = ezkl.calibrate_settings(
|
||||
res = await ezkl.calibrate_settings(
|
||||
data_path, model_path, output_path, "resources", 1, [0, 1, 2])
|
||||
assert res == True
|
||||
assert os.path.isfile(output_path)
|
||||
|
||||
|
||||
|
||||
def test_calibrate():
|
||||
async def test_calibrate():
|
||||
data_path = os.path.join(
|
||||
examples_path,
|
||||
'onnx',
|
||||
@@ -170,7 +170,7 @@ def test_calibrate():
|
||||
model_path, output_path, py_run_args=run_args)
|
||||
assert res == True
|
||||
|
||||
res = ezkl.calibrate_settings(
|
||||
res = await ezkl.calibrate_settings(
|
||||
data_path, model_path, output_path, "resources")
|
||||
assert res == True
|
||||
assert os.path.isfile(output_path)
|
||||
@@ -198,7 +198,7 @@ def test_model_compile():
|
||||
assert res == True
|
||||
|
||||
|
||||
def test_forward():
|
||||
async def test_forward():
|
||||
"""
|
||||
Test for vanilla forward pass
|
||||
"""
|
||||
@@ -217,7 +217,7 @@ def test_forward():
|
||||
'witness.json'
|
||||
)
|
||||
|
||||
res = ezkl.gen_witness(data_path, model_path, output_path)
|
||||
res = await ezkl.gen_witness(data_path, model_path, output_path)
|
||||
|
||||
with open(output_path, "r") as f:
|
||||
data = json.load(f)
|
||||
@@ -229,12 +229,12 @@ def test_forward():
|
||||
assert data["processed_outputs"]["poseidon_hash"] == res["processed_outputs"]["poseidon_hash"]
|
||||
|
||||
|
||||
def test_get_srs():
|
||||
async def test_get_srs():
|
||||
"""
|
||||
Test for get_srs
|
||||
"""
|
||||
settings_path = os.path.join(folder_path, 'settings.json')
|
||||
res = ezkl.get_srs(settings_path, srs_path=srs_path)
|
||||
res = await ezkl.get_srs(settings_path, srs_path=srs_path)
|
||||
|
||||
assert res == True
|
||||
|
||||
@@ -242,7 +242,7 @@ def test_get_srs():
|
||||
|
||||
another_srs_path = os.path.join(folder_path, "kzg_test_k8.params")
|
||||
|
||||
res = ezkl.get_srs(logrows=8, srs_path=another_srs_path, commitment=ezkl.PyCommitments.KZG)
|
||||
res = await ezkl.get_srs(logrows=8, srs_path=another_srs_path, commitment=ezkl.PyCommitments.KZG)
|
||||
|
||||
assert os.path.isfile(another_srs_path)
|
||||
|
||||
@@ -393,9 +393,7 @@ def test_prove_evm():
|
||||
assert os.path.isfile(proof_path)
|
||||
|
||||
|
||||
|
||||
|
||||
def test_create_evm_verifier():
|
||||
async def test_create_evm_verifier():
|
||||
"""
|
||||
Create EVM verifier with solidity code
|
||||
In order to run this test you will need to install solc in your environment
|
||||
@@ -404,8 +402,17 @@ def test_create_evm_verifier():
|
||||
settings_path = os.path.join(folder_path, 'settings.json')
|
||||
sol_code_path = os.path.join(folder_path, 'test.sol')
|
||||
abi_path = os.path.join(folder_path, 'test.abi')
|
||||
proof_path = os.path.join(folder_path, 'test_evm.pf')
|
||||
calldata_path = os.path.join(folder_path, 'calldata.bytes')
|
||||
|
||||
res = ezkl.create_evm_verifier(
|
||||
# res is now a vector of bytes
|
||||
res = ezkl.encode_evm_calldata(proof_path, calldata_path)
|
||||
|
||||
assert os.path.isfile(calldata_path)
|
||||
assert len(res) > 0
|
||||
|
||||
|
||||
res = await ezkl.create_evm_verifier(
|
||||
vk_path,
|
||||
settings_path,
|
||||
sol_code_path,
|
||||
@@ -417,7 +424,7 @@ def test_create_evm_verifier():
|
||||
assert os.path.isfile(sol_code_path)
|
||||
|
||||
|
||||
def test_deploy_evm():
|
||||
async def test_deploy_evm():
|
||||
"""
|
||||
Test deployment of the verifier smart contract
|
||||
In order to run this you will need to install solc in your environment
|
||||
@@ -428,7 +435,7 @@ def test_deploy_evm():
|
||||
# TODO: without optimization there will be out of gas errors
|
||||
# sol_code_path = os.path.join(folder_path, 'test.sol')
|
||||
|
||||
res = ezkl.deploy_evm(
|
||||
res = await ezkl.deploy_evm(
|
||||
addr_path,
|
||||
sol_code_path,
|
||||
rpc_url=anvil_url,
|
||||
@@ -437,7 +444,7 @@ def test_deploy_evm():
|
||||
assert res == True
|
||||
|
||||
|
||||
def test_deploy_evm_with_private_key():
|
||||
async def test_deploy_evm_with_private_key():
|
||||
"""
|
||||
Test deployment of the verifier smart contract using a custom private key
|
||||
In order to run this you will need to install solc in your environment
|
||||
@@ -450,7 +457,7 @@ def test_deploy_evm_with_private_key():
|
||||
|
||||
anvil_default_private_key = "ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"
|
||||
|
||||
res = ezkl.deploy_evm(
|
||||
res = await ezkl.deploy_evm(
|
||||
addr_path,
|
||||
sol_code_path,
|
||||
rpc_url=anvil_url,
|
||||
@@ -462,7 +469,7 @@ def test_deploy_evm_with_private_key():
|
||||
custom_zero_balance_private_key = "ff9dfe0b6d31e93ba13460a4d6f63b5e31dd9532b1304f1cbccea7092a042aa4"
|
||||
|
||||
with pytest.raises(RuntimeError, match="Failed to run deploy_evm"):
|
||||
res = ezkl.deploy_evm(
|
||||
res = await ezkl.deploy_evm(
|
||||
addr_path,
|
||||
sol_code_path,
|
||||
rpc_url=anvil_url,
|
||||
@@ -470,7 +477,7 @@ def test_deploy_evm_with_private_key():
|
||||
)
|
||||
|
||||
|
||||
def test_verify_evm():
|
||||
async def test_verify_evm():
|
||||
"""
|
||||
Verifies an evm proof
|
||||
In order to run this you will need to install solc in your environment
|
||||
@@ -486,7 +493,7 @@ def test_verify_evm():
|
||||
# TODO: without optimization there will be out of gas errors
|
||||
# sol_code_path = os.path.join(folder_path, 'test.sol')
|
||||
|
||||
res = ezkl.verify_evm(
|
||||
res = await ezkl.verify_evm(
|
||||
addr,
|
||||
proof_path,
|
||||
rpc_url=anvil_url,
|
||||
@@ -497,7 +504,7 @@ def test_verify_evm():
|
||||
assert res == True
|
||||
|
||||
|
||||
def test_aggregate_and_verify_aggr():
|
||||
async def test_aggregate_and_verify_aggr():
|
||||
data_path = os.path.join(
|
||||
examples_path,
|
||||
'onnx',
|
||||
@@ -526,7 +533,7 @@ def test_aggregate_and_verify_aggr():
|
||||
res = ezkl.gen_settings(model_path, settings_path)
|
||||
assert res == True
|
||||
|
||||
res = ezkl.calibrate_settings(
|
||||
res = await ezkl.calibrate_settings(
|
||||
data_path, model_path, settings_path, "resources")
|
||||
assert res == True
|
||||
assert os.path.isfile(settings_path)
|
||||
@@ -548,7 +555,7 @@ def test_aggregate_and_verify_aggr():
|
||||
'1l_relu_aggr_witness.json'
|
||||
)
|
||||
|
||||
res = ezkl.gen_witness(data_path, compiled_model_path,
|
||||
res = await ezkl.gen_witness(data_path, compiled_model_path,
|
||||
output_path)
|
||||
|
||||
ezkl.prove(
|
||||
@@ -603,7 +610,7 @@ def test_aggregate_and_verify_aggr():
|
||||
assert res == True
|
||||
|
||||
|
||||
def test_evm_aggregate_and_verify_aggr():
|
||||
async def test_evm_aggregate_and_verify_aggr():
|
||||
data_path = os.path.join(
|
||||
examples_path,
|
||||
'onnx',
|
||||
@@ -628,7 +635,7 @@ def test_evm_aggregate_and_verify_aggr():
|
||||
settings_path,
|
||||
)
|
||||
|
||||
ezkl.calibrate_settings(
|
||||
await ezkl.calibrate_settings(
|
||||
data_path,
|
||||
model_path,
|
||||
settings_path,
|
||||
@@ -657,7 +664,7 @@ def test_evm_aggregate_and_verify_aggr():
|
||||
'1l_relu_aggr_evm_witness.json'
|
||||
)
|
||||
|
||||
res = ezkl.gen_witness(data_path, compiled_model_path,
|
||||
res = await ezkl.gen_witness(data_path, compiled_model_path,
|
||||
output_path)
|
||||
|
||||
ezkl.prove(
|
||||
@@ -698,7 +705,7 @@ def test_evm_aggregate_and_verify_aggr():
|
||||
sol_code_path = os.path.join(folder_path, 'aggr_evm_1l_relu.sol')
|
||||
abi_path = os.path.join(folder_path, 'aggr_evm_1l_relu.abi')
|
||||
|
||||
res = ezkl.create_evm_verifier_aggr(
|
||||
res = await ezkl.create_evm_verifier_aggr(
|
||||
[settings_path],
|
||||
aggregate_vk_path,
|
||||
sol_code_path,
|
||||
@@ -712,7 +719,7 @@ def test_evm_aggregate_and_verify_aggr():
|
||||
|
||||
addr_path = os.path.join(folder_path, 'address_aggr.json')
|
||||
|
||||
res = ezkl.deploy_evm(
|
||||
res = await ezkl.deploy_evm(
|
||||
addr_path,
|
||||
sol_code_path,
|
||||
rpc_url=anvil_url,
|
||||
@@ -730,7 +737,7 @@ def test_evm_aggregate_and_verify_aggr():
|
||||
# with open(addr_path, 'r') as file:
|
||||
# addr_aggr = file.read().rstrip()
|
||||
|
||||
# res = ezkl.verify_evm(
|
||||
# res = await ezkl.verify_evm(
|
||||
# aggregate_proof_path,
|
||||
# addr_aggr,
|
||||
# rpc_url=anvil_url,
|
||||
@@ -769,7 +776,7 @@ def get_examples():
|
||||
|
||||
|
||||
@pytest.mark.parametrize("model_file, input_file", get_examples())
|
||||
def test_all_examples(model_file, input_file):
|
||||
async def test_all_examples(model_file, input_file):
|
||||
"""Tests all examples in the examples folder"""
|
||||
# gen settings
|
||||
settings_path = os.path.join(folder_path, "settings.json")
|
||||
@@ -783,7 +790,7 @@ def test_all_examples(model_file, input_file):
|
||||
res = ezkl.gen_settings(model_file, settings_path)
|
||||
assert res
|
||||
|
||||
res = ezkl.calibrate_settings(
|
||||
res = await ezkl.calibrate_settings(
|
||||
input_file, model_file, settings_path, "resources")
|
||||
assert res
|
||||
|
||||
@@ -814,7 +821,7 @@ def test_all_examples(model_file, input_file):
|
||||
assert os.path.isfile(pk_path)
|
||||
|
||||
print("Generating witness for example: ", model_file)
|
||||
res = ezkl.gen_witness(input_file, compiled_model_path, witness_path)
|
||||
res = await ezkl.gen_witness(input_file, compiled_model_path, witness_path)
|
||||
assert os.path.isfile(witness_path)
|
||||
|
||||
print("Proving example: ", model_file)
|
||||
|
||||
Reference in New Issue
Block a user