Compare commits

...

15 Commits

Author SHA1 Message Date
github-actions[bot]
a0c17e9863 ci: update version string in docs 2024-06-07 14:19:56 +00:00
Ethan Cemer
685487c853 feat: DA swap proof commitments (#807) 2024-06-07 10:19:38 -04:00
dante
33d41c7e49 fix: calldata cmd 2024-05-31 13:19:33 -04:00
dante
e04c959662 fix: sequential dependence between JS releases (#806) 2024-05-31 08:34:51 -04:00
dante
27b1f2e9d4 fix: move banner to command loop 2024-05-30 17:01:40 -04:00
dante
4a172877af feat: add autocompletion tooling to cli (#805) 2024-05-30 16:44:24 -04:00
dante
5a8498894d feat: create encode-evm-calldata in cli and python (#803) 2024-05-29 21:03:16 -04:00
Snoppy
095c0ca5b4 chore: fix typos (#802) 2024-05-29 11:32:11 -04:00
dante
3fa482c9ef fix: remove error messages in calibration loop (confusing) (#800) 2024-05-22 09:32:52 +01:00
dante
6be3b1d663 chore: update alloy (#798) 2024-05-16 12:10:27 +09:00
dante
d5a1d1439c fix: elif syntax in install script 2024-05-14 03:31:26 +09:00
Ethan Cemer
ff8fd01f86 fix: patch invalid ${{ github.ref_name#v }} syntax on verify release script (#791) 2024-05-14 02:25:12 +09:00
dante
e9020f942e fix: python build matrix (#797) 2024-05-13 13:29:51 +09:00
dante
e7f54cb6ac fix: windows build (#796) 2024-05-13 12:47:36 +09:00
dante
ed65e8c090 Revert "fix: revert maturin version (#795)"
This reverts commit d9f2adad99.
2024-05-13 12:41:02 +09:00
28 changed files with 1334 additions and 341 deletions

View File

@@ -178,3 +178,58 @@ jobs:
npm publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
in-browser-evm-ver-publish:
name: publish-in-browser-evm-verifier-package
needs: [publish-wasm-bindings]
runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/')
steps:
- uses: actions/checkout@v4
- name: Update version in package.json
shell: bash
env:
RELEASE_TAG: ${{ github.ref_name }}
run: |
sed -i "s|\"version\": \".*\"|\"version\": \"${{ github.ref_name }}\"|" in-browser-evm-verifier/package.json
- name: Prepare tag and fetch package integrity
run: |
CLEANED_TAG=${{ github.ref_name }} # Get the tag from ref_name
CLEANED_TAG="${CLEANED_TAG#v}" # Remove leading 'v'
echo "CLEANED_TAG=${CLEANED_TAG}" >> $GITHUB_ENV # Set it as an environment variable for later steps
ENGINE_INTEGRITY=$(npm view @ezkljs/engine@$CLEANED_TAG dist.integrity)
echo "ENGINE_INTEGRITY=$ENGINE_INTEGRITY" >> $GITHUB_ENV
- name: Update @ezkljs/engine version in package.json
shell: bash
env:
RELEASE_TAG: ${{ github.ref_name }}
run: |
sed -i "s|\"@ezkljs/engine\": \".*\"|\"@ezkljs/engine\": \"$CLEANED_TAG\"|" in-browser-evm-verifier/package.json
- name: Update the engine import in in-browser-evm-verifier to use @ezkljs/engine package instead of the local one;
run: |
sed -i "s|import { encodeVerifierCalldata } from '../nodejs/ezkl';|import { encodeVerifierCalldata } from '@ezkljs/engine';|" in-browser-evm-verifier/src/index.ts
- name: Update pnpm-lock.yaml versions and integrity
run: |
awk -v integrity="$ENGINE_INTEGRITY" -v tag="$CLEANED_TAG" '
NR==30{$0=" specifier: \"" tag "\""}
NR==31{$0=" version: \"" tag "\""}
NR==400{$0=" /@ezkljs/engine@" tag ":"}
NR==401{$0=" resolution: {integrity: \"" integrity "\"}"} 1' in-browser-evm-verifier/pnpm-lock.yaml > temp.yaml && mv temp.yaml in-browser-evm-verifier/pnpm-lock.yaml
- name: Use pnpm 8
uses: pnpm/action-setup@v2
with:
version: 8
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: "18.12.1"
registry-url: "https://registry.npmjs.org"
- name: Publish to npm
run: |
cd in-browser-evm-verifier
pnpm install --frozen-lockfile
pnpm run build
pnpm publish --no-git-checks
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -50,6 +50,7 @@ jobs:
target: ${{ matrix.target }}
args: --release --out dist --features python-bindings
- name: Install built wheel
if: matrix.target == 'universal2-apple-darwin'
run: |
pip install ezkl --no-index --find-links dist --force-reinstall
python -c "import ezkl"
@@ -110,7 +111,7 @@ jobs:
if: startsWith(github.ref, 'refs/tags/')
strategy:
matrix:
target: [x86_64, i686]
target: [x86_64]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4

View File

@@ -341,6 +341,10 @@ jobs:
run: cargo nextest run --release --verbose tests_evm::kzg_evm_on_chain_output_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + on chain inputs & outputs)
run: cargo nextest run --release --verbose tests_evm::kzg_evm_on_chain_input_output_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + on chain inputs & kzg outputs + params)
run: cargo nextest run --release --verbose tests_evm::kzg_evm_on_chain_input_kzg_output_kzg_params_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + on chain outputs & kzg inputs + params)
run: cargo nextest run --release --verbose tests_evm::kzg_evm_on_chain_output_kzg_input_kzg_params_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + on chain inputs & outputs hashes)
run: cargo nextest run --release --verbose tests_evm::kzg_evm_on_chain_input_output_hashed_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM)

View File

@@ -1,65 +0,0 @@
name: Build and Publish EZKL npm packages (wasm bindings and in-browser evm verifier)
on:
workflow_dispatch:
inputs:
tag:
description: "The tag to release"
required: true
push:
tags:
- "*"
defaults:
run:
working-directory: .
jobs:
in-browser-evm-ver-publish:
name: publish-in-browser-evm-verifier-package
runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/')
steps:
- uses: actions/checkout@v4
- name: Update version in package.json
shell: bash
env:
RELEASE_TAG: ${{ github.ref_name }}
run: |
sed -i "s|\"version\": \".*\"|\"version\": \"${{ github.ref_name }}\"|" in-browser-evm-verifier/package.json
- name: Update @ezkljs/engine version in package.json
shell: bash
env:
RELEASE_TAG: ${{ github.ref_name }}
run: |
sed -i "s|\"@ezkljs/engine\": \".*\"|\"@ezkljs/engine\": \"${{ github.ref_name#v }}\"|" in-browser-evm-verifier/package.json
- name: Update the engine import in in-browser-evm-verifier to use @ezkljs/engine package instead of the local one;
run: |
sed -i "s|import { encodeVerifierCalldata } from '../nodejs/ezkl';|import { encodeVerifierCalldata } from '@ezkljs/engine';|" in-browser-evm-verifier/src/index.ts
- name: Fetch integrity
run: |
ENGINE_INTEGRITY=$(npm view @ezkljs/engine@${{ github.ref_name#v }} dist.integrity)
echo "ENGINE_INTEGRITY=$ENGINE_INTEGRITY" >> $GITHUB_ENV
- name: Update pnpm-lock.yaml versions and integrity
run: |
awk -v integrity="$ENGINE_INTEGRITY" -v tag="${{ github.ref_name#v }}" '
NR==30{$0=" specifier: \"" tag "\""}
NR==31{$0=" version: \"" tag "\""}
NR==400{$0=" /@ezkljs/engine@" tag ":"}
NR==401{$0=" resolution: {integrity: \"" integrity "\"}"} 1' in-browser-evm-verifier/pnpm-lock.yaml > temp.yaml && mv temp.yaml in-browser-evm-verifier/pnpm-lock.yaml
- name: Use pnpm 8
uses: pnpm/action-setup@v2
with:
version: 8
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: "18.12.1"
registry-url: "https://registry.npmjs.org"
- name: Publish to npm
run: |
cd in-browser-evm-verifier
pnpm install --frozen-lockfile
pnpm run build
pnpm publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

79
Cargo.lock generated
View File

@@ -58,7 +58,7 @@ checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5"
[[package]]
name = "alloy"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-consensus",
"alloy-contract",
@@ -80,7 +80,7 @@ dependencies = [
[[package]]
name = "alloy-consensus"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-eips",
"alloy-primitives 0.7.2",
@@ -93,7 +93,7 @@ dependencies = [
[[package]]
name = "alloy-contract"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-dyn-abi",
"alloy-json-abi",
@@ -140,7 +140,7 @@ dependencies = [
[[package]]
name = "alloy-eips"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-primitives 0.7.2",
"alloy-rlp",
@@ -154,7 +154,7 @@ dependencies = [
[[package]]
name = "alloy-genesis"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-primitives 0.7.2",
"alloy-serde",
@@ -177,7 +177,7 @@ dependencies = [
[[package]]
name = "alloy-json-rpc"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-primitives 0.7.2",
"serde",
@@ -189,7 +189,7 @@ dependencies = [
[[package]]
name = "alloy-network"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-consensus",
"alloy-eips",
@@ -206,7 +206,7 @@ dependencies = [
[[package]]
name = "alloy-node-bindings"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-genesis",
"alloy-primitives 0.7.2",
@@ -261,7 +261,7 @@ dependencies = [
[[package]]
name = "alloy-provider"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-eips",
"alloy-json-rpc",
@@ -281,6 +281,7 @@ dependencies = [
"futures",
"futures-utils-wasm",
"lru",
"pin-project",
"reqwest",
"serde_json",
"tokio",
@@ -313,7 +314,7 @@ dependencies = [
[[package]]
name = "alloy-rpc-client"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-json-rpc",
"alloy-transport",
@@ -333,7 +334,7 @@ dependencies = [
[[package]]
name = "alloy-rpc-types"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-consensus",
"alloy-eips",
@@ -351,7 +352,7 @@ dependencies = [
[[package]]
name = "alloy-rpc-types-trace"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-primitives 0.7.2",
"alloy-rpc-types",
@@ -363,7 +364,7 @@ dependencies = [
[[package]]
name = "alloy-serde"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-primitives 0.7.2",
"serde",
@@ -373,7 +374,7 @@ dependencies = [
[[package]]
name = "alloy-signer"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-primitives 0.7.2",
"async-trait",
@@ -386,7 +387,7 @@ dependencies = [
[[package]]
name = "alloy-signer-wallet"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-consensus",
"alloy-network",
@@ -459,7 +460,7 @@ dependencies = [
[[package]]
name = "alloy-transport"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-json-rpc",
"base64 0.22.1",
@@ -477,7 +478,7 @@ dependencies = [
[[package]]
name = "alloy-transport-http"
version = "0.1.0"
source = "git+https://github.com/alloy-rs/alloy#e60d64cff86d995657f0acea85c2bbd52f9bd810"
source = "git+https://github.com/alloy-rs/alloy?rev=5fbf57bac99edef9d8475190109a7ea9fb7e5e83#5fbf57bac99edef9d8475190109a7ea9fb7e5e83"
dependencies = [
"alloy-json-rpc",
"alloy-transport",
@@ -1207,6 +1208,15 @@ dependencies = [
"strsim",
]
[[package]]
name = "clap_complete"
version = "4.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd79504325bf38b10165b02e89b4347300f855f273c4cb30c4a3209e6583275e"
dependencies = [
"clap 4.5.3",
]
[[package]]
name = "clap_derive"
version = "4.5.3"
@@ -1831,6 +1841,7 @@ dependencies = [
"bincode",
"chrono",
"clap 4.5.3",
"clap_complete",
"colored",
"colored_json",
"console_error_panic_hook",
@@ -3913,9 +3924,9 @@ dependencies = [
[[package]]
name = "pyo3"
version = "0.20.3"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53bdbb96d49157e65d45cc287af5f32ffadd5f4761438b527b055fb0d4bb8233"
checksum = "a5e00b96a521718e08e03b1a622f01c8a8deb50719335de3f60b3b3950f069d8"
dependencies = [
"cfg-if",
"indoc",
@@ -3931,9 +3942,8 @@ dependencies = [
[[package]]
name = "pyo3-asyncio"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ea6b68e93db3622f3bb3bf363246cf948ed5375afe7abff98ccbdd50b184995"
version = "0.21.0"
source = "git+https://github.com/jopemachine/pyo3-asyncio/?branch=migration-pyo3-0.21#d1ec64076dd1b5c797db4b7b811f588466956d20"
dependencies = [
"futures",
"once_cell",
@@ -3945,9 +3955,8 @@ dependencies = [
[[package]]
name = "pyo3-asyncio-macros"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56c467178e1da6252c95c29ecf898b133f742e9181dca5def15dc24e19d45a39"
version = "0.21.0"
source = "git+https://github.com/jopemachine/pyo3-asyncio/?branch=migration-pyo3-0.21#d1ec64076dd1b5c797db4b7b811f588466956d20"
dependencies = [
"proc-macro2",
"quote",
@@ -3956,9 +3965,9 @@ dependencies = [
[[package]]
name = "pyo3-build-config"
version = "0.20.3"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "deaa5745de3f5231ce10517a1f5dd97d53e5a2fd77aa6b5842292085831d48d7"
checksum = "7883df5835fafdad87c0d888b266c8ec0f4c9ca48a5bed6bbb592e8dedee1b50"
dependencies = [
"once_cell",
"target-lexicon",
@@ -3966,9 +3975,9 @@ dependencies = [
[[package]]
name = "pyo3-ffi"
version = "0.20.3"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62b42531d03e08d4ef1f6e85a2ed422eb678b8cd62b762e53891c05faf0d4afa"
checksum = "01be5843dc60b916ab4dad1dca6d20b9b4e6ddc8e15f50c47fe6d85f1fb97403"
dependencies = [
"libc",
"pyo3-build-config",
@@ -3976,9 +3985,9 @@ dependencies = [
[[package]]
name = "pyo3-log"
version = "0.9.0"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c10808ee7250403bedb24bc30c32493e93875fef7ba3e4292226fe924f398bd"
checksum = "2af49834b8d2ecd555177e63b273b708dea75150abc6f5341d0a6e1a9623976c"
dependencies = [
"arc-swap",
"log",
@@ -3987,9 +3996,9 @@ dependencies = [
[[package]]
name = "pyo3-macros"
version = "0.20.3"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7305c720fa01b8055ec95e484a6eca7a83c841267f0dd5280f0c8b8551d2c158"
checksum = "77b34069fc0682e11b31dbd10321cbf94808394c56fd996796ce45217dfac53c"
dependencies = [
"proc-macro2",
"pyo3-macros-backend",
@@ -3999,9 +4008,9 @@ dependencies = [
[[package]]
name = "pyo3-macros-backend"
version = "0.20.3"
version = "0.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c7e9b68bb9c3149c5b0cade5d07f953d6d125eb4337723c4ccdb665f1f96185"
checksum = "08260721f32db5e1a5beae69a55553f56b99bd0e1c3e6e0a5e8851a9d0f5a85c"
dependencies = [
"heck 0.4.1",
"proc-macro2",

View File

@@ -23,6 +23,7 @@ halo2curves = { git = "https://github.com/privacy-scaling-explorations/halo2curv
rand = { version = "0.8", default_features = false }
itertools = { version = "0.10.3", default_features = false }
clap = { version = "4.5.3", features = ["derive"] }
clap_complete = "4.5.2"
serde = { version = "1.0.126", features = ["derive"], optional = true }
serde_json = { version = "1.0.97", default_features = false, features = [
"float_roundtrip",
@@ -47,7 +48,7 @@ metal = { git = "https://github.com/gfx-rs/metal-rs", optional = true }
# evm related deps
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
alloy = { git = "https://github.com/alloy-rs/alloy", version = "0.1.0", features = ["provider-http", "signers", "contract", "rpc-types-eth", "signer-wallet", "node-bindings"] }
alloy = { git = "https://github.com/alloy-rs/alloy", version = "0.1.0", rev="5fbf57bac99edef9d8475190109a7ea9fb7e5e83", features = ["provider-http", "signers", "contract", "rpc-types-eth", "signer-wallet", "node-bindings"] }
foundry-compilers = {version = "0.4.1", features = ["svm-solc"]}
ethabi = "18"
indicatif = { version = "0.17.5", features = ["rayon"] }
@@ -71,16 +72,16 @@ tokio = { version = "1.35", default_features = false, features = [
"rt-multi-thread"
] }
tokio-util = { version = "0.7.9", features = ["codec"] }
pyo3 = { version = "0.20.2", features = [
pyo3 = { version = "0.21.2", features = [
"extension-module",
"abi3-py37",
"macros",
], default_features = false, optional = true }
pyo3-asyncio = { version="0.20.0", features = [
pyo3-asyncio = { git = "https://github.com/jopemachine/pyo3-asyncio/", branch="migration-pyo3-0.21", features = [
"attributes",
"tokio-runtime",
], default_features = false, optional = true }
pyo3-log = { version = "0.9.0", default_features = false, optional = true }
pyo3-log = { version = "0.10.0", default_features = false, optional = true }
tract-onnx = { git = "https://github.com/sonos/tract/", rev = "05ebf550aa9922b221af4635c21a67a8d2af12a9", default_features = false, optional = true }
tabled = { version = "0.12.0", optional = true }

View File

@@ -93,6 +93,79 @@ contract LoadInstances {
}
}
// Contract that checks that the COMMITMENT_KZG bytes is equal to the first part of the proof.
pragma solidity ^0.8.0;
// The kzg commitments of a given model, all aggregated into a single bytes array.
// At solidity generation time, the commitments are hardcoded into the contract via the COMMITMENT_KZG constant.
// It will be used to check that the proof commitments match the expected commitments.
bytes constant COMMITMENT_KZG = hex"";
contract SwapProofCommitments {
/**
* @dev Swap the proof commitments
* @notice must pass encoded bytes from memory
* @param encoded - verifier calldata
*/
function checkKzgCommits(
bytes calldata encoded
) internal pure returns (bool equal) {
bytes4 funcSig;
uint256 proof_offset;
uint256 proof_length;
assembly {
// fetch function sig. Either `verifyProof(bytes,uint256[])` or `verifyProof(address,bytes,uint256[])`
funcSig := calldataload(encoded.offset)
// Fetch proof offset which is 4 + 32 bytes away from
// start of encoded for `verifyProof(bytes,uint256[])`,
// and 4 + 32 + 32 away for `verifyProof(address,bytes,uint256[])`
proof_offset := calldataload(
add(
encoded.offset,
add(0x04, mul(0x20, eq(funcSig, 0xaf83a18d)))
)
)
proof_length := calldataload(
add(add(encoded.offset, 0x04), proof_offset)
)
}
// Check the length of the commitment against the proof bytes
if (proof_length < COMMITMENT_KZG.length) {
return false;
}
// Load COMMITMENT_KZG into memory
bytes memory commitment = COMMITMENT_KZG;
// Compare the first N bytes of the proof with COMMITMENT_KZG
uint words = (commitment.length + 31) / 32; // Calculate the number of 32-byte words
assembly {
// Now we compare the commitment with the proof,
// ensuring that the commitments divided up into 32 byte words are all equal.
for {
let i := 0x20
} lt(i, add(mul(words, 0x20), 0x20)) {
i := add(i, 0x20)
} {
let wordProof := calldataload(
add(add(encoded.offset, add(i, 0x04)), proof_offset)
)
let wordCommitment := mload(add(commitment, i))
equal := eq(wordProof, wordCommitment)
if eq(equal, 0) {
return(0, 0)
}
}
}
return equal; // Return true if the commitment comparison passed
}
}
// This contract serves as a Data Attestation Verifier for the EZKL model.
// It is designed to read and attest to instances of proofs generated from a specified circuit.
// It is particularly constructed to read only int256 data from specified on-chain contracts' view functions.
@@ -104,9 +177,10 @@ contract LoadInstances {
// 4. Field Element Conversion: The fixed-point representation is then converted into a field element modulo P using the `toFieldElement` method.
// 5. Data Attestation: The `attestData` method validates that the public instances match the data fetched and processed by the contract.
// 6. Proof Verification: The `verifyWithDataAttestation` method parses the instances out of the encoded calldata and calls the `attestData` method to validate the public instances,
// 6b. Optional KZG Commitment Verification: It also checks the KZG commitments in the proof against the expected commitments using the `checkKzgCommits` method.
// then calls the `verifyProof` method to verify the proof on the verifier.
contract DataAttestation is LoadInstances {
contract DataAttestation is LoadInstances, SwapProofCommitments {
/**
* @notice Struct used to make view only calls to accounts to fetch the data that EZKL reads from.
* @param the address of the account to make calls to
@@ -350,12 +424,18 @@ contract DataAttestation is LoadInstances {
}
}
/**
* @dev Verify the proof with the data attestation.
* @param verifier - The address of the verifier contract.
* @param encoded - The verifier calldata.
*/
function verifyWithDataAttestation(
address verifier,
bytes calldata encoded
) public view returns (bool) {
require(verifier.code.length > 0, "Address: call to non-contract");
attestData(getInstancesCalldata(encoded));
require(checkKzgCommits(encoded), "Invalid KZG commitments");
// static call the verifier contract to verify the proof
(bool success, bytes memory returndata) = verifier.staticcall(encoded);

View File

@@ -1,4 +1,4 @@
ezkl==0.0.0
ezkl==11.3.0
sphinx
sphinx-rtd-theme
sphinxcontrib-napoleon

View File

@@ -1,7 +1,7 @@
import ezkl
project = 'ezkl'
release = '0.0.0'
release = '11.3.0'
version = release

View File

@@ -0,0 +1,604 @@
{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"# data-attest-kzg-vis\n",
"\n",
"Here's an example leveraging EZKL whereby the inputs to the model are read and attested to from an on-chain source and the params and outputs are committed to using kzg-commitments. \n",
"\n",
"In this setup:\n",
"- the inputs and outputs are publicly known to the prover and verifier\n",
"- the on chain inputs will be fetched and then fed directly into the circuit\n",
"- the quantization of the on-chain inputs happens within the evm and is replicated at proving time \n",
"- The kzg commitment to the params and inputs will be read from the proof and checked to make sure it matches the expected commitment stored on-chain.\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"First we import the necessary dependencies and set up logging to be as informative as possible. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# check if notebook is in colab\n",
"try:\n",
" # install ezkl\n",
" import google.colab\n",
" import subprocess\n",
" import sys\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"ezkl\"])\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"onnx\"])\n",
"\n",
"# rely on local installation of ezkl if the notebook is not in colab\n",
"except:\n",
" pass\n",
"\n",
"\n",
"from torch import nn\n",
"import ezkl\n",
"import os\n",
"import json\n",
"import logging\n",
"\n",
"# uncomment for more descriptive logging \n",
"FORMAT = '%(levelname)s %(name)s %(asctime)-15s %(filename)s:%(lineno)d %(message)s'\n",
"logging.basicConfig(format=FORMAT)\n",
"logging.getLogger().setLevel(logging.DEBUG)\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we define our model. It is a very simple PyTorch model that has just one layer, an average pooling 2D layer. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"# Defines the model\n",
"\n",
"class MyModel(nn.Module):\n",
" def __init__(self):\n",
" super(MyModel, self).__init__()\n",
" self.layer = nn.AvgPool2d(2, 1, (1, 1))\n",
"\n",
" def forward(self, x):\n",
" return self.layer(x)[0]\n",
"\n",
"\n",
"circuit = MyModel()\n",
"\n",
"# this is where you'd train your model"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We omit training for purposes of this demonstration. We've marked where training would happen in the cell above. \n",
"Now we export the model to onnx and create a corresponding (randomly generated) input. This input data will eventually be stored on chain and read from according to the call_data field in the graph input.\n",
"\n",
"You can replace the random `x` with real data if you so wish. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"x = 0.1*torch.rand(1,*[3, 2, 2], requires_grad=True)\n",
"\n",
"# Flips the neural net into inference mode\n",
"circuit.eval()\n",
"\n",
" # Export the model\n",
"torch.onnx.export(circuit, # model being run\n",
" x, # model input (or a tuple for multiple inputs)\n",
" \"network.onnx\", # where to save the model (can be a file or file-like object)\n",
" export_params=True, # store the trained parameter weights inside the model file\n",
" opset_version=10, # the ONNX version to export the model to\n",
" do_constant_folding=True, # whether to execute constant folding for optimization\n",
" input_names = ['input'], # the model's input names\n",
" output_names = ['output'], # the model's output names\n",
" dynamic_axes={'input' : {0 : 'batch_size'}, # variable length axes\n",
" 'output' : {0 : 'batch_size'}})\n",
"\n",
"data_array = ((x).detach().numpy()).reshape([-1]).tolist()\n",
"\n",
"data = dict(input_data = [data_array])\n",
"\n",
" # Serialize data into file:\n",
"json.dump(data, open(\"input.json\", 'w' ))\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We now define a function that will create a new anvil instance which we will deploy our test contract too. This contract will contain in its storage the data that we will read from and attest to. In production you would not need to set up a local anvil instance. Instead you would replace RPC_URL with the actual RPC endpoint of the chain you are deploying your verifiers too, reading from the data on said chain."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import subprocess\n",
"import time\n",
"import threading\n",
"\n",
"# make sure anvil is running locally\n",
"# $ anvil -p 3030\n",
"\n",
"RPC_URL = \"http://localhost:3030\"\n",
"\n",
"# Save process globally\n",
"anvil_process = None\n",
"\n",
"def start_anvil():\n",
" global anvil_process\n",
" if anvil_process is None:\n",
" anvil_process = subprocess.Popen([\"anvil\", \"-p\", \"3030\", \"--code-size-limit=41943040\"])\n",
" if anvil_process.returncode is not None:\n",
" raise Exception(\"failed to start anvil process\")\n",
" time.sleep(3)\n",
"\n",
"def stop_anvil():\n",
" global anvil_process\n",
" if anvil_process is not None:\n",
" anvil_process.terminate()\n",
" anvil_process = None\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We define our `PyRunArgs` objects which contains the visibility parameters for out model. \n",
"- `input_visibility` defines the visibility of the model inputs\n",
"- `param_visibility` defines the visibility of the model weights and constants and parameters \n",
"- `output_visibility` defines the visibility of the model outputs\n",
"\n",
"Here we create the following setup:\n",
"- `input_visibility`: \"public\"\n",
"- `param_visibility`: \"polycommitment\" \n",
"- `output_visibility`: \"polycommitment\"\n",
"\n",
"**Note**:\n",
"When we set this to polycommitment, we are saying that the model parameters are committed to using a polynomial commitment scheme. This commitment will be stored on chain as a constant stored in the DA contract, and the proof will contain the commitment to the parameters. The DA verification will then check that the commitment in the proof matches the commitment stored on chain. \n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import ezkl\n",
"\n",
"model_path = os.path.join('network.onnx')\n",
"compiled_model_path = os.path.join('network.compiled')\n",
"pk_path = os.path.join('test.pk')\n",
"vk_path = os.path.join('test.vk')\n",
"settings_path = os.path.join('settings.json')\n",
"srs_path = os.path.join('kzg.srs')\n",
"data_path = os.path.join('input.json')\n",
"\n",
"run_args = ezkl.PyRunArgs()\n",
"run_args.input_visibility = \"public\"\n",
"run_args.param_visibility = \"polycommit\"\n",
"run_args.output_visibility = \"polycommit\"\n",
"run_args.num_inner_cols = 1\n",
"run_args.variables = [(\"batch_size\", 1)]\n",
"\n",
"\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we generate a settings file. This file basically instantiates a bunch of parameters that determine their circuit shape, size etc... Because of the way we represent nonlinearities in the circuit (using Halo2's [lookup tables](https://zcash.github.io/halo2/design/proving-system/lookup.html)), it is often best to _calibrate_ this settings file as some data can fall out of range of these lookups.\n",
"\n",
"You can pass a dataset for calibration that will be representative of real inputs you might find if and when you deploy the prover. Here we create a dummy calibration dataset for demonstration purposes. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!RUST_LOG=trace\n",
"# TODO: Dictionary outputs\n",
"res = ezkl.gen_settings(model_path, settings_path, py_run_args=run_args)\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# generate a bunch of dummy calibration data\n",
"cal_data = {\n",
" \"input_data\": [(0.1*torch.rand(2, *[3, 2, 2])).flatten().tolist()],\n",
"}\n",
"\n",
"cal_path = os.path.join('val_data.json')\n",
"# save as json file\n",
"with open(cal_path, \"w\") as f:\n",
" json.dump(cal_data, f)\n",
"\n",
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)\n",
"assert res == True"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The graph input for on chain data sources is formatted completely differently compared to file based data sources.\n",
"\n",
"- For file data sources, the raw floating point values that eventually get quantized, converted into field elements and stored in `witness.json` to be consumed by the circuit are stored. The output data contains the expected floating point values returned as outputs from running your vanilla pytorch model on the given inputs.\n",
"- For on chain data sources, the input_data field contains all the data necessary to read and format the on chain data into something digestable by EZKL (aka field elements :-D). \n",
"Here is what the schema for an on-chain data source graph input file should look like:\n",
" \n",
"```json\n",
"{\n",
" \"input_data\": {\n",
" \"rpc\": \"http://localhost:3030\", // The rpc endpoint of the chain you are deploying your verifier to\n",
" \"calls\": [\n",
" {\n",
" \"call_data\": [\n",
" [\n",
" \"71e5ee5f0000000000000000000000000000000000000000000000000000000000000000\", // The abi encoded call data to a view function that returns a single on-chain data point (we only support uint256 returns for now)\n",
" 7 // The number of decimal places of the large uint256 value. This is our way of representing large wei values as floating points on chain, since the evm only natively supports integer values.\n",
" ],\n",
" [\n",
" \"71e5ee5f0000000000000000000000000000000000000000000000000000000000000001\",\n",
" 5\n",
" ],\n",
" [\n",
" \"71e5ee5f0000000000000000000000000000000000000000000000000000000000000002\",\n",
" 5\n",
" ]\n",
" ],\n",
" \"address\": \"5fbdb2315678afecb367f032d93f642f64180aa3\" // The address of the contract that we are calling to get the data. \n",
" }\n",
" ]\n",
" }\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"await ezkl.setup_test_evm_witness(\n",
" data_path,\n",
" compiled_model_path,\n",
" # we write the call data to the same file as the input data\n",
" data_path,\n",
" input_source=ezkl.PyTestDataSource.OnChain,\n",
" output_source=ezkl.PyTestDataSource.File,\n",
" rpc_url=RPC_URL)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"As we use Halo2 with KZG-commitments we need an SRS string from (preferably) a multi-party trusted setup ceremony. For an overview of the procedures for such a ceremony check out [this page](https://blog.ethereum.org/2023/01/16/announcing-kzg-ceremony). The `get_srs` command retrieves a correctly sized SRS given the calibrated settings file from [here](https://github.com/han0110/halo2-kzg-srs). \n",
"\n",
"These SRS were generated with [this](https://github.com/privacy-scaling-explorations/perpetualpowersoftau) ceremony. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"res = await ezkl.get_srs( settings_path)\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We now need to generate the circuit witness. These are the model outputs (and any hashes) that are generated when feeding the previously generated `input.json` through the circuit / model. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!export RUST_BACKTRACE=1\n",
"\n",
"witness_path = \"witness.json\"\n",
"\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Here we setup verifying and proving keys for the circuit. As the name suggests the proving key is needed for ... proving and the verifying key is needed for ... verifying. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# HERE WE SETUP THE CIRCUIT PARAMS\n",
"# WE GOT KEYS\n",
"# WE GOT CIRCUIT PARAMETERS\n",
"# EVERYTHING ANYONE HAS EVER NEEDED FOR ZK\n",
"res = ezkl.setup(\n",
" compiled_model_path,\n",
" vk_path,\n",
" pk_path,\n",
" )\n",
"\n",
"assert res == True\n",
"assert os.path.isfile(vk_path)\n",
"assert os.path.isfile(pk_path)\n",
"assert os.path.isfile(settings_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we generate a full proof. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# GENERATE A PROOF\n",
"\n",
"proof_path = os.path.join('test.pf')\n",
"\n",
"res = ezkl.prove(\n",
" witness_path,\n",
" compiled_model_path,\n",
" pk_path,\n",
" proof_path,\n",
" \n",
" \"single\",\n",
" )\n",
"\n",
"print(res)\n",
"assert os.path.isfile(proof_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"And verify it as a sanity check. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# VERIFY IT\n",
"\n",
"res = ezkl.verify(\n",
" proof_path,\n",
" settings_path,\n",
" vk_path,\n",
" \n",
" )\n",
"\n",
"assert res == True\n",
"print(\"verified\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We can now create and then deploy a vanilla evm verifier."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"abi_path = 'test.abi'\n",
"sol_code_path = 'test.sol'\n",
"\n",
"res = await ezkl.create_evm_verifier(\n",
" vk_path,\n",
" \n",
" settings_path,\n",
" sol_code_path,\n",
" abi_path,\n",
" )\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"addr_path_verifier = \"addr_verifier.txt\"\n",
"\n",
"res = await ezkl.deploy_evm(\n",
" addr_path_verifier,\n",
" sol_code_path,\n",
" 'http://127.0.0.1:3030'\n",
")\n",
"\n",
"assert res == True"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"When deploying a DA with kzg commitments, we need to make sure to also pass a witness file that contains the commitments to the parameters and inputs. This is because the verifier will need to check that the commitments in the proof match the commitments stored on chain."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"abi_path = 'test.abi'\n",
"sol_code_path = 'test.sol'\n",
"input_path = 'input.json'\n",
"\n",
"res = await ezkl.create_evm_data_attestation(\n",
" input_path,\n",
" settings_path,\n",
" sol_code_path,\n",
" abi_path,\n",
" witness_path = witness_path,\n",
" )"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we can deploy the data attest verifier contract. For security reasons, this binding will only deploy to a local anvil instance, using accounts generated by anvil. \n",
"So should only be used for testing purposes."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"addr_path_da = \"addr_da.txt\"\n",
"\n",
"res = await ezkl.deploy_da_evm(\n",
" addr_path_da,\n",
" input_path,\n",
" settings_path,\n",
" sol_code_path,\n",
" RPC_URL,\n",
" )\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Call the view only verify method on the contract to verify the proof. Since it is a view function this is safe to use in production since you don't have to pass your private key."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# read the verifier address\n",
"addr_verifier = None\n",
"with open(addr_path_verifier, 'r') as f:\n",
" addr = f.read()\n",
"#read the data attestation address\n",
"addr_da = None\n",
"with open(addr_path_da, 'r') as f:\n",
" addr_da = f.read()\n",
"\n",
"res = await ezkl.verify_evm(\n",
" addr,\n",
" proof_path,\n",
" RPC_URL,\n",
" addr_da,\n",
")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "ezkl",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.13"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@@ -157,6 +157,7 @@
{
"cell_type": "code",
"execution_count": null,
"id": "b78d3cbf",
"metadata": {},
"outputs": [],
"source": [
@@ -192,7 +193,7 @@
"outputs": [],
"source": [
"# srs path\n",
"res = await ezkl.get_srs( settings_path)"
"res = ezkl.get_srs( settings_path)"
]
},
{
@@ -298,7 +299,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.15"
"version": "3.12.3"
}
},
"nbformat": 4,

View File

@@ -169,7 +169,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
@@ -302,4 +302,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}

View File

@@ -170,7 +170,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{

View File

@@ -126,7 +126,6 @@ elif [ "$PLATFORM" == "macos" ]; then
echo "Cleaning up"
rm "$EZKL_DIR/build-artifacts.ezkl-macos-aarch64.tar.gz"
else
JSON_RESPONSE=$(curl -s "$RELEASE_URL")
FILE_URL=$(echo "$JSON_RESPONSE" | grep -o 'https://github.com[^"]*' | grep "build-artifacts.ezkl-macos.tar.gz")
@@ -155,7 +154,7 @@ elif [ "$PLATFORM" == "linux" ]; then
echo "Cleaning up"
rm "$EZKL_DIR/build-artifacts.ezkl-linux-gnu.tar.gz"
else if [ "$ARCHITECTURE" == "aarch64" ]; then
elif [ "$ARCHITECTURE" == "aarch64" ]; then
JSON_RESPONSE=$(curl -s "$RELEASE_URL")
FILE_URL=$(echo "$JSON_RESPONSE" | grep -o 'https://github.com[^"]*' | grep "build-artifacts.ezkl-linux-aarch64.tar.gz")

View File

@@ -1,7 +1,7 @@
// ignore file if compiling for wasm
#[cfg(not(target_arch = "wasm32"))]
use clap::Parser;
use clap::{CommandFactory, Parser};
#[cfg(not(target_arch = "wasm32"))]
use colored_json::ToColoredJson;
#[cfg(not(target_arch = "wasm32"))]
@@ -24,22 +24,30 @@ use std::error::Error;
#[cfg(not(target_arch = "wasm32"))]
pub async fn main() -> Result<(), Box<dyn Error>> {
let args = Cli::parse();
init_logger();
#[cfg(not(any(target_arch = "wasm32", feature = "no-banner")))]
banner();
#[cfg(feature = "icicle")]
if env::var("ENABLE_ICICLE_GPU").is_ok() {
info!("Running with ICICLE GPU");
if let Some(generator) = args.generator {
ezkl::commands::print_completions(generator, &mut Cli::command());
Ok(())
} else if let Some(command) = args.command {
init_logger();
#[cfg(not(any(target_arch = "wasm32", feature = "no-banner")))]
banner();
#[cfg(feature = "icicle")]
if env::var("ENABLE_ICICLE_GPU").is_ok() {
info!("Running with ICICLE GPU");
} else {
info!("Running with CPU");
}
info!("command: \n {}", &command.as_json().to_colored_json_auto()?);
let res = run(command).await;
match &res {
Ok(_) => info!("succeeded"),
Err(e) => error!("failed: {}", e),
};
res.map(|_| ())
} else {
info!("Running with CPU");
Err("No command provided".into())
}
info!("command: \n {}", &args.as_json()?.to_colored_json_auto()?);
let res = run(args.command).await;
match &res {
Ok(_) => info!("succeeded"),
Err(e) => error!("failed: {}", e),
};
res.map(|_| ())
}
#[cfg(target_arch = "wasm32")]

View File

@@ -3910,7 +3910,7 @@ pub(crate) fn range_check<F: PrimeField + TensorType + PartialOrd + std::hash::H
let int_values = w.get_int_evals()?;
for v in int_values.iter() {
if v < &range.0 || v > &range.1 {
log::error!("Value ({:?}) out of range: {:?}", v, range);
log::warn!("Value ({:?}) out of range: {:?}", v, range);
return Err(Box::new(TensorError::TableLookupError));
}
}

View File

@@ -1,6 +1,7 @@
#[cfg(not(target_arch = "wasm32"))]
use alloy::primitives::Address as H160;
use clap::{Parser, Subcommand};
use clap::{Command, Parser, Subcommand};
use clap_complete::{generate, Generator, Shell};
#[cfg(feature = "python-bindings")]
use pyo3::{
conversion::{FromPyObject, PyTryFrom},
@@ -10,7 +11,7 @@ use pyo3::{
};
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use std::{error::Error, str::FromStr};
use std::str::FromStr;
use tosubcommand::{ToFlags, ToSubcommand};
use crate::{pfsys::ProofType, Commitments, RunArgs};
@@ -52,6 +53,8 @@ pub const DEFAULT_VERIFIER_AGGREGATED_ABI: &str = "verifier_aggr_abi.json";
pub const DEFAULT_VERIFIER_DA_ABI: &str = "verifier_da_abi.json";
/// Default solidity code
pub const DEFAULT_SOL_CODE: &str = "evm_deploy.sol";
/// Default calldata path
pub const DEFAULT_CALLDATA: &str = "calldata.bytes";
/// Default solidity code for aggregated proofs
pub const DEFAULT_SOL_CODE_AGGREGATED: &str = "evm_deploy_aggr.sol";
/// Default solidity code for data attestation
@@ -78,7 +81,7 @@ pub const DEFAULT_CALIBRATION_FILE: &str = "calibration.json";
pub const DEFAULT_LOOKUP_SAFETY_MARGIN: &str = "2";
/// Default Compress selectors
pub const DEFAULT_DISABLE_SELECTOR_COMPRESSION: &str = "false";
/// Default render vk seperately
/// Default render vk separately
pub const DEFAULT_RENDER_VK_SEPERATELY: &str = "false";
/// Default VK sol path
pub const DEFAULT_VK_SOL: &str = "vk.sol";
@@ -253,33 +256,66 @@ lazy_static! {
};
}
#[allow(missing_docs)]
#[derive(Parser, Debug, Clone, Deserialize, Serialize)]
#[command(author, about, long_about = None)]
#[clap(version = *VERSION)]
pub struct Cli {
#[command(subcommand)]
#[allow(missing_docs)]
pub command: Commands,
/// Get the styles for the CLI
pub fn get_styles() -> clap::builder::Styles {
clap::builder::Styles::styled()
.usage(
clap::builder::styling::Style::new()
.bold()
.underline()
.fg_color(Some(clap::builder::styling::Color::Ansi(clap::builder::styling::AnsiColor::Cyan))),
)
.header(
clap::builder::styling::Style::new()
.bold()
.underline()
.fg_color(Some(clap::builder::styling::Color::Ansi(clap::builder::styling::AnsiColor::Cyan))),
)
.literal(
clap::builder::styling::Style::new().fg_color(Some(clap::builder::styling::Color::Ansi(clap::builder::styling::AnsiColor::Magenta))),
)
.invalid(
clap::builder::styling::Style::new()
.bold()
.fg_color(Some(clap::builder::styling::Color::Ansi(clap::builder::styling::AnsiColor::Red))),
)
.error(
clap::builder::styling::Style::new()
.bold()
.fg_color(Some(clap::builder::styling::Color::Ansi(clap::builder::styling::AnsiColor::Red))),
)
.valid(
clap::builder::styling::Style::new()
.bold()
.underline()
.fg_color(Some(clap::builder::styling::Color::Ansi(clap::builder::styling::AnsiColor::Green))),
)
.placeholder(
clap::builder::styling::Style::new().fg_color(Some(clap::builder::styling::Color::Ansi(clap::builder::styling::AnsiColor::White))),
)
}
impl Cli {
/// Export the ezkl configuration as json
pub fn as_json(&self) -> Result<String, Box<dyn Error>> {
let serialized = match serde_json::to_string(&self) {
Ok(s) => s,
Err(e) => {
return Err(Box::new(e));
}
};
Ok(serialized)
}
/// Parse an ezkl configuration from a json
pub fn from_json(arg_json: &str) -> Result<Self, serde_json::Error> {
serde_json::from_str(arg_json)
}
/// Print completions for the given generator
pub fn print_completions<G: Generator>(gen: G, cmd: &mut Command) {
generate(gen, cmd, cmd.get_name().to_string(), &mut std::io::stdout());
}
#[allow(missing_docs)]
#[derive(Parser, Debug, Clone)]
#[command(author, about, long_about = None)]
#[clap(version = *VERSION, styles = get_styles(), trailing_var_arg = true)]
pub struct Cli {
/// If provided, outputs the completion file for given shell
#[clap(long = "generate", value_parser)]
pub generator: Option<Shell>,
#[command(subcommand)]
#[allow(missing_docs)]
pub command: Option<Commands>,
}
#[allow(missing_docs)]
#[derive(Debug, Subcommand, Clone, Deserialize, Serialize, PartialEq, PartialOrd, ToSubcommand)]
pub enum Commands {
@@ -289,7 +325,7 @@ pub enum Commands {
/// Loads model and prints model table
Table {
/// The path to the .onnx model file
#[arg(short = 'M', long, default_value = DEFAULT_MODEL)]
#[arg(short = 'M', long, default_value = DEFAULT_MODEL, value_hint = clap::ValueHint::FilePath)]
model: Option<PathBuf>,
/// proving arguments
#[clap(flatten)]
@@ -299,29 +335,29 @@ pub enum Commands {
/// Generates the witness from an input file.
GenWitness {
/// The path to the .json data file
#[arg(short = 'D', long, default_value = DEFAULT_DATA)]
#[arg(short = 'D', long, default_value = DEFAULT_DATA, value_hint = clap::ValueHint::FilePath)]
data: Option<PathBuf>,
/// The path to the compiled model file (generated using the compile-circuit command)
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT)]
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT, value_hint = clap::ValueHint::FilePath)]
compiled_circuit: Option<PathBuf>,
/// Path to output the witness .json file
#[arg(short = 'O', long, default_value = DEFAULT_WITNESS)]
#[arg(short = 'O', long, default_value = DEFAULT_WITNESS, value_hint = clap::ValueHint::FilePath)]
output: Option<PathBuf>,
/// Path to the verification key file (optional - solely used to generate kzg commits)
#[arg(short = 'V', long)]
#[arg(short = 'V', long, value_hint = clap::ValueHint::FilePath)]
vk_path: Option<PathBuf>,
/// Path to the srs file (optional - solely used to generate kzg commits)
#[arg(short = 'P', long)]
#[arg(short = 'P', long, value_hint = clap::ValueHint::FilePath)]
srs_path: Option<PathBuf>,
},
/// Produces the proving hyperparameters, from run-args
GenSettings {
/// The path to the .onnx model file
#[arg(short = 'M', long, default_value = DEFAULT_MODEL)]
#[arg(short = 'M', long, default_value = DEFAULT_MODEL, value_hint = clap::ValueHint::FilePath)]
model: Option<PathBuf>,
/// The path to generate the circuit settings .json file to
#[arg(short = 'O', long, default_value = DEFAULT_SETTINGS)]
#[arg(short = 'O', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
settings_path: Option<PathBuf>,
/// proving arguments
#[clap(flatten)]
@@ -332,33 +368,34 @@ pub enum Commands {
#[cfg(not(target_arch = "wasm32"))]
CalibrateSettings {
/// The path to the .json calibration data file.
#[arg(short = 'D', long, default_value = DEFAULT_CALIBRATION_FILE)]
#[arg(short = 'D', long, default_value = DEFAULT_CALIBRATION_FILE, value_hint = clap::ValueHint::FilePath)]
data: Option<PathBuf>,
/// The path to the .onnx model file
#[arg(short = 'M', long, default_value = DEFAULT_MODEL)]
#[arg(short = 'M', long, default_value = DEFAULT_MODEL, value_hint = clap::ValueHint::FilePath)]
model: Option<PathBuf>,
/// The path to load circuit settings .json file AND overwrite (generated using the gen-settings command).
#[arg(short = 'O', long, default_value = DEFAULT_SETTINGS)]
#[arg(short = 'O', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
settings_path: Option<PathBuf>,
#[arg(long = "target", default_value = DEFAULT_CALIBRATION_TARGET)]
#[arg(long = "target", default_value = DEFAULT_CALIBRATION_TARGET, value_hint = clap::ValueHint::Other)]
/// Target for calibration. Set to "resources" to optimize for computational resource. Otherwise, set to "accuracy" to optimize for accuracy.
target: CalibrationTarget,
/// the lookup safety margin to use for calibration. if the max lookup is 2^k, then the max lookup will be 2^k * lookup_safety_margin. larger = safer but slower
#[arg(long, default_value = DEFAULT_LOOKUP_SAFETY_MARGIN)]
#[arg(long, default_value = DEFAULT_LOOKUP_SAFETY_MARGIN, value_hint = clap::ValueHint::Other)]
lookup_safety_margin: i64,
/// Optional scales to specifically try for calibration. Example, --scales 0,4
#[arg(long, value_delimiter = ',', allow_hyphen_values = true)]
#[arg(long, value_delimiter = ',', allow_hyphen_values = true, value_hint = clap::ValueHint::Other)]
scales: Option<Vec<crate::Scale>>,
/// Optional scale rebase multipliers to specifically try for calibration. This is the multiplier at which we divide to return to the input scale. Example, --scale-rebase-multipliers 0,4
#[arg(
long,
value_delimiter = ',',
allow_hyphen_values = true,
default_value = DEFAULT_SCALE_REBASE_MULTIPLIERS
default_value = DEFAULT_SCALE_REBASE_MULTIPLIERS,
value_hint = clap::ValueHint::Other
)]
scale_rebase_multiplier: Vec<u32>,
/// max logrows to use for calibration, 26 is the max public SRS size
#[arg(long)]
#[arg(long, value_hint = clap::ValueHint::Other)]
max_logrows: Option<u32>,
// whether to only range check rebases (instead of trying both range check and lookup)
#[arg(long, default_value = DEFAULT_ONLY_RANGE_CHECK_REBASE, action = clap::ArgAction::SetTrue)]
@@ -369,13 +406,13 @@ pub enum Commands {
#[command(name = "gen-srs", arg_required_else_help = true)]
GenSrs {
/// The path to output the generated SRS
#[arg(long)]
#[arg(long, value_hint = clap::ValueHint::FilePath)]
srs_path: PathBuf,
/// number of logrows to use for srs
#[arg(long)]
#[arg(long, value_hint = clap::ValueHint::Other)]
logrows: usize,
/// commitment used
#[arg(long, default_value = DEFAULT_COMMITMENT)]
#[arg(long, default_value = DEFAULT_COMMITMENT, value_hint = clap::ValueHint::Other)]
commitment: Option<Commitments>,
},
@@ -384,57 +421,57 @@ pub enum Commands {
#[command(name = "get-srs")]
GetSrs {
/// The path to output the desired srs file, if set to None will save to $EZKL_REPO_PATH/srs
#[arg(long)]
#[arg(long, default_value = None, value_hint = clap::ValueHint::FilePath)]
srs_path: Option<PathBuf>,
/// Path to the circuit settings .json file to read in logrows from. Overriden by logrows if specified.
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS)]
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
settings_path: Option<PathBuf>,
/// Number of logrows to use for srs. Overrides settings_path if specified.
#[arg(long, default_value = None)]
#[arg(long, default_value = None, value_hint = clap::ValueHint::Other)]
logrows: Option<u32>,
/// Commitment used
#[arg(long, default_value = None)]
#[arg(long, default_value = None, value_hint = clap::ValueHint::Other)]
commitment: Option<Commitments>,
},
/// Loads model and input and runs mock prover (for testing)
Mock {
/// The path to the .json witness file (generated using the gen-witness command)
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS)]
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS, value_hint = clap::ValueHint::FilePath)]
witness: Option<PathBuf>,
/// The path to the compiled model file (generated using the compile-circuit command)
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT)]
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT, value_hint = clap::ValueHint::FilePath)]
model: Option<PathBuf>,
},
/// Mock aggregate proofs
MockAggregate {
/// The path to the snarks to aggregate over (generated using the prove command with the --proof-type=for-aggr flag)
#[arg(long, default_value = DEFAULT_PROOF, value_delimiter = ',', allow_hyphen_values = true)]
#[arg(long, default_value = DEFAULT_PROOF, value_delimiter = ',', allow_hyphen_values = true, value_hint = clap::ValueHint::FilePath)]
aggregation_snarks: Vec<PathBuf>,
/// logrows used for aggregation circuit
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS)]
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
logrows: Option<u32>,
/// whether the accumulated are segments of a larger proof
#[arg(long, default_value = DEFAULT_SPLIT, action = clap::ArgAction::SetTrue)]
split_proofs: Option<bool>,
},
/// setup aggregation circuit :)
/// Setup aggregation circuit and generate pk and vk
SetupAggregate {
/// The path to samples of snarks that will be aggregated over (generated using the prove command with the --proof-type=for-aggr flag)
#[arg(long, default_value = DEFAULT_PROOF, value_delimiter = ',', allow_hyphen_values = true)]
#[arg(long, default_value = DEFAULT_PROOF, value_delimiter = ',', allow_hyphen_values = true, value_hint = clap::ValueHint::FilePath)]
sample_snarks: Vec<PathBuf>,
/// The path to save the desired verification key file to
#[arg(long, default_value = DEFAULT_VK_AGGREGATED)]
#[arg(long, default_value = DEFAULT_VK_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
vk_path: Option<PathBuf>,
/// The path to save the proving key to
#[arg(long, default_value = DEFAULT_PK_AGGREGATED)]
#[arg(long, default_value = DEFAULT_PK_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
pk_path: Option<PathBuf>,
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
#[arg(long)]
#[arg(long, value_hint = clap::ValueHint::FilePath)]
srs_path: Option<PathBuf>,
/// logrows used for aggregation circuit
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS)]
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
logrows: Option<u32>,
/// whether the accumulated are segments of a larger proof
#[arg(long, default_value = DEFAULT_SPLIT, action = clap::ArgAction::SetTrue)]
@@ -443,19 +480,19 @@ pub enum Commands {
#[arg(long, default_value = DEFAULT_DISABLE_SELECTOR_COMPRESSION, action = clap::ArgAction::SetTrue)]
disable_selector_compression: Option<bool>,
/// commitment used
#[arg(long, default_value = DEFAULT_COMMITMENT)]
#[arg(long, default_value = DEFAULT_COMMITMENT, value_hint = clap::ValueHint::Other)]
commitment: Option<Commitments>,
},
/// Aggregates proofs :)
/// Aggregates proofs
Aggregate {
/// The path to the snarks to aggregate over (generated using the prove command with the --proof-type=for-aggr flag)
#[arg(long, default_value = DEFAULT_PROOF, value_delimiter = ',', allow_hyphen_values = true)]
#[arg(long, default_value = DEFAULT_PROOF, value_delimiter = ',', allow_hyphen_values = true, value_hint = clap::ValueHint::FilePath)]
aggregation_snarks: Vec<PathBuf>,
/// The path to load the desired proving key file (generated using the setup-aggregate command)
#[arg(long, default_value = DEFAULT_PK_AGGREGATED)]
#[arg(long, default_value = DEFAULT_PK_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
pk_path: Option<PathBuf>,
/// The path to output the proof file to
#[arg(long, default_value = DEFAULT_PROOF_AGGREGATED)]
#[arg(long, default_value = DEFAULT_PROOF_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
proof_path: Option<PathBuf>,
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
#[arg(long)]
@@ -465,50 +502,51 @@ pub enum Commands {
require_equals = true,
num_args = 0..=1,
default_value_t = TranscriptType::default(),
value_enum
value_enum,
value_hint = clap::ValueHint::Other
)]
transcript: TranscriptType,
/// logrows used for aggregation circuit
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS)]
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
logrows: Option<u32>,
/// run sanity checks during calculations (safe or unsafe)
#[arg(long, default_value = DEFAULT_CHECKMODE)]
#[arg(long, default_value = DEFAULT_CHECKMODE, value_hint = clap::ValueHint::Other)]
check_mode: Option<CheckMode>,
/// whether the accumulated proofs are segments of a larger circuit
#[arg(long, default_value = DEFAULT_SPLIT, action = clap::ArgAction::SetTrue)]
split_proofs: Option<bool>,
/// commitment used
#[arg(long, default_value = DEFAULT_COMMITMENT)]
#[arg(long, default_value = DEFAULT_COMMITMENT, value_hint = clap::ValueHint::Other)]
commitment: Option<Commitments>,
},
/// Compiles a circuit from onnx to a simplified graph (einsum + other ops) and parameters as sets of field elements
CompileCircuit {
/// The path to the .onnx model file
#[arg(short = 'M', long, default_value = DEFAULT_MODEL)]
#[arg(short = 'M', long, default_value = DEFAULT_MODEL, value_hint = clap::ValueHint::FilePath)]
model: Option<PathBuf>,
/// The path to the compiled model file (generated using the compile-circuit command)
#[arg(long, default_value = DEFAULT_COMPILED_CIRCUIT)]
#[arg(long, default_value = DEFAULT_COMPILED_CIRCUIT, value_hint = clap::ValueHint::FilePath)]
compiled_circuit: Option<PathBuf>,
/// The path to load circuit settings .json file from (generated using the gen-settings command)
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS)]
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
settings_path: Option<PathBuf>,
},
/// Creates pk and vk
Setup {
/// The path to the compiled model file (generated using the compile-circuit command)
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT)]
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT, value_hint = clap::ValueHint::FilePath)]
compiled_circuit: Option<PathBuf>,
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
#[arg(long)]
#[arg(long, value_hint = clap::ValueHint::FilePath)]
srs_path: Option<PathBuf>,
/// The path to output the verification key file to
#[arg(long, default_value = DEFAULT_VK)]
#[arg(long, default_value = DEFAULT_VK, value_hint = clap::ValueHint::FilePath)]
vk_path: Option<PathBuf>,
/// The path to output the proving key file to
#[arg(long, default_value = DEFAULT_PK)]
#[arg(long, default_value = DEFAULT_PK, value_hint = clap::ValueHint::FilePath)]
pk_path: Option<PathBuf>,
/// The graph witness (optional - used to override fixed values in the circuit)
#[arg(short = 'W', long)]
#[arg(short = 'W', long, value_hint = clap::ValueHint::FilePath)]
witness: Option<PathBuf>,
/// compress selectors
#[arg(long, default_value = DEFAULT_DISABLE_SELECTOR_COMPRESSION, action = clap::ArgAction::SetTrue)]
@@ -519,24 +557,24 @@ pub enum Commands {
#[command(arg_required_else_help = true)]
SetupTestEvmData {
/// The path to the .json data file, which should include both the network input (possibly private) and the network output (public input to the proof)
#[arg(short = 'D', long)]
#[arg(short = 'D', long, value_hint = clap::ValueHint::FilePath)]
data: Option<PathBuf>,
/// The path to the compiled model file (generated using the compile-circuit command)
#[arg(short = 'M', long)]
#[arg(short = 'M', long, value_hint = clap::ValueHint::FilePath)]
compiled_circuit: Option<PathBuf>,
/// For testing purposes only. The optional path to the .json data file that will be generated that contains the OnChain data storage information
/// derived from the file information in the data .json file.
/// Should include both the network input (possibly private) and the network output (public input to the proof)
#[arg(short = 'T', long)]
#[arg(short = 'T', long, value_hint = clap::ValueHint::FilePath)]
test_data: PathBuf,
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
#[arg(short = 'U', long)]
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: Option<String>,
/// where the input data come from
#[arg(long, default_value = "on-chain")]
#[arg(long, default_value = "on-chain", value_hint = clap::ValueHint::Other)]
input_source: TestDataSource,
/// where the output data come from
#[arg(long, default_value = "on-chain")]
#[arg(long, default_value = "on-chain", value_hint = clap::ValueHint::Other)]
output_source: TestDataSource,
},
#[cfg(not(target_arch = "wasm32"))]
@@ -544,23 +582,23 @@ pub enum Commands {
#[command(arg_required_else_help = true)]
TestUpdateAccountCalls {
/// The path to the verifier contract's address
#[arg(long)]
#[arg(long, value_hint = clap::ValueHint::Other)]
addr: H160Flag,
/// The path to the .json data file.
#[arg(short = 'D', long)]
#[arg(short = 'D', long, value_hint = clap::ValueHint::FilePath)]
data: Option<PathBuf>,
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
#[arg(short = 'U', long)]
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: Option<String>,
},
#[cfg(not(target_arch = "wasm32"))]
/// Swaps the positions in the transcript that correspond to commitments
SwapProofCommitments {
/// The path to the proof file
#[arg(short = 'P', long, default_value = DEFAULT_PROOF)]
#[arg(short = 'P', long, default_value = DEFAULT_PROOF, value_hint = clap::ValueHint::FilePath)]
proof_path: Option<PathBuf>,
/// The path to the witness file
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS)]
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS, value_hint = clap::ValueHint::FilePath)]
witness_path: Option<PathBuf>,
},
@@ -568,50 +606,65 @@ pub enum Commands {
/// Loads model, data, and creates proof
Prove {
/// The path to the .json witness file (generated using the gen-witness command)
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS)]
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS, value_hint = clap::ValueHint::FilePath)]
witness: Option<PathBuf>,
/// The path to the compiled model file (generated using the compile-circuit command)
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT)]
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT, value_hint = clap::ValueHint::FilePath)]
compiled_circuit: Option<PathBuf>,
/// The path to load the desired proving key file (generated using the setup command)
#[arg(long, default_value = DEFAULT_PK)]
#[arg(long, default_value = DEFAULT_PK, value_hint = clap::ValueHint::FilePath)]
pk_path: Option<PathBuf>,
/// The path to output the proof file to
#[arg(long, default_value = DEFAULT_PROOF)]
#[arg(long, default_value = DEFAULT_PROOF, value_hint = clap::ValueHint::FilePath)]
proof_path: Option<PathBuf>,
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
#[arg(long)]
#[arg(long, value_hint = clap::ValueHint::FilePath)]
srs_path: Option<PathBuf>,
#[arg(
long,
require_equals = true,
num_args = 0..=1,
default_value_t = ProofType::Single,
value_enum
value_enum,
value_hint = clap::ValueHint::Other
)]
proof_type: ProofType,
/// run sanity checks during calculations (safe or unsafe)
#[arg(long, default_value = DEFAULT_CHECKMODE)]
#[arg(long, default_value = DEFAULT_CHECKMODE, value_hint = clap::ValueHint::Other)]
check_mode: Option<CheckMode>,
},
#[cfg(not(target_arch = "wasm32"))]
/// Encodes a proof into evm calldata
#[command(name = "encode-evm-calldata")]
EncodeEvmCalldata {
/// The path to the proof file (generated using the prove command)
#[arg(long, default_value = DEFAULT_PROOF, value_hint = clap::ValueHint::FilePath)]
proof_path: Option<PathBuf>,
/// The path to save the calldata to
#[arg(long, default_value = DEFAULT_CALLDATA, value_hint = clap::ValueHint::FilePath)]
calldata_path: Option<PathBuf>,
/// The path to the verification key address (only used if the vk is rendered as a separate contract)
#[arg(long, value_hint = clap::ValueHint::Other)]
addr_vk: Option<H160Flag>,
},
#[cfg(not(target_arch = "wasm32"))]
/// Creates an Evm verifier for a single proof
#[command(name = "create-evm-verifier")]
CreateEvmVerifier {
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
#[arg(long)]
#[arg(long, value_hint = clap::ValueHint::FilePath)]
srs_path: Option<PathBuf>,
/// The path to load circuit settings .json file from (generated using the gen-settings command)
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS)]
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
settings_path: Option<PathBuf>,
/// The path to load the desired verification key file
#[arg(long, default_value = DEFAULT_VK)]
#[arg(long, default_value = DEFAULT_VK, value_hint = clap::ValueHint::FilePath)]
vk_path: Option<PathBuf>,
/// The path to output the Solidity code
#[arg(long, default_value = DEFAULT_SOL_CODE)]
#[arg(long, default_value = DEFAULT_SOL_CODE, value_hint = clap::ValueHint::FilePath)]
sol_code_path: Option<PathBuf>,
/// The path to output the Solidity verifier ABI
#[arg(long, default_value = DEFAULT_VERIFIER_ABI)]
#[arg(long, default_value = DEFAULT_VERIFIER_ABI, value_hint = clap::ValueHint::FilePath)]
abi_path: Option<PathBuf>,
/// Whether the verifier key should be rendered as a separate contract.
/// We recommend disabling selector compression if this is enabled.
@@ -624,19 +677,19 @@ pub enum Commands {
#[command(name = "create-evm-vk")]
CreateEvmVK {
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
#[arg(long)]
#[arg(long, value_hint = clap::ValueHint::FilePath)]
srs_path: Option<PathBuf>,
/// The path to load circuit settings .json file from (generated using the gen-settings command)
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS)]
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
settings_path: Option<PathBuf>,
/// The path to load the desired verification key file
#[arg(long, default_value = DEFAULT_VK)]
#[arg(long, default_value = DEFAULT_VK, value_hint = clap::ValueHint::FilePath)]
vk_path: Option<PathBuf>,
/// The path to output the Solidity code
#[arg(long, default_value = DEFAULT_VK_SOL)]
#[arg(long, default_value = DEFAULT_VK_SOL, value_hint = clap::ValueHint::FilePath)]
sol_code_path: Option<PathBuf>,
/// The path to output the Solidity verifier ABI
#[arg(long, default_value = DEFAULT_VK_ABI)]
#[arg(long, default_value = DEFAULT_VK_ABI, value_hint = clap::ValueHint::FilePath)]
abi_path: Option<PathBuf>,
},
#[cfg(not(target_arch = "wasm32"))]
@@ -644,21 +697,24 @@ pub enum Commands {
#[command(name = "create-evm-da")]
CreateEvmDataAttestation {
/// The path to load circuit settings .json file from (generated using the gen-settings command)
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS)]
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
settings_path: Option<PathBuf>,
/// The path to output the Solidity code
#[arg(long, default_value = DEFAULT_SOL_CODE_DA)]
#[arg(long, default_value = DEFAULT_SOL_CODE_DA, value_hint = clap::ValueHint::FilePath)]
sol_code_path: Option<PathBuf>,
/// The path to output the Solidity verifier ABI
#[arg(long, default_value = DEFAULT_VERIFIER_DA_ABI)]
#[arg(long, default_value = DEFAULT_VERIFIER_DA_ABI, value_hint = clap::ValueHint::FilePath)]
abi_path: Option<PathBuf>,
/// The path to the .json data file, which should
/// contain the necessary calldata and account addresses
/// needed to read from all the on-chain
/// view functions that return the data that the network
/// ingests as inputs.
#[arg(short = 'D', long, default_value = DEFAULT_DATA)]
#[arg(short = 'D', long, default_value = DEFAULT_DATA, value_hint = clap::ValueHint::FilePath)]
data: Option<PathBuf>,
/// The path to the witness file. This is needed for proof swapping for kzg commitments.
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS, value_hint = clap::ValueHint::FilePath)]
witness: Option<PathBuf>,
},
#[cfg(not(target_arch = "wasm32"))]
@@ -666,22 +722,22 @@ pub enum Commands {
#[command(name = "create-evm-verifier-aggr")]
CreateEvmVerifierAggr {
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
#[arg(long)]
#[arg(long, value_hint = clap::ValueHint::FilePath)]
srs_path: Option<PathBuf>,
/// The path to load the desired verification key file
#[arg(long, default_value = DEFAULT_VK_AGGREGATED)]
#[arg(long, default_value = DEFAULT_VK_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
vk_path: Option<PathBuf>,
/// The path to the Solidity code
#[arg(long, default_value = DEFAULT_SOL_CODE_AGGREGATED)]
#[arg(long, default_value = DEFAULT_SOL_CODE_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
sol_code_path: Option<PathBuf>,
/// The path to output the Solidity verifier ABI
#[arg(long, default_value = DEFAULT_VERIFIER_AGGREGATED_ABI)]
#[arg(long, default_value = DEFAULT_VERIFIER_AGGREGATED_ABI, value_hint = clap::ValueHint::FilePath)]
abi_path: Option<PathBuf>,
// aggregated circuit settings paths, used to calculate the number of instances in the aggregate proof
#[arg(long, default_value = DEFAULT_SETTINGS, value_delimiter = ',', allow_hyphen_values = true)]
#[arg(long, default_value = DEFAULT_SETTINGS, value_delimiter = ',', allow_hyphen_values = true, value_hint = clap::ValueHint::FilePath)]
aggregation_settings: Vec<PathBuf>,
// logrows used for aggregation circuit
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS)]
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
logrows: Option<u32>,
/// Whether the verifier key should be rendered as a separate contract.
/// We recommend disabling selector compression if this is enabled.
@@ -692,16 +748,16 @@ pub enum Commands {
/// Verifies a proof, returning accept or reject
Verify {
/// The path to load circuit settings .json file from (generated using the gen-settings command)
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS)]
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
settings_path: Option<PathBuf>,
/// The path to the proof file (generated using the prove command)
#[arg(long, default_value = DEFAULT_PROOF)]
#[arg(long, default_value = DEFAULT_PROOF, value_hint = clap::ValueHint::FilePath)]
proof_path: Option<PathBuf>,
/// The path to the verification key file (generated using the setup command)
#[arg(long, default_value = DEFAULT_VK)]
#[arg(long, default_value = DEFAULT_VK, value_hint = clap::ValueHint::FilePath)]
vk_path: Option<PathBuf>,
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
#[arg(long)]
#[arg(long, value_hint = clap::ValueHint::FilePath)]
srs_path: Option<PathBuf>,
/// Reduce SRS logrows to the number of instances rather than the number of logrows used for proofs (only works if the srs were generated in the same ceremony)
#[arg(long, default_value = DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION, action = clap::ArgAction::SetTrue)]
@@ -710,60 +766,60 @@ pub enum Commands {
/// Verifies an aggregate proof, returning accept or reject
VerifyAggr {
/// The path to the proof file (generated using the prove command)
#[arg(long, default_value = DEFAULT_PROOF_AGGREGATED)]
#[arg(long, default_value = DEFAULT_PROOF_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
proof_path: Option<PathBuf>,
/// The path to the verification key file (generated using the setup-aggregate command)
#[arg(long, default_value = DEFAULT_VK_AGGREGATED)]
#[arg(long, default_value = DEFAULT_VK_AGGREGATED, value_hint = clap::ValueHint::FilePath)]
vk_path: Option<PathBuf>,
/// reduced srs
#[arg(long, default_value = DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION, action = clap::ArgAction::SetTrue)]
reduced_srs: Option<bool>,
/// The path to SRS, if None will use $EZKL_REPO_PATH/srs/kzg{logrows}.srs
#[arg(long)]
#[arg(long, value_hint = clap::ValueHint::FilePath)]
srs_path: Option<PathBuf>,
/// logrows used for aggregation circuit
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS)]
#[arg(long, default_value = DEFAULT_AGGREGATED_LOGROWS, value_hint = clap::ValueHint::Other)]
logrows: Option<u32>,
/// commitment
#[arg(long, default_value = DEFAULT_COMMITMENT)]
#[arg(long, default_value = DEFAULT_COMMITMENT, value_hint = clap::ValueHint::Other)]
commitment: Option<Commitments>,
},
#[cfg(not(target_arch = "wasm32"))]
/// Deploys an evm verifier that is generated by ezkl
DeployEvmVerifier {
/// The path to the Solidity code (generated using the create-evm-verifier command)
#[arg(long, default_value = DEFAULT_SOL_CODE)]
#[arg(long, default_value = DEFAULT_SOL_CODE, value_hint = clap::ValueHint::FilePath)]
sol_code_path: Option<PathBuf>,
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
#[arg(short = 'U', long)]
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: Option<String>,
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS)]
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS, value_hint = clap::ValueHint::Other)]
/// The path to output the contract address
addr_path: Option<PathBuf>,
/// The optimizer runs to set on the verifier. Lower values optimize for deployment cost, while higher values optimize for gas cost.
#[arg(long, default_value = DEFAULT_OPTIMIZER_RUNS)]
#[arg(long, default_value = DEFAULT_OPTIMIZER_RUNS, value_hint = clap::ValueHint::Other)]
optimizer_runs: usize,
/// Private secp256K1 key in hex format, 64 chars, no 0x prefix, of the account signing transactions. If None the private key will be generated by Anvil
#[arg(short = 'P', long)]
#[arg(short = 'P', long, value_hint = clap::ValueHint::Other)]
private_key: Option<String>,
},
#[cfg(not(target_arch = "wasm32"))]
/// Deploys an evm verifier that is generated by ezkl
DeployEvmVK {
/// The path to the Solidity code (generated using the create-evm-verifier command)
#[arg(long, default_value = DEFAULT_VK_SOL)]
#[arg(long, default_value = DEFAULT_VK_SOL, value_hint = clap::ValueHint::FilePath)]
sol_code_path: Option<PathBuf>,
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
#[arg(short = 'U', long)]
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: Option<String>,
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS_VK)]
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS_VK, value_hint = clap::ValueHint::Other)]
/// The path to output the contract address
addr_path: Option<PathBuf>,
/// The optimizer runs to set on the verifier. Lower values optimize for deployment cost, while higher values optimize for gas cost.
#[arg(long, default_value = DEFAULT_OPTIMIZER_RUNS)]
#[arg(long, default_value = DEFAULT_OPTIMIZER_RUNS, value_hint = clap::ValueHint::Other)]
optimizer_runs: usize,
/// Private secp256K1 key in hex format, 64 chars, no 0x prefix, of the account signing transactions. If None the private key will be generated by Anvil
#[arg(short = 'P', long)]
#[arg(short = 'P', long, value_hint = clap::ValueHint::Other)]
private_key: Option<String>,
},
#[cfg(not(target_arch = "wasm32"))]
@@ -771,25 +827,25 @@ pub enum Commands {
#[command(name = "deploy-evm-da")]
DeployEvmDataAttestation {
/// The path to the .json data file, which should include both the network input (possibly private) and the network output (public input to the proof)
#[arg(short = 'D', long, default_value = DEFAULT_DATA)]
#[arg(short = 'D', long, default_value = DEFAULT_DATA, value_hint = clap::ValueHint::FilePath)]
data: Option<PathBuf>,
/// The path to load circuit settings .json file from (generated using the gen-settings command)
#[arg(long, default_value = DEFAULT_SETTINGS)]
#[arg(long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
settings_path: Option<PathBuf>,
/// The path to the Solidity code
#[arg(long, default_value = DEFAULT_SOL_CODE_DA)]
#[arg(long, default_value = DEFAULT_SOL_CODE_DA, value_hint = clap::ValueHint::FilePath)]
sol_code_path: Option<PathBuf>,
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
#[arg(short = 'U', long)]
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: Option<String>,
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS_DA)]
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS_DA, value_hint = clap::ValueHint::FilePath)]
/// The path to output the contract address
addr_path: Option<PathBuf>,
/// The optimizer runs to set on the verifier. (Lower values optimize for deployment, while higher values optimize for execution)
#[arg(long, default_value = DEFAULT_OPTIMIZER_RUNS)]
#[arg(long, default_value = DEFAULT_OPTIMIZER_RUNS, value_hint = clap::ValueHint::Other)]
optimizer_runs: usize,
/// Private secp256K1 key in hex format, 64 chars, no 0x prefix, of the account signing transactions. If None the private key will be generated by Anvil
#[arg(short = 'P', long)]
#[arg(short = 'P', long, value_hint = clap::ValueHint::Other)]
private_key: Option<String>,
},
#[cfg(not(target_arch = "wasm32"))]
@@ -797,19 +853,32 @@ pub enum Commands {
#[command(name = "verify-evm")]
VerifyEvm {
/// The path to the proof file (generated using the prove command)
#[arg(long, default_value = DEFAULT_PROOF)]
#[arg(long, default_value = DEFAULT_PROOF, value_hint = clap::ValueHint::FilePath)]
proof_path: Option<PathBuf>,
/// The path to verifier contract's address
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS)]
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS, value_hint = clap::ValueHint::Other)]
addr_verifier: H160Flag,
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
#[arg(short = 'U', long)]
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: Option<String>,
/// does the verifier use data attestation ?
#[arg(long)]
#[arg(long, value_hint = clap::ValueHint::Other)]
addr_da: Option<H160Flag>,
// is the vk rendered seperately, if so specify an address
#[arg(long)]
#[arg(long, value_hint = clap::ValueHint::Other)]
addr_vk: Option<H160Flag>,
},
}
impl Commands {
/// Converts the commands to a json string
pub fn as_json(&self) -> String {
serde_json::to_string(self).unwrap()
}
/// Converts a json string to a Commands struct
pub fn from_json(json: &str) -> Self {
serde_json::from_str(json).unwrap()
}
}

View File

@@ -23,7 +23,6 @@ use alloy::providers::fillers::{
use alloy::providers::network::{Ethereum, EthereumSigner};
use alloy::providers::ProviderBuilder;
use alloy::providers::{Identity, Provider, RootProvider};
use alloy::rpc::types::eth::BlockId;
use alloy::rpc::types::eth::TransactionInput;
use alloy::rpc::types::eth::TransactionRequest;
use alloy::signers::wallet::LocalWallet;
@@ -591,7 +590,7 @@ pub async fn verify_proof_via_solidity(
return Err(Box::new(EvmVerificationError::InvalidProof));
}
let gas = client.estimate_gas(&tx, BlockId::default()).await?;
let gas = client.estimate_gas(&tx).await?;
info!("estimated verify gas cost: {:#?}", gas);
@@ -725,7 +724,7 @@ pub async fn verify_proof_with_data_attestation(
debug!("transaction {:#?}", tx);
info!(
"estimated verify gas cost: {:#?}",
client.estimate_gas(&tx, BlockId::default()).await?
client.estimate_gas(&tx).await?
);
let result = client.call(&tx).await;
@@ -923,13 +922,15 @@ pub async fn get_contract_artifacts(
return Err(format!("file not found: {:#?}", sol_code_path).into());
}
let mut settings = SolcSettings::default();
settings.optimizer = Optimizer {
enabled: Some(true),
runs: Some(runs),
details: None,
let settings = SolcSettings {
optimizer: Optimizer {
enabled: Some(true),
runs: Some(runs),
details: None,
},
output_selection: OutputSelection::default_output_selection(),
..Default::default()
};
settings.output_selection = OutputSelection::default_output_selection();
let input = SolcInput::build(
std::collections::BTreeMap::from([(
@@ -965,6 +966,7 @@ pub async fn get_contract_artifacts(
pub fn fix_da_sol(
input_data: Option<Vec<CallsToAccount>>,
output_data: Option<Vec<CallsToAccount>>,
commitment_bytes: Option<Vec<u8>>,
) -> Result<String, Box<dyn Error>> {
let mut accounts_len = 0;
let mut contract = ATTESTDATA_SOL.to_string();
@@ -989,5 +991,21 @@ pub fn fix_da_sol(
}
contract = contract.replace("AccountCall[]", &format!("AccountCall[{}]", accounts_len));
if commitment_bytes.clone().is_some() && !commitment_bytes.clone().unwrap().is_empty() {
let commitment_bytes = commitment_bytes.unwrap();
let hex_string = hex::encode(commitment_bytes);
contract = contract.replace(
"bytes constant COMMITMENT_KZG = hex\"\";",
&format!("bytes constant COMMITMENT_KZG = hex\"{}\";", hex_string),
);
} else {
// Remove the SwapProofCommitments inheritance and the checkKzgCommits function call if no commitment is provided
contract = contract.replace(", SwapProofCommitments", "");
contract = contract.replace(
"require(checkKzgCommits(encoded), \"Invalid KZG commitments\");",
"",
);
}
Ok(contract)
}

View File

@@ -204,6 +204,18 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
)
.await
}
#[cfg(not(target_arch = "wasm32"))]
Commands::EncodeEvmCalldata {
proof_path,
calldata_path,
addr_vk,
} => encode_evm_calldata(
proof_path.unwrap_or(DEFAULT_PROOF.into()),
calldata_path.unwrap_or(DEFAULT_CALLDATA.into()),
addr_vk,
)
.map(|e| serde_json::to_string(&e).unwrap()),
Commands::CreateEvmVK {
vk_path,
srs_path,
@@ -226,12 +238,14 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
sol_code_path,
abi_path,
data,
witness,
} => {
create_evm_data_attestation(
settings_path.unwrap_or(DEFAULT_SETTINGS.into()),
sol_code_path.unwrap_or(DEFAULT_SOL_CODE_DA.into()),
abi_path.unwrap_or(DEFAULT_VERIFIER_DA_ABI.into()),
data.unwrap_or(DEFAULT_DATA.into()),
witness,
)
.await
}
@@ -683,7 +697,7 @@ pub(crate) async fn gen_witness(
// these aren't real values so the sanity checks are mostly meaningless
let mut circuit = GraphCircuit::load(compiled_circuit_path)?;
let data = GraphData::from_path(data)?;
let data: GraphData = GraphData::from_path(data)?;
let settings = circuit.settings().clone();
let vk = if let Some(vk) = vk_path {
@@ -1178,7 +1192,6 @@ pub(crate) async fn calibrate(
);
num_passed += 1;
} else {
error!("calibration failed {}", res.err().unwrap());
num_failed += 1;
}
@@ -1426,16 +1439,19 @@ pub(crate) async fn create_evm_data_attestation(
_sol_code_path: PathBuf,
_abi_path: PathBuf,
_input: PathBuf,
_witness: Option<PathBuf>,
) -> Result<String, Box<dyn Error>> {
#[allow(unused_imports)]
use crate::graph::{DataSource, VarVisibility};
use crate::{graph::Visibility, pfsys::get_proof_commitments};
let settings = GraphSettings::load(&settings_path)?;
let visibility = VarVisibility::from_args(&settings.run_args)?;
trace!("params computed");
let data = GraphData::from_path(_input)?;
// if input is not provided, we just instantiate dummy input data
let data = GraphData::from_path(_input).unwrap_or(GraphData::new(DataSource::File(vec![])));
let output_data = if let Some(DataSource::OnChain(source)) = data.output_data {
if visibility.output.is_private() {
@@ -1463,19 +1479,34 @@ pub(crate) async fn create_evm_data_attestation(
None
};
if input_data.is_some() || output_data.is_some() {
let output = fix_da_sol(input_data, output_data)?;
let mut f = File::create(_sol_code_path.clone())?;
let _ = f.write(output.as_bytes());
// fetch abi of the contract
let (abi, _, _) = get_contract_artifacts(_sol_code_path, "DataAttestation", 0).await?;
// save abi to file
serde_json::to_writer(std::fs::File::create(_abi_path)?, &abi)?;
// Read the settings file. Look if either the run_ars.input_visibility, run_args.output_visibility or run_args.param_visibility is KZGCommit
// if so, then we need to load the witness
let commitment_bytes = if settings.run_args.input_visibility == Visibility::KZGCommit
|| settings.run_args.output_visibility == Visibility::KZGCommit
|| settings.run_args.param_visibility == Visibility::KZGCommit
{
let witness = GraphWitness::from_path(_witness.unwrap_or(DEFAULT_WITNESS.into()))?;
let commitments = witness.get_polycommitments();
let proof_first_bytes = get_proof_commitments::<
KZGCommitmentScheme<Bn256>,
_,
EvmTranscript<G1Affine, _, _, _>,
>(&commitments);
Some(proof_first_bytes.unwrap())
} else {
return Err(
"Neither input or output data source is on-chain. Atleast one must be on chain.".into(),
);
}
None
};
let output = fix_da_sol(input_data, output_data, commitment_bytes)?;
let mut f = File::create(_sol_code_path.clone())?;
let _ = f.write(output.as_bytes());
// fetch abi of the contract
let (abi, _, _) = get_contract_artifacts(_sol_code_path, "DataAttestation", 0).await?;
// save abi to file
serde_json::to_writer(std::fs::File::create(_abi_path)?, &abi)?;
Ok(String::new())
}
@@ -1531,6 +1562,32 @@ pub(crate) async fn deploy_evm(
Ok(String::new())
}
/// Encodes the calldata for the EVM verifier (both aggregated and single proof)
pub(crate) fn encode_evm_calldata(
proof_path: PathBuf,
calldata_path: PathBuf,
addr_vk: Option<H160Flag>,
) -> Result<Vec<u8>, Box<dyn Error>> {
let snark = Snark::load::<IPACommitmentScheme<G1Affine>>(&proof_path)?;
let flattened_instances = snark.instances.into_iter().flatten();
let encoded = halo2_solidity_verifier::encode_calldata(
addr_vk
.as_ref()
.map(|x| alloy::primitives::Address::from(*x).0)
.map(|x| x.0),
&snark.proof,
&flattened_instances.collect::<Vec<_>>(),
);
log::debug!("Encoded calldata: {:?}", encoded);
File::create(calldata_path)?.write_all(encoded.as_slice())?;
Ok(encoded)
}
#[cfg(not(target_arch = "wasm32"))]
pub(crate) async fn verify_evm(
proof_path: PathBuf,
@@ -1913,10 +1970,6 @@ pub(crate) fn swap_proof_commitments_cmd(
let witness = GraphWitness::from_path(witness)?;
let commitments = witness.get_polycommitments();
if commitments.is_empty() {
log::warn!("no commitments found in witness");
}
let snark_new = swap_proof_commitments_polycommit(&snark, &commitments)?;
if snark_new.proof != *snark.proof {

View File

@@ -15,7 +15,7 @@ use super::{VarVisibility, Visibility};
/// poseidon len to hash in tree
pub const POSEIDON_LEN_GRAPH: usize = 32;
/// Poseidon number of instancess
/// Poseidon number of instances
pub const POSEIDON_INSTANCES: usize = 1;
/// Poseidon module type

View File

@@ -550,7 +550,7 @@ impl Node {
.collect::<Result<Vec<_>, Box<dyn Error>>>()?;
let homogenous_inputs = opkind.requires_homogenous_input_scales();
// autoamtically increases a constant's scale if it is only used once and
// automatically increases a constant's scale if it is only used once and
for input in homogenous_inputs
.into_iter()
.filter(|i| !deleted_indices.contains(i))

View File

@@ -72,7 +72,7 @@ impl ToFlags for Visibility {
impl<'a> From<&'a str> for Visibility {
fn from(s: &'a str) -> Self {
if s.contains("hashed/private") {
// split on last occurence of '/'
// split on last occurrence of '/'
let (_, outlets) = s.split_at(s.rfind('/').unwrap());
let outlets = outlets
.trim_start_matches('/')

View File

@@ -178,37 +178,37 @@ impl From<String> for Commitments {
#[derive(Debug, Args, Deserialize, Serialize, Clone, PartialEq, PartialOrd, ToFlags)]
pub struct RunArgs {
/// The tolerance for error on model outputs
#[arg(short = 'T', long, default_value = "0")]
#[arg(short = 'T', long, default_value = "0", value_hint = clap::ValueHint::Other)]
pub tolerance: Tolerance,
/// The denominator in the fixed point representation used when quantizing inputs
#[arg(short = 'S', long, default_value = "7", allow_hyphen_values = true)]
#[arg(short = 'S', long, default_value = "7", value_hint = clap::ValueHint::Other)]
pub input_scale: Scale,
/// The denominator in the fixed point representation used when quantizing parameters
#[arg(long, default_value = "7", allow_hyphen_values = true)]
#[arg(long, default_value = "7", value_hint = clap::ValueHint::Other)]
pub param_scale: Scale,
/// if the scale is ever > scale_rebase_multiplier * input_scale then the scale is rebased to input_scale (this a more advanced parameter, use with caution)
#[arg(long, default_value = "1")]
#[arg(long, default_value = "1", value_hint = clap::ValueHint::Other)]
pub scale_rebase_multiplier: u32,
/// The min and max elements in the lookup table input column
#[arg(short = 'B', long, value_parser = parse_key_val::<i64, i64>, default_value = "-32768->32768")]
pub lookup_range: Range,
/// The log_2 number of rows
#[arg(short = 'K', long, default_value = "17")]
#[arg(short = 'K', long, default_value = "17", value_hint = clap::ValueHint::Other)]
pub logrows: u32,
/// The log_2 number of rows
#[arg(short = 'N', long, default_value = "2")]
#[arg(short = 'N', long, default_value = "2", value_hint = clap::ValueHint::Other)]
pub num_inner_cols: usize,
/// Hand-written parser for graph variables, eg. batch_size=1
#[arg(short = 'V', long, value_parser = parse_key_val::<String, usize>, default_value = "batch_size->1", value_delimiter = ',')]
#[arg(short = 'V', long, value_parser = parse_key_val::<String, usize>, default_value = "batch_size->1", value_delimiter = ',', value_hint = clap::ValueHint::Other)]
pub variables: Vec<(String, usize)>,
/// Flags whether inputs are public, private, fixed, hashed, polycommit
#[arg(long, default_value = "private")]
#[arg(long, default_value = "private", value_hint = clap::ValueHint::Other)]
pub input_visibility: Visibility,
/// Flags whether outputs are public, private, fixed, hashed, polycommit
#[arg(long, default_value = "public")]
#[arg(long, default_value = "public", value_hint = clap::ValueHint::Other)]
pub output_visibility: Visibility,
/// Flags whether params are fixed, private, hashed, polycommit
#[arg(long, default_value = "private")]
#[arg(long, default_value = "private", value_hint = clap::ValueHint::Other)]
pub param_visibility: Visibility,
#[arg(long, default_value = "false")]
/// Rebase the scale using lookup table for division instead of using a range check
@@ -217,10 +217,10 @@ pub struct RunArgs {
#[arg(long, default_value = "false")]
pub rebase_frac_zero_constants: bool,
/// check mode (safe, unsafe, etc)
#[arg(long, default_value = "unsafe")]
#[arg(long, default_value = "unsafe", value_hint = clap::ValueHint::Other)]
pub check_mode: CheckMode,
/// commitment scheme
#[arg(long, default_value = "kzg")]
#[arg(long, default_value = "kzg", value_hint = clap::ValueHint::Other)]
pub commitment: Option<Commitments>,
}

View File

@@ -21,7 +21,7 @@ pub enum EvmVerificationError {
/// EVM verify errors
#[error("evm deployment failed")]
Deploy,
/// Invalid Visibilit
/// Invalid Visibility
#[error("Invalid visibility")]
InvalidVisibility,
}

View File

@@ -627,6 +627,34 @@ pub fn swap_proof_commitments<
snark: &Snark<Scheme::Scalar, Scheme::Curve>,
commitments: &[Scheme::Curve],
) -> Result<Snark<Scheme::Scalar, Scheme::Curve>, Box<dyn Error>>
where
Scheme::Scalar: SerdeObject
+ PrimeField
+ FromUniformBytes<64>
+ WithSmallOrderMulGroup<3>
+ Ord
+ Serialize
+ DeserializeOwned,
Scheme::Curve: Serialize + DeserializeOwned,
{
let proof_first_bytes = get_proof_commitments::<Scheme, E, TW>(commitments)?;
let mut snark_new = snark.clone();
// swap the proof bytes for the new ones
snark_new.proof[..proof_first_bytes.len()].copy_from_slice(&proof_first_bytes);
snark_new.create_hex_proof();
Ok(snark_new)
}
/// Returns the bytes encoded proof commitments
pub fn get_proof_commitments<
Scheme: CommitmentScheme,
E: EncodedChallenge<Scheme::Curve>,
TW: TranscriptWriterBuffer<Vec<u8>, Scheme::Curve, E>,
>(
commitments: &[Scheme::Curve],
) -> Result<Vec<u8>, Box<dyn Error>>
where
Scheme::Scalar: SerdeObject
+ PrimeField
@@ -639,7 +667,7 @@ where
{
let mut transcript_new: TW = TranscriptWriterBuffer::<_, Scheme::Curve, _>::init(vec![]);
// polycommit commitments are the first set of points in the proof, this we'll always be the first set of advice
// polycommit commitments are the first set of points in the proof, this will always be the first set of advice
for commit in commitments {
transcript_new
.write_point(*commit)
@@ -648,12 +676,11 @@ where
let proof_first_bytes = transcript_new.finalize();
let mut snark_new = snark.clone();
// swap the proof bytes for the new ones
snark_new.proof[..proof_first_bytes.len()].copy_from_slice(&proof_first_bytes);
snark_new.create_hex_proof();
if commitments.is_empty() {
log::warn!("no commitments found in witness");
}
Ok(snark_new)
Ok(proof_first_bytes)
}
/// Swap the proof commitments to a new set in the proof for KZG

View File

@@ -466,7 +466,7 @@ fn buffer_to_felts(buffer: Vec<u8>) -> PyResult<Vec<String>> {
/// Arguments
/// -------
/// message: list[str]
/// List of field elements represnted as strings
/// List of field elements represented as strings
///
/// Returns
/// -------
@@ -1430,6 +1430,47 @@ fn verify_aggr(
Ok(true)
}
/// Creates encoded evm calldata from a proof file
///
/// Arguments
/// ---------
/// proof: str
/// Path to the proof file
///
/// calldata: str
/// Path to the calldata file to save
///
/// addr_vk: str
/// The address of the verification key contract (if the verifier key is to be rendered as a separate contract)
///
/// Returns
/// -------
/// vec[u8]
/// The encoded calldata
///
#[pyfunction(signature = (
proof=PathBuf::from(DEFAULT_PROOF),
calldata=PathBuf::from(DEFAULT_CALLDATA),
addr_vk=None,
))]
fn encode_evm_calldata<'a>(
proof: PathBuf,
calldata: PathBuf,
addr_vk: Option<&'a str>,
) -> Result<Vec<u8>, PyErr> {
let addr_vk = if let Some(addr_vk) = addr_vk {
let addr_vk = H160Flag::from(addr_vk);
Some(addr_vk)
} else {
None
};
crate::execute::encode_evm_calldata(proof, calldata, addr_vk).map_err(|e| {
let err_str = format!("Failed to generate calldata: {}", e);
PyRuntimeError::new_err(err_str)
})
}
/// Creates an EVM compatible verifier, you will need solc installed in your environment to run this
///
/// Arguments
@@ -1517,6 +1558,7 @@ fn create_evm_verifier(
settings_path=PathBuf::from(DEFAULT_SETTINGS),
sol_code_path=PathBuf::from(DEFAULT_SOL_CODE_DA),
abi_path=PathBuf::from(DEFAULT_VERIFIER_DA_ABI),
witness_path=None,
))]
fn create_evm_data_attestation(
py: Python,
@@ -1524,6 +1566,7 @@ fn create_evm_data_attestation(
settings_path: PathBuf,
sol_code_path: PathBuf,
abi_path: PathBuf,
witness_path: Option<PathBuf>,
) -> PyResult<Bound<'_, PyAny>> {
pyo3_asyncio::tokio::future_into_py(py, async move {
crate::execute::create_evm_data_attestation(
@@ -1531,6 +1574,7 @@ fn create_evm_data_attestation(
sol_code_path,
abi_path,
input_data,
witness_path,
)
.await
.map_err(|e| {
@@ -1888,6 +1932,6 @@ fn ezkl(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
m.add_function(wrap_pyfunction!(setup_test_evm_witness, m)?)?;
m.add_function(wrap_pyfunction!(create_evm_verifier_aggr, m)?)?;
m.add_function(wrap_pyfunction!(create_evm_data_attestation, m)?)?;
m.add_function(wrap_pyfunction!(encode_evm_calldata, m)?)?;
Ok(())
}

View File

@@ -1015,7 +1015,7 @@ mod native_tests {
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(true, hardfork);
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "on-chain", "file", "public", "private");
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "on-chain", "file", "public", "private", "private");
// test_dir.close().unwrap();
}
@@ -1025,7 +1025,7 @@ mod native_tests {
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(true, Hardfork::Latest);
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "file", "on-chain", "private", "public");
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "file", "on-chain", "private", "public", "private");
// test_dir.close().unwrap();
}
@@ -1035,7 +1035,7 @@ mod native_tests {
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(true, Hardfork::Latest);
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "on-chain", "on-chain", "public", "public");
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "on-chain", "on-chain", "public", "public", "private");
test_dir.close().unwrap();
}
@@ -1045,7 +1045,25 @@ mod native_tests {
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(true, Hardfork::Latest);
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "on-chain", "on-chain", "hashed", "hashed");
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "on-chain", "on-chain", "hashed", "hashed", "private");
test_dir.close().unwrap();
}
#(#[test_case(TESTS_ON_CHAIN_INPUT[N])])*
fn kzg_evm_on_chain_input_kzg_output_kzg_params_prove_and_verify_(test: &str) {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(true, Hardfork::Latest);
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "on-chain", "file", "public", "polycommit", "polycommit");
test_dir.close().unwrap();
}
#(#[test_case(TESTS_ON_CHAIN_INPUT[N])])*
fn kzg_evm_on_chain_output_kzg_input_kzg_params_prove_and_verify_(test: &str) {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(true, Hardfork::Latest);
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "file", "on-chain", "polycommit", "public", "polycommit");
test_dir.close().unwrap();
}
});
@@ -1814,6 +1832,18 @@ mod native_tests {
let settings_arg = format!("{}/{}/settings.json", test_dir, example_name);
let private_key = format!("--private-key={}", *ANVIL_DEFAULT_PRIVATE_KEY);
// create encoded calldata
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args([
"encode-evm-calldata",
"--proof-path",
&format!("{}/{}/aggr.pf", test_dir, example_name),
])
.status()
.expect("failed to execute process");
assert!(status.success());
let base_args = vec![
"create-evm-verifier-aggr",
"--vk-path",
@@ -2036,6 +2066,18 @@ mod native_tests {
let addr_path_arg = format!("--addr-path={}/{}/addr.txt", test_dir, example_name);
let settings_arg = format!("--settings-path={}", settings_path);
// create encoded calldata
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args([
"encode-evm-calldata",
"--proof-path",
&format!("{}/{}/proof.pf", test_dir, example_name),
])
.status()
.expect("failed to execute process");
assert!(status.success());
// create the verifier
let mut args = vec!["create-evm-verifier", "--vk-path", &vk_arg, &settings_arg];
@@ -2205,6 +2247,19 @@ mod native_tests {
let deployed_addr_arg_vk = format!("--addr-vk={}", addr_vk);
// create encoded calldata
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args([
"encode-evm-calldata",
"--proof-path",
&format!("{}/{}/proof.pf", test_dir, example_name),
&deployed_addr_arg_vk,
])
.status()
.expect("failed to execute process");
assert!(status.success());
// now verify the proof
let pf_arg = format!("{}/{}/proof.pf", test_dir, example_name);
let mut args = vec![
@@ -2254,12 +2309,13 @@ mod native_tests {
output_source: &str,
input_visibility: &str,
output_visibility: &str,
param_visibility: &str,
) {
gen_circuit_settings_and_witness(
test_dir,
example_name.clone(),
input_visibility,
"private",
param_visibility,
output_visibility,
1,
"resources",
@@ -2376,6 +2432,18 @@ mod native_tests {
let settings_arg = format!("--settings-path={}", settings_path);
// create encoded calldata
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args([
"encode-evm-calldata",
"--proof-path",
&format!("{}/{}/proof.pf", test_dir, example_name),
])
.status()
.expect("failed to execute process");
assert!(status.success());
// create the verifier
let mut args = vec!["create-evm-verifier", "--vk-path", &vk_arg, &settings_arg];
@@ -2413,15 +2481,23 @@ mod native_tests {
let sol_arg = format!("{}/{}/kzg.sol", test_dir, example_name);
let mut create_da_args = vec![
"create-evm-da",
&settings_arg,
"--sol-code-path",
sol_arg.as_str(),
"-W",
&witness_path,
];
// if there is a on-chain source we add the data
if input_source != "file" || output_source != "file" {
create_da_args.push("-D");
create_da_args.push(test_on_chain_data_path.as_str());
}
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args([
"create-evm-da",
&settings_arg,
"--sol-code-path",
sol_arg.as_str(),
"-D",
test_on_chain_data_path.as_str(),
])
.args(&create_da_args)
.status()
.expect("failed to execute process");
assert!(status.success());

View File

@@ -402,6 +402,15 @@ async def test_create_evm_verifier():
settings_path = os.path.join(folder_path, 'settings.json')
sol_code_path = os.path.join(folder_path, 'test.sol')
abi_path = os.path.join(folder_path, 'test.abi')
proof_path = os.path.join(folder_path, 'test_evm.pf')
calldata_path = os.path.join(folder_path, 'calldata.bytes')
# res is now a vector of bytes
res = ezkl.encode_evm_calldata(proof_path, calldata_path)
assert os.path.isfile(calldata_path)
assert len(res) > 0
res = await ezkl.create_evm_verifier(
vk_path,