Compare commits

...

8 Commits

Author SHA1 Message Date
github-actions[bot]
de33fb8671 ci: update version string in docs 2024-07-11 23:33:35 +00:00
dante
6855ea1947 feat: parallel polynomial reads in halo2 (#826) 2024-07-12 00:33:14 +01:00
dante
2ca57bde2c chore: bump tract (#823) 2024-06-29 01:53:18 +01:00
Ethan Cemer
390de88194 feat: create_evm_vk python bindings (#818) 2024-06-23 22:21:59 -04:00
dante
cd91f0af26 chore: allow for float lookup safety margins (#817) 2024-06-19 09:12:01 -04:00
dante
4771192823 fix: more verbose io / rw errors (#815) 2024-06-14 12:31:50 -04:00
dante
a863ccc868 chore: update cmd feature flag (#814) 2024-06-11 16:07:49 -04:00
Ethan Cemer
8e6ccc863d feat: all file source kzg commit DA (#812) 2024-06-11 09:32:54 -04:00
18 changed files with 559 additions and 347 deletions

View File

@@ -345,6 +345,8 @@ jobs:
run: cargo nextest run --release --verbose tests_evm::kzg_evm_on_chain_input_kzg_output_kzg_params_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + on chain outputs & kzg inputs + params)
run: cargo nextest run --release --verbose tests_evm::kzg_evm_on_chain_output_kzg_input_kzg_params_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + on chain all kzg)
run: cargo nextest run --release --verbose tests_evm::kzg_evm_on_chain_all_kzg_params_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + on chain inputs & outputs hashes)
run: cargo nextest run --release --verbose tests_evm::kzg_evm_on_chain_input_output_hashed_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM)

163
Cargo.lock generated
View File

@@ -504,6 +504,12 @@ dependencies = [
"libc",
]
[[package]]
name = "anes"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299"
[[package]]
name = "ansi-str"
version = "0.8.0"
@@ -897,17 +903,6 @@ dependencies = [
"syn 2.0.53",
]
[[package]]
name = "atty"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
"hermit-abi 0.1.19",
"libc",
"winapi",
]
[[package]]
name = "auto_impl"
version = "1.2.0"
@@ -1176,14 +1171,30 @@ dependencies = [
]
[[package]]
name = "clap"
version = "2.34.0"
name = "ciborium"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e"
dependencies = [
"bitflags 1.3.2",
"textwrap",
"unicode-width",
"ciborium-io",
"ciborium-ll",
"serde",
]
[[package]]
name = "ciborium-io"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757"
[[package]]
name = "ciborium-ll"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9"
dependencies = [
"ciborium-io",
"half 2.4.1",
]
[[package]]
@@ -1214,7 +1225,7 @@ version = "4.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd79504325bf38b10165b02e89b4347300f855f273c4cb30c4a3209e6583275e"
dependencies = [
"clap 4.5.3",
"clap",
]
[[package]]
@@ -1373,24 +1384,24 @@ dependencies = [
[[package]]
name = "criterion"
version = "0.3.6"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f"
checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f"
dependencies = [
"atty",
"anes",
"cast",
"clap 2.34.0",
"ciborium",
"clap",
"criterion-plot",
"csv",
"is-terminal",
"itertools 0.10.5",
"lazy_static",
"num-traits",
"once_cell",
"oorandom",
"plotters",
"rayon",
"regex",
"serde",
"serde_cbor",
"serde_derive",
"serde_json",
"tinytemplate",
@@ -1399,9 +1410,9 @@ dependencies = [
[[package]]
name = "criterion-plot"
version = "0.4.5"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876"
checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1"
dependencies = [
"cast",
"itertools 0.10.5",
@@ -1469,27 +1480,6 @@ dependencies = [
"typenum",
]
[[package]]
name = "csv"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe"
dependencies = [
"csv-core",
"itoa",
"ryu",
"serde",
]
[[package]]
name = "csv-core"
version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5efa2b3d7902f4b634a20cae3c9c4e6209dc4779feb6863329607560143efa70"
dependencies = [
"memchr",
]
[[package]]
name = "cuda-config"
version = "0.1.0"
@@ -1837,10 +1827,9 @@ name = "ezkl"
version = "0.0.0"
dependencies = [
"alloy",
"ark-std 0.3.0",
"bincode",
"chrono",
"clap 4.5.3",
"clap",
"clap_complete",
"colored",
"colored_json",
@@ -1850,7 +1839,6 @@ dependencies = [
"env_logger",
"ethabi",
"foundry-compilers",
"futures-util",
"gag",
"getrandom",
"halo2_gadgets",
@@ -1870,7 +1858,6 @@ dependencies = [
"objc",
"openssl",
"pg_bigdecimal",
"plotters",
"portable-atomic",
"pyo3",
"pyo3-asyncio",
@@ -1893,7 +1880,6 @@ dependencies = [
"thiserror",
"tokio",
"tokio-postgres",
"tokio-util",
"tosubcommand",
"tract-onnx",
"unzip-n",
@@ -2267,10 +2253,11 @@ checksum = "1b43ede17f21864e81be2fa654110bf1e793774238d86ef8555c37e6519c0403"
[[package]]
name = "half"
version = "2.2.1"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02b4af3693f1b705df946e9fe5631932443781d0aabb423b62fcd4d73f6d2fd0"
checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888"
dependencies = [
"cfg-if",
"crunchy",
"num-traits",
]
@@ -2295,7 +2282,7 @@ dependencies = [
[[package]]
name = "halo2_proofs"
version = "0.3.0"
source = "git+https://github.com/zkonduit/halo2?branch=ac/optional-selector-poly#54f54453cf186aa5d89579c4e7663f9a27cfb89a?branch=ac/optional-selector-poly#54f54453cf186aa5d89579c4e7663f9a27cfb89a"
source = "git+https://github.com/zkonduit/halo2#8cfca221f53069a0374687654882b99e729041d7#8cfca221f53069a0374687654882b99e729041d7"
dependencies = [
"blake2b_simd",
"env_logger",
@@ -2456,15 +2443,6 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "hermit-abi"
version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
dependencies = [
"libc",
]
[[package]]
name = "hermit-abi"
version = "0.3.9"
@@ -2784,7 +2762,7 @@ version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b"
dependencies = [
"hermit-abi 0.3.9",
"hermit-abi",
"libc",
"windows-sys 0.52.0",
]
@@ -3309,7 +3287,7 @@ version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
dependencies = [
"hermit-abi 0.3.9",
"hermit-abi",
"libc",
]
@@ -3410,9 +3388,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-src"
version = "300.2.3+3.2.1"
version = "300.3.1+3.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5cff92b6f71555b61bb9315f7c64da3ca43d87531622120fea0195fc761b4843"
checksum = "7259953d42a81bf137fbbd73bd30a8e1914d6dce43c2b90ed575783a22608b91"
dependencies = [
"cc",
]
@@ -4126,9 +4104,9 @@ checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3"
[[package]]
name = "rayon"
version = "1.9.0"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e4963ed1bc86e4f3ee217022bd855b297cef07fb9eac5dfa1f788b220b49b3bd"
checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa"
dependencies = [
"either",
"rayon-core",
@@ -5155,15 +5133,6 @@ dependencies = [
"syn 1.0.109",
]
[[package]]
name = "textwrap"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
dependencies = [
"unicode-width",
]
[[package]]
name = "thiserror"
version = "1.0.58"
@@ -5463,8 +5432,8 @@ dependencies = [
[[package]]
name = "tract-core"
version = "0.21.5-pre"
source = "git+https://github.com/sonos/tract/?rev=05ebf550aa9922b221af4635c21a67a8d2af12a9#05ebf550aa9922b221af4635c21a67a8d2af12a9"
version = "0.21.6-pre"
source = "git+https://github.com/sonos/tract/?rev=7bf303b2ae9bddd5fa6951ae95848c0d52fb7f50#7bf303b2ae9bddd5fa6951ae95848c0d52fb7f50"
dependencies = [
"anyhow",
"bit-set",
@@ -5487,13 +5456,14 @@ dependencies = [
[[package]]
name = "tract-data"
version = "0.21.5-pre"
source = "git+https://github.com/sonos/tract/?rev=05ebf550aa9922b221af4635c21a67a8d2af12a9#05ebf550aa9922b221af4635c21a67a8d2af12a9"
version = "0.21.6-pre"
source = "git+https://github.com/sonos/tract/?rev=7bf303b2ae9bddd5fa6951ae95848c0d52fb7f50#7bf303b2ae9bddd5fa6951ae95848c0d52fb7f50"
dependencies = [
"anyhow",
"downcast-rs",
"dyn-clone",
"dyn-hash",
"half 2.2.1",
"half 2.4.1",
"itertools 0.12.1",
"lazy_static",
"maplit",
@@ -5508,8 +5478,8 @@ dependencies = [
[[package]]
name = "tract-hir"
version = "0.21.5-pre"
source = "git+https://github.com/sonos/tract/?rev=05ebf550aa9922b221af4635c21a67a8d2af12a9#05ebf550aa9922b221af4635c21a67a8d2af12a9"
version = "0.21.6-pre"
source = "git+https://github.com/sonos/tract/?rev=7bf303b2ae9bddd5fa6951ae95848c0d52fb7f50#7bf303b2ae9bddd5fa6951ae95848c0d52fb7f50"
dependencies = [
"derive-new",
"log",
@@ -5518,21 +5488,22 @@ dependencies = [
[[package]]
name = "tract-linalg"
version = "0.21.5-pre"
source = "git+https://github.com/sonos/tract/?rev=05ebf550aa9922b221af4635c21a67a8d2af12a9#05ebf550aa9922b221af4635c21a67a8d2af12a9"
version = "0.21.6-pre"
source = "git+https://github.com/sonos/tract/?rev=7bf303b2ae9bddd5fa6951ae95848c0d52fb7f50#7bf303b2ae9bddd5fa6951ae95848c0d52fb7f50"
dependencies = [
"cc",
"derive-new",
"downcast-rs",
"dyn-clone",
"dyn-hash",
"half 2.2.1",
"half 2.4.1",
"lazy_static",
"liquid",
"liquid-core",
"log",
"num-traits",
"paste",
"rayon",
"scan_fmt",
"smallvec",
"time",
@@ -5543,8 +5514,8 @@ dependencies = [
[[package]]
name = "tract-nnef"
version = "0.21.5-pre"
source = "git+https://github.com/sonos/tract/?rev=05ebf550aa9922b221af4635c21a67a8d2af12a9#05ebf550aa9922b221af4635c21a67a8d2af12a9"
version = "0.21.6-pre"
source = "git+https://github.com/sonos/tract/?rev=7bf303b2ae9bddd5fa6951ae95848c0d52fb7f50#7bf303b2ae9bddd5fa6951ae95848c0d52fb7f50"
dependencies = [
"byteorder",
"flate2",
@@ -5557,8 +5528,8 @@ dependencies = [
[[package]]
name = "tract-onnx"
version = "0.21.5-pre"
source = "git+https://github.com/sonos/tract/?rev=05ebf550aa9922b221af4635c21a67a8d2af12a9#05ebf550aa9922b221af4635c21a67a8d2af12a9"
version = "0.21.6-pre"
source = "git+https://github.com/sonos/tract/?rev=7bf303b2ae9bddd5fa6951ae95848c0d52fb7f50#7bf303b2ae9bddd5fa6951ae95848c0d52fb7f50"
dependencies = [
"bytes",
"derive-new",
@@ -5574,8 +5545,8 @@ dependencies = [
[[package]]
name = "tract-onnx-opl"
version = "0.21.5-pre"
source = "git+https://github.com/sonos/tract/?rev=05ebf550aa9922b221af4635c21a67a8d2af12a9#05ebf550aa9922b221af4635c21a67a8d2af12a9"
version = "0.21.6-pre"
source = "git+https://github.com/sonos/tract/?rev=7bf303b2ae9bddd5fa6951ae95848c0d52fb7f50#7bf303b2ae9bddd5fa6951ae95848c0d52fb7f50"
dependencies = [
"getrandom",
"log",

View File

@@ -39,7 +39,6 @@ snark-verifier = { git = "https://github.com/zkonduit/snark-verifier", branch =
halo2_solidity_verifier = { git = "https://github.com/alexander-camuto/halo2-solidity-verifier", branch = "main" }
maybe-rayon = { version = "0.1.1", default_features = false }
bincode = { version = "1.3.3", default_features = false }
ark-std = { version = "^0.3.0", default-features = false }
unzip-n = "0.1.2"
num = "0.4.1"
portable-atomic = "1.6.0"
@@ -63,16 +62,13 @@ reqwest = { version = "0.12.4", default-features = false, features = [
openssl = { version = "0.10.55", features = ["vendored"] }
tokio-postgres = "0.7.10"
pg_bigdecimal = "0.1.5"
futures-util = "0.3.30"
lazy_static = "1.4.0"
colored_json = { version = "3.0.1", default_features = false, optional = true }
plotters = { version = "0.3.0", default_features = false, optional = true }
regex = { version = "1", default_features = false }
tokio = { version = "1.35", default_features = false, features = [
"macros",
"rt-multi-thread"
] }
tokio-util = { version = "0.7.9", features = ["codec"] }
pyo3 = { version = "0.21.2", features = [
"extension-module",
"abi3-py37",
@@ -83,9 +79,8 @@ pyo3-asyncio = { git = "https://github.com/jopemachine/pyo3-asyncio/", branch="m
"tokio-runtime",
], default_features = false, optional = true }
pyo3-log = { version = "0.10.0", default_features = false, optional = true }
tract-onnx = { git = "https://github.com/sonos/tract/", rev = "05ebf550aa9922b221af4635c21a67a8d2af12a9", default_features = false, optional = true }
tract-onnx = { git = "https://github.com/sonos/tract/", rev = "7bf303b2ae9bddd5fa6951ae95848c0d52fb7f50", default_features = false, optional = true }
tabled = { version = "0.12.0", optional = true }
objc = { version = "0.2.4", optional = true }
@@ -108,8 +103,10 @@ console_error_panic_hook = "0.1.7"
wasm-bindgen-console-logger = "0.1.1"
[target.'cfg(not(all(target_arch = "wasm32", target_os = "unknown")))'.dev-dependencies]
criterion = { version = "0.5.1", features = ["html_reports"] }
[dev-dependencies]
criterion = { version = "0.3", features = ["html_reports"] }
tempfile = "3.3.0"
lazy_static = "1.4.0"
mnist = "0.5"
@@ -180,7 +177,7 @@ required-features = ["ezkl"]
[features]
web = ["wasm-bindgen-rayon"]
default = ["ezkl", "mv-lookup", "no-banner"]
default = ["ezkl", "mv-lookup", "no-banner", "parallel-poly-read"]
onnx = ["dep:tract-onnx"]
python-bindings = ["pyo3", "pyo3-log", "pyo3-asyncio"]
ezkl = [
@@ -194,6 +191,7 @@ ezkl = [
"colored_json",
"halo2_proofs/circuit-params",
]
parallel-poly-read = ["halo2_proofs/parallel-poly-read"]
mv-lookup = [
"halo2_proofs/mv-lookup",
"snark-verifier/mv-lookup",
@@ -203,6 +201,7 @@ det-prove = []
icicle = ["halo2_proofs/icicle_gpu"]
empty-cmd = []
no-banner = []
no-update = []
metal = ["dep:metal", "dep:objc"]
# icicle patch to 0.1.0 if feature icicle is enabled
@@ -210,7 +209,7 @@ metal = ["dep:metal", "dep:objc"]
icicle = { git = "https://github.com/ingonyama-zk/icicle?rev=45b00fb", package = "icicle", branch = "fix/vhnat/ezkl-build-fix" }
[patch.'https://github.com/zkonduit/halo2']
halo2_proofs = { git = "https://github.com/zkonduit/halo2?branch=ac/optional-selector-poly#54f54453cf186aa5d89579c4e7663f9a27cfb89a", package = "halo2_proofs", branch = "ac/optional-selector-poly" }
halo2_proofs = { git = "https://github.com/zkonduit/halo2#8cfca221f53069a0374687654882b99e729041d7", package = "halo2_proofs" }
[profile.release]

View File

@@ -93,9 +93,6 @@ contract LoadInstances {
}
}
// Contract that checks that the COMMITMENT_KZG bytes is equal to the first part of the proof.
pragma solidity ^0.8.0;
// The kzg commitments of a given model, all aggregated into a single bytes array.
// At solidity generation time, the commitments are hardcoded into the contract via the COMMITMENT_KZG constant.
// It will be used to check that the proof commitments match the expected commitments.
@@ -163,7 +160,7 @@ contract SwapProofCommitments {
}
return equal; // Return true if the commitment comparison passed
}
} /// end checkKzgCommits
}
// This contract serves as a Data Attestation Verifier for the EZKL model.

View File

@@ -1,4 +1,4 @@
ezkl==0.0.0
ezkl==11.6.0
sphinx
sphinx-rtd-theme
sphinxcontrib-napoleon

View File

@@ -1,7 +1,7 @@
import ezkl
project = 'ezkl'
release = '0.0.0'
release = '11.6.0'
version = release

View File

@@ -39,7 +39,7 @@
"import json\n",
"import numpy as np\n",
"from sklearn.svm import SVC\n",
"import sk2torch\n",
"from hummingbird.ml import convert\n",
"import torch\n",
"import ezkl\n",
"import os\n",
@@ -59,11 +59,11 @@
"# Train an SVM on the data and wrap it in PyTorch.\n",
"sk_model = SVC(probability=True)\n",
"sk_model.fit(xs, ys)\n",
"model = sk2torch.wrap(sk_model)\n",
"\n",
"model = convert(sk_model, \"torch\").model\n",
"\n",
"\n",
"\n",
"model\n",
"\n"
]
},
@@ -84,33 +84,6 @@
"data_path = os.path.join('input.json')"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "7f0ca328",
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"# Create a coordinate grid to compute a vector field on.\n",
"spaced = np.linspace(-2, 2, num=25)\n",
"grid_xs = torch.tensor([[x, y] for x in spaced for y in spaced], requires_grad=True)\n",
"\n",
"\n",
"# Compute the gradients of the SVM output.\n",
"outputs = model.predict_proba(grid_xs)[:, 1]\n",
"(input_grads,) = torch.autograd.grad(outputs.sum(), (grid_xs,))\n",
"\n",
"\n",
"# Create a quiver plot of the vector field.\n",
"plt.quiver(\n",
" grid_xs[:, 0].detach().numpy(),\n",
" grid_xs[:, 1].detach().numpy(),\n",
" input_grads[:, 0].detach().numpy(),\n",
" input_grads[:, 1].detach().numpy(),\n",
")\n"
]
},
{
"cell_type": "code",
"execution_count": null,
@@ -119,14 +92,14 @@
"outputs": [],
"source": [
"\n",
"\n",
"spaced = np.linspace(-2, 2, num=25)\n",
"grid_xs = torch.tensor([[x, y] for x in spaced for y in spaced], requires_grad=True)\n",
"# export to onnx format\n",
"# !!!!!!!!!!!!!!!!! This will flash a warning but it is fine !!!!!!!!!!!!!!!!!!!!!\n",
"\n",
"# Input to the model\n",
"shape = xs.shape[1:]\n",
"x = grid_xs[0:1]\n",
"torch_out = model.predict(x)\n",
"# Export the model\n",
"torch.onnx.export(model, # model being run\n",
" # model input (or a tuple for multiple inputs)\n",
@@ -143,9 +116,7 @@
"\n",
"d = ((x).detach().numpy()).reshape([-1]).tolist()\n",
"\n",
"data = dict(input_shapes=[shape],\n",
" input_data=[d],\n",
" output_data=[o.reshape([-1]).tolist() for o in torch_out])\n",
"data = dict(input_data=[d])\n",
"\n",
"# Serialize data into file:\n",
"json.dump(data, open(\"input.json\", 'w'))\n"
@@ -167,6 +138,7 @@
{
"cell_type": "code",
"execution_count": null,
"id": "0bee4d7f",
"metadata": {},
"outputs": [],
"source": [
@@ -220,7 +192,7 @@
},
{
"cell_type": "code",
"execution_count": 10,
"execution_count": null,
"id": "b1c561a8",
"metadata": {},
"outputs": [],
@@ -441,9 +413,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.15"
"version": "3.12.3"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
}

View File

@@ -379,9 +379,9 @@ pub enum Commands {
#[arg(long = "target", default_value = DEFAULT_CALIBRATION_TARGET, value_hint = clap::ValueHint::Other)]
/// Target for calibration. Set to "resources" to optimize for computational resource. Otherwise, set to "accuracy" to optimize for accuracy.
target: CalibrationTarget,
/// the lookup safety margin to use for calibration. if the max lookup is 2^k, then the max lookup will be 2^k * lookup_safety_margin. larger = safer but slower
/// the lookup safety margin to use for calibration. if the max lookup is 2^k, then the max lookup will be ceil(2^k * lookup_safety_margin). larger = safer but slower
#[arg(long, default_value = DEFAULT_LOOKUP_SAFETY_MARGIN, value_hint = clap::ValueHint::Other)]
lookup_safety_margin: i64,
lookup_safety_margin: f64,
/// Optional scales to specifically try for calibration. Example, --scales 0,4
#[arg(long, value_delimiter = ',', allow_hyphen_values = true, value_hint = clap::ValueHint::Other)]
scales: Option<Vec<crate::Scale>>,
@@ -868,6 +868,7 @@ pub enum Commands {
#[arg(long, value_hint = clap::ValueHint::Other)]
addr_vk: Option<H160Flag>,
},
#[cfg(not(feature = "no-update"))]
/// Updates ezkl binary to version specified (or latest if not specified)
Update {
/// The version to update to

View File

@@ -327,11 +327,7 @@ pub async fn setup_eth_backend(
ProviderBuilder::new()
.with_recommended_fillers()
.signer(EthereumSigner::from(wallet))
.on_http(
endpoint
.parse()
.map_err(|_| EthError::UrlParse(endpoint.clone()))?,
),
.on_http(endpoint.parse().map_err(|_| EthError::UrlParse(endpoint))?),
);
let chain_id = client.get_chain_id().await?;
@@ -354,8 +350,7 @@ pub async fn deploy_contract_via_solidity(
let (abi, bytecode, runtime_bytecode) =
get_contract_artifacts(sol_code_path, contract_name, runs).await?;
let factory =
get_sol_contract_factory(abi, bytecode, runtime_bytecode, client.clone(), None::<()>)?;
let factory = get_sol_contract_factory(abi, bytecode, runtime_bytecode, client, None::<()>)?;
let contract = factory.deploy().await?;
Ok(contract)
@@ -452,20 +447,30 @@ pub async fn deploy_da_verifier_via_solidity(
}
}
let (abi, bytecode, runtime_bytecode) =
get_contract_artifacts(sol_code_path, "DataAttestation", runs).await?;
let (contract_addresses, call_data, decimals) = if !calls_to_accounts.is_empty() {
parse_calls_to_accounts(calls_to_accounts)?
} else {
return Err(EthError::OnChainDataSource);
// if calls to accounts is empty then we know need to check that atleast there kzg visibility in the settings file
let kzg_visibility = settings.run_args.input_visibility.is_polycommit()
|| settings.run_args.output_visibility.is_polycommit()
|| settings.run_args.param_visibility.is_polycommit();
if !kzg_visibility {
return Err(EthError::OnChainDataSource);
}
let factory =
get_sol_contract_factory::<_, ()>(abi, bytecode, runtime_bytecode, client, None)?;
let contract = factory.deploy().await?;
return Ok(contract);
};
let (abi, bytecode, runtime_bytecode) =
get_contract_artifacts(sol_code_path, "DataAttestation", runs).await?;
let factory = get_sol_contract_factory(
abi,
bytecode,
runtime_bytecode,
client.clone(),
client,
Some((
// address[] memory _contractAddresses,
DynSeqToken(
@@ -506,7 +511,7 @@ pub async fn deploy_da_verifier_via_solidity(
),
// uint8 _instanceOffset,
WordToken(U256::from(contract_instance_offset as u32).into()),
//address _admin
// address _admin
WordToken(client_address.into_word()),
)),
)?;
@@ -529,7 +534,7 @@ fn parse_calls_to_accounts(
let mut call_data = vec![];
let mut decimals: Vec<Vec<U256>> = vec![];
for (i, val) in calls_to_accounts.iter().enumerate() {
let contract_address_bytes = hex::decode(val.address.clone())?;
let contract_address_bytes = hex::decode(&val.address)?;
let contract_address = H160::from_slice(&contract_address_bytes);
contract_addresses.push(contract_address);
call_data.push(vec![]);
@@ -573,7 +578,7 @@ pub async fn update_account_calls(
let (client, client_address) = setup_eth_backend(rpc_url, None).await?;
let contract = DataAttestation::new(addr, client.clone());
let contract = DataAttestation::new(addr, &client);
info!("contract_addresses: {:#?}", contract_addresses);
@@ -804,7 +809,7 @@ pub async fn test_on_chain_data<M: 'static + Provider<Http<Client>, Ethereum>>(
client: Arc<M>,
data: &[Vec<FileSourceInner>],
) -> Result<Vec<CallsToAccount>, EthError> {
let (contract, decimals) = setup_test_contract(client.clone(), data).await?;
let (contract, decimals) = setup_test_contract(client, data).await?;
// Get the encoded call data for each input
let mut calldata = vec![];
@@ -836,10 +841,10 @@ pub async fn read_on_chain_inputs<M: 'static + Provider<Http<Client>, Ethereum>>
let mut decimals = vec![];
for on_chain_data in data {
// Construct the address
let contract_address_bytes = hex::decode(on_chain_data.address.clone())?;
let contract_address_bytes = hex::decode(&on_chain_data.address)?;
let contract_address = H160::from_slice(&contract_address_bytes);
for (call_data, decimal) in &on_chain_data.call_data {
let call_data_bytes = hex::decode(call_data.clone())?;
let call_data_bytes = hex::decode(call_data)?;
let input: TransactionInput = call_data_bytes.into();
let tx = TransactionRequest::default()
@@ -866,8 +871,8 @@ pub async fn evm_quantize<M: 'static + Provider<Http<Client>, Ethereum>>(
) -> Result<Vec<Fr>, EthError> {
let contract = QuantizeData::deploy(&client).await?;
let fetched_inputs = data.0.clone();
let decimals = data.1.clone();
let fetched_inputs = &data.0;
let decimals = &data.1;
let fetched_inputs = fetched_inputs
.iter()
@@ -943,7 +948,7 @@ fn get_sol_contract_factory<'a, M: 'static + Provider<Http<Client>, Ethereum>, T
(None, false) => {
return Err(EthError::NoConstructor);
}
(None, true) => bytecode.clone(),
(None, true) => bytecode,
(Some(_), _) => {
let mut data = bytecode.to_vec();
@@ -955,7 +960,7 @@ fn get_sol_contract_factory<'a, M: 'static + Provider<Http<Client>, Ethereum>, T
}
};
Ok(CallBuilder::new_raw_deploy(client.clone(), data))
Ok(CallBuilder::new_raw_deploy(client, data))
}
/// Compiles a solidity verifier contract and returns the abi, bytecode, and runtime bytecode
@@ -1030,7 +1035,7 @@ pub fn fix_da_sol(
// fill in the quantization params and total calls
// as constants to the contract to save on gas
if let Some(input_data) = input_data {
if let Some(input_data) = &input_data {
let input_calls: usize = input_data.iter().map(|v| v.call_data.len()).sum();
accounts_len = input_data.len();
contract = contract.replace(
@@ -1038,7 +1043,7 @@ pub fn fix_da_sol(
&format!("uint256 constant INPUT_CALLS = {};", input_calls),
);
}
if let Some(output_data) = output_data {
if let Some(output_data) = &output_data {
let output_calls: usize = output_data.iter().map(|v| v.call_data.len()).sum();
accounts_len += output_data.len();
contract = contract.replace(
@@ -1048,8 +1053,9 @@ pub fn fix_da_sol(
}
contract = contract.replace("AccountCall[]", &format!("AccountCall[{}]", accounts_len));
if commitment_bytes.clone().is_some() && !commitment_bytes.clone().unwrap().is_empty() {
let commitment_bytes = commitment_bytes.unwrap();
// The case where a combination of on-chain data source + kzg commit is provided.
if commitment_bytes.is_some() && !commitment_bytes.as_ref().unwrap().is_empty() {
let commitment_bytes = commitment_bytes.as_ref().unwrap();
let hex_string = hex::encode(commitment_bytes);
contract = contract.replace(
"bytes constant COMMITMENT_KZG = hex\"\";",
@@ -1064,5 +1070,44 @@ pub fn fix_da_sol(
);
}
// if both input and output data is none then we will only deploy the DataAttest contract, adding in the verifyWithDataAttestation function
if input_data.is_none()
&& output_data.is_none()
&& commitment_bytes.as_ref().is_some()
&& !commitment_bytes.as_ref().unwrap().is_empty()
{
contract = contract.replace(
"contract SwapProofCommitments {",
"contract DataAttestation {",
);
// Remove everything past the end of the checkKzgCommits function
if let Some(pos) = contract.find(" } /// end checkKzgCommits") {
contract.truncate(pos);
contract.push('}');
}
// Add the Solidity function below checkKzgCommits
contract.push_str(
r#"
function verifyWithDataAttestation(
address verifier,
bytes calldata encoded
) public view returns (bool) {
require(verifier.code.length > 0, "Address: call to non-contract");
require(checkKzgCommits(encoded), "Invalid KZG commitments");
// static call the verifier contract to verify the proof
(bool success, bytes memory returndata) = verifier.staticcall(encoded);
if (success) {
return abi.decode(returndata, (bool));
} else {
revert("low-level call to verifier failed");
}
}
}"#,
);
}
Ok(contract)
}

View File

@@ -506,10 +506,12 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
)
.await
}
#[cfg(not(feature = "no-update"))]
Commands::Update { version } => update_ezkl_binary(&version).map(|e| e.to_string()),
}
}
#[cfg(not(feature = "no-update"))]
/// Assert that the version is valid
fn assert_version_is_valid(version: &str) -> Result<(), EZKLError> {
let err_string = "Invalid version string. Must be in the format v0.0.0";
@@ -527,8 +529,10 @@ fn assert_version_is_valid(version: &str) -> Result<(), EZKLError> {
Ok(())
}
#[cfg(not(feature = "no-update"))]
const INSTALL_BYTES: &[u8] = include_bytes!("../install_ezkl_cli.sh");
#[cfg(not(feature = "no-update"))]
fn update_ezkl_binary(version: &Option<String>) -> Result<String, EZKLError> {
// run the install script with the version
let install_script = std::str::from_utf8(INSTALL_BYTES)?;
@@ -1009,7 +1013,7 @@ pub(crate) async fn calibrate(
data: PathBuf,
settings_path: PathBuf,
target: CalibrationTarget,
lookup_safety_margin: i64,
lookup_safety_margin: f64,
scales: Option<Vec<crate::Scale>>,
scale_rebase_multiplier: Vec<u32>,
only_range_check_rebase: bool,

View File

@@ -41,16 +41,16 @@ pub enum GraphError {
/// Error when attempting to rescale an operation
#[error("failed to rescale inputs for {0}")]
RescalingError(String),
/// Error when attempting to load a model from a file
#[error("failed to load model")]
ModelLoad(#[from] std::io::Error),
/// Reading a file failed
#[error("[io] ({0}) {1}")]
ReadWriteFileError(String, String),
/// Model serialization error
#[error("failed to ser/deser model: {0}")]
ModelSerialize(#[from] bincode::Error),
/// Tract error
#[cfg(not(all(target_arch = "wasm32", target_os = "unknown")))]
#[error("[tract] {0}")]
TractError(#[from] tract_onnx::tract_core::anyhow::Error),
TractError(#[from] tract_onnx::prelude::TractError),
/// Packing exponent is too large
#[error("largest packing exponent exceeds max. try reducing the scale")]
PackingExponent,

View File

@@ -485,18 +485,25 @@ impl GraphData {
/// Load the model input from a file
pub fn from_path(path: std::path::PathBuf) -> Result<Self, GraphError> {
let reader = std::fs::File::open(path)?;
let reader = std::fs::File::open(&path).map_err(|e| {
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
})?;
let mut reader = BufReader::with_capacity(*EZKL_BUF_CAPACITY, reader);
let mut buf = String::new();
reader.read_to_string(&mut buf)?;
reader.read_to_string(&mut buf).map_err(|e| {
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
})?;
let graph_input = serde_json::from_str(&buf)?;
Ok(graph_input)
}
/// Save the model input to a file
pub fn save(&self, path: std::path::PathBuf) -> Result<(), GraphError> {
let file = std::fs::File::create(path.clone()).map_err(|e| {
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
})?;
// buf writer
let writer = BufWriter::with_capacity(*EZKL_BUF_CAPACITY, std::fs::File::create(path)?);
let writer = BufWriter::with_capacity(*EZKL_BUF_CAPACITY, file);
serde_json::to_writer(writer, self)?;
Ok(())
}

View File

@@ -267,7 +267,9 @@ impl GraphWitness {
/// Load the model input from a file
pub fn from_path(path: std::path::PathBuf) -> Result<Self, GraphError> {
let file = std::fs::File::open(path.clone())?;
let file = std::fs::File::open(path.clone()).map_err(|e| {
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
})?;
let reader = std::io::BufReader::with_capacity(*EZKL_BUF_CAPACITY, file);
serde_json::from_reader(reader).map_err(|e| e.into())
@@ -275,9 +277,11 @@ impl GraphWitness {
/// Save the model input to a file
pub fn save(&self, path: std::path::PathBuf) -> Result<(), GraphError> {
let file = std::fs::File::create(path.clone()).map_err(|e| {
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
})?;
// use buf writer
let writer =
std::io::BufWriter::with_capacity(*EZKL_BUF_CAPACITY, std::fs::File::create(path)?);
let writer = std::io::BufWriter::with_capacity(*EZKL_BUF_CAPACITY, file);
serde_json::to_writer(writer, &self).map_err(|e| e.into())
}
@@ -640,7 +644,9 @@ impl GraphCircuit {
}
///
pub fn save(&self, path: std::path::PathBuf) -> Result<(), GraphError> {
let f = std::fs::File::create(path)?;
let f = std::fs::File::create(&path).map_err(|e| {
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
})?;
let writer = std::io::BufWriter::with_capacity(*EZKL_BUF_CAPACITY, f);
bincode::serialize_into(writer, &self)?;
Ok(())
@@ -649,7 +655,9 @@ impl GraphCircuit {
///
pub fn load(path: std::path::PathBuf) -> Result<Self, GraphError> {
// read bytes from file
let f = std::fs::File::open(path)?;
let f = std::fs::File::open(&path).map_err(|e| {
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
})?;
let reader = std::io::BufReader::with_capacity(*EZKL_BUF_CAPACITY, f);
let result: GraphCircuit = bincode::deserialize_from(reader)?;
@@ -1026,10 +1034,10 @@ impl GraphCircuit {
Ok(data)
}
fn calc_safe_lookup_range(min_max_lookup: Range, lookup_safety_margin: i64) -> Range {
fn calc_safe_lookup_range(min_max_lookup: Range, lookup_safety_margin: f64) -> Range {
(
lookup_safety_margin * min_max_lookup.0,
lookup_safety_margin * min_max_lookup.1,
(lookup_safety_margin * min_max_lookup.0 as f64).floor() as i64,
(lookup_safety_margin * min_max_lookup.1 as f64).ceil() as i64,
)
}
@@ -1062,7 +1070,7 @@ impl GraphCircuit {
min_max_lookup: Range,
max_range_size: i64,
max_logrows: Option<u32>,
lookup_safety_margin: i64,
lookup_safety_margin: f64,
) -> Result<(), GraphError> {
// load the max logrows
let max_logrows = max_logrows.unwrap_or(MAX_PUBLIC_SRS);
@@ -1072,9 +1080,13 @@ impl GraphCircuit {
let safe_lookup_range = Self::calc_safe_lookup_range(min_max_lookup, lookup_safety_margin);
let lookup_size = (safe_lookup_range.1 - safe_lookup_range.0).abs();
// check if subtraction overflows
let lookup_size =
(safe_lookup_range.1.saturating_sub(safe_lookup_range.0)).saturating_abs();
// check if has overflowed max lookup input
if lookup_size > MAX_LOOKUP_ABS / lookup_safety_margin {
if lookup_size > (MAX_LOOKUP_ABS as f64 / lookup_safety_margin).floor() as i64 {
return Err(GraphError::LookupRangeTooLarge(
lookup_size.unsigned_abs() as usize
));

View File

@@ -483,7 +483,9 @@ impl Model {
///
pub fn save(&self, path: PathBuf) -> Result<(), GraphError> {
let f = std::fs::File::create(path)?;
let f = std::fs::File::create(&path).map_err(|e| {
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
})?;
let writer = std::io::BufWriter::new(f);
bincode::serialize_into(writer, &self)?;
Ok(())
@@ -492,10 +494,16 @@ impl Model {
///
pub fn load(path: PathBuf) -> Result<Self, GraphError> {
// read bytes from file
let mut f = std::fs::File::open(&path)?;
let metadata = fs::metadata(&path)?;
let mut f = std::fs::File::open(&path).map_err(|e| {
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
})?;
let metadata = fs::metadata(&path).map_err(|e| {
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
})?;
let mut buffer = vec![0; metadata.len() as usize];
f.read_exact(&mut buffer)?;
f.read_exact(&mut buffer).map_err(|e| {
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
})?;
let result = bincode::deserialize(&buffer)?;
Ok(result)
}
@@ -975,8 +983,11 @@ impl Model {
) -> Result<Vec<Vec<Tensor<f32>>>, GraphError> {
use tract_onnx::tract_core::internal::IntoArcTensor;
let (model, _) =
Model::load_onnx_using_tract(&mut std::fs::File::open(model_path)?, run_args)?;
let mut file = std::fs::File::open(model_path).map_err(|e| {
GraphError::ReadWriteFileError(model_path.display().to_string(), e.to_string())
})?;
let (model, _) = Model::load_onnx_using_tract(&mut file, run_args)?;
let datum_types: Vec<DatumType> = model
.input_outlets()?
@@ -1005,7 +1016,10 @@ impl Model {
/// * `params` - A [GraphSettings] struct holding parsed CLI arguments.
#[cfg(not(target_arch = "wasm32"))]
pub fn from_run_args(run_args: &RunArgs, model: &std::path::Path) -> Result<Self, GraphError> {
Model::new(&mut std::fs::File::open(model)?, run_args)
let mut file = std::fs::File::open(model).map_err(|e| {
GraphError::ReadWriteFileError(model.display().to_string(), e.to_string())
})?;
Model::new(&mut file, run_args)
}
/// Configures a model for the circuit

View File

@@ -85,6 +85,34 @@ pub fn multiplier_to_scale(mult: f64) -> crate::Scale {
mult.log2().round() as crate::Scale
}
#[cfg(not(target_arch = "wasm32"))]
/// extract padding from a onnx node.
pub fn extract_padding(
pool_spec: &PoolSpec,
num_dims: usize,
) -> Result<Vec<(usize, usize)>, GraphError> {
let padding = match &pool_spec.padding {
PaddingSpec::Explicit(b, a) | PaddingSpec::ExplicitOnnxPool(b, a, _) => {
b.iter().zip(a.iter()).map(|(b, a)| (*b, *a)).collect()
}
PaddingSpec::Valid => vec![(0, 0); num_dims],
_ => {
return Err(GraphError::MissingParams("padding".to_string()));
}
};
Ok(padding)
}
#[cfg(not(target_arch = "wasm32"))]
/// Extracts the strides from a onnx node.
pub fn extract_strides(pool_spec: &PoolSpec) -> Result<Vec<usize>, GraphError> {
Ok(pool_spec
.strides
.clone()
.ok_or(GraphError::MissingParams("stride".to_string()))?
.to_vec())
}
/// Gets the shape of a onnx node's outlets.
#[cfg(not(target_arch = "wasm32"))]
pub fn node_output_shapes(
@@ -255,6 +283,12 @@ pub fn new_op_from_onnx(
.flat_map(|x| x.out_scales())
.collect::<Vec<_>>();
let input_dims = inputs
.iter()
.map(|x| x.out_dims())
.flatten()
.collect::<Vec<_>>();
let mut replace_const = |scale: crate::Scale,
index: usize,
default_op: SupportedOp|
@@ -1073,18 +1107,8 @@ pub fn new_op_from_onnx(
));
}
let stride = pool_spec
.strides
.clone()
.ok_or(GraphError::MissingParams("stride".to_string()))?;
let padding = match &pool_spec.padding {
PaddingSpec::Explicit(b, a) | PaddingSpec::ExplicitOnnxPool(b, a, _) => {
b.iter().zip(a.iter()).map(|(b, a)| (*b, *a)).collect()
}
_ => {
return Err(GraphError::MissingParams("padding".to_string()));
}
};
let stride = extract_strides(&pool_spec)?;
let padding = extract_padding(&pool_spec, input_dims[0].len())?;
let kernel_shape = &pool_spec.kernel_shape;
SupportedOp::Hybrid(HybridOp::MaxPool {
@@ -1151,21 +1175,10 @@ pub fn new_op_from_onnx(
));
}
let stride = match conv_node.pool_spec.strides.clone() {
Some(s) => s.to_vec(),
None => {
return Err(GraphError::MissingParams("strides".to_string()));
}
};
let pool_spec = &conv_node.pool_spec;
let padding = match &conv_node.pool_spec.padding {
PaddingSpec::Explicit(b, a) | PaddingSpec::ExplicitOnnxPool(b, a, _) => {
b.iter().zip(a.iter()).map(|(b, a)| (*b, *a)).collect()
}
_ => {
return Err(GraphError::MissingParams("padding".to_string()));
}
};
let stride = extract_strides(&pool_spec)?;
let padding = extract_padding(&pool_spec, input_dims[0].len())?;
// if bias exists then rescale it to the input + kernel scale
if input_scales.len() == 3 {
@@ -1214,21 +1227,10 @@ pub fn new_op_from_onnx(
));
}
let stride = match deconv_node.pool_spec.strides.clone() {
Some(s) => s.to_vec(),
None => {
return Err(GraphError::MissingParams("strides".to_string()));
}
};
let padding = match &deconv_node.pool_spec.padding {
PaddingSpec::Explicit(b, a) | PaddingSpec::ExplicitOnnxPool(b, a, _) => {
b.iter().zip(a.iter()).map(|(b, a)| (*b, *a)).collect()
}
_ => {
return Err(GraphError::MissingParams("padding".to_string()));
}
};
let pool_spec = &deconv_node.pool_spec;
let stride = extract_strides(&pool_spec)?;
let padding = extract_padding(&pool_spec, input_dims[0].len())?;
// if bias exists then rescale it to the input + kernel scale
if input_scales.len() == 3 {
let bias_scale = input_scales[2];
@@ -1339,18 +1341,8 @@ pub fn new_op_from_onnx(
));
}
let stride = pool_spec
.strides
.clone()
.ok_or(GraphError::MissingParams("stride".to_string()))?;
let padding = match &pool_spec.padding {
PaddingSpec::Explicit(b, a) | PaddingSpec::ExplicitOnnxPool(b, a, _) => {
b.iter().zip(a.iter()).map(|(b, a)| (*b, *a)).collect()
}
_ => {
return Err(GraphError::MissingParams("padding".to_string()));
}
};
let stride = extract_strides(&pool_spec)?;
let padding = extract_padding(&pool_spec, input_dims[0].len())?;
SupportedOp::Hybrid(HybridOp::SumPool {
padding,

View File

@@ -887,7 +887,7 @@ fn calibrate_settings(
model: PathBuf,
settings: PathBuf,
target: CalibrationTarget,
lookup_safety_margin: i64,
lookup_safety_margin: f64,
scales: Option<Vec<crate::Scale>>,
scale_rebase_multiplier: Vec<u32>,
max_logrows: Option<u32>,
@@ -1491,7 +1491,7 @@ fn encode_evm_calldata<'a>(
/// The path to the SRS file
///
/// render_vk_separately: bool
/// Whether the verifier key should be rendered as a separate contract. We recommend disabling selector compression if this is enabled. To save the verifier key as a separate contract, set this to true and then call the create-evm-vk command
/// Whether the verifier key should be rendered as a separate contract. We recommend disabling selector compression if this is enabled. To save the verifier key as a separate contract, set this to true and then call the create_evm_vk command
///
/// Returns
/// -------
@@ -1533,6 +1533,56 @@ fn create_evm_verifier(
})
}
/// Creates an Evm verifer key. This command should be called after create_evm_verifier with the render_vk_separately arg set to true. By rendering a verification key separately you can reuse the same verifier for similar circuit setups with different verifying keys, helping to reduce the amount of state our verifiers store on the blockchain.
///
/// Arguments
/// ---------
/// vk_path: str
/// The path to the verification key file
///
/// settings_path: str
/// The path to the settings file
///
/// sol_code_path: str
/// The path to the create the solidity verifying key.
///
/// abi_path: str
/// The path to create the ABI for the solidity verifier
///
/// srs_path: str
/// The path to the SRS file
///
/// Returns
/// -------
/// bool
///
#[pyfunction(signature = (
vk_path=PathBuf::from(DEFAULT_VK),
settings_path=PathBuf::from(DEFAULT_SETTINGS),
sol_code_path=PathBuf::from(DEFAULT_VK_SOL),
abi_path=PathBuf::from(DEFAULT_VERIFIER_ABI),
srs_path=None
))]
fn create_evm_vk(
py: Python,
vk_path: PathBuf,
settings_path: PathBuf,
sol_code_path: PathBuf,
abi_path: PathBuf,
srs_path: Option<PathBuf>,
) -> PyResult<Bound<'_, PyAny>> {
pyo3_asyncio::tokio::future_into_py(py, async move {
crate::execute::create_evm_vk(vk_path, srs_path, settings_path, sol_code_path, abi_path)
.await
.map_err(|e| {
let err_str = format!("Failed to run create_evm_verifier: {}", e);
PyRuntimeError::new_err(err_str)
})?;
Ok(true)
})
}
/// Creates an EVM compatible data attestation verifier, you will need solc installed in your environment to run this
///
/// Arguments
@@ -1762,7 +1812,7 @@ fn deploy_da_evm(
/// Arguments
/// ---------
/// addr_verifier: str
/// The path to verifier contract's address
/// The verifier contract's address as a hex string
///
/// proof_path: str
/// The path to the proof file (generated using the prove command)
@@ -1774,7 +1824,7 @@ fn deploy_da_evm(
/// does the verifier use data attestation ?
///
/// addr_vk: str
///
/// The addess of the separate VK contract (if the verifier key is rendered as a separate contract)
/// Returns
/// -------
/// bool
@@ -1925,6 +1975,7 @@ fn ezkl(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
m.add_function(wrap_pyfunction!(compile_circuit, m)?)?;
m.add_function(wrap_pyfunction!(verify_aggr, m)?)?;
m.add_function(wrap_pyfunction!(create_evm_verifier, m)?)?;
m.add_function(wrap_pyfunction!(create_evm_vk, m)?)?;
m.add_function(wrap_pyfunction!(deploy_evm, m)?)?;
m.add_function(wrap_pyfunction!(deploy_vk_evm, m)?)?;
m.add_function(wrap_pyfunction!(deploy_da_evm, m)?)?;

View File

@@ -1066,6 +1066,15 @@ mod native_tests {
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "file", "on-chain", "polycommit", "public", "polycommit");
test_dir.close().unwrap();
}
#(#[test_case(TESTS_ON_CHAIN_INPUT[N])])*
fn kzg_evm_on_chain_all_kzg_params_prove_and_verify_(test: &str) {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(true, Hardfork::Latest);
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "file", "file", "polycommit", "polycommit", "polycommit");
test_dir.close().unwrap();
}
});
@@ -2330,7 +2339,6 @@ mod native_tests {
let model_path = format!("{}/{}/network.compiled", test_dir, example_name);
let settings_path = format!("{}/{}/settings.json", test_dir, example_name);
init_params(settings_path.clone().into());
let data_path = format!("{}/{}/input.json", test_dir, example_name);
@@ -2342,62 +2350,6 @@ mod native_tests {
let test_input_source = format!("--input-source={}", input_source);
let test_output_source = format!("--output-source={}", output_source);
// load witness
let witness: GraphWitness = GraphWitness::from_path(witness_path.clone().into()).unwrap();
let mut input: GraphData = GraphData::from_path(data_path.clone().into()).unwrap();
if input_visibility == "hashed" {
let hashes = witness.processed_inputs.unwrap().poseidon_hash.unwrap();
input.input_data = DataSource::File(
hashes
.iter()
.map(|h| vec![FileSourceInner::Field(*h)])
.collect(),
);
}
if output_visibility == "hashed" {
let hashes = witness.processed_outputs.unwrap().poseidon_hash.unwrap();
input.output_data = Some(DataSource::File(
hashes
.iter()
.map(|h| vec![FileSourceInner::Field(*h)])
.collect(),
));
} else {
input.output_data = Some(DataSource::File(
witness
.pretty_elements
.unwrap()
.rescaled_outputs
.iter()
.map(|o| {
o.iter()
.map(|f| FileSourceInner::Float(f.parse().unwrap()))
.collect()
})
.collect(),
));
}
input.save(data_path.clone().into()).unwrap();
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args([
"setup-test-evm-data",
"-D",
data_path.as_str(),
"-M",
&model_path,
"--test-data",
test_on_chain_data_path.as_str(),
rpc_arg.as_str(),
test_input_source.as_str(),
test_output_source.as_str(),
])
.status()
.expect("failed to execute process");
assert!(status.success());
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args([
"setup",
@@ -2412,6 +2364,82 @@ mod native_tests {
.expect("failed to execute process");
assert!(status.success());
// generate the witness, passing the vk path to generate the necessary kzg commits
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args([
"gen-witness",
"-D",
&data_path,
"-M",
&model_path,
"-O",
&witness_path,
"--vk-path",
&format!("{}/{}/key.vk", test_dir, example_name),
])
.status()
.expect("failed to execute process");
assert!(status.success());
// load witness
let witness: GraphWitness = GraphWitness::from_path(witness_path.clone().into()).unwrap();
// print out the witness
println!("WITNESS: {:?}", witness);
let mut input: GraphData = GraphData::from_path(data_path.clone().into()).unwrap();
if input_source != "file" || output_source != "file" {
println!("on chain input");
if input_visibility == "hashed" {
let hashes = witness.processed_inputs.unwrap().poseidon_hash.unwrap();
input.input_data = DataSource::File(
hashes
.iter()
.map(|h| vec![FileSourceInner::Field(*h)])
.collect(),
);
}
if output_visibility == "hashed" {
let hashes = witness.processed_outputs.unwrap().poseidon_hash.unwrap();
input.output_data = Some(DataSource::File(
hashes
.iter()
.map(|h| vec![FileSourceInner::Field(*h)])
.collect(),
));
} else {
input.output_data = Some(DataSource::File(
witness
.pretty_elements
.unwrap()
.rescaled_outputs
.iter()
.map(|o| {
o.iter()
.map(|f| FileSourceInner::Float(f.parse().unwrap()))
.collect()
})
.collect(),
));
}
input.save(data_path.clone().into()).unwrap();
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args([
"setup-test-evm-data",
"-D",
data_path.as_str(),
"-M",
&model_path,
"--test-data",
test_on_chain_data_path.as_str(),
rpc_arg.as_str(),
test_input_source.as_str(),
test_output_source.as_str(),
])
.status()
.expect("failed to execute process");
assert!(status.success());
}
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args([
"prove",
@@ -2502,13 +2530,19 @@ mod native_tests {
.expect("failed to execute process");
assert!(status.success());
let deploy_evm_data_path = if input_source != "file" || output_source != "file" {
test_on_chain_data_path.clone()
} else {
data_path.clone()
};
let addr_path_da_arg = format!("--addr-path={}/{}/addr_da.txt", test_dir, example_name);
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args([
"deploy-evm-da",
format!("--settings-path={}", settings_path).as_str(),
"-D",
test_on_chain_data_path.as_str(),
deploy_evm_data_path.as_str(),
"--sol-code-path",
sol_arg.as_str(),
rpc_arg.as_str(),
@@ -2546,40 +2580,42 @@ mod native_tests {
.status()
.expect("failed to execute process");
assert!(status.success());
// Create a new set of test on chain data
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args([
"setup-test-evm-data",
// Create a new set of test on chain data only for the on-chain input source
if input_source != "file" || output_source != "file" {
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args([
"setup-test-evm-data",
"-D",
data_path.as_str(),
"-M",
&model_path,
"--test-data",
test_on_chain_data_path.as_str(),
rpc_arg.as_str(),
test_input_source.as_str(),
test_output_source.as_str(),
])
.status()
.expect("failed to execute process");
assert!(status.success());
let deployed_addr_arg = format!("--addr={}", addr_da);
let args: Vec<&str> = vec![
"test-update-account-calls",
deployed_addr_arg.as_str(),
"-D",
data_path.as_str(),
"-M",
&model_path,
"--test-data",
test_on_chain_data_path.as_str(),
rpc_arg.as_str(),
test_input_source.as_str(),
test_output_source.as_str(),
])
.status()
.expect("failed to execute process");
];
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args(&args)
.status()
.expect("failed to execute process");
assert!(status.success());
let deployed_addr_arg = format!("--addr={}", addr_da);
let args = vec![
"test-update-account-calls",
deployed_addr_arg.as_str(),
"-D",
test_on_chain_data_path.as_str(),
rpc_arg.as_str(),
];
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args(&args)
.status()
.expect("failed to execute process");
assert!(status.success());
assert!(status.success());
}
// As sanity check, add example that should fail.
let args = vec![
"verify-evm",

View File

@@ -423,6 +423,74 @@ async def test_create_evm_verifier():
assert res == True
assert os.path.isfile(sol_code_path)
async def test_create_evm_verifier_separate_vk():
"""
Create EVM a verifier with solidity code and separate vk
In order to run this test you will need to install solc in your environment
"""
vk_path = os.path.join(folder_path, 'test_evm.vk')
settings_path = os.path.join(folder_path, 'settings.json')
sol_code_path = os.path.join(folder_path, 'test_separate.sol')
vk_code_path = os.path.join(folder_path, 'test_vk.sol')
abi_path = os.path.join(folder_path, 'test_separate.abi')
abi_vk_path = os.path.join(folder_path, 'test_vk_separate.abi')
proof_path = os.path.join(folder_path, 'test_evm.pf')
calldata_path = os.path.join(folder_path, 'calldata.bytes')
# # res is now a vector of bytes
# res = ezkl.encode_evm_calldata(proof_path, calldata_path)
# assert os.path.isfile(calldata_path)
# assert len(res) > 0
res = await ezkl.create_evm_verifier(
vk_path,
settings_path,
sol_code_path,
abi_path,
srs_path=srs_path,
render_vk_seperately=True
)
res = await ezkl.create_evm_vk(
vk_path,
settings_path,
vk_code_path,
abi_vk_path,
srs_path=srs_path,
)
assert res == True
assert os.path.isfile(sol_code_path)
async def test_deploy_evm_separate_vk():
"""
Test deployment of the separate verifier smart contract + vk
In order to run this you will need to install solc in your environment
"""
addr_path_verifier = os.path.join(folder_path, 'address_separate.json')
addr_path_vk = os.path.join(folder_path, 'address_vk.json')
sol_code_path = os.path.join(folder_path, 'test_separate.sol')
vk_code_path = os.path.join(folder_path, 'test_vk.sol')
# TODO: without optimization there will be out of gas errors
# sol_code_path = os.path.join(folder_path, 'test.sol')
res = await ezkl.deploy_evm(
addr_path_verifier,
sol_code_path,
rpc_url=anvil_url,
)
res = await ezkl.deploy_vk_evm(
addr_path_vk,
vk_code_path,
rpc_url=anvil_url,
)
assert res == True
async def test_deploy_evm():
"""
@@ -503,6 +571,47 @@ async def test_verify_evm():
assert res == True
async def test_verify_evm_separate_vk():
"""
Verifies an evm proof
In order to run this you will need to install solc in your environment
"""
proof_path = os.path.join(folder_path, 'test_evm.pf')
addr_path_verifier = os.path.join(folder_path, 'address_separate.json')
addr_path_vk = os.path.join(folder_path, 'address_vk.json')
proof_path = os.path.join(folder_path, 'test_evm.pf')
calldata_path = os.path.join(folder_path, 'calldata_separate.bytes')
with open(addr_path_verifier, 'r') as file:
addr_verifier = file.read().rstrip()
print(addr_verifier)
with open(addr_path_vk, 'r') as file:
addr_vk = file.read().rstrip()
print(addr_vk)
# res is now a vector of bytes
res = ezkl.encode_evm_calldata(proof_path, calldata_path, addr_vk=addr_vk)
assert os.path.isfile(calldata_path)
assert len(res) > 0
# TODO: without optimization there will be out of gas errors
# sol_code_path = os.path.join(folder_path, 'test.sol')
res = await ezkl.verify_evm(
addr_verifier,
proof_path,
rpc_url=anvil_url,
addr_vk=addr_vk,
# sol_code_path
# optimizer_runs
)
assert res == True
async def test_aggregate_and_verify_aggr():
data_path = os.path.join(