Compare commits

...

8 Commits

Author SHA1 Message Date
dante
df72e01414 feat: make selector compression optional (#696) 2024-01-24 00:09:00 +00:00
Tobin South
172e26c00d fix: link for CLI auto-install (#695) 2024-01-22 13:00:27 +00:00
Jason Morton
11ac120f23 fix: large test numbering(#689)
Co-authored-by: dante <45801863+alexander-camuto@users.noreply.github.com>
2024-01-21 21:01:46 +00:00
Jseam
0fdd92e9f3 fix: move install_ezkl_cli.sh into main repo (#694)
Co-authored-by: dante <45801863+alexander-camuto@users.noreply.github.com>
2024-01-21 20:59:39 +00:00
Alexander Camuto
31f58056a5 chore: bump py03 2024-01-21 20:58:32 +00:00
dante
ddbcc1d2d8 fix: calibration should only consider local scales (#691) 2024-01-18 23:28:49 +00:00
Vehorny
feccc5feed chore(examples): proofreading the notebooks (#687)
---------

Co-authored-by: dante <45801863+alexander-camuto@users.noreply.github.com>
2024-01-18 14:48:02 +00:00
dante
db24577c5d fix: calibrate from total min/max on lookups rather than individual x (#690) 2024-01-17 23:59:15 +00:00
60 changed files with 7073 additions and 13086 deletions

View File

@@ -6,7 +6,7 @@ on:
description: "Test scenario tags"
jobs:
large-tests:
runs-on: self-hosted
runs-on: kaiju
steps:
- uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1
@@ -23,6 +23,6 @@ jobs:
- name: Self Attention KZG prove and verify large tests
run: cargo test --release --verbose tests::large_kzg_prove_and_verify_::large_tests_0_expects -- --include-ignored
- name: mobilenet Mock
run: cargo test --release --verbose tests::large_mock_::large_tests_2_expects -- --include-ignored
run: cargo test --release --verbose tests::large_mock_::large_tests_3_expects -- --include-ignored
- name: mobilenet KZG prove and verify large tests
run: cargo test --release --verbose tests::large_kzg_prove_and_verify_::large_tests_2_expects -- --include-ignored
run: cargo test --release --verbose tests::large_kzg_prove_and_verify_::large_tests_3_expects -- --include-ignored

View File

@@ -465,7 +465,7 @@ jobs:
# run: cargo nextest run --release --verbose tests::kzg_fuzz_ --test-threads 6
prove-and-verify-mock-aggr-tests:
runs-on: ubuntu-latest-32-cores
runs-on: self-hosted
needs: [build, library-tests]
steps:
- uses: actions/checkout@v4
@@ -611,27 +611,7 @@ jobs:
run: source .env/bin/activate; cargo nextest run --release --verbose tests::resources_accuracy_measurement_public_outputs_
python-integration-tests:
runs-on:
large-self-hosted
# Service containers to run with `container-job`
services:
# Label used to access the service container
postgres:
# Docker Hub image
image: postgres
env:
POSTGRES_USER: ubuntu
POSTGRES_HOST_AUTH_METHOD: trust
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
# needs: [build, library-tests, docs]
runs-on: large-self-hosted
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
@@ -663,13 +643,14 @@ jobs:
# # now dump the contents of the file into a file called kaggle.json
# echo $KAGGLE_API_KEY > /home/ubuntu/.kaggle/kaggle.json
# chmod 600 /home/ubuntu/.kaggle/kaggle.json
- name: Tictactoe tutorials
run: source .env/bin/activate; cargo nextest run py_tests::tests::tictactoe_ --test-threads 1
# - name: Postgres tutorials
# run: source .env/bin/activate; cargo nextest run py_tests::tests::postgres_ --test-threads 1
- name: All notebooks
run: source .env/bin/activate; cargo nextest run py_tests::tests::run_notebook_ --test-threads 1
- name: NBEATS tutorial
run: source .env/bin/activate; cargo nextest run py_tests::tests::nbeats_
run: source .env/bin/activate; cargo nextest run py_tests::tests::run_notebook_ --no-capture
- name: Voice tutorial
run: source .env/bin/activate; cargo nextest run py_tests::tests::voice_
- name: NBEATS tutorial
run: source .env/bin/activate; cargo nextest run py_tests::tests::nbeats_
- name: Tictactoe tutorials
run: source .env/bin/activate; cargo nextest run py_tests::tests::tictactoe_ --no-capture
# - name: Postgres tutorials
# run: source .env/bin/activate; cargo nextest run py_tests::tests::postgres_ --test-threads 1

141
Cargo.lock generated
View File

@@ -1058,16 +1058,6 @@ dependencies = [
"itertools 0.10.5",
]
[[package]]
name = "crossbeam-channel"
version = "0.5.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200"
dependencies = [
"cfg-if",
"crossbeam-utils",
]
[[package]]
name = "crossbeam-deque"
version = "0.8.3"
@@ -1088,7 +1078,7 @@ dependencies = [
"autocfg",
"cfg-if",
"crossbeam-utils",
"memoffset 0.9.0",
"memoffset",
"scopeguard",
]
@@ -1378,7 +1368,7 @@ checksum = "68b0cf012f1230e43cd00ebb729c6bb58707ecfa8ad08b52ef3a4ccd2697fc30"
[[package]]
name = "ecc"
version = "0.1.0"
source = "git+https://github.com/zkonduit/halo2wrong?branch=ac/chunked-mv-lookup#c1d7551c82953829caee30fe218759b0d2657d26"
source = "git+https://github.com/zkonduit/halo2wrong?branch=ac/chunked-mv-lookup#b43ebe30e84825d0d004fa27803d99c4187d419f"
dependencies = [
"integer",
"num-bigint",
@@ -1862,7 +1852,7 @@ dependencies = [
"halo2_gadgets",
"halo2_proofs",
"halo2_solidity_verifier",
"halo2curves 0.1.0",
"halo2curves 0.6.0",
"hex",
"indicatif",
"instant",
@@ -2253,7 +2243,7 @@ dependencies = [
[[package]]
name = "halo2_gadgets"
version = "0.2.0"
source = "git+https://github.com/zkonduit/halo2?branch=ac/lookup-modularity#57b9123835aa7d8482f4182ede3e8f4b0aea5c0a"
source = "git+https://github.com/zkonduit/halo2?branch=main#6a2b9ada9804807ddba03bbadaf6e63822cec275"
dependencies = [
"arrayvec 0.7.4",
"bitvec 1.0.1",
@@ -2269,14 +2259,14 @@ dependencies = [
[[package]]
name = "halo2_proofs"
version = "0.2.0"
source = "git+https://github.com/zkonduit/halo2?branch=ac/lookup-modularity#57b9123835aa7d8482f4182ede3e8f4b0aea5c0a"
version = "0.3.0"
source = "git+https://github.com/zkonduit/halo2?branch=main#6a2b9ada9804807ddba03bbadaf6e63822cec275"
dependencies = [
"blake2b_simd",
"env_logger",
"ff",
"group",
"halo2curves 0.1.0",
"halo2curves 0.6.0",
"icicle",
"log",
"maybe-rayon",
@@ -2292,7 +2282,7 @@ dependencies = [
[[package]]
name = "halo2_solidity_verifier"
version = "0.1.0"
source = "git+https://github.com/alexander-camuto/halo2-solidity-verifier?branch=ac/lookup-modularity#cf9a3128bb583680dd4c418defd8d37bd8e5c3f1"
source = "git+https://github.com/alexander-camuto/halo2-solidity-verifier?branch=main#eb04be1f7d005e5b9dd3ff41efa30aeb5e0c34a3"
dependencies = [
"askama",
"blake2b_simd",
@@ -2319,8 +2309,6 @@ dependencies = [
"paste",
"rand 0.8.5",
"rand_core 0.6.4",
"serde",
"serde_arrays",
"static_assertions",
"subtle",
]
@@ -2343,10 +2331,35 @@ dependencies = [
"subtle",
]
[[package]]
name = "halo2curves"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3675880dc0cc7cd468943266297198a28f88210ba60ca5e0e04d121edf86b46"
dependencies = [
"blake2b_simd",
"ff",
"group",
"hex",
"lazy_static",
"num-bigint",
"num-traits",
"pairing",
"pasta_curves",
"paste",
"rand 0.8.5",
"rand_core 0.6.4",
"rayon",
"serde",
"serde_arrays",
"static_assertions",
"subtle",
]
[[package]]
name = "halo2wrong"
version = "0.1.0"
source = "git+https://github.com/zkonduit/halo2wrong?branch=ac/chunked-mv-lookup#c1d7551c82953829caee30fe218759b0d2657d26"
source = "git+https://github.com/zkonduit/halo2wrong?branch=ac/chunked-mv-lookup#b43ebe30e84825d0d004fa27803d99c4187d419f"
dependencies = [
"halo2_proofs",
"num-bigint",
@@ -2423,6 +2436,9 @@ name = "hex"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
dependencies = [
"serde",
]
[[package]]
name = "hex-literal"
@@ -2668,9 +2684,9 @@ dependencies = [
[[package]]
name = "indoc"
version = "1.0.9"
version = "2.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa799dd5ed20a7e349f3b4639aa80d74549c81716d9ec4f994c9b5815598306"
checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8"
[[package]]
name = "inout"
@@ -2696,7 +2712,7 @@ dependencies = [
[[package]]
name = "integer"
version = "0.1.0"
source = "git+https://github.com/zkonduit/halo2wrong?branch=ac/chunked-mv-lookup#c1d7551c82953829caee30fe218759b0d2657d26"
source = "git+https://github.com/zkonduit/halo2wrong?branch=ac/chunked-mv-lookup#b43ebe30e84825d0d004fa27803d99c4187d419f"
dependencies = [
"maingate",
"num-bigint",
@@ -2941,7 +2957,7 @@ checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4"
[[package]]
name = "maingate"
version = "0.1.0"
source = "git+https://github.com/zkonduit/halo2wrong?branch=ac/chunked-mv-lookup#c1d7551c82953829caee30fe218759b0d2657d26"
source = "git+https://github.com/zkonduit/halo2wrong?branch=ac/chunked-mv-lookup#b43ebe30e84825d0d004fa27803d99c4187d419f"
dependencies = [
"halo2wrong",
"num-bigint",
@@ -3001,15 +3017,6 @@ dependencies = [
"libc",
]
[[package]]
name = "memoffset"
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1"
dependencies = [
"autocfg",
]
[[package]]
name = "memoffset"
version = "0.9.0"
@@ -3335,6 +3342,15 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]]
name = "pairing"
version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81fec4625e73cf41ef4bb6846cafa6d44736525f442ba45e407c4a000a13996f"
dependencies = [
"group",
]
[[package]]
name = "papergrid"
version = "0.9.1"
@@ -3840,14 +3856,14 @@ dependencies = [
[[package]]
name = "pyo3"
version = "0.18.3"
version = "0.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3b1ac5b3731ba34fdaa9785f8d74d17448cd18f30cf19e0c7e7b1fdb5272109"
checksum = "9a89dc7a5850d0e983be1ec2a463a171d20990487c3cfcd68b5363f1ee3d6fe0"
dependencies = [
"cfg-if",
"indoc",
"libc",
"memoffset 0.8.0",
"memoffset",
"parking_lot",
"pyo3-build-config",
"pyo3-ffi",
@@ -3857,9 +3873,9 @@ dependencies = [
[[package]]
name = "pyo3-asyncio"
version = "0.18.0"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3564762e37035cfc486228e10b0528460fa026d681b5763873c693aa0d5c260"
checksum = "6ea6b68e93db3622f3bb3bf363246cf948ed5375afe7abff98ccbdd50b184995"
dependencies = [
"futures",
"once_cell",
@@ -3871,9 +3887,9 @@ dependencies = [
[[package]]
name = "pyo3-asyncio-macros"
version = "0.18.0"
version = "0.20.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be72d4cd43a27530306bd0d20d3932182fbdd072c6b98d3638bc37efb9d559dd"
checksum = "56c467178e1da6252c95c29ecf898b133f742e9181dca5def15dc24e19d45a39"
dependencies = [
"proc-macro2",
"quote",
@@ -3882,9 +3898,9 @@ dependencies = [
[[package]]
name = "pyo3-build-config"
version = "0.18.3"
version = "0.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9cb946f5ac61bb61a5014924910d936ebd2b23b705f7a4a3c40b05c720b079a3"
checksum = "07426f0d8fe5a601f26293f300afd1a7b1ed5e78b2a705870c5f30893c5163be"
dependencies = [
"once_cell",
"target-lexicon",
@@ -3892,9 +3908,9 @@ dependencies = [
[[package]]
name = "pyo3-ffi"
version = "0.18.3"
version = "0.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd4d7c5337821916ea2a1d21d1092e8443cf34879e53a0ac653fbb98f44ff65c"
checksum = "dbb7dec17e17766b46bca4f1a4215a85006b4c2ecde122076c562dd058da6cf1"
dependencies = [
"libc",
"pyo3-build-config",
@@ -3902,9 +3918,9 @@ dependencies = [
[[package]]
name = "pyo3-log"
version = "0.8.2"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c94ff6535a6bae58d7d0b85e60d4c53f7f84d0d0aa35d6a28c3f3e70bfe51444"
checksum = "4c10808ee7250403bedb24bc30c32493e93875fef7ba3e4292226fe924f398bd"
dependencies = [
"arc-swap",
"log",
@@ -3913,25 +3929,26 @@ dependencies = [
[[package]]
name = "pyo3-macros"
version = "0.18.3"
version = "0.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9d39c55dab3fc5a4b25bbd1ac10a2da452c4aca13bb450f22818a002e29648d"
checksum = "05f738b4e40d50b5711957f142878cfa0f28e054aa0ebdfc3fd137a843f74ed3"
dependencies = [
"proc-macro2",
"pyo3-macros-backend",
"quote",
"syn 1.0.109",
"syn 2.0.22",
]
[[package]]
name = "pyo3-macros-backend"
version = "0.18.3"
version = "0.20.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97daff08a4c48320587b5224cc98d609e3c27b6d437315bd40b605c98eeb5918"
checksum = "0fc910d4851847827daf9d6cdd4a823fbdaab5b8818325c5e97a86da79e8881f"
dependencies = [
"heck",
"proc-macro2",
"quote",
"syn 1.0.109",
"syn 2.0.22",
]
[[package]]
@@ -4040,9 +4057,9 @@ checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3"
[[package]]
name = "rayon"
version = "1.7.0"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b"
checksum = "fa7237101a77a10773db45d62004a272517633fbcc3df19d96455ede1122e051"
dependencies = [
"either",
"rayon-core",
@@ -4050,14 +4067,12 @@ dependencies = [
[[package]]
name = "rayon-core"
version = "1.11.0"
version = "1.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d"
checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2"
dependencies = [
"crossbeam-channel",
"crossbeam-deque",
"crossbeam-utils",
"num_cpus",
]
[[package]]
@@ -4745,11 +4760,11 @@ dependencies = [
[[package]]
name = "snark-verifier"
version = "0.1.1"
source = "git+https://github.com/zkonduit/snark-verifier?branch=ac/chunked-mv-lookup#22ee76bee1a24f3732e994b72b10ec09939348de"
source = "git+https://github.com/zkonduit/snark-verifier?branch=ac/chunked-mv-lookup#574b65ea6b4d43eebac5565146519a95b435815c"
dependencies = [
"ecc",
"halo2_proofs",
"halo2curves 0.1.0",
"halo2curves 0.6.0",
"hex",
"itertools 0.10.5",
"lazy_static",
@@ -5552,9 +5567,9 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
[[package]]
name = "unindent"
version = "0.1.11"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1766d682d402817b5ac4490b3c3002d91dfa0d22812f341609f97b08757359c"
checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce"
[[package]]
name = "unzip-n"

View File

@@ -15,9 +15,9 @@ crate-type = ["cdylib", "rlib"]
[dependencies]
halo2_gadgets = { git = "https://github.com/zkonduit/halo2", branch= "ac/lookup-modularity" }
halo2_proofs = { git = "https://github.com/zkonduit/halo2", branch= "ac/lookup-modularity" }
halo2curves = { version = "0.1.0" }
halo2_gadgets = { git = "https://github.com/zkonduit/halo2", branch= "main" }
halo2_proofs = { git = "https://github.com/zkonduit/halo2", branch= "main" }
halo2curves = { version = "0.6.0", features = ["derive_serde"] }
rand = { version = "0.8", default_features = false }
itertools = { version = "0.10.3", default_features = false }
clap = { version = "4.3.3", features = ["derive"]}
@@ -28,7 +28,7 @@ thiserror = { version = "1.0.38", default_features = false }
hex = { version = "0.4.3", default_features = false }
halo2_wrong_ecc = { git = "https://github.com/zkonduit/halo2wrong", branch = "ac/chunked-mv-lookup", package = "ecc" }
snark-verifier = { git = "https://github.com/zkonduit/snark-verifier", branch = "ac/chunked-mv-lookup", features=["derive_serde"]}
halo2_solidity_verifier = { git = "https://github.com/alexander-camuto/halo2-solidity-verifier", branch= "ac/lookup-modularity" }
halo2_solidity_verifier = { git = "https://github.com/alexander-camuto/halo2-solidity-verifier", branch= "main" }
maybe-rayon = { version = "0.1.1", default_features = false }
bincode = { version = "1.3.3", default_features = false }
ark-std = { version = "^0.3.0", default-features = false }
@@ -51,9 +51,9 @@ plotters = { version = "0.3.0", default_features = false, optional = true }
regex = { version = "1", default_features = false }
tokio = { version = "1.26.0", default_features = false, features = ["macros", "rt"] }
tokio-util = { version = "0.7.9", features = ["codec"] }
pyo3 = { version = "0.18.3", features = ["extension-module", "abi3-py37", "macros"], default_features = false, optional = true }
pyo3-asyncio = { version = "0.18.0", features = ["attributes", "tokio-runtime"], default_features = false, optional = true }
pyo3-log = { version = "0.8.1", default_features = false, optional = true }
pyo3 = { version = "0.20.2", features = ["extension-module", "abi3-py37", "macros"], default_features = false, optional = true }
pyo3-asyncio = { version = "0.20.0", features = ["attributes", "tokio-runtime"], default_features = false, optional = true }
pyo3-log = { version = "0.9.0", default_features = false, optional = true }
tract-onnx = { git = "https://github.com/sonos/tract/", rev= "7b1aa33b2f7d1f19b80e270c83320f0f94daff69", default_features = false, optional = true }
tabled = { version = "0.12.0", optional = true }

View File

@@ -64,8 +64,8 @@ More notebook tutorials can be found within `examples/notebooks`.
#### CLI
Install the CLI
```bash
curl https://hub.ezkl.xyz/install_ezkl_cli.sh | bash
``` shell
curl https://raw.githubusercontent.com/zkonduit/ezkl/main/install_ezkl_cli.sh | bash
```
https://user-images.githubusercontent.com/45801863/236771676-5bbbbfd1-ba6f-418a-902e-20738ce0e9f0.mp4

View File

@@ -121,13 +121,16 @@ fn runcnvrl(c: &mut Criterion) {
group.throughput(Throughput::Elements(*size as u64));
group.bench_with_input(BenchmarkId::new("pk", size), &size, |b, &_| {
b.iter(|| {
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params)
.unwrap();
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(
&circuit, &params, true,
)
.unwrap();
});
});
let pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params)
.unwrap();
let pk =
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
group.throughput(Throughput::Elements(*size as u64));
group.bench_with_input(BenchmarkId::new("prove", size), &size, |b, &_| {

View File

@@ -90,13 +90,13 @@ fn rundot(c: &mut Criterion) {
group.throughput(Throughput::Elements(len as u64));
group.bench_with_input(BenchmarkId::new("pk", len), &len, |b, &_| {
b.iter(|| {
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params)
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
});
});
let pk =
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params).unwrap();
let pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
group.throughput(Throughput::Elements(len as u64));
group.bench_with_input(BenchmarkId::new("prove", len), &len, |b, &_| {

View File

@@ -94,13 +94,13 @@ fn runmatmul(c: &mut Criterion) {
group.throughput(Throughput::Elements(len as u64));
group.bench_with_input(BenchmarkId::new("pk", len), &len, |b, &_| {
b.iter(|| {
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params)
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
});
});
let pk =
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params).unwrap();
let pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
group.throughput(Throughput::Elements(len as u64));
group.bench_with_input(BenchmarkId::new("prove", len), &len, |b, &_| {

View File

@@ -111,13 +111,13 @@ fn runmatmul(c: &mut Criterion) {
group.throughput(Throughput::Elements(len as u64));
group.bench_with_input(BenchmarkId::new("pk", len), &len, |b, &_| {
b.iter(|| {
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params)
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
});
});
let pk =
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params).unwrap();
let pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
group.throughput(Throughput::Elements(len as u64));
group.bench_with_input(BenchmarkId::new("prove", len), &len, |b, &_| {

View File

@@ -114,13 +114,13 @@ fn runmatmul(c: &mut Criterion) {
group.throughput(Throughput::Elements(k as u64));
group.bench_with_input(BenchmarkId::new("pk", k), &k, |b, &_| {
b.iter(|| {
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params)
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
});
});
let pk =
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params).unwrap();
let pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
group.throughput(Throughput::Elements(k as u64));
group.bench_with_input(BenchmarkId::new("prove", k), &k, |b, &_| {

View File

@@ -86,13 +86,13 @@ fn runsum(c: &mut Criterion) {
group.throughput(Throughput::Elements(len as u64));
group.bench_with_input(BenchmarkId::new("pk", len), &len, |b, &_| {
b.iter(|| {
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params)
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
});
});
let pk =
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params).unwrap();
let pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
group.throughput(Throughput::Elements(len as u64));
group.bench_with_input(BenchmarkId::new("prove", len), &len, |b, &_| {

View File

@@ -101,13 +101,16 @@ fn runsumpool(c: &mut Criterion) {
group.throughput(Throughput::Elements(*size as u64));
group.bench_with_input(BenchmarkId::new("pk", size), &size, |b, &_| {
b.iter(|| {
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params)
.unwrap();
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(
&circuit, &params, true,
)
.unwrap();
});
});
let pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params)
.unwrap();
let pk =
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
group.throughput(Throughput::Elements(*size as u64));
group.bench_with_input(BenchmarkId::new("prove", size), &size, |b, &_| {

View File

@@ -84,13 +84,13 @@ fn runadd(c: &mut Criterion) {
group.throughput(Throughput::Elements(len as u64));
group.bench_with_input(BenchmarkId::new("pk", len), &len, |b, &_| {
b.iter(|| {
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params)
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
});
});
let pk =
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params).unwrap();
let pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
group.throughput(Throughput::Elements(len as u64));
group.bench_with_input(BenchmarkId::new("prove", len), &len, |b, &_| {

View File

@@ -83,13 +83,13 @@ fn runpow(c: &mut Criterion) {
group.throughput(Throughput::Elements(len as u64));
group.bench_with_input(BenchmarkId::new("pk", len), &len, |b, &_| {
b.iter(|| {
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params)
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
});
});
let pk =
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params).unwrap();
let pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
group.throughput(Throughput::Elements(len as u64));
group.bench_with_input(BenchmarkId::new("prove", len), &len, |b, &_| {

View File

@@ -76,13 +76,13 @@ fn runposeidon(c: &mut Criterion) {
group.throughput(Throughput::Elements(*size as u64));
group.bench_with_input(BenchmarkId::new("pk", size), &size, |b, &_| {
b.iter(|| {
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params)
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
});
});
let pk =
create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params).unwrap();
let pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, MyCircuit>(&circuit, &params, true)
.unwrap();
group.throughput(Throughput::Elements(*size as u64));
group.bench_with_input(BenchmarkId::new("prove", size), &size, |b, &_| {

View File

@@ -90,13 +90,13 @@ fn runrelu(c: &mut Criterion) {
group.throughput(Throughput::Elements(len as u64));
group.bench_with_input(BenchmarkId::new("pk", len), &len, |b, &_| {
b.iter(|| {
create_keys::<KZGCommitmentScheme<Bn256>, Fr, NLCircuit>(&circuit, &params)
create_keys::<KZGCommitmentScheme<Bn256>, Fr, NLCircuit>(&circuit, &params, true)
.unwrap();
});
});
let pk =
create_keys::<KZGCommitmentScheme<Bn256>, Fr, NLCircuit>(&circuit, &params).unwrap();
let pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, NLCircuit>(&circuit, &params, true)
.unwrap();
group.throughput(Throughput::Elements(len as u64));
group.bench_with_input(BenchmarkId::new("prove", len), &len, |b, &_| {

View File

@@ -271,7 +271,7 @@
"The graph input for on chain data sources is formatted completely differently compared to file based data sources.\n",
"\n",
"- For file data sources, the raw floating point values that eventually get quantized, converted into field elements and stored in `witness.json` to be consumed by the circuit are stored. The output data contains the expected floating point values returned as outputs from running your vanilla pytorch model on the given inputs.\n",
"- For on chain data sources, the input_data field contains all the data necessary to read and format the on chain data into something digestable by EZKL (aka field elemenets :-D). \n",
"- For on chain data sources, the input_data field contains all the data necessary to read and format the on chain data into something digestable by EZKL (aka field elements :-D). \n",
"Here is what the schema for an on-chain data source graph input file should look like:\n",
" \n",
"```json\n",

View File

@@ -309,7 +309,7 @@
"metadata": {},
"outputs": [],
"source": [
"print(ezkl.vecu64_to_felt(res['processed_outputs']['poseidon_hash'][0]))"
"print(ezkl.string_to_felt(res['processed_outputs']['poseidon_hash'][0]))"
]
},
{
@@ -338,7 +338,7 @@
"\n",
"def test_on_chain_data(res):\n",
" # Step 0: Convert the tensor to a flat list\n",
" data = [int(ezkl.vecu64_to_felt(res['processed_outputs']['poseidon_hash'][0]), 0)]\n",
" data = [int(ezkl.string_to_felt(res['processed_outputs']['poseidon_hash'][0]), 0)]\n",
"\n",
" # Step 1: Prepare the data\n",
" # Step 2: Prepare and compile the contract.\n",

View File

@@ -42,7 +42,7 @@
},
{
"cell_type": "code",
"execution_count": 1,
"execution_count": null,
"metadata": {
"id": "gvQ5HL1bTDWF"
},
@@ -441,9 +441,9 @@
"# Serialize calibration data into file:\n",
"json.dump(data, open(cal_data_path, 'w'))\n",
"\n",
"# Optimize for resources, we cap logrows at 17 to reduce setup and proving time, at the expense of accuracy\n",
"# Optimize for resources, we cap logrows at 12 to reduce setup and proving time, at the expense of accuracy\n",
"# You may want to increase the max logrows if accuracy is a concern\n",
"res = ezkl.calibrate_settings(cal_data_path, model_path, settings_path, \"resources\", max_logrows = 17)"
"res = ezkl.calibrate_settings(cal_data_path, model_path, settings_path, \"resources\", max_logrows = 12, scales = [2])"
]
},
{
@@ -508,9 +508,8 @@
" compiled_model_path,\n",
" vk_path,\n",
" pk_path,\n",
" \n",
" )\n",
" \n",
"\n",
"\n",
"assert res == True\n",
"assert os.path.isfile(vk_path)\n",
@@ -565,7 +564,6 @@
" compiled_model_path,\n",
" pk_path,\n",
" proof_path,\n",
" \n",
" \"single\",\n",
" )\n",
"\n",
@@ -697,7 +695,7 @@
"formatted_output = \"[\"\n",
"for i, value in enumerate(proof[\"instances\"]):\n",
" for j, field_element in enumerate(value):\n",
" onchain_input_array.append(ezkl.vecu64_to_felt(field_element))\n",
" onchain_input_array.append(ezkl.string_to_felt(field_element))\n",
" formatted_output += str(onchain_input_array[-1])\n",
" if j != len(value) - 1:\n",
" formatted_output += \", \"\n",

View File

@@ -7,7 +7,7 @@
"source": [
"# kzg-ezkl\n",
"\n",
"Here's an example leveraging EZKL whereby the inputs to the model, and the model params themselves, are commited to using kzg-commitments inside a circuit.\n",
"Here's an example leveraging EZKL whereby the inputs to the model, and the model params themselves, are committed to using kzg-commitments inside a circuit.\n",
"\n",
"In this setup:\n",
"- the commitments are publicly known to the prover and verifier\n",
@@ -166,7 +166,7 @@
"Shoutouts: \n",
"\n",
"- [summa-solvency](https://github.com/summa-dev/summa-solvency) for their help with the poseidon hashing chip. \n",
"- [timeofey](https://github.com/timoftime) for providing inspiration in our developement of the el-gamal encryption circuit in Halo2. "
"- [timeofey](https://github.com/timoftime) for providing inspiration in our development of the el-gamal encryption circuit in Halo2. "
]
},
{

View File

@@ -300,13 +300,14 @@
},
{
"cell_type": "code",
"execution_count": 9,
"execution_count": 20,
"metadata": {},
"outputs": [],
"source": [
"# iterate over each submodel gen-settings, compile circuit and setup zkSNARK\n",
"\n",
"def setup(i):\n",
" print(\"Setting up split model \"+str(i))\n",
" # file names\n",
" model_path = os.path.join('network_split_'+str(i)+'.onnx')\n",
" settings_path = os.path.join('settings_split_'+str(i)+'.json')\n",
@@ -342,12 +343,12 @@
" compiled_model_path,\n",
" vk_path,\n",
" pk_path,\n",
" compress_selectors=True,\n",
" )\n",
"\n",
" assert res == True\n",
" assert os.path.isfile(vk_path)\n",
" assert os.path.isfile(pk_path)\n",
" \n",
" res = ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n",
" run_args.input_scale = settings[\"model_output_scales\"][0]\n",
"\n",
@@ -383,7 +384,6 @@
" compiled_model_path,\n",
" pk_path,\n",
" proof_path,\n",
" \n",
" \"for-aggr\",\n",
" )\n",
"\n",
@@ -413,7 +413,6 @@
" proof_path,\n",
" settings_path,\n",
" vk_path,\n",
" \n",
" )\n",
"\n",
" assert res == True\n",
@@ -442,7 +441,7 @@
" proof_path = os.path.join('proof_split_'+str(i)+'.json')\n",
" proofs.append(proof_path)\n",
"\n",
"ezkl.mock_aggregate(proofs, logrows=23, split_proofs = True)"
"ezkl.mock_aggregate(proofs, logrows=22, split_proofs = True)"
]
}
],

View File

@@ -780,7 +780,7 @@
"pk_path = os.path.join('test.pk')\n",
"vk_path = os.path.join('test.vk')\n",
"settings_path = os.path.join('settings.json')\n",
"",
"\n",
"witness_path = os.path.join('witness.json')\n",
"data_path = os.path.join('input.json')"
]
@@ -845,7 +845,7 @@
"res = ezkl.gen_settings(model_path, settings_path)\n",
"assert res == True\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\", max_logrows = 20, scales = [5,6])\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\", max_logrows = 20, scales = [3])\n",
"assert res == True"
]
},
@@ -887,11 +887,28 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 30,
"metadata": {
"id": "12YIcFr85X9-"
},
"outputs": [],
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"spawning module 2\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"quotient_poly_degree 4\n",
"n 262144\n",
"extended_k 20\n"
]
}
],
"source": [
"res = ezkl.setup(\n",
" compiled_model_path,\n",
@@ -971,9 +988,9 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.13"
"version": "3.9.15"
}
},
"nbformat": 4,
"nbformat_minor": 0
}
}

File diff suppressed because one or more lines are too long

View File

@@ -302,7 +302,7 @@
" assert res == True\n",
" assert os.path.isfile(vk_path)\n",
" assert os.path.isfile(pk_path)\n",
" \n",
"\n",
" res = ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n",
" run_args.input_scale = settings[\"model_output_scales\"][0]\n",
"\n",
@@ -330,14 +330,14 @@
" compiled_model_path,\n",
" pk_path,\n",
" proof_path,\n",
" \n",
" \"for-aggr\",\n",
" )\n",
"\n",
" print(res)\n",
" res_1_proof = res[\"proof\"]\n",
" assert os.path.isfile(proof_path)\n",
"\n",
" # Verify the proof\n",
" # # Verify the proof\n",
" if i > 0:\n",
" print(\"swapping commitments\")\n",
" # swap the proof commitments if we are not the first model\n",
@@ -356,12 +356,19 @@
"\n",
" res = ezkl.swap_proof_commitments(proof_path, witness_path)\n",
" print(res)\n",
" \n",
" # load proof and then print \n",
" proof = json.load(open(proof_path, 'r'))\n",
" res_2_proof = proof[\"hex_proof\"]\n",
" # show diff in hex strings\n",
" print(res_1_proof)\n",
" print(res_2_proof)\n",
" assert res_1_proof == res_2_proof\n",
"\n",
" res = ezkl.verify(\n",
" proof_path,\n",
" settings_path,\n",
" vk_path,\n",
" \n",
" )\n",
"\n",
" assert res == True\n",
@@ -439,7 +446,7 @@
" proof_path = os.path.join('proof_split_'+str(i)+'.json')\n",
" proofs.append(proof_path)\n",
"\n",
"ezkl.mock_aggregate(proofs, logrows=23, split_proofs = True)"
"ezkl.mock_aggregate(proofs, logrows=22, split_proofs = True)"
]
}
],

View File

@@ -78,7 +78,7 @@
"pk_path = os.path.join('test.pk')\n",
"vk_path = os.path.join('test.vk')\n",
"settings_path = os.path.join('settings.json')\n",
"",
"\n",
"witness_path = os.path.join('witness.json')\n",
"data_path = os.path.join('input.json')"
]
@@ -122,8 +122,8 @@
"# Loop through each element in the y tensor\n",
"for e in y_input:\n",
" # Apply the custom function and append the result to the list\n",
" print(ezkl.float_to_vecu64(e,7))\n",
" result.append(ezkl.poseidon_hash([ezkl.float_to_vecu64(e, 7)])[0])\n",
" print(ezkl.float_to_string(e,7))\n",
" result.append(ezkl.poseidon_hash([ezkl.float_to_string(e, 7)])[0])\n",
"\n",
"y = y.unsqueeze(0)\n",
"y = y.reshape(1, 9)\n",
@@ -343,7 +343,7 @@
"# we force the output to be 0 this corresponds to the set membership test being true -- and we set this to a fixed vis output\n",
"# this means that the output is fixed and the verifier can see it but that if the input is not in the set the output will not be 0 and the verifier will reject\n",
"witness = json.load(open(witness_path, \"r\"))\n",
"witness[\"outputs\"][0] = [[0, 0, 0, 0]]\n",
"witness[\"outputs\"][0] = [\"0000000000000000000000000000000000000000000000000000000000000000\"]\n",
"json.dump(witness, open(witness_path, \"w\"))\n",
"\n",
"witness = json.load(open(witness_path, \"r\"))\n",
@@ -353,7 +353,6 @@
" compiled_model_path,\n",
" vk_path,\n",
" pk_path,\n",
" \n",
" witness_path = witness_path,\n",
" )\n",
"\n",
@@ -520,4 +519,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}

View File

@@ -8,7 +8,7 @@
"source": [
"## EZKL Jupyter Notebook Demo \n",
"\n",
"Here we demonstrate how to use the EZKL package to run a publicly known / committted to network on some private data, producing a public output.\n"
"Here we demonstrate how to use the EZKL package to run a publicly known / committed to network on some private data, producing a public output.\n"
]
},
{
@@ -210,7 +210,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 9,
"id": "b1c561a8",
"metadata": {},
"outputs": [],

View File

@@ -126,7 +126,7 @@
"# Loop through each element in the y tensor\n",
"for e in user_preimages:\n",
" # Apply the custom function and append the result to the list\n",
" users.append(ezkl.poseidon_hash([ezkl.float_to_vecu64(e, 0)])[0])\n",
" users.append(ezkl.poseidon_hash([ezkl.float_to_string(e, 0)])[0])\n",
"\n",
"users_t = torch.tensor(user_preimages)\n",
"users_t = users_t.reshape(1, 6)\n",
@@ -303,7 +303,7 @@
"# we force the output to be 1 this corresponds to the solvency test being true -- and we set this to a fixed vis output\n",
"# this means that the output is fixed and the verifier can see it but that if the input is not in the set the output will not be 0 and the verifier will reject\n",
"witness = json.load(open(witness_path, \"r\"))\n",
"witness[\"outputs\"][0] = [ezkl.float_to_vecu64(1.0, 0)]\n",
"witness[\"outputs\"][0] = [ezkl.float_to_string(1.0, 0)]\n",
"json.dump(witness, open(witness_path, \"w\"))"
]
},
@@ -417,7 +417,7 @@
"# we force the output to be 1 this corresponds to the solvency test being true -- and we set this to a fixed vis output\n",
"# this means that the output is fixed and the verifier can see it but that if the input is not in the set the output will not be 0 and the verifier will reject\n",
"witness = json.load(open(witness_path, \"r\"))\n",
"witness[\"outputs\"][0] = [ezkl.float_to_vecu64(1.0, 0)]\n",
"witness[\"outputs\"][0] = [ezkl.float_to_string(1.0, 0)]\n",
"json.dump(witness, open(witness_path, \"w\"))\n"
]
},

View File

@@ -633,7 +633,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales = [4])"
]
},
{

View File

@@ -154,7 +154,7 @@
"source": [
"## Create a neural net to verify the execution of the tic tac toe model\n",
"\n",
"1. Given the data generated above classify whether the tic tac toe games are valid. This approach uses a binary classification as the tic tac toe state space is fairly small. For larger state spaces we will want to use anomaly detection based approachs"
"1. Given the data generated above classify whether the tic tac toe games are valid. This approach uses a binary classification as the tic tac toe state space is fairly small. For larger state spaces, we will want to use anomaly detection based approaches."
]
},
{
@@ -520,7 +520,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales = [4])"
]
},
{
@@ -636,7 +636,8 @@
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3"
"pygments_lexer": "ipython3",
"version": "3.9.15"
}
},
"nbformat": 4,

View File

@@ -237,7 +237,7 @@
"\n",
"ezkl.gen_settings(onnx_filename, settings_filename)\n",
"ezkl.calibrate_settings(\n",
" input_filename, onnx_filename, settings_filename, \"resources\")\n",
" input_filename, onnx_filename, settings_filename, \"resources\", scales = [4])\n",
"res = ezkl.get_srs(settings_filename)\n",
"ezkl.compile_circuit(onnx_filename, compiled_filename, settings_filename)\n",
"\n",
@@ -255,7 +255,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 7,
"metadata": {
"id": "fULvvnK7_CMb"
},
@@ -451,7 +451,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.13"
"version": "3.9.15"
}
},
"nbformat": 4,

View File

@@ -25,17 +25,9 @@
},
{
"cell_type": "code",
"execution_count": 1,
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"voice_data_dir: .\n"
]
}
],
"outputs": [],
"source": [
"\n",
"import os\n",
@@ -43,7 +35,7 @@
"\n",
"voice_data_dir = os.environ.get('VOICE_DATA_DIR')\n",
"\n",
"# if is none set to \"\" \n",
"# if is none set to \"\"\n",
"if voice_data_dir is None:\n",
" voice_data_dir = \"\"\n",
"\n",
@@ -637,7 +629,7 @@
"source": [
"\n",
"\n",
"res = ezkl.calibrate_settings(val_data, model_path, settings_path, \"resources\")\n",
"res = ezkl.calibrate_settings(val_data, model_path, settings_path, \"resources\", scales = [4])\n",
"assert res == True\n",
"print(\"verified\")\n"
]
@@ -908,7 +900,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.13"
"version": "3.9.15"
}
},
"nbformat": 4,

View File

@@ -49,7 +49,7 @@
"import torch\n",
"import math\n",
"\n",
"# these are constatns for the rotation\n",
"# these are constants for the rotation\n",
"phi = torch.tensor(5 * math.pi / 180)\n",
"s = torch.sin(phi)\n",
"c = torch.cos(phi)\n",
@@ -503,11 +503,11 @@
"pyplot.arrow(0, 0, 1, 0, width=0.02, alpha=0.5)\n",
"pyplot.arrow(0, 0, 0, 1, width=0.02, alpha=0.5)\n",
"\n",
"arrow_x = ezkl.vecu64_to_float(witness['outputs'][0][0], out_scale)\n",
"arrow_y = ezkl.vecu64_to_float(witness['outputs'][0][1], out_scale)\n",
"arrow_x = ezkl.string_to_float(witness['outputs'][0][0], out_scale)\n",
"arrow_y = ezkl.string_to_float(witness['outputs'][0][1], out_scale)\n",
"pyplot.arrow(0, 0, arrow_x, arrow_y, width=0.02)\n",
"arrow_x = ezkl.vecu64_to_float(witness['outputs'][0][2], out_scale)\n",
"arrow_y = ezkl.vecu64_to_float(witness['outputs'][0][3], out_scale)\n",
"arrow_x = ezkl.string_to_float(witness['outputs'][0][2], out_scale)\n",
"arrow_y = ezkl.string_to_float(witness['outputs'][0][3], out_scale)\n",
"pyplot.arrow(0, 0, arrow_x, arrow_y, width=0.02)"
]
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

170
install_ezkl_cli.sh Normal file
View File

@@ -0,0 +1,170 @@
#!/usr/bin/env bash
set -e
BASE_DIR=${XDG_CONFIG_HOME:-$HOME}
EZKL_DIR=${EZKL_DIR-"$BASE_DIR/.ezkl"}
# Create the .ezkl bin directory if it doesn't exit
mkdir -p $EZKL_DIR
# Store the correct profile file (i.e. .profile for bash or .zshenv for ZSH).
case $SHELL in
*/zsh)
PROFILE=${ZDOTDIR-"$HOME"}/.zshenv
PREF_SHELL=zsh
;;
*/bash)
PROFILE=$HOME/.bashrc
PREF_SHELL=bash
;;
*/fish)
PROFILE=$HOME/.config/fish/config.fish
PREF_SHELL=fish
;;
*/ash)
PROFILE=$HOME/.profile
PREF_SHELL=ash
;;
*)
echo "NOTICE: Shell could not be detected, you will need to manually add ${EZKL_DIR} to your PATH."
esac
# Check for non standard installation of ezkl
if [ "$(which ezkl)s" != "s" ] && [ "$(which ezkl)" != "$EZKL_DIR/ezkl" ] ; then
echo "ezkl is installed in a non-standard directory, $(which ezkl). To use the automated installer, remove the existing ezkl from path to prevent conflicts"
exit 1
fi
if [[ ":$PATH:" != *":${EZKl_DIR}:"* ]]; then
# Add the ezkl directory to the path and ensure the old PATH variables remain.
echo >> $PROFILE && echo "export PATH=\"\$PATH:$EZKL_DIR\"" >> $PROFILE
fi
# Install latest ezkl version
# Get the right release URL
if [ -z "$1" ]
then
RELEASE_URL="https://api.github.com/repos/zkonduit/ezkl/releases/latest"
echo "No version tags provided, installing the latest ezkl version"
else
RELEASE_URL="https://api.github.com/repos/zkonduit/ezkl/releases/tags/$1"
echo "Installing ezkl version $1"
fi
PLATFORM=""
case "$(uname -s)" in
Darwin*)
PLATFORM="macos"
;;
Linux*Microsoft*)
PLATFORM="linux"
;;
Linux*)
PLATFORM="linux"
;;
CYGWIN*|MINGW*|MINGW32*|MSYS*)
PLATFORM="windows-msvc"
;;
*)
echo "Platform is not supported. If you would need support for the platform please submit an issue https://github.com/zkonduit/ezkl/issues/new/choose"
exit 1
;;
esac
# Check arch
ARCHITECTURE="$(uname -m)"
if [ "${ARCHITECTURE}" = "x86_64" ]; then
# Redirect stderr to /dev/null to avoid printing errors if non Rosetta.
if [ "$(sysctl -n sysctl.proc_translated 2>/dev/null)" = "1" ]; then
ARCHITECTURE="arm64" # Rosetta.
else
ARCHITECTURE="amd64" # Intel.
fi
elif [ "${ARCHITECTURE}" = "arm64" ] ||[ "${ARCHITECTURE}" = "aarch64" ]; then
ARCHITECTURE="aarch64" # Arm.
elif [ "${ARCHITECTURE}" = "amd64" ]; then
ARCHITECTURE="amd64" # Amd
else
echo "Architecture is not supported. If you would need support for the architecture please submit an issue https://github.com/zkonduit/ezkl/issues/new/choose"
exit 1
fi
# Remove existing ezkl
echo "Removing old ezkl binary if it exists"
[ -e file ] && rm file
# download the release and unpack the right tarball
if [ "$PLATFORM" == "windows-msvc" ]; then
JSON_RESPONSE=$(curl -s "$RELEASE_URL")
FILE_URL=$(echo "$JSON_RESPONSE" | grep -o 'https://github.com[^"]*' | grep "build-artifacts.ezkl-windows-msvc.tar.gz")
echo "Downloading package"
curl -L "$FILE_URL" -o "$EZKL_DIR/build-artifacts.ezkl-windows-msvc.tar.gz"
echo "Unpacking package"
tar -xzf "$EZKL_DIR/build-artifacts.ezkl-windows-msvc.tar.gz" -C "$EZKL_DIR"
echo "Cleaning up"
rm "$EZKL_DIR/build-artifacts.ezkl-windows-msvc.tar.gz"
elif [ "$PLATFORM" == "macos" ]; then
if [ "$ARCHITECTURE" == "aarch64" ] || [ "$ARCHITECTURE" == "arm64" ]; then
JSON_RESPONSE=$(curl -s "$RELEASE_URL")
FILE_URL=$(echo "$JSON_RESPONSE" | grep -o 'https://github.com[^"]*' | grep "build-artifacts.ezkl-macos-aarch64.tar.gz")
echo "Downloading package"
curl -L "$FILE_URL" -o "$EZKL_DIR/build-artifacts.ezkl-macos-aarch64.tar.gz"
echo "Unpacking package"
tar -xzf "$EZKL_DIR/build-artifacts.ezkl-macos-aarch64.tar.gz" -C "$EZKL_DIR"
echo "Cleaning up"
rm "$EZKL_DIR/build-artifacts.ezkl-macos-aarch64.tar.gz"
else
JSON_RESPONSE=$(curl -s "$RELEASE_URL")
FILE_URL=$(echo "$JSON_RESPONSE" | grep -o 'https://github.com[^"]*' | grep "build-artifacts.ezkl-macos.tar.gz")
echo "Downloading package"
curl -L "$FILE_URL" -o "$EZKL_DIR/build-artifacts.ezkl-macos.tar.gz"
echo "Unpacking package"
tar -xzf "$EZKL_DIR/build-artifacts.ezkl-macos.tar.gz" -C "$EZKL_DIR"
echo "Cleaning up"
rm "$EZKL_DIR/build-artifacts.ezkl-macos.tar.gz"
fi
elif [ "$PLATFORM" == "linux" ]; then
if [ "${ARCHITECTURE}" = "amd64" ]; then
JSON_RESPONSE=$(curl -s "$RELEASE_URL")
FILE_URL=$(echo "$JSON_RESPONSE" | grep -o 'https://github.com[^"]*' | grep "build-artifacts.ezkl-linux-gnu.tar.gz")
echo "Downloading package"
curl -L "$FILE_URL" -o "$EZKL_DIR/build-artifacts.ezkl-linux-gnu.tar.gz"
echo "Unpacking package"
tar -xzf "$EZKL_DIR/build-artifacts.ezkl-linux-gnu.tar.gz" -C "$EZKL_DIR"
echo "Cleaning up"
rm "$EZKL_DIR/build-artifacts.ezkl-linux-gnu.tar.gz"
else
echo "ARM architectures are not supported for Linux at the moment. If you would need support for the ARM architectures on linux please submit an issue https://github.com/zkonduit/ezkl/issues/new/choose"
exit 1
fi
else
echo "Platform and Architecture is not supported. If you would need support for the platform and architecture please submit an issue https://github.com/zkonduit/ezkl/issues/new/choose"
exit 1
fi
echo && echo "Successfully downloaded ezkl at ${EZKL_DIR}"
echo "We detected that your preferred shell is ${PREF_SHELL} and added ezkl to PATH. Run 'source ${PROFILE}' or start a new terminal session to use ezkl."

View File

@@ -219,7 +219,7 @@ mod tests {
};
let prover = halo2_proofs::dev::MockProver::run(K as u32, &circuit, vec![]).unwrap();
assert_eq!(prover.verify_par(), Ok(()))
assert_eq!(prover.verify(), Ok(()))
}
}
@@ -240,6 +240,6 @@ mod tests {
message: message.into(),
};
let prover = halo2_proofs::dev::MockProver::run(K as u32, &circuit, vec![]).unwrap();
assert_eq!(prover.verify_par(), Ok(()))
assert_eq!(prover.verify(), Ok(()))
}
}

View File

@@ -499,7 +499,7 @@ mod tests {
_spec: PhantomData,
};
let prover = halo2_proofs::dev::MockProver::run(k, &circuit, output).unwrap();
assert_eq!(prover.verify_par(), Ok(()))
assert_eq!(prover.verify(), Ok(()))
}
#[test]
@@ -518,7 +518,7 @@ mod tests {
_spec: PhantomData,
};
let prover = halo2_proofs::dev::MockProver::run(k, &circuit, output).unwrap();
assert_eq!(prover.verify_par(), Ok(()))
assert_eq!(prover.verify(), Ok(()))
}
#[test]
@@ -551,7 +551,7 @@ mod tests {
};
let prover = halo2_proofs::dev::MockProver::run(k, &circuit, output).unwrap();
assert_eq!(prover.verify_par(), Ok(()))
assert_eq!(prover.verify(), Ok(()))
}
}
@@ -573,6 +573,6 @@ mod tests {
_spec: PhantomData,
};
let prover = halo2_proofs::dev::MockProver::run(k, &circuit, output).unwrap();
assert_eq!(prover.verify_par(), Ok(()))
assert_eq!(prover.verify(), Ok(()))
}
}

View File

@@ -1837,15 +1837,6 @@ pub fn deconv<F: PrimeField + TensorType + PartialOrd + std::marker::Send + std:
)));
}
if has_bias {
let bias = &inputs[2];
if (bias.dims().len() != 1) || (bias.dims()[0] != kernel.dims()[0]) {
return Err(Box::new(TensorError::DimMismatch(
"deconv bias".to_string(),
)));
}
}
let (kernel_height, kernel_width) = (kernel.dims()[2], kernel.dims()[3]);
let null_val = ValType::Constant(F::ZERO);

View File

@@ -90,7 +90,7 @@ mod matmul {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -165,7 +165,7 @@ mod matmul_col_overflow_double_col {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -239,7 +239,7 @@ mod matmul_col_overflow {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -327,7 +327,7 @@ mod matmul_col_ultra_overflow_double_col {
halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme<halo2curves::bn256::Bn256>,
F,
MatmulCircuit<F>,
>(&circuit, &params)
>(&circuit, &params, true)
.unwrap();
let prover = crate::pfsys::create_proof_circuit_kzg(
@@ -441,7 +441,7 @@ mod matmul_col_ultra_overflow {
halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme<halo2curves::bn256::Bn256>,
F,
MatmulCircuit<F>,
>(&circuit, &params)
>(&circuit, &params, true)
.unwrap();
let prover = crate::pfsys::create_proof_circuit_kzg(
@@ -543,7 +543,7 @@ mod dot {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -620,7 +620,7 @@ mod dot_col_overflow_triple_col {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -693,7 +693,7 @@ mod dot_col_overflow {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -762,7 +762,7 @@ mod sum {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -832,7 +832,7 @@ mod sum_col_overflow_double_col {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -901,7 +901,7 @@ mod sum_col_overflow {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -994,7 +994,7 @@ mod composition {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -1095,7 +1095,7 @@ mod conv {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
#[test]
@@ -1133,7 +1133,7 @@ mod conv {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -1240,7 +1240,7 @@ mod conv_col_ultra_overflow {
halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme<halo2curves::bn256::Bn256>,
F,
ConvCircuit<F>,
>(&circuit, &params)
>(&circuit, &params, true)
.unwrap();
let prover = crate::pfsys::create_proof_circuit_kzg(
@@ -1390,7 +1390,7 @@ mod conv_relu_col_ultra_overflow {
halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme<halo2curves::bn256::Bn256>,
F,
ConvCircuit<F>,
>(&circuit, &params)
>(&circuit, &params, true)
.unwrap();
let prover = crate::pfsys::create_proof_circuit_kzg(
@@ -1484,7 +1484,7 @@ mod add_w_shape_casting {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -1551,7 +1551,7 @@ mod add {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -1618,7 +1618,7 @@ mod add_with_overflow {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -1727,7 +1727,7 @@ mod add_with_overflow_and_poseidon {
let prover =
MockProver::run(K as u32, &circuit, vec![vec![commitment_a, commitment_b]]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
#[test]
@@ -1822,7 +1822,7 @@ mod sub {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -1889,7 +1889,7 @@ mod mult {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -1954,7 +1954,7 @@ mod pow {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -2023,7 +2023,7 @@ mod pack {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -2116,7 +2116,7 @@ mod matmul_relu {
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -2222,7 +2222,7 @@ mod rangecheckpercent {
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
{
let inp = Tensor::new(Some(&[Value::<F>::known(F::from(200_u64))]), &[1]).unwrap();
@@ -2233,7 +2233,7 @@ mod rangecheckpercent {
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
// Unsuccessful case
@@ -2328,7 +2328,7 @@ mod relu {
};
let prover = MockProver::run(4_u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}
@@ -2421,7 +2421,7 @@ mod lookup_ultra_overflow {
halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme<halo2curves::bn256::Bn256>,
F,
ReLUCircuit<F>,
>(&circuit, &params)
>(&circuit, &params, true)
.unwrap();
let prover = crate::pfsys::create_proof_circuit_kzg(
@@ -2557,6 +2557,6 @@ mod softmax {
_marker: PhantomData,
};
let prover = MockProver::run(K as u32, &circuit, vec![]).unwrap();
prover.assert_satisfied_par();
prover.assert_satisfied();
}
}

View File

@@ -73,6 +73,8 @@ pub const DEFAULT_FUZZ_RUNS: &str = "10";
pub const DEFAULT_CALIBRATION_FILE: &str = "calibration.json";
/// Default lookup safety margin
pub const DEFAULT_LOOKUP_SAFETY_MARGIN: &str = "2";
/// Default Compress selectors
pub const DEFAULT_COMPRESS_SELECTORS: &str = "false";
impl std::fmt::Display for TranscriptType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
@@ -389,6 +391,9 @@ pub enum Commands {
/// whether the accumulated are segments of a larger proof
#[arg(long, default_value = DEFAULT_SPLIT)]
split_proofs: bool,
/// compress selectors
#[arg(long, default_value = DEFAULT_COMPRESS_SELECTORS)]
compress_selectors: bool,
},
/// Aggregates proofs :)
Aggregate {
@@ -451,6 +456,9 @@ pub enum Commands {
/// The graph witness (optional - used to override fixed values in the circuit)
#[arg(short = 'W', long)]
witness: Option<PathBuf>,
/// compress selectors
#[arg(long, default_value = DEFAULT_COMPRESS_SELECTORS)]
compress_selectors: bool,
},
#[cfg(not(target_arch = "wasm32"))]
@@ -473,6 +481,9 @@ pub enum Commands {
/// number of fuzz iterations
#[arg(long, default_value = DEFAULT_FUZZ_RUNS)]
num_runs: usize,
/// compress selectors
#[arg(long, default_value = DEFAULT_COMPRESS_SELECTORS)]
compress_selectors: bool,
},
#[cfg(not(target_arch = "wasm32"))]
/// Deploys a test contact that the data attester reads from and creates a data attestation formatted input.json file that contains call data information

View File

@@ -140,8 +140,14 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
compiled_circuit,
transcript,
num_runs,
} => fuzz(compiled_circuit, witness, transcript, num_runs),
compress_selectors,
} => fuzz(
compiled_circuit,
witness,
transcript,
num_runs,
compress_selectors,
),
Commands::GenSrs { srs_path, logrows } => gen_srs_cmd(srs_path, logrows as u32),
#[cfg(not(target_arch = "wasm32"))]
Commands::GetSrs {
@@ -233,7 +239,15 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
vk_path,
pk_path,
witness,
} => setup(compiled_circuit, srs_path, vk_path, pk_path, witness),
compress_selectors,
} => setup(
compiled_circuit,
srs_path,
vk_path,
pk_path,
witness,
compress_selectors,
),
#[cfg(not(target_arch = "wasm32"))]
Commands::SetupTestEVMData {
data,
@@ -296,6 +310,7 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
srs_path,
logrows,
split_proofs,
compress_selectors,
} => setup_aggregate(
sample_snarks,
vk_path,
@@ -303,6 +318,7 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
srs_path,
logrows,
split_proofs,
compress_selectors,
),
Commands::Aggregate {
proof_path,
@@ -823,8 +839,6 @@ pub(crate) fn calibrate(
"input scale: {}, param scale: {}, scale rebase multiplier: {}",
input_scale, param_scale, scale_rebase_multiplier
));
// vec of settings copied chunks.len() times
let run_args_iterable = vec![settings.run_args.clone(); chunks.len()];
#[cfg(unix)]
let _r = match Gag::stdout() {
@@ -836,41 +850,41 @@ pub(crate) fn calibrate(
Ok(r) => Some(r),
Err(_) => None,
};
let key = (input_scale, param_scale, scale_rebase_multiplier);
forward_pass_res.insert(key, vec![]);
let tasks = chunks
let local_run_args = RunArgs {
input_scale,
param_scale,
scale_rebase_multiplier,
..settings.run_args.clone()
};
let mut circuit = match GraphCircuit::from_run_args(&local_run_args, &model_path) {
Ok(c) => c,
Err(e) => {
// drop the gag
#[cfg(unix)]
std::mem::drop(_r);
#[cfg(unix)]
std::mem::drop(_q);
debug!("circuit creation from run args failed: {:?}", e);
continue;
}
};
chunks
.iter()
.zip(run_args_iterable)
.map(|(chunk, run_args)| {
// we need to create a new run args for each chunk
// time it
.map(|chunk| {
let chunk = chunk.clone();
let local_run_args = RunArgs {
input_scale,
param_scale,
scale_rebase_multiplier,
..run_args.clone()
};
let original_settings = settings.clone();
let mut circuit = match GraphCircuit::from_run_args(&local_run_args, &model_path) {
Ok(c) => c,
Err(_) => {
return Err(format!("failed to create circuit from run args"))
as Result<GraphSettings, String>
}
};
let data = circuit
.load_graph_from_file_exclusively(&chunk)
.map_err(|e| format!("failed to load circuit inputs: {}", e))?;
let forward_res = circuit
.calibrate(&data, max_logrows, lookup_safety_margin)
.map_err(|e| format!("failed to calibrate: {}", e))?;
.forward(&mut data.clone(), None, None)
.map_err(|e| format!("failed to forward: {}", e))?;
// push result to the hashmap
forward_pass_res
@@ -878,38 +892,32 @@ pub(crate) fn calibrate(
.ok_or("key not found")?
.push(forward_res);
let settings = circuit.settings().clone();
let found_run_args = RunArgs {
input_scale: settings.run_args.input_scale,
param_scale: settings.run_args.param_scale,
lookup_range: settings.run_args.lookup_range,
logrows: settings.run_args.logrows,
scale_rebase_multiplier: settings.run_args.scale_rebase_multiplier,
..run_args.clone()
};
let found_settings = GraphSettings {
run_args: found_run_args,
required_lookups: settings.required_lookups,
model_output_scales: settings.model_output_scales,
model_input_scales: settings.model_input_scales,
num_rows: settings.num_rows,
total_assignments: settings.total_assignments,
total_const_size: settings.total_const_size,
..original_settings.clone()
};
Ok(found_settings) as Result<GraphSettings, String>
Ok(()) as Result<(), String>
})
.collect::<Vec<Result<GraphSettings, String>>>();
.collect::<Result<Vec<()>, String>>()?;
let mut res: Vec<GraphSettings> = vec![];
for task in tasks {
if let Ok(task) = task {
res.push(task);
}
}
let min_lookup_range = forward_pass_res
.get(&key)
.unwrap()
.iter()
.map(|x| x.min_lookup_inputs)
.min()
.unwrap_or(0);
let max_lookup_range = forward_pass_res
.get(&key)
.unwrap()
.iter()
.map(|x| x.max_lookup_inputs)
.max()
.unwrap_or(0);
let res = circuit.calibrate_from_min_max(
min_lookup_range,
max_lookup_range,
max_logrows,
lookup_safety_margin,
);
// drop the gag
#[cfg(unix)]
@@ -917,31 +925,37 @@ pub(crate) fn calibrate(
#[cfg(unix)]
std::mem::drop(_q);
let max_lookup_range = res
.iter()
.map(|x| x.run_args.lookup_range.1)
.max()
.unwrap_or(0);
let min_lookup_range = res
.iter()
.map(|x| x.run_args.lookup_range.0)
.min()
.unwrap_or(0);
if res.is_ok() {
let new_settings = circuit.settings().clone();
let found_run_args = RunArgs {
input_scale: new_settings.run_args.input_scale,
param_scale: new_settings.run_args.param_scale,
lookup_range: new_settings.run_args.lookup_range,
logrows: new_settings.run_args.logrows,
scale_rebase_multiplier: new_settings.run_args.scale_rebase_multiplier,
..settings.run_args.clone()
};
let found_settings = GraphSettings {
run_args: found_run_args,
required_lookups: new_settings.required_lookups,
model_output_scales: new_settings.model_output_scales,
model_input_scales: new_settings.model_input_scales,
num_rows: new_settings.num_rows,
total_assignments: new_settings.total_assignments,
total_const_size: new_settings.total_const_size,
..settings.clone()
};
found_params.push(found_settings.clone());
if let Some(mut best) = res.into_iter().max_by_key(|p| {
(
p.run_args.logrows,
p.run_args.input_scale,
p.run_args.param_scale,
)
}) {
best.run_args.lookup_range = (min_lookup_range, max_lookup_range);
// pick the one with the largest logrows
found_params.push(best.clone());
debug!(
"found settings: \n {}",
best.as_json()?.to_colored_json_auto()?
found_settings.as_json()?.to_colored_json_auto()?
);
} else {
debug!("calibration failed {}", res.err().unwrap());
}
pb.inc(1);
@@ -1034,7 +1048,7 @@ pub(crate) fn calibrate(
let tear_sheet_table = Table::new(vec![accuracy_res]);
println!(
warn!(
"\n\n <------------- Numerical Fidelity Report (input_scale: {}, param_scale: {}, scale_input_multiplier: {}) ------------->\n\n{}\n\n",
best_params.run_args.input_scale,
best_params.run_args.param_scale,
@@ -1098,7 +1112,7 @@ pub(crate) fn mock(
)
.map_err(Box::<dyn Error>::from)?;
prover
.verify_par()
.verify()
.map_err(|e| Box::<dyn Error>::from(ExecutionError::VerifyError(e)))?;
Ok(String::new())
}
@@ -1392,6 +1406,7 @@ pub(crate) fn setup(
vk_path: PathBuf,
pk_path: PathBuf,
witness: Option<PathBuf>,
compress_selectors: bool,
) -> Result<String, Box<dyn Error>> {
// these aren't real values so the sanity checks are mostly meaningless
let mut circuit = GraphCircuit::load(compiled_circuit)?;
@@ -1402,8 +1417,12 @@ pub(crate) fn setup(
let params = load_params_cmd(srs_path, circuit.settings().run_args.logrows)?;
let pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(&circuit, &params)
.map_err(Box::<dyn Error>::from)?;
let pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(
&circuit,
&params,
compress_selectors,
)
.map_err(Box::<dyn Error>::from)?;
save_vk::<KZGCommitmentScheme<Bn256>>(&vk_path, pk.get_vk())?;
save_pk::<KZGCommitmentScheme<Bn256>>(&pk_path, &pk)?;
@@ -1542,6 +1561,7 @@ pub(crate) fn fuzz(
data_path: PathBuf,
transcript: TranscriptType,
num_runs: usize,
compress_selectors: bool,
) -> Result<String, Box<dyn Error>> {
check_solc_requirement();
let passed = AtomicBool::new(true);
@@ -1557,8 +1577,12 @@ pub(crate) fn fuzz(
let data = GraphWitness::from_path(data_path)?;
let pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(&circuit, &params)
.map_err(Box::<dyn Error>::from)?;
let pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(
&circuit,
&params,
compress_selectors,
)
.map_err(Box::<dyn Error>::from)?;
circuit.load_graph_witness(&data)?;
@@ -1574,9 +1598,12 @@ pub(crate) fn fuzz(
let fuzz_pk = || {
let new_params = gen_srs::<KZGCommitmentScheme<Bn256>>(logrows);
let bad_pk =
create_keys::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(&circuit, &new_params)
.map_err(|_| ())?;
let bad_pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(
&circuit,
&new_params,
compress_selectors,
)
.map_err(|_| ())?;
let bad_proof = create_proof_circuit_kzg(
circuit.clone(),
@@ -1647,9 +1674,12 @@ pub(crate) fn fuzz(
let fuzz_vk = || {
let new_params = gen_srs::<KZGCommitmentScheme<Bn256>>(logrows);
let bad_pk =
create_keys::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(&circuit, &new_params)
.map_err(|_| ())?;
let bad_pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(
&circuit,
&new_params,
compress_selectors,
)
.map_err(|_| ())?;
let bad_vk = bad_pk.get_vk();
@@ -1809,7 +1839,7 @@ pub(crate) fn mock_aggregate(
let prover = halo2_proofs::dev::MockProver::run(logrows, &circuit, vec![circuit.instances()])
.map_err(Box::<dyn Error>::from)?;
prover
.verify_par()
.verify()
.map_err(|e| Box::<dyn Error>::from(ExecutionError::VerifyError(e)))?;
#[cfg(not(target_arch = "wasm32"))]
pb.finish_with_message("Done.");
@@ -1823,6 +1853,7 @@ pub(crate) fn setup_aggregate(
srs_path: Option<PathBuf>,
logrows: u32,
split_proofs: bool,
compress_selectors: bool,
) -> Result<String, Box<dyn Error>> {
// the K used for the aggregation circuit
let params = load_params_cmd(srs_path, logrows)?;
@@ -1833,8 +1864,11 @@ pub(crate) fn setup_aggregate(
}
let agg_circuit = AggregationCircuit::new(&params.get_g()[0].into(), snarks, split_proofs)?;
let agg_pk =
create_keys::<KZGCommitmentScheme<Bn256>, Fr, AggregationCircuit>(&agg_circuit, &params)?;
let agg_pk = create_keys::<KZGCommitmentScheme<Bn256>, Fr, AggregationCircuit>(
&agg_circuit,
&params,
compress_selectors,
)?;
let agg_vk = agg_pk.get_vk();

View File

@@ -617,13 +617,13 @@ impl ToPyObject for DataSource {
}
#[cfg(feature = "python-bindings")]
use crate::pfsys::field_to_vecu64_montgomery;
use crate::pfsys::field_to_string_montgomery;
#[cfg(feature = "python-bindings")]
impl ToPyObject for FileSourceInner {
fn to_object(&self, py: Python) -> PyObject {
match self {
FileSourceInner::Field(data) => field_to_vecu64_montgomery(data).to_object(py),
FileSourceInner::Field(data) => field_to_string_montgomery(data).to_object(py),
FileSourceInner::Bool(data) => data.to_object(py),
FileSourceInner::Float(data) => data.to_object(py),
}

View File

@@ -53,7 +53,7 @@ pub use utilities::*;
pub use vars::*;
#[cfg(feature = "python-bindings")]
use crate::pfsys::field_to_vecu64_montgomery;
use crate::pfsys::field_to_string_montgomery;
/// The safety factor for the range of the lookup table.
pub const RANGE_MULTIPLIER: i128 = 2;
@@ -332,16 +332,16 @@ impl ToPyObject for GraphWitness {
let dict_params = PyDict::new(py);
let dict_outputs = PyDict::new(py);
let inputs: Vec<Vec<[u64; 4]>> = self
let inputs: Vec<Vec<String>> = self
.inputs
.iter()
.map(|x| x.iter().map(field_to_vecu64_montgomery).collect())
.map(|x| x.iter().map(field_to_string_montgomery).collect())
.collect();
let outputs: Vec<Vec<[u64; 4]>> = self
let outputs: Vec<Vec<String>> = self
.outputs
.iter()
.map(|x| x.iter().map(field_to_vecu64_montgomery).collect())
.map(|x| x.iter().map(field_to_string_montgomery).collect())
.collect();
dict.set_item("inputs", inputs).unwrap();
@@ -389,9 +389,9 @@ impl ToPyObject for GraphWitness {
#[cfg(feature = "python-bindings")]
fn insert_poseidon_hash_pydict(pydict: &PyDict, poseidon_hash: &Vec<Fp>) -> Result<(), PyErr> {
let poseidon_hash: Vec<[u64; 4]> = poseidon_hash
let poseidon_hash: Vec<String> = poseidon_hash
.iter()
.map(field_to_vecu64_montgomery)
.map(field_to_string_montgomery)
.collect();
pydict.set_item("poseidon_hash", poseidon_hash)?;
@@ -956,10 +956,14 @@ impl GraphCircuit {
(ASSUMED_BLINDING_FACTORS + RESERVED_BLINDING_ROWS_PAD) as f64
}
fn calc_safe_lookup_range(res: &GraphWitness, lookup_safety_margin: i128) -> (i128, i128) {
fn calc_safe_lookup_range(
min_lookup_inputs: i128,
max_lookup_inputs: i128,
lookup_safety_margin: i128,
) -> (i128, i128) {
let mut margin = (
lookup_safety_margin * res.min_lookup_inputs,
lookup_safety_margin * res.max_lookup_inputs,
lookup_safety_margin * min_lookup_inputs,
lookup_safety_margin * max_lookup_inputs,
);
if lookup_safety_margin == 1 {
margin.0 -= 1;
@@ -978,7 +982,8 @@ impl GraphCircuit {
fn calc_min_logrows(
&mut self,
res: &GraphWitness,
min_lookup_inputs: i128,
max_lookup_inputs: i128,
max_logrows: Option<u32>,
lookup_safety_margin: i128,
) -> Result<(), Box<dyn std::error::Error>> {
@@ -986,19 +991,23 @@ impl GraphCircuit {
let max_logrows = max_logrows.unwrap_or(MAX_PUBLIC_SRS);
let max_logrows = std::cmp::min(max_logrows, MAX_PUBLIC_SRS);
let mut max_logrows = std::cmp::max(max_logrows, MIN_LOGROWS);
let mut min_logrows = MIN_LOGROWS;
let reserved_blinding_rows = Self::reserved_blinding_rows();
// check if has overflowed max lookup input
if res.max_lookup_inputs > MAX_LOOKUP_ABS / lookup_safety_margin
|| res.min_lookup_inputs < -MAX_LOOKUP_ABS / lookup_safety_margin
if max_lookup_inputs > MAX_LOOKUP_ABS / lookup_safety_margin
|| min_lookup_inputs < -MAX_LOOKUP_ABS / lookup_safety_margin
{
let err_string = format!("max lookup input ({}) is too large", res.max_lookup_inputs);
error!("{}", err_string);
let err_string = format!("max lookup input ({}) is too large", max_lookup_inputs);
return Err(err_string.into());
}
let safe_range = Self::calc_safe_lookup_range(res, lookup_safety_margin);
let mut min_logrows = MIN_LOGROWS;
let safe_range = Self::calc_safe_lookup_range(
min_lookup_inputs,
max_lookup_inputs,
lookup_safety_margin,
);
// degrade the max logrows until the extended k is small enough
while min_logrows < max_logrows
&& !self.extended_k_is_small_enough(
@@ -1020,8 +1029,7 @@ impl GraphCircuit {
return Err(err_string.into());
}
// degrade the max logrows until the extended k is small enough
while max_logrows > min_logrows
while min_logrows < max_logrows
&& !self.extended_k_is_small_enough(
max_logrows,
Self::calc_num_cols(safe_range, max_logrows),
@@ -1030,6 +1038,17 @@ impl GraphCircuit {
max_logrows -= 1;
}
if !self
.extended_k_is_small_enough(max_logrows, Self::calc_num_cols(safe_range, max_logrows))
{
let err_string = format!(
"extended k is too large to accommodate the quotient polynomial with logrows {}",
max_logrows
);
error!("{}", err_string);
return Err(err_string.into());
}
let min_bits = ((safe_range.1 - safe_range.0) as f64 + reserved_blinding_rows + 1.)
.log2()
.ceil() as usize;
@@ -1111,22 +1130,31 @@ impl GraphCircuit {
// n = 2^k
let n = 1u64 << k;
let mut extended_k = k;
while (1 << extended_k) < (n * quotient_poly_degree) {
extended_k += 1;
if !(extended_k <= bn256::Fr::S) {
return false;
}
}
extended_k <= bn256::Fr::S
true
}
/// Calibrate the circuit to the supplied data.
pub fn calibrate(
pub fn calibrate_from_min_max(
&mut self,
input: &[Tensor<Fp>],
min_lookup_inputs: i128,
max_lookup_inputs: i128,
max_logrows: Option<u32>,
lookup_safety_margin: i128,
) -> Result<GraphWitness, Box<dyn std::error::Error>> {
let res = self.forward(&mut input.to_vec(), None, None)?;
self.calc_min_logrows(&res, max_logrows, lookup_safety_margin)?;
Ok(res)
) -> Result<(), Box<dyn std::error::Error>> {
self.calc_min_logrows(
min_lookup_inputs,
max_lookup_inputs,
max_logrows,
lookup_safety_margin,
)?;
Ok(())
}
/// Runs the forward pass of the model / graph of computations and any associated hashing.

View File

@@ -11,7 +11,7 @@ use crate::tensor::TensorType;
use clap::ValueEnum;
use halo2_proofs::circuit::Value;
use halo2_proofs::plonk::{
create_proof, keygen_pk, keygen_vk, verify_proof, Circuit, ProvingKey, VerifyingKey,
create_proof, keygen_pk, keygen_vk_custom, verify_proof, Circuit, ProvingKey, VerifyingKey,
};
use halo2_proofs::poly::commitment::{CommitmentScheme, Params, ParamsProver, Prover, Verifier};
use halo2_proofs::poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG};
@@ -167,8 +167,8 @@ impl ToPyObject for TranscriptType {
#[cfg(feature = "python-bindings")]
///
pub fn g1affine_to_pydict(g1affine_dict: &PyDict, g1affine: &G1Affine) {
let g1affine_x = field_to_vecu64_montgomery(&g1affine.x);
let g1affine_y = field_to_vecu64_montgomery(&g1affine.y);
let g1affine_x = field_to_string_montgomery(&g1affine.x);
let g1affine_y = field_to_string_montgomery(&g1affine.y);
g1affine_dict.set_item("x", g1affine_x).unwrap();
g1affine_dict.set_item("y", g1affine_y).unwrap();
}
@@ -178,24 +178,24 @@ use halo2curves::bn256::G1;
#[cfg(feature = "python-bindings")]
///
pub fn g1_to_pydict(g1_dict: &PyDict, g1: &G1) {
let g1_x = field_to_vecu64_montgomery(&g1.x);
let g1_y = field_to_vecu64_montgomery(&g1.y);
let g1_z = field_to_vecu64_montgomery(&g1.z);
let g1_x = field_to_string_montgomery(&g1.x);
let g1_y = field_to_string_montgomery(&g1.y);
let g1_z = field_to_string_montgomery(&g1.z);
g1_dict.set_item("x", g1_x).unwrap();
g1_dict.set_item("y", g1_y).unwrap();
g1_dict.set_item("z", g1_z).unwrap();
}
/// converts fp into `Vec<u64>` in Montgomery form
pub fn field_to_vecu64_montgomery<F: PrimeField + SerdeObject + Serialize>(fp: &F) -> [u64; 4] {
pub fn field_to_string_montgomery<F: PrimeField + SerdeObject + Serialize>(fp: &F) -> String {
let repr = serde_json::to_string(&fp).unwrap();
let b: [u64; 4] = serde_json::from_str(&repr).unwrap();
let b: String = serde_json::from_str(&repr).unwrap();
b
}
/// converts `Vec<u64>` in Montgomery form into fp
pub fn vecu64_to_field_montgomery<F: PrimeField + SerdeObject + Serialize + DeserializeOwned>(
b: &[u64; 4],
pub fn string_to_field_montgomery<F: PrimeField + SerdeObject + Serialize + DeserializeOwned>(
b: &String,
) -> F {
let repr = serde_json::to_string(&b).unwrap();
let fp: F = serde_json::from_str(&repr).unwrap();
@@ -256,10 +256,10 @@ where
{
fn to_object(&self, py: Python) -> PyObject {
let dict = PyDict::new(py);
let field_elems: Vec<Vec<[u64; 4]>> = self
let field_elems: Vec<Vec<String>> = self
.instances
.iter()
.map(|x| x.iter().map(|fp| field_to_vecu64_montgomery(fp)).collect())
.map(|x| x.iter().map(|fp| field_to_string_montgomery(fp)).collect())
.collect::<Vec<_>>();
dict.set_item("instances", field_elems).unwrap();
let hex_proof = hex::encode(&self.proof);
@@ -306,6 +306,12 @@ where
}
}
/// create hex proof from proof
pub fn create_hex_proof(&mut self) {
let hex_proof = hex::encode(&self.proof);
self.hex_proof = Some(format!("0x{}", hex_proof));
}
/// Saves the Proof to a specified `proof_path`.
pub fn save(&self, proof_path: &PathBuf) -> Result<(), Box<dyn Error>> {
let file = std::fs::File::create(proof_path)?;
@@ -427,6 +433,7 @@ where
pub fn create_keys<Scheme: CommitmentScheme, F: PrimeField + TensorType, C: Circuit<F>>(
circuit: &C,
params: &'_ Scheme::ParamsProver,
compress_selectors: bool,
) -> Result<ProvingKey<Scheme::Curve>, halo2_proofs::plonk::Error>
where
C: Circuit<Scheme::Scalar>,
@@ -438,7 +445,7 @@ where
// Initialize verifying key
let now = Instant::now();
trace!("preparing VK");
let vk = keygen_vk(params, &empty_circuit)?;
let vk = keygen_vk_custom(params, &empty_circuit, compress_selectors)?;
let elapsed = now.elapsed();
info!("VK took {}.{}", elapsed.as_secs(), elapsed.subsec_millis());
@@ -594,6 +601,7 @@ where
let mut snark_new = snark.clone();
// swap the proof bytes for the new ones
snark_new.proof[..proof_first_bytes.len()].copy_from_slice(&proof_first_bytes);
snark_new.create_hex_proof();
Ok(snark_new)
}

View File

@@ -30,7 +30,7 @@ use std::str::FromStr;
use std::{fs::File, path::PathBuf};
use tokio::runtime::Runtime;
type PyFelt = [u64; 4];
type PyFelt = String;
#[pyclass]
#[derive(Debug, Clone)]
@@ -65,9 +65,9 @@ struct PyG1 {
impl From<G1> for PyG1 {
fn from(g1: G1) -> Self {
PyG1 {
x: crate::pfsys::field_to_vecu64_montgomery::<Fq>(&g1.x),
y: crate::pfsys::field_to_vecu64_montgomery::<Fq>(&g1.y),
z: crate::pfsys::field_to_vecu64_montgomery::<Fq>(&g1.z),
x: crate::pfsys::field_to_string_montgomery::<Fq>(&g1.x),
y: crate::pfsys::field_to_string_montgomery::<Fq>(&g1.y),
z: crate::pfsys::field_to_string_montgomery::<Fq>(&g1.z),
}
}
}
@@ -75,9 +75,9 @@ impl From<G1> for PyG1 {
impl From<PyG1> for G1 {
fn from(val: PyG1) -> Self {
G1 {
x: crate::pfsys::vecu64_to_field_montgomery::<Fq>(&val.x),
y: crate::pfsys::vecu64_to_field_montgomery::<Fq>(&val.y),
z: crate::pfsys::vecu64_to_field_montgomery::<Fq>(&val.z),
x: crate::pfsys::string_to_field_montgomery::<Fq>(&val.x),
y: crate::pfsys::string_to_field_montgomery::<Fq>(&val.y),
z: crate::pfsys::string_to_field_montgomery::<Fq>(&val.z),
}
}
}
@@ -108,8 +108,8 @@ pub struct PyG1Affine {
impl From<G1Affine> for PyG1Affine {
fn from(g1: G1Affine) -> Self {
PyG1Affine {
x: crate::pfsys::field_to_vecu64_montgomery::<Fq>(&g1.x),
y: crate::pfsys::field_to_vecu64_montgomery::<Fq>(&g1.y),
x: crate::pfsys::field_to_string_montgomery::<Fq>(&g1.x),
y: crate::pfsys::field_to_string_montgomery::<Fq>(&g1.y),
}
}
}
@@ -117,8 +117,8 @@ impl From<G1Affine> for PyG1Affine {
impl From<PyG1Affine> for G1Affine {
fn from(val: PyG1Affine) -> Self {
G1Affine {
x: crate::pfsys::vecu64_to_field_montgomery::<Fq>(&val.x),
y: crate::pfsys::vecu64_to_field_montgomery::<Fq>(&val.y),
x: crate::pfsys::string_to_field_montgomery::<Fq>(&val.x),
y: crate::pfsys::string_to_field_montgomery::<Fq>(&val.y),
}
}
}
@@ -211,10 +211,10 @@ impl Into<PyRunArgs> for RunArgs {
#[pyfunction(signature = (
array,
))]
fn vecu64_to_felt(array: PyFelt) -> PyResult<String> {
fn string_to_felt(array: PyFelt) -> PyResult<String> {
Ok(format!(
"{:?}",
crate::pfsys::vecu64_to_field_montgomery::<Fr>(&array)
crate::pfsys::string_to_field_montgomery::<Fr>(&array)
))
}
@@ -222,8 +222,8 @@ fn vecu64_to_felt(array: PyFelt) -> PyResult<String> {
#[pyfunction(signature = (
array,
))]
fn vecu64_to_int(array: PyFelt) -> PyResult<i128> {
let felt = crate::pfsys::vecu64_to_field_montgomery::<Fr>(&array);
fn string_to_int(array: PyFelt) -> PyResult<i128> {
let felt = crate::pfsys::string_to_field_montgomery::<Fr>(&array);
let int_rep = felt_to_i128(felt);
Ok(int_rep)
}
@@ -233,8 +233,8 @@ fn vecu64_to_int(array: PyFelt) -> PyResult<i128> {
array,
scale
))]
fn vecu64_to_float(array: PyFelt, scale: crate::Scale) -> PyResult<f64> {
let felt = crate::pfsys::vecu64_to_field_montgomery::<Fr>(&array);
fn string_to_float(array: PyFelt, scale: crate::Scale) -> PyResult<f64> {
let felt = crate::pfsys::string_to_field_montgomery::<Fr>(&array);
let int_rep = felt_to_i128(felt);
let multiplier = scale_to_multiplier(scale);
let float_rep = int_rep as f64 / multiplier;
@@ -246,11 +246,11 @@ fn vecu64_to_float(array: PyFelt, scale: crate::Scale) -> PyResult<f64> {
input,
scale
))]
fn float_to_vecu64(input: f64, scale: crate::Scale) -> PyResult<PyFelt> {
fn float_to_string(input: f64, scale: crate::Scale) -> PyResult<PyFelt> {
let int_rep = quantize_float(&input, 0.0, scale)
.map_err(|_| PyIOError::new_err("Failed to quantize input"))?;
let felt = i128_to_felt(int_rep);
Ok(crate::pfsys::field_to_vecu64_montgomery::<Fr>(&felt))
Ok(crate::pfsys::field_to_string_montgomery::<Fr>(&felt))
}
/// Converts a buffer to vector of 4 u64s representing a fixed point field element
@@ -318,7 +318,7 @@ fn buffer_to_felts(buffer: Vec<u8>) -> PyResult<Vec<String>> {
fn poseidon_hash(message: Vec<PyFelt>) -> PyResult<Vec<PyFelt>> {
let message: Vec<Fr> = message
.iter()
.map(crate::pfsys::vecu64_to_field_montgomery::<Fr>)
.map(crate::pfsys::string_to_field_montgomery::<Fr>)
.collect::<Vec<_>>();
let output =
@@ -329,7 +329,7 @@ fn poseidon_hash(message: Vec<PyFelt>) -> PyResult<Vec<PyFelt>> {
let hash = output[0]
.iter()
.map(crate::pfsys::field_to_vecu64_montgomery::<Fr>)
.map(crate::pfsys::field_to_string_montgomery::<Fr>)
.collect::<Vec<_>>();
Ok(hash)
}
@@ -337,8 +337,8 @@ fn poseidon_hash(message: Vec<PyFelt>) -> PyResult<Vec<PyFelt>> {
/// Generate a kzg commitment.
#[pyfunction(signature = (
message,
vk_path,
settings_path,
vk_path=PathBuf::from(DEFAULT_VK),
settings_path=PathBuf::from(DEFAULT_SETTINGS),
srs_path=None
))]
fn kzg_commit(
@@ -349,7 +349,7 @@ fn kzg_commit(
) -> PyResult<Vec<PyG1Affine>> {
let message: Vec<Fr> = message
.iter()
.map(crate::pfsys::vecu64_to_field_montgomery::<Fr>)
.map(crate::pfsys::string_to_field_montgomery::<Fr>)
.collect::<Vec<_>>();
let settings = GraphSettings::load(&settings_path)
@@ -387,9 +387,9 @@ fn swap_proof_commitments(proof_path: PathBuf, witness_path: PathBuf) -> PyResul
/// Generates a vk from a pk for a model circuit and saves it to a file
#[pyfunction(signature = (
path_to_pk,
circuit_settings_path,
vk_output_path
path_to_pk=PathBuf::from(DEFAULT_PK),
circuit_settings_path=PathBuf::from(DEFAULT_SETTINGS),
vk_output_path=PathBuf::from(DEFAULT_VK),
))]
fn gen_vk_from_pk_single(
path_to_pk: PathBuf,
@@ -413,8 +413,8 @@ fn gen_vk_from_pk_single(
/// Generates a vk from a pk for an aggregate circuit and saves it to a file
#[pyfunction(signature = (
path_to_pk,
vk_output_path
path_to_pk=PathBuf::from(DEFAULT_PK_AGGREGATED),
vk_output_path=PathBuf::from(DEFAULT_VK_AGGREGATED),
))]
fn gen_vk_from_pk_aggr(path_to_pk: PathBuf, vk_output_path: PathBuf) -> PyResult<bool> {
let pk = load_pk::<KZGCommitmentScheme<Bn256>, Fr, AggregationCircuit>(path_to_pk, ())
@@ -543,7 +543,7 @@ fn calibrate_settings(
#[pyfunction(signature = (
data=PathBuf::from(DEFAULT_DATA),
model=PathBuf::from(DEFAULT_MODEL),
output=None,
output=PathBuf::from(DEFAULT_WITNESS),
vk_path=None,
srs_path=None,
))]
@@ -604,7 +604,8 @@ fn mock_aggregate(
vk_path=PathBuf::from(DEFAULT_VK),
pk_path=PathBuf::from(DEFAULT_PK),
srs_path=None,
witness_path = None
witness_path = None,
compress_selectors=DEFAULT_COMPRESS_SELECTORS.parse().unwrap(),
))]
fn setup(
model: PathBuf,
@@ -612,8 +613,17 @@ fn setup(
pk_path: PathBuf,
srs_path: Option<PathBuf>,
witness_path: Option<PathBuf>,
compress_selectors: bool,
) -> Result<bool, PyErr> {
crate::execute::setup(model, srs_path, vk_path, pk_path, witness_path).map_err(|e| {
crate::execute::setup(
model,
srs_path,
vk_path,
pk_path,
witness_path,
compress_selectors,
)
.map_err(|e| {
let err_str = format!("Failed to run setup: {}", e);
PyRuntimeError::new_err(err_str)
})?;
@@ -682,7 +692,8 @@ fn verify(
pk_path=PathBuf::from(DEFAULT_PK_AGGREGATED),
logrows=DEFAULT_AGGREGATED_LOGROWS.parse().unwrap(),
split_proofs = false,
srs_path = None
srs_path = None,
compress_selectors=DEFAULT_COMPRESS_SELECTORS.parse().unwrap(),
))]
fn setup_aggregate(
sample_snarks: Vec<PathBuf>,
@@ -691,6 +702,7 @@ fn setup_aggregate(
logrows: u32,
split_proofs: bool,
srs_path: Option<PathBuf>,
compress_selectors: bool,
) -> Result<bool, PyErr> {
crate::execute::setup_aggregate(
sample_snarks,
@@ -699,6 +711,7 @@ fn setup_aggregate(
srs_path,
logrows,
split_proofs,
compress_selectors,
)
.map_err(|e| {
let err_str = format!("Failed to setup aggregate: {}", e);
@@ -1022,19 +1035,18 @@ fn print_proof_hex(proof_path: PathBuf) -> Result<String, PyErr> {
// Python Module
#[pymodule]
fn ezkl(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
// NOTE: DeployVerifierEVM and SendProofEVM will be implemented in python in pyezkl
pyo3_log::init();
m.add_class::<PyRunArgs>()?;
m.add_class::<PyG1Affine>()?;
m.add_class::<PyG1>()?;
m.add_class::<PyTestDataSource>()?;
m.add_function(wrap_pyfunction!(vecu64_to_felt, m)?)?;
m.add_function(wrap_pyfunction!(vecu64_to_int, m)?)?;
m.add_function(wrap_pyfunction!(vecu64_to_float, m)?)?;
m.add_function(wrap_pyfunction!(string_to_felt, m)?)?;
m.add_function(wrap_pyfunction!(string_to_int, m)?)?;
m.add_function(wrap_pyfunction!(string_to_float, m)?)?;
m.add_function(wrap_pyfunction!(kzg_commit, m)?)?;
m.add_function(wrap_pyfunction!(swap_proof_commitments, m)?)?;
m.add_function(wrap_pyfunction!(poseidon_hash, m)?)?;
m.add_function(wrap_pyfunction!(float_to_vecu64, m)?)?;
m.add_function(wrap_pyfunction!(float_to_string, m)?)?;
m.add_function(wrap_pyfunction!(buffer_to_felts, m)?)?;
m.add_function(wrap_pyfunction!(gen_vk_from_pk_aggr, m)?)?;
m.add_function(wrap_pyfunction!(gen_vk_from_pk_single, m)?)?;

View File

@@ -72,7 +72,7 @@ pub fn encodeVerifierCalldata(
/// Converts 4 u64s to a field element
#[wasm_bindgen]
#[allow(non_snake_case)]
pub fn vecU64ToFelt(array: wasm_bindgen::Clamped<Vec<u8>>) -> Result<String, JsError> {
pub fn stringToFelt(array: wasm_bindgen::Clamped<Vec<u8>>) -> Result<String, JsError> {
let felt: Fr = serde_json::from_slice(&array[..])
.map_err(|e| JsError::new(&format!("Failed to deserialize field element: {}", e)))?;
Ok(format!("{:?}", felt))
@@ -81,7 +81,7 @@ pub fn vecU64ToFelt(array: wasm_bindgen::Clamped<Vec<u8>>) -> Result<String, JsE
/// Converts 4 u64s representing a field element directly to an integer
#[wasm_bindgen]
#[allow(non_snake_case)]
pub fn vecU64ToInt(
pub fn stringToInt(
array: wasm_bindgen::Clamped<Vec<u8>>,
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
let felt: Fr = serde_json::from_slice(&array[..])
@@ -95,7 +95,7 @@ pub fn vecU64ToInt(
/// Converts 4 u64s representing a field element directly to a (rescaled from fixed point scaling) floating point
#[wasm_bindgen]
#[allow(non_snake_case)]
pub fn vecU64ToFloat(
pub fn stringToFloat(
array: wasm_bindgen::Clamped<Vec<u8>>,
scale: crate::Scale,
) -> Result<f64, JsError> {
@@ -109,23 +109,23 @@ pub fn vecU64ToFloat(
/// Converts a floating point element to 4 u64s representing a fixed point field element
#[wasm_bindgen]
#[allow(non_snake_case)]
pub fn floatToVecU64(
pub fn floatTostring(
input: f64,
scale: crate::Scale,
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
let int_rep =
quantize_float(&input, 0.0, scale).map_err(|e| JsError::new(&format!("{}", e)))?;
let felt = i128_to_felt(int_rep);
let vec = crate::pfsys::field_to_vecu64_montgomery::<halo2curves::bn256::Fr>(&felt);
let vec = crate::pfsys::field_to_string_montgomery::<halo2curves::bn256::Fr>(&felt);
Ok(wasm_bindgen::Clamped(serde_json::to_vec(&vec).map_err(
|e| JsError::new(&format!("Failed to serialize vecu64_montgomery{}", e)),
|e| JsError::new(&format!("Failed to serialize string_montgomery{}", e)),
)?))
}
/// Converts a buffer to vector of 4 u64s representing a fixed point field element
#[wasm_bindgen]
#[allow(non_snake_case)]
pub fn bufferToVecOfVecU64(
pub fn bufferToVecOfstring(
buffer: wasm_bindgen::Clamped<Vec<u8>>,
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
// Convert the buffer to a slice
@@ -224,6 +224,7 @@ pub fn genWitness(
pub fn genVk(
compiled_circuit: wasm_bindgen::Clamped<Vec<u8>>,
params_ser: wasm_bindgen::Clamped<Vec<u8>>,
compress_selectors: bool,
) -> Result<Vec<u8>, JsError> {
// Read in kzg params
let mut reader = std::io::BufReader::new(&params_ser[..]);
@@ -235,9 +236,13 @@ pub fn genVk(
.map_err(|e| JsError::new(&format!("Failed to deserialize compiled model: {}", e)))?;
// Create verifying key
let vk = create_vk_wasm::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(&circuit, &params)
.map_err(Box::<dyn std::error::Error>::from)
.map_err(|e| JsError::new(&format!("Failed to create verifying key: {}", e)))?;
let vk = create_vk_wasm::<KZGCommitmentScheme<Bn256>, Fr, GraphCircuit>(
&circuit,
&params,
compress_selectors,
)
.map_err(Box::<dyn std::error::Error>::from)
.map_err(|e| JsError::new(&format!("Failed to create verifying key: {}", e)))?;
let mut serialized_vk = Vec::new();
vk.write(&mut serialized_vk, halo2_proofs::SerdeFormat::RawBytes)
@@ -497,6 +502,7 @@ pub fn srsValidation(srs: wasm_bindgen::Clamped<Vec<u8>>) -> Result<bool, JsErro
pub fn create_vk_wasm<Scheme: CommitmentScheme, F: PrimeField + TensorType, C: Circuit<F>>(
circuit: &C,
params: &'_ Scheme::ParamsProver,
compress_selectors: bool,
) -> Result<VerifyingKey<Scheme::Curve>, halo2_proofs::plonk::Error>
where
C: Circuit<Scheme::Scalar>,
@@ -506,7 +512,7 @@ where
let empty_circuit = <C as Circuit<F>>::without_witnesses(circuit);
// Initialize the verifying key
let vk = keygen_vk(params, &empty_circuit)?;
let vk = keygen_vk_custom(params, &empty_circuit, compress_selectors)?;
Ok(vk)
}
/// Creates a [ProvingKey] from a [VerifyingKey] for a [GraphCircuit] (`circuit`) with specific [CommitmentScheme] parameters (`params`) for the WASM target

View File

@@ -170,9 +170,9 @@ mod native_tests {
}
}
const PF_FAILURE: &str = "examples/test_failure.proof";
const PF_FAILURE: &str = "examples/test_failure_proof.json";
const PF_FAILURE_AGGR: &str = "examples/test_failure_aggr.proof";
const PF_FAILURE_AGGR: &str = "examples/test_failure_aggr_proof.json";
const LARGE_TESTS: [&str; 5] = [
"self_attention",
@@ -360,7 +360,7 @@ mod native_tests {
#[cfg(feature = "icicle")]
const TESTS_AGGR: [&str; 3] = ["1l_mlp", "1l_flatten", "1l_average"];
const TESTS_EVM: [&str; 21] = [
const TESTS_EVM: [&str; 23] = [
"1l_mlp",
"1l_flatten",
"1l_average",
@@ -376,12 +376,14 @@ mod native_tests {
"1l_tanh",
"2l_relu_sigmoid_small",
"2l_relu_small",
"2l_relu_fc",
"min",
"max",
"1l_max_pool",
"idolmodel",
"1l_identity",
"lstm",
"rnn",
"quantize_dequantize",
];
const TESTS_EVM_AGGR: [&str; 18] = [
@@ -516,7 +518,7 @@ mod native_tests {
crate::native_tests::setup_py_env();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
accuracy_measurement(path, test.to_string(), "private", "private", "public", 1, "accuracy", 1.2);
accuracy_measurement(path, test.to_string(), "private", "private", "public", 1, "accuracy", 2.6);
test_dir.close().unwrap();
}
@@ -526,7 +528,7 @@ mod native_tests {
crate::native_tests::setup_py_env();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
accuracy_measurement(path, test.to_string(), "private", "fixed", "private", 1, "accuracy", 1.2);
accuracy_measurement(path, test.to_string(), "private", "fixed", "private", 1, "accuracy", 2.6);
test_dir.close().unwrap();
}
@@ -536,7 +538,7 @@ mod native_tests {
crate::native_tests::setup_py_env();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
accuracy_measurement(path, test.to_string(), "public", "private", "private", 1, "accuracy", 1.2);
accuracy_measurement(path, test.to_string(), "public", "private", "private", 1, "accuracy", 2.6);
test_dir.close().unwrap();
}
@@ -955,7 +957,7 @@ mod native_tests {
});
seq!(N in 0..= 17 {
seq!(N in 0..=17 {
// these take a particularly long time to run
#(#[test_case(TESTS_EVM_AGGR[N])])*
#[ignore]
@@ -971,7 +973,7 @@ mod native_tests {
});
seq!(N in 0..= 20 {
seq!(N in 0..=22 {
#(#[test_case(TESTS_EVM[N])])*
fn kzg_evm_prove_and_verify_(test: &str) {
@@ -979,9 +981,9 @@ mod native_tests {
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
kzg_evm_prove_and_verify(path, test.to_string(), "private", "private", "public");
#[cfg(not(feature = "icicle"))]
run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
kzg_evm_prove_and_verify(2, path, test.to_string(), "private", "private", "public");
// #[cfg(not(feature = "icicle"))]
// run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
test_dir.close().unwrap();
}
@@ -993,9 +995,9 @@ mod native_tests {
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let mut _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
kzg_evm_prove_and_verify(path, test.to_string(), "hashed", "private", "private");
#[cfg(not(feature = "icicle"))]
run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
kzg_evm_prove_and_verify(2, path, test.to_string(), "hashed", "private", "private");
// #[cfg(not(feature = "icicle"))]
// run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
test_dir.close().unwrap();
}
@@ -1010,9 +1012,9 @@ mod native_tests {
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let mut _anvil_child = crate::native_tests::start_anvil(false, hardfork);
kzg_evm_prove_and_verify(path, test.to_string(), "kzgcommit", "private", "public");
#[cfg(not(feature = "icicle"))]
run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
kzg_evm_prove_and_verify(2, path, test.to_string(), "kzgcommit", "private", "public");
// #[cfg(not(feature = "icicle"))]
// run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
test_dir.close().unwrap();
}
@@ -1023,9 +1025,9 @@ mod native_tests {
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
kzg_evm_prove_and_verify(path, test.to_string(), "private", "hashed", "public");
#[cfg(not(feature = "icicle"))]
run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
kzg_evm_prove_and_verify(2, path, test.to_string(), "private", "hashed", "public");
// #[cfg(not(feature = "icicle"))]
// run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
test_dir.close().unwrap();
}
@@ -1036,9 +1038,9 @@ mod native_tests {
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
kzg_evm_prove_and_verify(path, test.to_string(), "private", "private", "hashed");
#[cfg(not(feature = "icicle"))]
run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
kzg_evm_prove_and_verify(2, path, test.to_string(), "private", "private", "hashed");
// #[cfg(not(feature = "icicle"))]
// run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
test_dir.close().unwrap();
}
@@ -1049,9 +1051,9 @@ mod native_tests {
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
kzg_evm_prove_and_verify(path, test.to_string(), "private", "kzgcommit", "public");
#[cfg(not(feature = "icicle"))]
run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
kzg_evm_prove_and_verify(2, path, test.to_string(), "private", "kzgcommit", "public");
// #[cfg(not(feature = "icicle"))]
// run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
test_dir.close().unwrap();
}
@@ -1062,9 +1064,9 @@ mod native_tests {
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
kzg_evm_prove_and_verify(path, test.to_string(), "private", "private", "kzgcommit");
#[cfg(not(feature = "icicle"))]
run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
kzg_evm_prove_and_verify(2, path, test.to_string(), "private", "private", "kzgcommit");
// #[cfg(not(feature = "icicle"))]
// run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
test_dir.close().unwrap();
}
@@ -1074,9 +1076,9 @@ mod native_tests {
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
kzg_evm_prove_and_verify(path, test.to_string(), "kzgcommit", "kzgcommit", "kzgcommit");
#[cfg(not(feature = "icicle"))]
run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
kzg_evm_prove_and_verify(2, path, test.to_string(), "kzgcommit", "kzgcommit", "kzgcommit");
// #[cfg(not(feature = "icicle"))]
// run_js_tests(path, test.to_string(), "testBrowserEvmVerify");
test_dir.close().unwrap();
}
@@ -1740,6 +1742,7 @@ mod native_tests {
// prove-serialize-verify, the usual full path
fn kzg_evm_prove_and_verify(
num_inner_columns: usize,
test_dir: &str,
example_name: String,
input_visibility: &str,
@@ -1755,7 +1758,7 @@ mod native_tests {
input_visibility,
param_visibility,
output_visibility,
2,
num_inner_columns,
None,
false,
"single",
@@ -1901,6 +1904,20 @@ mod native_tests {
.map(|h| vec![FileSourceInner::Field(*h)])
.collect(),
));
} else {
input.output_data = Some(DataSource::File(
witness
.pretty_elements
.unwrap()
.rescaled_outputs
.iter()
.map(|o| {
o.iter()
.map(|f| FileSourceInner::Float(f.parse().unwrap()))
.collect()
})
.collect(),
));
}
input.save(data_path.clone().into()).unwrap();

View File

@@ -12,7 +12,7 @@ def get_ezkl_output(witness_file, settings_file):
outputs = witness_output['outputs']
with open(settings_file) as f:
settings = json.load(f)
ezkl_outputs = [[ezkl.vecu64_to_float(
ezkl_outputs = [[ezkl.string_to_float(
outputs[i][j], settings['model_output_scales'][i]) for j in range(len(outputs[i]))] for i in range(len(outputs))]
return ezkl_outputs

View File

@@ -118,38 +118,38 @@ mod py_tests {
}
const TESTS: [&str; 32] = [
"proof_splitting.ipynb",
"mnist_gan_proof_splitting.ipynb",
"proof_splitting.ipynb", // 0
"variance.ipynb",
"mnist_gan.ipynb",
// "mnist_vae.ipynb",
"keras_simple_demo.ipynb",
"hashed_vis.ipynb",
"mnist_gan_proof_splitting.ipynb", // 4
"hashed_vis.ipynb", // 5
"simple_demo_all_public.ipynb",
"data_attest.ipynb",
"variance.ipynb",
"little_transformer.ipynb",
"simple_demo_aggregated_proofs.ipynb",
"ezkl_demo.ipynb",
"ezkl_demo.ipynb", // 10
"lstm.ipynb",
"set_membership.ipynb",
"decision_tree.ipynb",
"random_forest.ipynb",
"gradient_boosted_trees.ipynb",
"gradient_boosted_trees.ipynb", // 15
"xgboost.ipynb",
"lightgbm.ipynb",
"svm.ipynb",
"simple_demo_public_input_output.ipynb",
"simple_demo_public_network_output.ipynb",
"simple_demo_public_network_output.ipynb", // 20
"gcn.ipynb",
"linear_regression.ipynb",
"stacked_regression.ipynb",
"data_attest_hashed.ipynb",
"kzg_vis.ipynb",
"kzg_vis.ipynb", // 25
"kmeans.ipynb",
"solvency.ipynb",
"sklearn_mlp.ipynb",
"generalized_inverse.ipynb",
"mnist_classifier.ipynb",
"mnist_classifier.ipynb", // 30
"world_rotation.ipynb",
];

View File

@@ -56,9 +56,9 @@ def test_poseidon_hash():
Test for poseidon_hash
"""
message = [1.0, 2.0, 3.0, 4.0]
message = [ezkl.float_to_vecu64(x, 7) for x in message]
message = [ezkl.float_to_string(x, 7) for x in message]
res = ezkl.poseidon_hash(message)
assert ezkl.vecu64_to_felt(
assert ezkl.string_to_felt(
res[0]) == "0x0da7e5e5c8877242fa699f586baf770d731defd54f952d4adeb85047a0e32f45"
@@ -70,14 +70,14 @@ def test_field_serialization():
input = 890
scale = 7
felt = ezkl.float_to_vecu64(input, scale)
roundtrip_input = ezkl.vecu64_to_float(felt, scale)
felt = ezkl.float_to_string(input, scale)
roundtrip_input = ezkl.string_to_float(felt, scale)
assert input == roundtrip_input
input = -700
scale = 7
felt = ezkl.float_to_vecu64(input, scale)
roundtrip_input = ezkl.vecu64_to_float(felt, scale)
felt = ezkl.float_to_string(input, scale)
roundtrip_input = ezkl.string_to_float(felt, scale)
assert input == roundtrip_input

View File

@@ -8,10 +8,10 @@ mod wasm32 {
use ezkl::graph::GraphWitness;
use ezkl::pfsys;
use ezkl::wasm::{
bufferToVecOfVecU64, compiledCircuitValidation, encodeVerifierCalldata, genPk, genVk,
bufferToVecOfstring, compiledCircuitValidation, encodeVerifierCalldata, genPk, genVk,
genWitness, inputValidation, pkValidation, poseidonHash, printProofHex, proofValidation,
prove, settingsValidation, srsValidation, u8_array_to_u128_le, vecU64ToFelt, vecU64ToFloat,
vecU64ToInt, verify, vkValidation, witnessValidation,
prove, settingsValidation, srsValidation, stringToFelt, stringToFloat, stringToInt,
u8_array_to_u128_le, verify, vkValidation, witnessValidation,
};
use halo2_solidity_verifier::encode_calldata;
use halo2curves::bn256::{Fr, G1Affine};
@@ -26,7 +26,7 @@ mod wasm32 {
pub const NETWORK_COMPILED: &[u8] = include_bytes!("../tests/wasm/model.compiled");
pub const NETWORK: &[u8] = include_bytes!("../tests/wasm/network.onnx");
pub const INPUT: &[u8] = include_bytes!("../tests/wasm/input.json");
pub const PROOF: &[u8] = include_bytes!("../tests/wasm/test.proof");
pub const PROOF: &[u8] = include_bytes!("../tests/wasm/proof.json");
pub const SETTINGS: &[u8] = include_bytes!("../tests/wasm/settings.json");
pub const PK: &[u8] = include_bytes!("../tests/wasm/pk.key");
pub const VK: &[u8] = include_bytes!("../tests/wasm/vk.key");
@@ -78,19 +78,19 @@ mod wasm32 {
let serialized = serde_json::to_vec(&field_element).unwrap();
let clamped = wasm_bindgen::Clamped(serialized);
let scale = 2;
let floating_point = vecU64ToFloat(clamped.clone(), scale)
let floating_point = stringToFloat(clamped.clone(), scale)
.map_err(|_| "failed")
.unwrap();
assert_eq!(floating_point, (i as f64) / 4.0);
let integer: i128 = serde_json::from_slice(
&vecU64ToInt(clamped.clone()).map_err(|_| "failed").unwrap(),
&stringToInt(clamped.clone()).map_err(|_| "failed").unwrap(),
)
.unwrap();
assert_eq!(integer, i as i128);
let hex_string = format!("{:?}", field_element);
let returned_string = vecU64ToFelt(clamped).map_err(|_| "failed").unwrap();
let returned_string = stringToFelt(clamped).map_err(|_| "failed").unwrap();
assert_eq!(hex_string, returned_string);
}
}
@@ -101,7 +101,7 @@ mod wasm32 {
let mut buffer = string_high.clone().into_bytes();
let clamped = wasm_bindgen::Clamped(buffer.clone());
let field_elements_ser = bufferToVecOfVecU64(clamped).map_err(|_| "failed").unwrap();
let field_elements_ser = bufferToVecOfstring(clamped).map_err(|_| "failed").unwrap();
let field_elements: Vec<Fr> = serde_json::from_slice(&field_elements_ser[..]).unwrap();
@@ -118,7 +118,7 @@ mod wasm32 {
let buffer = string_sample.clone().into_bytes();
let clamped = wasm_bindgen::Clamped(buffer.clone());
let field_elements_ser = bufferToVecOfVecU64(clamped).map_err(|_| "failed").unwrap();
let field_elements_ser = bufferToVecOfstring(clamped).map_err(|_| "failed").unwrap();
let field_elements: Vec<Fr> = serde_json::from_slice(&field_elements_ser[..]).unwrap();
@@ -133,7 +133,7 @@ mod wasm32 {
let buffer = string_concat.into_bytes();
let clamped = wasm_bindgen::Clamped(buffer.clone());
let field_elements_ser = bufferToVecOfVecU64(clamped).map_err(|_| "failed").unwrap();
let field_elements_ser = bufferToVecOfstring(clamped).map_err(|_| "failed").unwrap();
let field_elements: Vec<Fr> = serde_json::from_slice(&field_elements_ser[..]).unwrap();
@@ -186,6 +186,7 @@ mod wasm32 {
let vk = genVk(
wasm_bindgen::Clamped(NETWORK_COMPILED.to_vec()),
wasm_bindgen::Clamped(SRS.to_vec()),
true,
)
.map_err(|_| "failed")
.unwrap();
@@ -206,6 +207,7 @@ mod wasm32 {
let vk = genVk(
wasm_bindgen::Clamped(NETWORK_COMPILED.to_vec()),
wasm_bindgen::Clamped(SRS.to_vec()),
true,
)
.map_err(|_| "failed")
.unwrap();
@@ -218,6 +220,7 @@ mod wasm32 {
let vk = genVk(
wasm_bindgen::Clamped(NETWORK_COMPILED.to_vec()),
wasm_bindgen::Clamped(SRS.to_vec()),
true,
)
.map_err(|_| "failed")
.unwrap();

Binary file not shown.

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,7 +1,6 @@
import * as fs from 'fs/promises';
import * as fsSync from 'fs'
import JSONBig from 'json-bigint';
import { vecU64ToFelt } from '@ezkljs/engine/nodejs'
const solc = require('solc');
// import os module

Binary file not shown.

View File

@@ -1 +1 @@
{"inputs":[[[6425625360762666998,7924344314350639699,14762033076929465436,2023505479389396574],[12436184717236109307,3962172157175319849,7381016538464732718,1011752739694698287],[12436184717236109307,3962172157175319849,7381016538464732718,1011752739694698287]]],"pretty_elements":{"rescaled_inputs":[["2","1","1"]],"inputs":[["0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000001"]],"processed_inputs":[],"processed_params":[],"processed_outputs":[],"rescaled_outputs":[["0","0","0","0"]],"outputs":[["0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000"]]},"outputs":[[[0,0,0,0],[0,0,0,0],[0,0,0,0],[0,0,0,0]]],"processed_inputs":null,"processed_params":null,"processed_outputs":null,"max_lookup_inputs":0,"min_lookup_inputs":-1}
{"inputs":[["0200000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000","0100000000000000000000000000000000000000000000000000000000000000"]],"pretty_elements":{"rescaled_inputs":[["2","1","1"]],"inputs":[["0x0000000000000000000000000000000000000000000000000000000000000002","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000001"]],"processed_inputs":[],"processed_params":[],"processed_outputs":[],"rescaled_outputs":[["0","0","0","0"]],"outputs":[["0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000"]]},"outputs":[["0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000","0000000000000000000000000000000000000000000000000000000000000000"]],"processed_inputs":null,"processed_params":null,"processed_outputs":null,"max_lookup_inputs":0,"min_lookup_inputs":-1}