Compare commits

...

28 Commits

Author SHA1 Message Date
dante
f0dbd2ee8a Update Cargo.toml 2025-04-29 11:58:48 -04:00
dante
3f1b28b7a2 Update Cargo.toml 2025-04-29 11:56:47 -04:00
dante
c47d3bf05c chore: feature-gate eth 2025-04-29 11:49:46 -04:00
dante
68b2c96b97 Merge branch 'vka-hashing' of https://github.com/zkonduit/ezkl into vka-hashing 2025-04-29 11:31:20 -04:00
dante
9a0ab22fdb fix matches 2025-04-29 11:31:13 -04:00
dante
f2b1de3740 Merge branch 'main' into vka-hashing 2025-04-29 11:26:04 -04:00
Ethan
dcb888ff1e fix wasm package graph data import error 2025-04-28 16:29:09 -05:00
Ethan
26f465e70c bring back zizmor analysis 2025-04-28 08:21:26 -05:00
Ethan
8eef53213d rmv data attestation 2025-04-27 19:36:54 -05:00
Ethan
a1345966d7 configure Git credentials more persistently 2025-04-27 18:09:37 -05:00
Ethan
640061c850 set git config after action checkouts 2025-04-27 17:48:30 -05:00
Ethan
da7db7d88d use git config local instead of global 2025-04-27 17:20:24 -05:00
Ethan
a55f75ff3f rmv debug statement on token 2025-04-24 11:19:19 -05:00
Ethan
bf6f704827 debug token 2025-04-24 10:56:27 -05:00
Ethan
0dbfdf4672 debug token 2025-04-24 10:54:56 -05:00
Ethan
98299356a6 *fix syntax error on yaml 2025-04-24 10:51:24 -05:00
Ethan
04805d2a91 move token env to job level 2025-04-24 10:42:35 -05:00
Ethan
ca18cf29bb set token as global env var 2025-04-24 10:36:37 -05:00
Ethan
78f8e23b55 use verification ezkl token 2025-04-24 10:26:15 -05:00
Ethan
7d40926082 activate git fetch with cli on runner 2025-04-24 09:53:20 -05:00
Ethan
e2c8182871 *update python bindings 2025-04-24 09:43:55 -05:00
Ethan
4f077c9134 *use https for loading h2 sol verifier crate 2025-04-23 21:57:07 -05:00
Ethan
038805ce02 Merge branch 'main' into vka-hashing 2025-04-23 21:32:56 -05:00
Ethan
0fb87c9a20 *update lock 2025-04-23 21:30:43 -05:00
Ethan
77423a6d07 *check that on-chain rescaled instances match what is stored in proof file. 2025-04-23 21:25:35 -05:00
Ethan
8b416c7a00 *comment out swift package test 2025-04-21 04:31:51 -05:00
Ethan
73ec5e549a *temporarily disable zizmor + swift package on ci. 2025-04-21 04:27:36 -05:00
Ethan
28386d8442 vka hashing + rescaling 2025-04-21 04:13:31 -05:00
22 changed files with 1004 additions and 4726 deletions

View File

@@ -24,10 +24,31 @@ jobs:
permissions:
contents: read
runs-on: large-self-hosted
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -44,10 +65,31 @@ jobs:
permissions:
contents: read
runs-on: ubuntu-latest
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -60,10 +102,31 @@ jobs:
permissions:
contents: read
runs-on: ubuntu-latest
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -76,10 +139,31 @@ jobs:
permissions:
contents: read
runs-on: ubuntu-latest-32-cores
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -95,7 +179,7 @@ jobs:
- name: Library tests
run: cargo nextest run --lib --verbose
- name: Library tests (original lookup)
run: cargo nextest run --lib --verbose --no-default-features --features ezkl
run: cargo nextest run --lib --verbose --no-default-features --features ezkl,eth-original-lookup
# ultra-overflow-tests-gpu:
# runs-on: GPU
@@ -134,10 +218,31 @@ jobs:
permissions:
contents: read
runs-on: non-gpu
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -155,22 +260,43 @@ jobs:
# - name: Conv overflow (wasi)
# run: cargo wasi test conv_col_ultra_overflow -- --include-ignored --nocapture
- name: lookup overflow
run: cargo nextest run --release lookup_ultra_overflow --no-capture --no-default-features --features ezkl -- --include-ignored
run: cargo nextest run --release lookup_ultra_overflow --no-capture --no-default-features --features ezkl,eth-original-lookup -- --include-ignored
- name: Matmul overflow
run: RUST_LOG=debug cargo nextest run --release matmul_col_ultra_overflow --no-capture --no-default-features --features ezkl -- --include-ignored
run: RUST_LOG=debug cargo nextest run --release matmul_col_ultra_overflow --no-capture --no-default-features --features ezkl,eth-original-lookup -- --include-ignored
- name: Conv overflow
run: RUST_LOG=debug cargo nextest run --release conv_col_ultra_overflow --no-capture --no-default-features --features ezkl -- --include-ignored
run: RUST_LOG=debug cargo nextest run --release conv_col_ultra_overflow --no-capture --no-default-features --features ezkl,eth-original-lookup -- --include-ignored
- name: Conv + relu overflow
run: cargo nextest run --release conv_relu_col_ultra_overflow --no-capture --no-default-features --features ezkl -- --include-ignored
run: cargo nextest run --release conv_relu_col_ultra_overflow --no-capture --no-default-features --features ezkl,eth-original-lookup -- --include-ignored
ultra-overflow-tests:
permissions:
contents: read
runs-on: non-gpu
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -200,10 +326,31 @@ jobs:
permissions:
contents: read
runs-on: ubuntu-latest-16-cores
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -220,10 +367,31 @@ jobs:
permissions:
contents: read
runs-on: non-gpu
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -268,10 +436,31 @@ jobs:
permissions:
contents: read
runs-on: non-gpu
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -335,10 +524,31 @@ jobs:
contents: read
runs-on: non-gpu
needs: [build, library-tests, docs, python-tests, python-integration-tests]
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -454,10 +664,31 @@ jobs:
contents: read
runs-on: non-gpu
needs: [build, library-tests, docs]
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -573,10 +804,31 @@ jobs:
contents: read
runs-on: self-hosted
needs: [build, library-tests, docs, python-tests, python-integration-tests]
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: dtolnay/rust-toolchain@4f94fbe7e03939b0e674bcc9ca609a16088f63ff #nightly branch, TODO: update when required
with:
toolchain: nightly-2025-02-17
@@ -614,10 +866,31 @@ jobs:
contents: read
runs-on: large-self-hosted
needs: [build, library-tests, docs, python-tests, python-integration-tests]
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -635,10 +908,31 @@ jobs:
contents: read
runs-on: large-self-hosted
needs: [build, library-tests, docs, python-tests, python-integration-tests]
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -660,10 +954,31 @@ jobs:
contents: read
runs-on: ubuntu-latest-32-cores
needs: [build, library-tests, docs]
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -681,10 +996,31 @@ jobs:
contents: read
runs-on: non-gpu
needs: [build, library-tests, docs]
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0
with:
python-version: "3.12"
@@ -711,10 +1047,31 @@ jobs:
contents: read
runs-on: non-gpu
needs: [build, library-tests, docs, python-tests, python-integration-tests]
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0
with:
python-version: "3.12"
@@ -762,10 +1119,31 @@ jobs:
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0
with:
python-version: "3.11"
@@ -812,16 +1190,39 @@ jobs:
- name: NBEATS tutorial
run: source .env/bin/activate; cargo nextest run py_tests::tests::nbeats_
# - name: Reusable verifier tutorial
# run: source .env/bin/activate; cargo nextest run py_tests::tests::reusable_
# run: source .env/bin/activate; cargo nextest run py_tests::tests::reusable_verifier_ --no-capture
- name: Reusable verifier tutorial
run: source .env/bin/activate; cargo nextest run py_tests::tests::reusable_verifier_ --no-capture --test-threads 1
ios-integration-tests:
permissions:
contents: read
runs-on: macos-latest
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -840,10 +1241,31 @@ jobs:
runs-on: macos-latest
needs: [ios-integration-tests]
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set the URL replacement as before
git config --global \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17

3
Cargo.lock generated
View File

@@ -2513,13 +2513,14 @@ dependencies = [
[[package]]
name = "halo2_solidity_verifier"
version = "0.1.0"
source = "git+https://github.com/alexander-camuto/halo2-solidity-verifier#3d237d566ca6714c9ee6fcf3f2dcefffa79f914c"
source = "git+https://github.com/zkonduit/verification-ezkl?branch=vka-hash#409f977e461b435b9afc33ed38edba09fe2eaee4"
dependencies = [
"askama",
"blake2b_simd",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"regex",
"ruint",
"sha3 0.10.8",
]

View File

@@ -35,7 +35,7 @@ halo2_wrong_ecc = { git = "https://github.com/zkonduit/halo2wrong", branch = "ac
snark-verifier = { git = "https://github.com/zkonduit/snark-verifier", branch = "ac/chunked-mv-lookup", features = [
"derive_serde",
] }
halo2_solidity_verifier = { git = "https://github.com/alexander-camuto/halo2-solidity-verifier", optional = true }
halo2_solidity_verifier = { git = "https://github.com/zkonduit/verification-ezkl", branch = "vka-hash", optional = true }
maybe-rayon = { version = "0.1.1", default-features = false }
bincode = { version = "1.3.3", default-features = false }
unzip-n = "0.1.2"
@@ -217,8 +217,8 @@ required-features = ["python-bindings"]
[features]
web = ["wasm-bindgen-rayon"]
default = [
"eth-mv-lookup",
"ezkl",
"mv-lookup",
"precompute-coset",
"no-banner",
"parallel-poly-read",
@@ -234,8 +234,6 @@ ezkl = [
"tabled/color",
"serde_json/std",
"colored_json",
"dep:alloy",
"dep:foundry-compilers",
"dep:ethabi",
"dep:indicatif",
"dep:gag",
@@ -247,11 +245,30 @@ ezkl = [
"dep:chrono",
"dep:sha256",
"dep:clap_complete",
"dep:halo2_solidity_verifier",
"dep:semver",
"dep:clap",
"dep:tosubcommand",
]
eth = [
"dep:alloy",
"dep:foundry-compilers",
"dep:ethabi",
]
solidity-verifier = [
"dep:halo2_solidity_verifier",
]
solidity-verifier-mv-lookup = [
"halo2_solidity_verifier/mv-lookup",
]
eth-mv-lookup = [
"solidity-verifier-mv-lookup",
"mv-lookup",
"eth",
]
eth-original-lookup = [
"eth",
"solidity-verifier",
]
parallel-poly-read = [
"halo2_proofs/circuit-params",
"halo2_proofs/parallel-poly-read",
@@ -259,7 +276,6 @@ parallel-poly-read = [
mv-lookup = [
"halo2_proofs/mv-lookup",
"snark-verifier/mv-lookup",
"halo2_solidity_verifier/mv-lookup",
]
asm = ["halo2curves/asm", "halo2_proofs/asm"]
precompute-coset = ["halo2_proofs/precompute-coset"]

View File

@@ -1,589 +0,0 @@
{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"# data-attest-ezkl\n",
"\n",
"Here's an example leveraging EZKL whereby the inputs to the model are read and attested to from an on-chain source.\n",
"\n",
"In this setup:\n",
"- the inputs and outputs are publicly known to the prover and verifier\n",
"- the on chain inputs will be fetched and then fed directly into the circuit\n",
"- the quantization of the on-chain inputs happens within the evm and is replicated at proving time \n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"First we import the necessary dependencies and set up logging to be as informative as possible. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# check if notebook is in colab\n",
"try:\n",
" # install ezkl\n",
" import google.colab\n",
" import subprocess\n",
" import sys\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"ezkl\"])\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"onnx\"])\n",
"\n",
"# rely on local installation of ezkl if the notebook is not in colab\n",
"except:\n",
" pass\n",
"\n",
"\n",
"from torch import nn\n",
"import ezkl\n",
"import os\n",
"import json\n",
"import logging\n",
"\n",
"# uncomment for more descriptive logging \n",
"FORMAT = '%(levelname)s %(name)s %(asctime)-15s %(filename)s:%(lineno)d %(message)s'\n",
"logging.basicConfig(format=FORMAT)\n",
"logging.getLogger().setLevel(logging.DEBUG)\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we define our model. It is a very simple PyTorch model that has just one layer, an average pooling 2D layer. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"# Defines the model\n",
"\n",
"class MyModel(nn.Module):\n",
" def __init__(self):\n",
" super(MyModel, self).__init__()\n",
" self.layer = nn.AvgPool2d(2, 1, (1, 1))\n",
"\n",
" def forward(self, x):\n",
" return self.layer(x)[0]\n",
"\n",
"\n",
"circuit = MyModel()\n",
"\n",
"# this is where you'd train your model"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We omit training for purposes of this demonstration. We've marked where training would happen in the cell above. \n",
"Now we export the model to onnx and create a corresponding (randomly generated) input. This input data will eventually be stored on chain and read from according to the call_data field in the graph input.\n",
"\n",
"You can replace the random `x` with real data if you so wish. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"x = 0.1*torch.rand(1,*[3, 2, 2], requires_grad=True)\n",
"\n",
"# Flips the neural net into inference mode\n",
"circuit.eval()\n",
"\n",
" # Export the model\n",
"torch.onnx.export(circuit, # model being run\n",
" x, # model input (or a tuple for multiple inputs)\n",
" \"network.onnx\", # where to save the model (can be a file or file-like object)\n",
" export_params=True, # store the trained parameter weights inside the model file\n",
" opset_version=10, # the ONNX version to export the model to\n",
" do_constant_folding=True, # whether to execute constant folding for optimization\n",
" input_names = ['input'], # the model's input names\n",
" output_names = ['output'], # the model's output names\n",
" dynamic_axes={'input' : {0 : 'batch_size'}, # variable length axes\n",
" 'output' : {0 : 'batch_size'}})\n",
"\n",
"data_array = ((x).detach().numpy()).reshape([-1]).tolist()\n",
"\n",
"data = dict(input_data = [data_array])\n",
"\n",
" # Serialize data into file:\n",
"json.dump(data, open(\"input.json\", 'w' ))\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We now define a function that will create a new anvil instance which we will deploy our test contract too. This contract will contain in its storage the data that we will read from and attest to. In production you would not need to set up a local anvil instance. Instead you would replace RPC_URL with the actual RPC endpoint of the chain you are deploying your verifiers too, reading from the data on said chain."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import subprocess\n",
"import time\n",
"import threading\n",
"\n",
"# make sure anvil is running locally\n",
"# $ anvil -p 3030\n",
"\n",
"RPC_URL = \"http://localhost:3030\"\n",
"\n",
"# Save process globally\n",
"anvil_process = None\n",
"\n",
"def start_anvil():\n",
" global anvil_process\n",
" if anvil_process is None:\n",
" anvil_process = subprocess.Popen([\"anvil\", \"-p\", \"3030\", \"--code-size-limit=41943040\"])\n",
" if anvil_process.returncode is not None:\n",
" raise Exception(\"failed to start anvil process\")\n",
" time.sleep(3)\n",
"\n",
"def stop_anvil():\n",
" global anvil_process\n",
" if anvil_process is not None:\n",
" anvil_process.terminate()\n",
" anvil_process = None\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We define our `PyRunArgs` objects which contains the visibility parameters for out model. \n",
"- `input_visibility` defines the visibility of the model inputs\n",
"- `param_visibility` defines the visibility of the model weights and constants and parameters \n",
"- `output_visibility` defines the visibility of the model outputs\n",
"\n",
"Here we create the following setup:\n",
"- `input_visibility`: \"public\"\n",
"- `param_visibility`: \"private\"\n",
"- `output_visibility`: public\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import ezkl\n",
"\n",
"model_path = os.path.join('network.onnx')\n",
"compiled_model_path = os.path.join('network.compiled')\n",
"pk_path = os.path.join('test.pk')\n",
"vk_path = os.path.join('test.vk')\n",
"settings_path = os.path.join('settings.json')\n",
"srs_path = os.path.join('kzg.srs')\n",
"data_path = os.path.join('input.json')\n",
"\n",
"run_args = ezkl.PyRunArgs()\n",
"run_args.input_visibility = \"public\"\n",
"run_args.param_visibility = \"private\"\n",
"run_args.output_visibility = \"public\"\n",
"run_args.num_inner_cols = 1\n",
"run_args.variables = [(\"batch_size\", 1)]\n",
"\n",
"\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we generate a settings file. This file basically instantiates a bunch of parameters that determine their circuit shape, size etc... Because of the way we represent nonlinearities in the circuit (using Halo2's [lookup tables](https://zcash.github.io/halo2/design/proving-system/lookup.html)), it is often best to _calibrate_ this settings file as some data can fall out of range of these lookups.\n",
"\n",
"You can pass a dataset for calibration that will be representative of real inputs you might find if and when you deploy the prover. Here we create a dummy calibration dataset for demonstration purposes. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!RUST_LOG=trace\n",
"# TODO: Dictionary outputs\n",
"res = ezkl.gen_settings(model_path, settings_path, py_run_args=run_args)\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# generate a bunch of dummy calibration data\n",
"cal_data = {\n",
" \"input_data\": [(0.1*torch.rand(2, *[3, 2, 2])).flatten().tolist()],\n",
"}\n",
"\n",
"cal_path = os.path.join('val_data.json')\n",
"# save as json file\n",
"with open(cal_path, \"w\") as f:\n",
" json.dump(cal_data, f)\n",
"\n",
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)\n",
"assert res == True"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The graph input for on chain data sources is formatted completely differently compared to file based data sources.\n",
"\n",
"- For file data sources, the raw floating point values that eventually get quantized, converted into field elements and stored in `witness.json` to be consumed by the circuit are stored. The output data contains the expected floating point values returned as outputs from running your vanilla pytorch model on the given inputs.\n",
"- For on chain data sources, the input_data field contains all the data necessary to read and format the on chain data into something digestable by EZKL (aka field elements :-D). \n",
"Here is what the schema for an on-chain data source graph input file should look like for a single call data source:\n",
" \n",
"```json\n",
"{\n",
" \"input_data\": {\n",
" \"rpc\": \"http://localhost:3030\", // The rpc endpoint of the chain you are deploying your verifier to\n",
" \"calls\": {\n",
" \"call_data\": \"1f3be514000000000000000000000000c6962004f452be9203591991d15f6b388e09e8d00000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000b000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000009000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000\", // The abi encoded call data to a view function that returns an array of on-chain data points we are attesting to. \n",
" \"decimals\": 0, // The number of decimal places of the large uint256 value. This is our way of representing large wei values as floating points on chain, since the evm only natively supports integer values.\n",
" \"address\": \"9A213F53334279C128C37DA962E5472eCD90554f\", // The address of the contract that we are calling to get the data. \n",
" \"len\": 12 // The number of data points returned by the view function (the length of the array)\n",
" }\n",
" }\n",
"}\n",
"```"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"await ezkl.setup_test_evm_data(\n",
" data_path,\n",
" compiled_model_path,\n",
" # we write the call data to the same file as the input data\n",
" data_path,\n",
" input_source=ezkl.PyTestDataSource.OnChain,\n",
" output_source=ezkl.PyTestDataSource.File,\n",
" rpc_url=RPC_URL)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"As we use Halo2 with KZG-commitments we need an SRS string from (preferably) a multi-party trusted setup ceremony. For an overview of the procedures for such a ceremony check out [this page](https://blog.ethereum.org/2023/01/16/announcing-kzg-ceremony). The `get_srs` command retrieves a correctly sized SRS given the calibrated settings file from [here](https://github.com/han0110/halo2-kzg-srs). \n",
"\n",
"These SRS were generated with [this](https://github.com/privacy-scaling-explorations/perpetualpowersoftau) ceremony. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"res = await ezkl.get_srs( settings_path)\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We now need to generate the circuit witness. These are the model outputs (and any hashes) that are generated when feeding the previously generated `input.json` through the circuit / model. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!export RUST_BACKTRACE=1\n",
"\n",
"witness_path = \"witness.json\"\n",
"\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Here we setup verifying and proving keys for the circuit. As the name suggests the proving key is needed for ... proving and the verifying key is needed for ... verifying. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# HERE WE SETUP THE CIRCUIT PARAMS\n",
"# WE GOT KEYS\n",
"# WE GOT CIRCUIT PARAMETERS\n",
"# EVERYTHING ANYONE HAS EVER NEEDED FOR ZK\n",
"res = ezkl.setup(\n",
" compiled_model_path,\n",
" vk_path,\n",
" pk_path,\n",
" \n",
" )\n",
"\n",
"assert res == True\n",
"assert os.path.isfile(vk_path)\n",
"assert os.path.isfile(pk_path)\n",
"assert os.path.isfile(settings_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we generate a full proof. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# GENERATE A PROOF\n",
"\n",
"proof_path = os.path.join('test.pf')\n",
"\n",
"res = ezkl.prove(\n",
" witness_path,\n",
" compiled_model_path,\n",
" pk_path,\n",
" proof_path,\n",
" \n",
" \"single\",\n",
" )\n",
"\n",
"print(res)\n",
"assert os.path.isfile(proof_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"And verify it as a sanity check. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# VERIFY IT\n",
"\n",
"res = ezkl.verify(\n",
" proof_path,\n",
" settings_path,\n",
" vk_path,\n",
" \n",
" )\n",
"\n",
"assert res == True\n",
"print(\"verified\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We can now create and then deploy a vanilla evm verifier."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"abi_path = 'test.abi'\n",
"sol_code_path = 'test.sol'\n",
"\n",
"res = await ezkl.create_evm_verifier(\n",
" vk_path,\n",
" \n",
" settings_path,\n",
" sol_code_path,\n",
" abi_path,\n",
" )\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"\n",
"addr_path_verifier = \"addr_verifier.txt\"\n",
"\n",
"res = await ezkl.deploy_evm(\n",
" addr_path_verifier,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
")\n",
"\n",
"assert res == True"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"With the vanilla verifier deployed, we can now create the data attestation contract, which will read in the instances from the calldata to the verifier, attest to them, call the verifier and then return the result. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"abi_path = 'test.abi'\n",
"sol_code_path = 'test.sol'\n",
"input_path = 'input.json'\n",
"\n",
"res = await ezkl.create_evm_data_attestation(\n",
" input_path,\n",
" settings_path,\n",
" sol_code_path,\n",
" abi_path,\n",
" )"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we can deploy the data attest verifier contract. For security reasons, this binding will only deploy to a local anvil instance, using accounts generated by anvil. \n",
"So should only be used for testing purposes."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"addr_path_da = \"addr_da.txt\"\n",
"\n",
"res = await ezkl.deploy_da_evm(\n",
" addr_path_da,\n",
" input_path,\n",
" RPC_URL,\n",
" settings_path,\n",
" sol_code_path,\n",
" )\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Call the view only verify method on the contract to verify the proof. Since it is a view function this is safe to use in production since you don't have to pass your private key."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# read the verifier address\n",
"addr_verifier = None\n",
"with open(addr_path_verifier, 'r') as f:\n",
" addr = f.read()\n",
"#read the data attestation address\n",
"addr_da = None\n",
"with open(addr_path_da, 'r') as f:\n",
" addr_da = f.read()\n",
"\n",
"res = await ezkl.verify_evm(\n",
" addr,\n",
" RPC_URL,\n",
" proof_path,\n",
" addr_da,\n",
")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.9"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@@ -1,660 +0,0 @@
{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"# data-attest-ezkl hashed\n",
"\n",
"Here's an example leveraging EZKL whereby the hashes of the outputs to the model are read and attested to from an on-chain source.\n",
"\n",
"In this setup:\n",
"- the hashes of outputs are publicly known to the prover and verifier\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"First we import the necessary dependencies and set up logging to be as informative as possible. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# check if notebook is in colab\n",
"try:\n",
" # install ezkl\n",
" import google.colab\n",
" import subprocess\n",
" import sys\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"ezkl\"])\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"onnx\"])\n",
"\n",
"# rely on local installation of ezkl if the notebook is not in colab\n",
"except:\n",
" pass\n",
"\n",
"\n",
"from torch import nn\n",
"import ezkl\n",
"import os\n",
"import json\n",
"import logging\n",
"\n",
"# uncomment for more descriptive logging \n",
"# FORMAT = '%(levelname)s %(name)s %(asctime)-15s %(filename)s:%(lineno)d %(message)s'\n",
"# logging.basicConfig(format=FORMAT)\n",
"# logging.getLogger().setLevel(logging.DEBUG)\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we define our model. It is a very simple PyTorch model that has just one layer, an average pooling 2D layer. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"# Defines the model\n",
"\n",
"class MyModel(nn.Module):\n",
" def __init__(self):\n",
" super(MyModel, self).__init__()\n",
" self.layer = nn.AvgPool2d(2, 1, (1, 1))\n",
"\n",
" def forward(self, x):\n",
" return self.layer(x)[0]\n",
"\n",
"\n",
"circuit = MyModel()\n",
"\n",
"# this is where you'd train your model\n",
"\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We omit training for purposes of this demonstration. We've marked where training would happen in the cell above. \n",
"Now we export the model to onnx and create a corresponding (randomly generated) input. This input data will eventually be stored on chain and read from according to the call_data field in the graph input.\n",
"\n",
"You can replace the random `x` with real data if you so wish. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"x = 0.1*torch.rand(1,*[3, 2, 2], requires_grad=True)\n",
"\n",
"# Flips the neural net into inference mode\n",
"circuit.eval()\n",
"\n",
" # Export the model\n",
"torch.onnx.export(circuit, # model being run\n",
" x, # model input (or a tuple for multiple inputs)\n",
" \"network.onnx\", # where to save the model (can be a file or file-like object)\n",
" export_params=True, # store the trained parameter weights inside the model file\n",
" opset_version=10, # the ONNX version to export the model to\n",
" do_constant_folding=True, # whether to execute constant folding for optimization\n",
" input_names = ['input'], # the model's input names\n",
" output_names = ['output'], # the model's output names\n",
" dynamic_axes={'input' : {0 : 'batch_size'}, # variable length axes\n",
" 'output' : {0 : 'batch_size'}})\n",
"\n",
"data_array = ((x).detach().numpy()).reshape([-1]).tolist()\n",
"\n",
"data = dict(input_data = [data_array])\n",
"\n",
" # Serialize data into file:\n",
"json.dump(data, open(\"input.json\", 'w' ))\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We now define a function that will create a new anvil instance which we will deploy our test contract too. This contract will contain in its storage the data that we will read from and attest to. In production you would not need to set up a local anvil instance. Instead you would replace RPC_URL with the actual RPC endpoint of the chain you are deploying your verifiers too, reading from the data on said chain."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import subprocess\n",
"import time\n",
"import threading\n",
"\n",
"# make sure anvil is running locally\n",
"# $ anvil -p 3030\n",
"\n",
"RPC_URL = \"http://localhost:3030\"\n",
"\n",
"# Save process globally\n",
"anvil_process = None\n",
"\n",
"def start_anvil():\n",
" global anvil_process\n",
" if anvil_process is None:\n",
" anvil_process = subprocess.Popen([\"anvil\", \"-p\", \"3030\", \"--code-size-limit=41943040\"])\n",
" if anvil_process.returncode is not None:\n",
" raise Exception(\"failed to start anvil process\")\n",
" time.sleep(3)\n",
"\n",
"def stop_anvil():\n",
" global anvil_process\n",
" if anvil_process is not None:\n",
" anvil_process.terminate()\n",
" anvil_process = None\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We define our `PyRunArgs` objects which contains the visibility parameters for out model. \n",
"- `input_visibility` defines the visibility of the model inputs\n",
"- `param_visibility` defines the visibility of the model weights and constants and parameters \n",
"- `output_visibility` defines the visibility of the model outputs\n",
"\n",
"Here we create the following setup:\n",
"- `input_visibility`: \"private\"\n",
"- `param_visibility`: \"private\"\n",
"- `output_visibility`: hashed\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import ezkl\n",
"\n",
"model_path = os.path.join('network.onnx')\n",
"compiled_model_path = os.path.join('network.compiled')\n",
"pk_path = os.path.join('test.pk')\n",
"vk_path = os.path.join('test.vk')\n",
"settings_path = os.path.join('settings.json')\n",
"srs_path = os.path.join('kzg.srs')\n",
"data_path = os.path.join('input.json')\n",
"\n",
"run_args = ezkl.PyRunArgs()\n",
"run_args.input_visibility = \"private\"\n",
"run_args.param_visibility = \"private\"\n",
"run_args.output_visibility = \"hashed\"\n",
"run_args.variables = [(\"batch_size\", 1)]\n",
"\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we generate a settings file. This file basically instantiates a bunch of parameters that determine their circuit shape, size etc... Because of the way we represent nonlinearities in the circuit (using Halo2's [lookup tables](https://zcash.github.io/halo2/design/proving-system/lookup.html)), it is often best to _calibrate_ this settings file as some data can fall out of range of these lookups.\n",
"\n",
"You can pass a dataset for calibration that will be representative of real inputs you might find if and when you deploy the prover. Here we create a dummy calibration dataset for demonstration purposes. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!RUST_LOG=trace\n",
"# TODO: Dictionary outputs\n",
"res = ezkl.gen_settings(model_path, settings_path, py_run_args=run_args)\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# generate a bunch of dummy calibration data\n",
"cal_data = {\n",
" \"input_data\": [(0.1*torch.rand(2, *[3, 2, 2])).flatten().tolist()],\n",
"}\n",
"\n",
"cal_path = os.path.join('val_data.json')\n",
"# save as json file\n",
"with open(cal_path, \"w\") as f:\n",
" json.dump(cal_data, f)\n",
"\n",
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)\n",
"assert res == True"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"As we use Halo2 with KZG-commitments we need an SRS string from (preferably) a multi-party trusted setup ceremony. For an overview of the procedures for such a ceremony check out [this page](https://blog.ethereum.org/2023/01/16/announcing-kzg-ceremony). The `get_srs` command retrieves a correctly sized SRS given the calibrated settings file from [here](https://github.com/han0110/halo2-kzg-srs). \n",
"\n",
"These SRS were generated with [this](https://github.com/privacy-scaling-explorations/perpetualpowersoftau) ceremony. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"res = await ezkl.get_srs( settings_path)\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We now need to generate the circuit witness. These are the model outputs (and any hashes) that are generated when feeding the previously generated `input.json` through the circuit / model. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!export RUST_BACKTRACE=1\n",
"\n",
"witness_path = \"witness.json\"\n",
"\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"print(ezkl.felt_to_big_endian(res['processed_outputs']['poseidon_hash'][0]))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We now post the hashes of the outputs to the chain. This is the data that will be read from and attested to."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from web3 import Web3, HTTPProvider\n",
"from solcx import compile_standard\n",
"from decimal import Decimal\n",
"import json\n",
"import os\n",
"import torch\n",
"\n",
"\n",
"# setup web3 instance\n",
"w3 = Web3(HTTPProvider(RPC_URL))\n",
"\n",
"def test_on_chain_data(res):\n",
" print(f'poseidon_hash: {res[\"processed_outputs\"][\"poseidon_hash\"]}')\n",
" # Step 0: Convert the tensor to a flat list\n",
" data = [int(ezkl.felt_to_big_endian(res['processed_outputs']['poseidon_hash'][0]), 0)]\n",
"\n",
" # Step 1: Prepare the data\n",
" # Step 2: Prepare and compile the contract.\n",
" # We are using a test contract here but in production you would\n",
" # use whatever contract you are fetching data from.\n",
" contract_source_code = '''\n",
" // SPDX-License-Identifier: UNLICENSED\n",
" pragma solidity ^0.8.17;\n",
"\n",
" contract TestReads {\n",
"\n",
" uint[] public arr;\n",
" constructor(uint256[] memory _numbers) {\n",
" for(uint256 i = 0; i < _numbers.length; i++) {\n",
" arr.push(_numbers[i]);\n",
" }\n",
" }\n",
" function getArr() public view returns (uint[] memory) {\n",
" return arr;\n",
" }\n",
" }\n",
" '''\n",
"\n",
" compiled_sol = compile_standard({\n",
" \"language\": \"Solidity\",\n",
" \"sources\": {\"testreads.sol\": {\"content\": contract_source_code}},\n",
" \"settings\": {\"outputSelection\": {\"*\": {\"*\": [\"metadata\", \"evm.bytecode\", \"abi\"]}}}\n",
" })\n",
"\n",
" # Get bytecode\n",
" bytecode = compiled_sol['contracts']['testreads.sol']['TestReads']['evm']['bytecode']['object']\n",
"\n",
" # Get ABI\n",
" # In production if you are reading from really large contracts you can just use\n",
" # a stripped down version of the ABI of the contract you are calling, containing only the view functions you will fetch data from.\n",
" abi = json.loads(compiled_sol['contracts']['testreads.sol']['TestReads']['metadata'])['output']['abi']\n",
"\n",
" # Step 3: Deploy the contract\n",
" TestReads = w3.eth.contract(abi=abi, bytecode=bytecode)\n",
" tx_hash = TestReads.constructor(data).transact()\n",
" tx_receipt = w3.eth.wait_for_transaction_receipt(tx_hash)\n",
" # If you are deploying to production you can skip the 3 lines of code above and just instantiate the contract like this,\n",
" # passing the address and abi of the contract you are fetching data from.\n",
" contract = w3.eth.contract(address=tx_receipt['contractAddress'], abi=abi)\n",
"\n",
" # Step 4: Interact with the contract\n",
" calldata = contract.functions.getArr().build_transaction()['data'][2:]\n",
"\n",
" # Prepare the calls_to_account object\n",
" # If you were calling view functions across multiple contracts,\n",
" # you would have multiple entries in the calls_to_account array,\n",
" # one for each contract.\n",
" decimals = [0] * len(data)\n",
" call_to_account = {\n",
" 'call_data': calldata,\n",
" 'decimals': decimals,\n",
" 'address': contract.address[2:], # remove the '0x' prefix\n",
" }\n",
"\n",
" print(f'call_to_account: {call_to_account}')\n",
"\n",
" return call_to_account\n",
"\n",
"# Now let's start the Anvil process. You don't need to do this if you are deploying to a non-local chain.\n",
"start_anvil()\n",
"\n",
"# Now let's call our function, passing in the same input tensor we used to export the model 2 cells above.\n",
"call_to_account = test_on_chain_data(res)\n",
"\n",
"data = dict(input_data = [data_array], output_data = {'rpc': RPC_URL, 'call': call_to_account })\n",
"\n",
"# Serialize on-chain data into file:\n",
"json.dump(data, open(\"input.json\", 'w'))\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Here we setup verifying and proving keys for the circuit. As the name suggests the proving key is needed for ... proving and the verifying key is needed for ... verifying. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# HERE WE SETUP THE CIRCUIT PARAMS\n",
"# WE GOT KEYS\n",
"# WE GOT CIRCUIT PARAMETERS\n",
"# EVERYTHING ANYONE HAS EVER NEEDED FOR ZK\n",
"res = ezkl.setup(\n",
" compiled_model_path,\n",
" vk_path,\n",
" pk_path,\n",
" \n",
" )\n",
"\n",
"assert res == True\n",
"assert os.path.isfile(vk_path)\n",
"assert os.path.isfile(pk_path)\n",
"assert os.path.isfile(settings_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we generate a full proof. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# GENERATE A PROOF\n",
"\n",
"proof_path = os.path.join('test.pf')\n",
"\n",
"res = ezkl.prove(\n",
" witness_path,\n",
" compiled_model_path,\n",
" pk_path,\n",
" proof_path,\n",
" \n",
" \"single\",\n",
" )\n",
"\n",
"print(res)\n",
"assert os.path.isfile(proof_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"And verify it as a sanity check. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# VERIFY IT\n",
"\n",
"res = ezkl.verify(\n",
" proof_path,\n",
" settings_path,\n",
" vk_path,\n",
" \n",
" )\n",
"\n",
"assert res == True\n",
"print(\"verified\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We can now create and then deploy a vanilla evm verifier."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"abi_path = 'test.abi'\n",
"sol_code_path = 'test.sol'\n",
"\n",
"res = await ezkl.create_evm_verifier(\n",
" vk_path,\n",
" \n",
" settings_path,\n",
" sol_code_path,\n",
" abi_path,\n",
" )\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"\n",
"addr_path_verifier = \"addr_verifier.txt\"\n",
"\n",
"res = await ezkl.deploy_evm(\n",
" addr_path_verifier,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
")\n",
"\n",
"assert res == True"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"With the vanilla verifier deployed, we can now create the data attestation contract, which will read in the instances from the calldata to the verifier, attest to them, call the verifier and then return the result. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"abi_path = 'test.abi'\n",
"sol_code_path = 'test.sol'\n",
"input_path = 'input.json'\n",
"\n",
"res = await ezkl.create_evm_data_attestation(\n",
" input_path,\n",
" settings_path,\n",
" sol_code_path,\n",
" abi_path,\n",
" )"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we can deploy the data attest verifier contract. For security reasons, this binding will only deploy to a local anvil instance, using accounts generated by anvil. \n",
"So should only be used for testing purposes."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"addr_path_da = \"addr_da.txt\"\n",
"\n",
"res = await ezkl.deploy_da_evm(\n",
" addr_path_da,\n",
" input_path,\n",
" RPC_URL,\n",
" settings_path,\n",
" sol_code_path,\n",
" )\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Call the view only verify method on the contract to verify the proof. Since it is a view function this is safe to use in production since you don't have to pass your private key."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# read the verifier address\n",
"addr_verifier = None\n",
"with open(addr_path_verifier, 'r') as f:\n",
" addr = f.read()\n",
"#read the data attestation address\n",
"addr_da = None\n",
"with open(addr_path_da, 'r') as f:\n",
" addr_da = f.read()\n",
"\n",
"res = await ezkl.verify_evm(\n",
" addr,\n",
" RPC_URL,\n",
" proof_path,\n",
" addr_da,\n",
")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.9"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@@ -1,592 +0,0 @@
{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"# data-attest-kzg-vis\n",
"\n",
"Here's an example leveraging EZKL whereby the inputs to the model are read and attested to from an on-chain source and the params and outputs are committed to using kzg-commitments. \n",
"\n",
"In this setup:\n",
"- the inputs and outputs are publicly known to the prover and verifier\n",
"- the on chain inputs will be fetched and then fed directly into the circuit\n",
"- the quantization of the on-chain inputs happens within the evm and is replicated at proving time \n",
"- The kzg commitment to the params and inputs will be read from the proof and checked to make sure it matches the expected commitment stored on-chain.\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"First we import the necessary dependencies and set up logging to be as informative as possible. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# check if notebook is in colab\n",
"try:\n",
" # install ezkl\n",
" import google.colab\n",
" import subprocess\n",
" import sys\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"ezkl\"])\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"onnx\"])\n",
"\n",
"# rely on local installation of ezkl if the notebook is not in colab\n",
"except:\n",
" pass\n",
"\n",
"\n",
"from torch import nn\n",
"import ezkl\n",
"import os\n",
"import json\n",
"import logging\n",
"\n",
"# uncomment for more descriptive logging \n",
"FORMAT = '%(levelname)s %(name)s %(asctime)-15s %(filename)s:%(lineno)d %(message)s'\n",
"logging.basicConfig(format=FORMAT)\n",
"logging.getLogger().setLevel(logging.DEBUG)\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we define our model. It is a very simple PyTorch model that has just one layer, an average pooling 2D layer. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"# Defines the model\n",
"\n",
"class MyModel(nn.Module):\n",
" def __init__(self):\n",
" super(MyModel, self).__init__()\n",
" self.layer = nn.AvgPool2d(2, 1, (1, 1))\n",
"\n",
" def forward(self, x):\n",
" return self.layer(x)[0]\n",
"\n",
"\n",
"circuit = MyModel()\n",
"\n",
"# this is where you'd train your model"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We omit training for purposes of this demonstration. We've marked where training would happen in the cell above. \n",
"Now we export the model to onnx and create a corresponding (randomly generated) input. This input data will eventually be stored on chain and read from according to the call_data field in the graph input.\n",
"\n",
"You can replace the random `x` with real data if you so wish. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"x = 0.1*torch.rand(1,*[3, 2, 2], requires_grad=True)\n",
"\n",
"# Flips the neural net into inference mode\n",
"circuit.eval()\n",
"\n",
" # Export the model\n",
"torch.onnx.export(circuit, # model being run\n",
" x, # model input (or a tuple for multiple inputs)\n",
" \"network.onnx\", # where to save the model (can be a file or file-like object)\n",
" export_params=True, # store the trained parameter weights inside the model file\n",
" opset_version=10, # the ONNX version to export the model to\n",
" do_constant_folding=True, # whether to execute constant folding for optimization\n",
" input_names = ['input'], # the model's input names\n",
" output_names = ['output'], # the model's output names\n",
" dynamic_axes={'input' : {0 : 'batch_size'}, # variable length axes\n",
" 'output' : {0 : 'batch_size'}})\n",
"\n",
"data_array = ((x).detach().numpy()).reshape([-1]).tolist()\n",
"\n",
"data = dict(input_data = [data_array])\n",
"\n",
" # Serialize data into file:\n",
"json.dump(data, open(\"input.json\", 'w' ))\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We now define a function that will create a new anvil instance which we will deploy our test contract too. This contract will contain in its storage the data that we will read from and attest to. In production you would not need to set up a local anvil instance. Instead you would replace RPC_URL with the actual RPC endpoint of the chain you are deploying your verifiers too, reading from the data on said chain."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import subprocess\n",
"import time\n",
"import threading\n",
"\n",
"# make sure anvil is running locally\n",
"# $ anvil -p 3030\n",
"\n",
"RPC_URL = \"http://localhost:3030\"\n",
"\n",
"# Save process globally\n",
"anvil_process = None\n",
"\n",
"def start_anvil():\n",
" global anvil_process\n",
" if anvil_process is None:\n",
" anvil_process = subprocess.Popen([\"anvil\", \"-p\", \"3030\", \"--code-size-limit=41943040\"])\n",
" if anvil_process.returncode is not None:\n",
" raise Exception(\"failed to start anvil process\")\n",
" time.sleep(3)\n",
"\n",
"def stop_anvil():\n",
" global anvil_process\n",
" if anvil_process is not None:\n",
" anvil_process.terminate()\n",
" anvil_process = None\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We define our `PyRunArgs` objects which contains the visibility parameters for out model. \n",
"- `input_visibility` defines the visibility of the model inputs\n",
"- `param_visibility` defines the visibility of the model weights and constants and parameters \n",
"- `output_visibility` defines the visibility of the model outputs\n",
"\n",
"Here we create the following setup:\n",
"- `input_visibility`: \"public\"\n",
"- `param_visibility`: \"polycommitment\" \n",
"- `output_visibility`: \"polycommitment\"\n",
"\n",
"**Note**:\n",
"When we set this to polycommitment, we are saying that the model parameters are committed to using a polynomial commitment scheme. This commitment will be stored on chain as a constant stored in the DA contract, and the proof will contain the commitment to the parameters. The DA verification will then check that the commitment in the proof matches the commitment stored on chain. \n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import ezkl\n",
"\n",
"model_path = os.path.join('network.onnx')\n",
"compiled_model_path = os.path.join('network.compiled')\n",
"pk_path = os.path.join('test.pk')\n",
"vk_path = os.path.join('test.vk')\n",
"settings_path = os.path.join('settings.json')\n",
"srs_path = os.path.join('kzg.srs')\n",
"data_path = os.path.join('input.json')\n",
"\n",
"run_args = ezkl.PyRunArgs()\n",
"run_args.input_visibility = \"public\"\n",
"run_args.param_visibility = \"polycommit\"\n",
"run_args.output_visibility = \"polycommit\"\n",
"run_args.num_inner_cols = 1\n",
"run_args.variables = [(\"batch_size\", 1)]\n",
"\n",
"\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we generate a settings file. This file basically instantiates a bunch of parameters that determine their circuit shape, size etc... Because of the way we represent nonlinearities in the circuit (using Halo2's [lookup tables](https://zcash.github.io/halo2/design/proving-system/lookup.html)), it is often best to _calibrate_ this settings file as some data can fall out of range of these lookups.\n",
"\n",
"You can pass a dataset for calibration that will be representative of real inputs you might find if and when you deploy the prover. Here we create a dummy calibration dataset for demonstration purposes. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!RUST_LOG=trace\n",
"# TODO: Dictionary outputs\n",
"res = ezkl.gen_settings(model_path, settings_path, py_run_args=run_args)\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# generate a bunch of dummy calibration data\n",
"cal_data = {\n",
" \"input_data\": [(0.1*torch.rand(2, *[3, 2, 2])).flatten().tolist()],\n",
"}\n",
"\n",
"cal_path = os.path.join('val_data.json')\n",
"# save as json file\n",
"with open(cal_path, \"w\") as f:\n",
" json.dump(cal_data, f)\n",
"\n",
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)\n",
"assert res == True"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The graph input for on chain data sources is formatted completely differently compared to file based data sources.\n",
"\n",
"- For file data sources, the raw floating point values that eventually get quantized, converted into field elements and stored in `witness.json` to be consumed by the circuit are stored. The output data contains the expected floating point values returned as outputs from running your vanilla pytorch model on the given inputs.\n",
"- For on chain data sources, the input_data field contains all the data necessary to read and format the on chain data into something digestable by EZKL (aka field elements :-D). \n",
"Here is what the schema for an on-chain data source graph input file should look like for a single call data source:\n",
" \n",
"```json\n",
"{\n",
" \"input_data\": {\n",
" \"rpc\": \"http://localhost:3030\", // The rpc endpoint of the chain you are deploying your verifier to\n",
" \"calls\": {\n",
" \"call_data\": \"1f3be514000000000000000000000000c6962004f452be9203591991d15f6b388e09e8d00000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000b000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000009000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000\", // The abi encoded call data to a view function that returns an array of on-chain data points we are attesting to. \n",
" \"decimals\": 0, // The number of decimal places of the large uint256 value. This is our way of representing large wei values as floating points on chain, since the evm only natively supports integer values.\n",
" \"address\": \"9A213F53334279C128C37DA962E5472eCD90554f\", // The address of the contract that we are calling to get the data. \n",
" \"len\": 3 // The number of data points returned by the view function (the length of the array)\n",
" }\n",
" }\n",
"}\n",
"```"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"await ezkl.setup_test_evm_data(\n",
" data_path,\n",
" compiled_model_path,\n",
" # we write the call data to the same file as the input data\n",
" data_path,\n",
" input_source=ezkl.PyTestDataSource.OnChain,\n",
" output_source=ezkl.PyTestDataSource.File,\n",
" rpc_url=RPC_URL)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"As we use Halo2 with KZG-commitments we need an SRS string from (preferably) a multi-party trusted setup ceremony. For an overview of the procedures for such a ceremony check out [this page](https://blog.ethereum.org/2023/01/16/announcing-kzg-ceremony). The `get_srs` command retrieves a correctly sized SRS given the calibrated settings file from [here](https://github.com/han0110/halo2-kzg-srs). \n",
"\n",
"These SRS were generated with [this](https://github.com/privacy-scaling-explorations/perpetualpowersoftau) ceremony. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"res = await ezkl.get_srs( settings_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We now need to generate the circuit witness. These are the model outputs (and any hashes) that are generated when feeding the previously generated `input.json` through the circuit / model. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# HERE WE SETUP THE CIRCUIT PARAMS\n",
"# WE GOT KEYS\n",
"# WE GOT CIRCUIT PARAMETERS\n",
"# EVERYTHING ANYONE HAS EVER NEEDED FOR ZK\n",
"res = ezkl.setup(\n",
" compiled_model_path,\n",
" vk_path,\n",
" pk_path,\n",
" )\n",
"\n",
"assert res == True\n",
"assert os.path.isfile(vk_path)\n",
"assert os.path.isfile(pk_path)\n",
"assert os.path.isfile(settings_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!export RUST_BACKTRACE=1\n",
"\n",
"witness_path = \"witness.json\"\n",
"\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Here we setup verifying and proving keys for the circuit. As the name suggests the proving key is needed for ... proving and the verifying key is needed for ... verifying. "
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we generate a full proof. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# GENERATE A PROOF\n",
"\n",
"proof_path = os.path.join('test.pf')\n",
"\n",
"res = ezkl.prove(\n",
" witness_path,\n",
" compiled_model_path,\n",
" pk_path,\n",
" proof_path,\n",
" \n",
" \"single\",\n",
" )\n",
"\n",
"print(res)\n",
"assert os.path.isfile(proof_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"And verify it as a sanity check. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# VERIFY IT\n",
"\n",
"res = ezkl.verify(\n",
" proof_path,\n",
" settings_path,\n",
" vk_path,\n",
" \n",
" )\n",
"\n",
"assert res == True\n",
"print(\"verified\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We can now create and then deploy a vanilla evm verifier."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"abi_path = 'test.abi'\n",
"sol_code_path = 'test.sol'\n",
"\n",
"res = await ezkl.create_evm_verifier(\n",
" vk_path,\n",
" \n",
" settings_path,\n",
" sol_code_path,\n",
" abi_path,\n",
" )\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"addr_path_verifier = \"addr_verifier.txt\"\n",
"\n",
"res = await ezkl.deploy_evm(\n",
" addr_path_verifier,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
")\n",
"\n",
"assert res == True"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"When deploying a DA with kzg commitments, we need to make sure to also pass a witness file that contains the commitments to the parameters and inputs. This is because the verifier will need to check that the commitments in the proof match the commitments stored on chain."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"abi_path = 'test.abi'\n",
"sol_code_path = 'test.sol'\n",
"input_path = 'input.json'\n",
"\n",
"res = await ezkl.create_evm_data_attestation(\n",
" input_path,\n",
" settings_path,\n",
" sol_code_path,\n",
" abi_path,\n",
" witness_path = witness_path,\n",
" )"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we can deploy the data attest verifier contract. For security reasons, this binding will only deploy to a local anvil instance, using accounts generated by anvil. \n",
"So should only be used for testing purposes."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"addr_path_da = \"addr_da.txt\"\n",
"\n",
"res = await ezkl.deploy_da_evm(\n",
" addr_path_da,\n",
" input_path,\n",
" RPC_URL,\n",
" settings_path,\n",
" sol_code_path,\n",
" )\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Call the view only verify method on the contract to verify the proof. Since it is a view function this is safe to use in production since you don't have to pass your private key."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# read the verifier address\n",
"addr_verifier = None\n",
"with open(addr_path_verifier, 'r') as f:\n",
" addr = f.read()\n",
"#read the data attestation address\n",
"addr_da = None\n",
"with open(addr_path_da, 'r') as f:\n",
" addr_da = f.read()\n",
"\n",
"res = await ezkl.verify_evm(\n",
" addr,\n",
" RPC_URL,\n",
" proof_path,\n",
" addr_da,\n",
")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@@ -125,7 +125,7 @@
"\n",
" witness_path = os.path.join(name, \"witness.json\")\n",
" sol_code_path = os.path.join(name, 'test.sol')\n",
" sol_key_code_path = os.path.join(name, 'test_key.sol')\n",
" vka_path = os.path.join(name, 'vka.bytes')\n",
" abi_path = os.path.join(name, 'test.abi')\n",
" proof_path = os.path.join(name, \"proof.json\")\n",
"\n",
@@ -177,7 +177,7 @@
" res = await ezkl.create_evm_verifier(vk_path, settings_path, sol_code_path, abi_path, reusable=True)\n",
" assert res == True\n",
"\n",
" res = await ezkl.create_evm_vka(vk_path, settings_path, sol_key_code_path, abi_path)\n",
" res = await ezkl.create_evm_vka(vk_path, settings_path, vka_path)\n",
" assert res == True\n"
]
},
@@ -287,12 +287,13 @@
"source": [
"for name in names:\n",
" addr_path_vk = \"addr_vk.txt\"\n",
" sol_key_code_path = os.path.join(name, 'test_key.sol')\n",
" res = await ezkl.deploy_evm(addr_path_vk, 'http://127.0.0.1:3030', sol_key_code_path, \"vka\")\n",
" vka_path = os.path.join(name, 'vka.bytes')\n",
" res = await ezkl.register_vka(\n",
" addr,\n",
" 'http://127.0.0.1:3030',\n",
" vka_path=vka_path,\n",
" )\n",
" assert res == True\n",
"\n",
" with open(addr_path_vk, 'r') as file:\n",
" addr_vk = file.read().rstrip()\n",
" \n",
" proof_path = os.path.join(name, \"proof.json\")\n",
" sol_code_path = os.path.join(name, 'vk.sol')\n",
@@ -300,7 +301,7 @@
" addr,\n",
" \"http://127.0.0.1:3030\",\n",
" proof_path,\n",
" addr_vk = addr_vk\n",
" vka_path = vka_path\n",
" )\n",
" assert res == True"
]

View File

@@ -1,685 +0,0 @@
{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"# univ3-da-ezkl\n",
"\n",
"Here's an example leveraging EZKL whereby the inputs to the model are read and attested to from an on-chain source. For this setup we make a single call to a view function that returns an array of UniV3 historical TWAP price data that we will attest to on-chain. \n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"First we import the necessary dependencies and set up logging to be as informative as possible. "
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"# check if notebook is in colab\n",
"try:\n",
" # install ezkl\n",
" import google.colab\n",
" import subprocess\n",
" import sys\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"ezkl\"])\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"onnx\"])\n",
"\n",
"# rely on local installation of ezkl if the notebook is not in colab\n",
"except:\n",
" pass\n",
"\n",
"\n",
"from torch import nn\n",
"import ezkl\n",
"import os\n",
"import json\n",
"import logging\n",
"\n",
"# uncomment for more descriptive logging \n",
"FORMAT = '%(levelname)s %(name)s %(asctime)-15s %(filename)s:%(lineno)d %(message)s'\n",
"logging.basicConfig(format=FORMAT)\n",
"logging.getLogger().setLevel(logging.DEBUG)\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we define our model. It is a very simple PyTorch model that has just one layer, an average pooling 2D layer. "
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"# Defines the model\n",
"\n",
"class MyModel(nn.Module):\n",
" def __init__(self):\n",
" super(MyModel, self).__init__()\n",
" self.layer = nn.AvgPool2d(2, 1, (1, 1))\n",
"\n",
" def forward(self, x):\n",
" return self.layer(x)[0]\n",
"\n",
"\n",
"circuit = MyModel()\n",
"\n",
"# this is where you'd train your model"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We omit training for purposes of this demonstration. We've marked where training would happen in the cell above. \n",
"Now we export the model to onnx and create a corresponding (randomly generated) input. This input data will eventually be stored on chain and read from according to the call_data field in the graph input.\n",
"\n",
"You can replace the random `x` with real data if you so wish. "
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"x = 0.1*torch.rand(1,*[3, 2, 2], requires_grad=True)\n",
"\n",
"# Flips the neural net into inference mode\n",
"circuit.eval()\n",
"\n",
" # Export the model\n",
"torch.onnx.export(circuit, # model being run\n",
" x, # model input (or a tuple for multiple inputs)\n",
" \"network.onnx\", # where to save the model (can be a file or file-like object)\n",
" export_params=True, # store the trained parameter weights inside the model file\n",
" opset_version=10, # the ONNX version to export the model to\n",
" do_constant_folding=True, # whether to execute constant folding for optimization\n",
" input_names = ['input'], # the model's input names\n",
" output_names = ['output'], # the model's output names\n",
" dynamic_axes={'input' : {0 : 'batch_size'}, # variable length axes\n",
" 'output' : {0 : 'batch_size'}})\n",
"\n",
"data_array = ((x).detach().numpy()).reshape([-1]).tolist()\n",
"\n",
"data = dict(input_data = [data_array])\n",
"\n",
" # Serialize data into file:\n",
"json.dump(data, open(\"input.json\", 'w' ))\n",
"\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We now define a function that will create a new anvil instance which we will deploy our test contract too. This contract will contain in its storage the data that we will read from and attest to. In production you would not need to set up a local anvil instance. Instead you would replace RPC_URL with the actual RPC endpoint of the chain you are deploying your verifiers too, reading from the data on said chain."
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"import subprocess\n",
"import time\n",
"import threading\n",
"\n",
"# make sure anvil is running locally\n",
"# $ anvil -p 3030\n",
"\n",
"RPC_URL = \"http://localhost:3030\"\n",
"\n",
"# Save process globally\n",
"anvil_process = None\n",
"\n",
"def start_anvil():\n",
" global anvil_process\n",
" if anvil_process is None:\n",
" anvil_process = subprocess.Popen([\"anvil\", \"-p\", \"3030\", \"--fork-url\", \"https://arb1.arbitrum.io/rpc\", \"--code-size-limit=41943040\"])\n",
" if anvil_process.returncode is not None:\n",
" raise Exception(\"failed to start anvil process\")\n",
" time.sleep(3)\n",
"\n",
"def stop_anvil():\n",
" global anvil_process\n",
" if anvil_process is not None:\n",
" anvil_process.terminate()\n",
" anvil_process = None\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We define our `PyRunArgs` objects which contains the visibility parameters for out model. \n",
"- `input_visibility` defines the visibility of the model inputs\n",
"- `param_visibility` defines the visibility of the model weights and constants and parameters \n",
"- `output_visibility` defines the visibility of the model outputs\n",
"\n",
"Here we create the following setup:\n",
"- `input_visibility`: \"public\"\n",
"- `param_visibility`: \"private\"\n",
"- `output_visibility`: public\n"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"import ezkl\n",
"\n",
"model_path = os.path.join('network.onnx')\n",
"compiled_model_path = os.path.join('network.compiled')\n",
"pk_path = os.path.join('test.pk')\n",
"vk_path = os.path.join('test.vk')\n",
"settings_path = os.path.join('settings.json')\n",
"srs_path = os.path.join('kzg.srs')\n",
"data_path = os.path.join('input.json')\n",
"\n",
"run_args = ezkl.PyRunArgs()\n",
"run_args.input_visibility = \"public\"\n",
"run_args.param_visibility = \"private\"\n",
"run_args.output_visibility = \"public\"\n",
"run_args.decomp_legs=5\n",
"run_args.num_inner_cols = 1\n",
"run_args.variables = [(\"batch_size\", 1)]"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we generate a settings file. This file basically instantiates a bunch of parameters that determine their circuit shape, size etc... Because of the way we represent nonlinearities in the circuit (using Halo2's [lookup tables](https://zcash.github.io/halo2/design/proving-system/lookup.html)), it is often best to _calibrate_ this settings file as some data can fall out of range of these lookups.\n",
"\n",
"You can pass a dataset for calibration that will be representative of real inputs you might find if and when you deploy the prover. Here we create a dummy calibration dataset for demonstration purposes. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# TODO: Dictionary outputs\n",
"res = ezkl.gen_settings(model_path, settings_path, py_run_args=run_args)\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# generate a bunch of dummy calibration data\n",
"cal_data = {\n",
" \"input_data\": [(0.1*torch.rand(2, *[3, 2, 2])).flatten().tolist()],\n",
"}\n",
"\n",
"cal_path = os.path.join('val_data.json')\n",
"# save as json file\n",
"with open(cal_path, \"w\") as f:\n",
" json.dump(cal_data, f)\n",
"\n",
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)\n",
"assert res == True"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The graph input for on chain data sources is formatted completely differently compared to file based data sources.\n",
"\n",
"- For file data sources, the raw floating point values that eventually get quantized, converted into field elements and stored in `witness.json` to be consumed by the circuit are stored. The output data contains the expected floating point values returned as outputs from running your vanilla pytorch model on the given inputs.\n",
"- For on chain data sources, the input_data field contains all the data necessary to read and format the on chain data into something digestable by EZKL (aka field elements :-D). \n",
"Here is what the schema for an on-chain data source graph input file should look like for a single call data source:\n",
" \n",
"```json\n",
"{\n",
" \"input_data\": {\n",
" \"rpc\": \"http://localhost:3030\", // The rpc endpoint of the chain you are deploying your verifier to\n",
" \"call\": {\n",
" \"call_data\": \"1f3be514000000000000000000000000c6962004f452be9203591991d15f6b388e09e8d00000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000b000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000009000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000\", // The abi encoded call data to a view function that returns an array of on-chain data points we are attesting to. \n",
" \"decimals\": 0, // The number of decimal places of the large uint256 value. This is our way of representing large wei values as floating points on chain, since the evm only natively supports integer values.\n",
" \"address\": \"9A213F53334279C128C37DA962E5472eCD90554f\", // The address of the contract that we are calling to get the data. \n",
" \"len\": 12 // The number of data points returned by the view function (the length of the array)\n",
" }\n",
" }\n",
"}\n",
"```"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from web3 import Web3, HTTPProvider\n",
"from solcx import compile_standard\n",
"from decimal import Decimal\n",
"import json\n",
"import os\n",
"import torch\n",
"import requests\n",
"\n",
"def count_decimal_places(num):\n",
" num_str = str(num)\n",
" if '.' in num_str:\n",
" return len(num_str) - 1 - num_str.index('.')\n",
" else:\n",
" return 0\n",
"\n",
"w3 = Web3(HTTPProvider(RPC_URL)) \n",
"\n",
"def on_chain_data(tensor):\n",
" data = tensor.view(-1).tolist()\n",
" secondsAgo = [len(data) - 1 - i for i in range(len(data))]\n",
"\n",
" contract_source_code = '''\n",
" // SPDX-License-Identifier: MIT\n",
" pragma solidity ^0.8.20;\n",
"\n",
" interface IUniswapV3PoolDerivedState {\n",
" function observe(\n",
" uint32[] calldata secondsAgos\n",
" ) external view returns (\n",
" int56[] memory tickCumulatives,\n",
" uint160[] memory secondsPerLiquidityCumulativeX128s\n",
" );\n",
" }\n",
"\n",
" contract UniTickAttestor {\n",
" int256[] private cachedTicks;\n",
"\n",
" function consult(\n",
" IUniswapV3PoolDerivedState pool,\n",
" uint32[] memory secondsAgo\n",
" ) public view returns (int256[] memory tickCumulatives) {\n",
" tickCumulatives = new int256[](secondsAgo.length);\n",
" (int56[] memory _ticks,) = pool.observe(secondsAgo);\n",
" for (uint256 i = 0; i < secondsAgo.length; i++) {\n",
" tickCumulatives[i] = int256(_ticks[i]);\n",
" }\n",
" }\n",
"\n",
" function cache_price(\n",
" IUniswapV3PoolDerivedState pool,\n",
" uint32[] memory secondsAgo\n",
" ) public {\n",
" (int56[] memory _ticks,) = pool.observe(secondsAgo);\n",
" cachedTicks = new int256[](_ticks.length);\n",
" for (uint256 i = 0; i < _ticks.length; i++) {\n",
" cachedTicks[i] = int256(_ticks[i]);\n",
" }\n",
" }\n",
"\n",
" function readPriceCache() public view returns (int256[] memory) {\n",
" return cachedTicks;\n",
" }\n",
" }\n",
" '''\n",
"\n",
" compiled_sol = compile_standard({\n",
" \"language\": \"Solidity\",\n",
" \"sources\": {\"UniTickAttestor.sol\": {\"content\": contract_source_code}},\n",
" \"settings\": {\"outputSelection\": {\"*\": {\"*\": [\"metadata\", \"evm.bytecode\", \"abi\"]}}}\n",
" })\n",
"\n",
" bytecode = compiled_sol['contracts']['UniTickAttestor.sol']['UniTickAttestor']['evm']['bytecode']['object']\n",
" abi = json.loads(compiled_sol['contracts']['UniTickAttestor.sol']['UniTickAttestor']['metadata'])['output']['abi']\n",
"\n",
" # Deploy contract\n",
" UniTickAttestor = w3.eth.contract(abi=abi, bytecode=bytecode)\n",
" tx_hash = UniTickAttestor.constructor().transact()\n",
" tx_receipt = w3.eth.wait_for_transaction_receipt(tx_hash)\n",
" contract = w3.eth.contract(address=tx_receipt['contractAddress'], abi=abi)\n",
"\n",
" # Step 4: Store data via cache_price transaction\n",
" tx_hash = contract.functions.cache_price(\n",
" \"0xC6962004f452bE9203591991D15f6b388e09E8D0\",\n",
" secondsAgo\n",
" ).transact()\n",
" tx_receipt = w3.eth.wait_for_transaction_receipt(tx_hash)\n",
"\n",
" # Step 5: Prepare calldata for readPriceCache\n",
" call = contract.functions.readPriceCache().build_transaction()\n",
" calldata = call['data'][2:]\n",
"\n",
" # Get stored data\n",
" result = contract.functions.readPriceCache().call()\n",
" print(f'Cached ticks: {result}')\n",
"\n",
" decimals = [0] * len(data)\n",
"\n",
" call_to_account = {\n",
" 'call_data': calldata,\n",
" 'decimals': decimals,\n",
" 'address': contract.address[2:],\n",
" }\n",
"\n",
" return call_to_account\n",
"\n",
"start_anvil()\n",
"call_to_account = on_chain_data(x)\n",
"\n",
"data = dict(input_data = {'rpc': RPC_URL, 'call': call_to_account })\n",
"json.dump(data, open(\"input.json\", 'w'))"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"As we use Halo2 with KZG-commitments we need an SRS string from (preferably) a multi-party trusted setup ceremony. For an overview of the procedures for such a ceremony check out [this page](https://blog.ethereum.org/2023/01/16/announcing-kzg-ceremony). The `get_srs` command retrieves a correctly sized SRS given the calibrated settings file from [here](https://github.com/han0110/halo2-kzg-srs). \n",
"\n",
"These SRS were generated with [this](https://github.com/privacy-scaling-explorations/perpetualpowersoftau) ceremony. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"res = await ezkl.get_srs( settings_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"We now need to generate the circuit witness. These are the model outputs (and any hashes) that are generated when feeding the previously generated `input.json` through the circuit / model. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# !export RUST_BACKTRACE=1\n",
"\n",
"witness_path = \"witness.json\"\n",
"\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Here we setup verifying and proving keys for the circuit. As the name suggests the proving key is needed for ... proving and the verifying key is needed for ... verifying. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# HERE WE SETUP THE CIRCUIT PARAMS\n",
"# WE GOT KEYS\n",
"# WE GOT CIRCUIT PARAMETERS\n",
"# EVERYTHING ANYONE HAS EVER NEEDED FOR ZK\n",
"res = ezkl.setup(\n",
" compiled_model_path,\n",
" vk_path,\n",
" pk_path,\n",
" )\n",
"\n",
"assert res == True\n",
"assert os.path.isfile(vk_path)\n",
"assert os.path.isfile(pk_path)\n",
"assert os.path.isfile(settings_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we generate a full proof. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# GENERATE A PROOF\n",
"\n",
"proof_path = os.path.join('test.pf')\n",
"\n",
"res = ezkl.prove(\n",
" witness_path,\n",
" compiled_model_path,\n",
" pk_path,\n",
" proof_path,\n",
" \"single\",\n",
" )\n",
"\n",
"print(res)\n",
"assert os.path.isfile(proof_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"And verify it as a sanity check. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# VERIFY IT\n",
"\n",
"res = ezkl.verify(\n",
" proof_path,\n",
" settings_path,\n",
" vk_path,\n",
" )\n",
"\n",
"assert res == True\n",
"print(\"verified\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We can now create and then deploy a vanilla evm verifier."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"abi_path = 'test.abi'\n",
"sol_code_path = 'test.sol'\n",
"\n",
"res = await ezkl.create_evm_verifier(\n",
" vk_path,\n",
" settings_path,\n",
" sol_code_path,\n",
" abi_path,\n",
" )\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"\n",
"addr_path_verifier = \"addr_verifier.txt\"\n",
"\n",
"res = await ezkl.deploy_evm(\n",
" addr_path_verifier,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
")\n",
"\n",
"assert res == True"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"With the vanilla verifier deployed, we can now create the data attestation contract, which will read in the instances from the calldata to the verifier, attest to them, call the verifier and then return the result. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"abi_path = 'test.abi'\n",
"sol_code_path = 'test.sol'\n",
"input_path = 'input.json'\n",
"\n",
"res = await ezkl.create_evm_data_attestation(\n",
" input_path,\n",
" settings_path,\n",
" sol_code_path,\n",
" abi_path,\n",
" )"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Now we can deploy the data attest verifier contract. For security reasons, this binding will only deploy to a local anvil instance, using accounts generated by anvil. \n",
"So should only be used for testing purposes."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"addr_path_da = \"addr_da.txt\"\n",
"\n",
"res = await ezkl.deploy_da_evm(\n",
" addr_path_da,\n",
" input_path,\n",
" RPC_URL,\n",
" settings_path,\n",
" sol_code_path,\n",
" )\n"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Here we need to regenerate the witness, prove and then verify all within the same cell. This is because we want to reduce the amount of latency between reading on-chain state and verifying it on-chain. This is because the attest input values read from the oracle are time sensitive (their values are derived from computing on block.timestamp) and can change between the time of reading and the time of verifying.\n",
"\n",
"Call the view only verify method on the contract to verify the proof. Since it is a view function this is safe to use in production since you don't have to pass your private key."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# !export RUST_BACKTRACE=1\n",
"\n",
"# print(res)\n",
"assert os.path.isfile(proof_path)\n",
"# read the verifier address\n",
"addr_verifier = None\n",
"with open(addr_path_verifier, 'r') as f:\n",
" addr = f.read()\n",
"#read the data attestation address\n",
"addr_da = None\n",
"with open(addr_path_da, 'r') as f:\n",
" addr_da = f.read()\n",
"\n",
"res = await ezkl.verify_evm(\n",
" addr,\n",
" RPC_URL,\n",
" proof_path,\n",
" addr_da,\n",
")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@@ -1,539 +0,0 @@
{
"cells": [
{
"attachments": {},
"cell_type": "markdown",
"id": "cf69bb3f-94e6-4dba-92cd-ce08df117d67",
"metadata": {},
"source": [
"## World rotation\n",
"\n",
"Here we demonstrate how to use the EZKL package to rotate an on-chain world. \n",
"\n",
"![zk-gaming-diagram-transformed](https://hackmd.io/_uploads/HkApuQGV6.png)\n",
"> **A typical ZK application flow**. For the shape rotators out there — this is an easily digestible example. A user computes a ZK-proof that they have calculated a valid rotation of a world. They submit this proof to a verifier contract which governs an on-chain world, along with a new set of coordinates, and the world rotation updates. Observe that its possible for one player to initiate a *global* change.\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "95613ee9",
"metadata": {},
"outputs": [],
"source": [
"# check if notebook is in colab\n",
"try:\n",
" # install ezkl\n",
" import google.colab\n",
" import subprocess\n",
" import sys\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"ezkl\"])\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"onnx\"])\n",
"\n",
"# rely on local installation of ezkl if the notebook is not in colab\n",
"except:\n",
" pass\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from torch import nn\n",
"import ezkl\n",
"import os\n",
"import json\n",
"import torch\n",
"import math\n",
"\n",
"# these are constants for the rotation\n",
"phi = torch.tensor(5 * math.pi / 180)\n",
"s = torch.sin(phi)\n",
"c = torch.cos(phi)\n",
"\n",
"\n",
"class RotateStuff(nn.Module):\n",
" def __init__(self):\n",
" super(RotateStuff, self).__init__()\n",
"\n",
" # create a rotation matrix -- the matrix is constant and is transposed for convenience\n",
" self.rot = torch.stack([torch.stack([c, -s]),\n",
" torch.stack([s, c])]).t()\n",
"\n",
" def forward(self, x):\n",
" x_rot = x @ self.rot # same as x_rot = (rot @ x.t()).t() due to rot in O(n) (SO(n) even)\n",
" return x_rot\n",
"\n",
"\n",
"circuit = RotateStuff()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"This will showcase the principle directions of rotation by plotting the rotation of a single unit vector."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from matplotlib import pyplot\n",
"pyplot.figure(figsize=(3, 3))\n",
"pyplot.arrow(0, 0, 1, 0, width=0.02, alpha=0.5)\n",
"pyplot.arrow(0, 0, 0, 1, width=0.02, alpha=0.5)\n",
"pyplot.arrow(0, 0, circuit.rot[0, 0].item(), circuit.rot[0, 1].item(), width=0.02)\n",
"pyplot.arrow(0, 0, circuit.rot[1, 0].item(), circuit.rot[1, 1].item(), width=0.02)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b37637c4",
"metadata": {},
"outputs": [],
"source": [
"model_path = os.path.join('network.onnx')\n",
"compiled_model_path = os.path.join('network.compiled')\n",
"pk_path = os.path.join('test.pk')\n",
"vk_path = os.path.join('test.vk')\n",
"settings_path = os.path.join('settings.json')\n",
"srs_path = os.path.join('kzg.srs')\n",
"witness_path = os.path.join('witness.json')\n",
"data_path = os.path.join('input.json')"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "82db373a",
"metadata": {},
"outputs": [],
"source": [
"\n",
"\n",
"# initial principle vectors for the rotation are as in the plot above\n",
"x = torch.tensor([[1, 0], [0, 1]], dtype=torch.float32)\n",
"\n",
"# Flips the neural net into inference mode\n",
"circuit.eval()\n",
"\n",
" # Export the model\n",
"torch.onnx.export(circuit, # model being run\n",
" x, # model input (or a tuple for multiple inputs)\n",
" model_path, # where to save the model (can be a file or file-like object)\n",
" export_params=True, # store the trained parameter weights inside the model file\n",
" opset_version=10, # the ONNX version to export the model to\n",
" do_constant_folding=True, # whether to execute constant folding for optimization\n",
" input_names = ['input'], # the model's input names\n",
" output_names = ['output'], # the model's output names\n",
" )\n",
"\n",
"data_array = ((x).detach().numpy()).reshape([-1]).tolist()\n",
"\n",
"data = dict(input_data = [data_array])\n",
"\n",
" # Serialize data into file:\n",
"json.dump( data, open(data_path, 'w' ))\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### World rotation in 2D on-chain"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"For demo purposes we deploy these coordinates to a contract running locally using Anvil. This creates our on-chain world. We then rotate the world using the EZKL package and submit the proof to the contract. The contract then updates the world rotation. For demo purposes we do this repeatedly, rotating the world by 1 transform each time."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import subprocess\n",
"import time\n",
"import threading\n",
"\n",
"# make sure anvil is running locally\n",
"# $ anvil -p 3030\n",
"\n",
"RPC_URL = \"http://localhost:3030\"\n",
"\n",
"# Save process globally\n",
"anvil_process = None\n",
"\n",
"def start_anvil():\n",
" global anvil_process\n",
" if anvil_process is None:\n",
" anvil_process = subprocess.Popen([\"anvil\", \"-p\", \"3030\", \"--code-size-limit=41943040\"])\n",
" if anvil_process.returncode is not None:\n",
" raise Exception(\"failed to start anvil process\")\n",
" time.sleep(3)\n",
"\n",
"def stop_anvil():\n",
" global anvil_process\n",
" if anvil_process is not None:\n",
" anvil_process.terminate()\n",
" anvil_process = None\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We define our `PyRunArgs` objects which contains the visibility parameters for out model. \n",
"- `input_visibility` defines the visibility of the model inputs\n",
"- `param_visibility` defines the visibility of the model weights and constants and parameters \n",
"- `output_visibility` defines the visibility of the model outputs\n",
"\n",
"Here we create the following setup:\n",
"- `input_visibility`: \"public\"\n",
"- `param_visibility`: \"fixed\"\n",
"- `output_visibility`: public"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d5e374a2",
"metadata": {},
"outputs": [],
"source": [
"py_run_args = ezkl.PyRunArgs()\n",
"py_run_args.input_visibility = \"public\"\n",
"py_run_args.output_visibility = \"public\"\n",
"py_run_args.param_visibility = \"private\" # private by default\n",
"py_run_args.scale_rebase_multiplier = 10\n",
"\n",
"res = ezkl.gen_settings(model_path, settings_path, py_run_args=py_run_args)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3aa4f090",
"metadata": {},
"outputs": [],
"source": [
"res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)\n",
"assert res == True"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"We also define a contract that holds out test data. This contract will contain in its storage the data that we will read from and attest to. In production you would not need to set up a local anvil instance. Instead you would replace RPC_URL with the actual RPC endpoint of the chain you are deploying your verifiers too, reading from the data on said chain."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2007dc77",
"metadata": {},
"outputs": [],
"source": [
"ezkl.setup_test_evm_data(\n",
" data_path,\n",
" compiled_model_path,\n",
" # we write the call data to the same file as the input data\n",
" data_path,\n",
" input_source=ezkl.PyTestDataSource.OnChain,\n",
" output_source=ezkl.PyTestDataSource.File,\n",
" rpc_url=RPC_URL)"
]
},
{
"cell_type": "markdown",
"id": "ab993958",
"metadata": {},
"source": [
"As we use Halo2 with KZG-commitments we need an SRS string from (preferably) a multi-party trusted setup ceremony. For an overview of the procedures for such a ceremony check out [this page](https://blog.ethereum.org/2023/01/16/announcing-kzg-ceremony). The `get_srs` command retrieves a correctly sized SRS given the calibrated settings file from [here](https://github.com/han0110/halo2-kzg-srs). \n",
"\n",
"These SRS were generated with [this](https://github.com/privacy-scaling-explorations/perpetualpowersoftau) ceremony. "
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "8b74dcee",
"metadata": {},
"outputs": [],
"source": [
"# srs path\n",
"res = await ezkl.get_srs( settings_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "18c8b7c7",
"metadata": {},
"outputs": [],
"source": [
"# now generate the witness file \n",
"\n",
"witness = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
{
"cell_type": "markdown",
"id": "ad58432e",
"metadata": {},
"source": [
"Here we setup verifying and proving keys for the circuit. As the name suggests the proving key is needed for ... proving and the verifying key is needed for ... verifying. "
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b1c561a8",
"metadata": {},
"outputs": [],
"source": [
"res = ezkl.setup(\n",
" compiled_model_path,\n",
" vk_path,\n",
" pk_path,\n",
" \n",
" )\n",
"\n",
"assert res == True\n",
"assert os.path.isfile(vk_path)\n",
"assert os.path.isfile(pk_path)\n",
"assert os.path.isfile(settings_path)"
]
},
{
"cell_type": "markdown",
"id": "1746c8d1",
"metadata": {},
"source": [
"We can now create an EVM verifier contract from our circuit. This contract will be deployed to the chain we are using. In this case we are using a local anvil instance."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d1920c0f",
"metadata": {},
"outputs": [],
"source": [
"abi_path = 'test.abi'\n",
"sol_code_path = 'test.sol'\n",
"\n",
"res = await ezkl.create_evm_verifier(\n",
" vk_path,\n",
" settings_path,\n",
" sol_code_path,\n",
" abi_path,\n",
" )\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "0fd7f22b",
"metadata": {},
"outputs": [],
"source": [
"import json\n",
"\n",
"addr_path_verifier = \"addr_verifier.txt\"\n",
"\n",
"res = await ezkl.deploy_evm(\n",
" addr_path_verifier,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
")\n",
"\n",
"assert res == True"
]
},
{
"cell_type": "markdown",
"id": "9c0dffab",
"metadata": {},
"source": [
"With the vanilla verifier deployed, we can now create the data attestation contract, which will read in the instances from the calldata to the verifier, attest to them, call the verifier and then return the result. \n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c2db14d7",
"metadata": {},
"outputs": [],
"source": [
"abi_path = 'test.abi'\n",
"sol_code_path = 'test.sol'\n",
"input_path = 'input.json'\n",
"\n",
"res = await ezkl.create_evm_data_attestation(\n",
" input_path,\n",
" settings_path,\n",
" sol_code_path,\n",
" abi_path,\n",
" )"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5a018ba6",
"metadata": {},
"outputs": [],
"source": [
"addr_path_da = \"addr_da.txt\"\n",
"\n",
"res = await ezkl.deploy_da_evm(\n",
" addr_path_da,\n",
" input_path,\n",
" RPC_URL,\n",
" settings_path,\n",
" sol_code_path,\n",
" )"
]
},
{
"cell_type": "markdown",
"id": "2adad845",
"metadata": {},
"source": [
"Now we can pull in the data from the contract and calculate a new set of coordinates. We then rotate the world by 1 transform and submit the proof to the contract. The contract could then update the world rotation (logic not inserted here). For demo purposes we do this repeatedly, rotating the world by 1 transform. "
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c384cbc8",
"metadata": {},
"outputs": [],
"source": [
"# GENERATE A PROOF\n",
"\n",
"\n",
"proof_path = os.path.join('test.pf')\n",
"\n",
"res = ezkl.prove(\n",
" witness_path,\n",
" compiled_model_path,\n",
" pk_path,\n",
" proof_path,\n",
" \n",
" \"single\",\n",
" )\n",
"\n",
"print(res)\n",
"assert os.path.isfile(proof_path)"
]
},
{
"cell_type": "markdown",
"id": "90eda56e",
"metadata": {},
"source": [
"Call the view only verify method on the contract to verify the proof. Since it is a view function this is safe to use in production since you don't have to pass your private key."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "76f00d41",
"metadata": {},
"outputs": [],
"source": [
"# read the verifier address\n",
"addr_verifier = None\n",
"with open(addr_path_verifier, 'r') as f:\n",
" addr = f.read()\n",
"#read the data attestation address\n",
"addr_da = None\n",
"with open(addr_path_da, 'r') as f:\n",
" addr_da = f.read()\n",
"\n",
"res = ezkl.verify_evm(\n",
" addr,\n",
" RPC_URL,\n",
" proof_path,\n",
" addr_da,\n",
")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"As a sanity check lets plot the rotations of the unit vectors. We can see that the unit vectors rotate as expected by the output of the circuit. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"witness['outputs'][0][0]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"settings = json.load(open(settings_path, 'r'))\n",
"out_scale = settings[\"model_output_scales\"][0]\n",
"\n",
"from matplotlib import pyplot\n",
"pyplot.figure(figsize=(3, 3))\n",
"pyplot.arrow(0, 0, 1, 0, width=0.02, alpha=0.5)\n",
"pyplot.arrow(0, 0, 0, 1, width=0.02, alpha=0.5)\n",
"\n",
"arrow_x = ezkl.felt_to_float(witness['outputs'][0][0], out_scale)\n",
"arrow_y = ezkl.felt_to_float(witness['outputs'][0][1], out_scale)\n",
"pyplot.arrow(0, 0, arrow_x, arrow_y, width=0.02)\n",
"arrow_x = ezkl.felt_to_float(witness['outputs'][0][2], out_scale)\n",
"arrow_y = ezkl.felt_to_float(witness['outputs'][0][3], out_scale)\n",
"pyplot.arrow(0, 0, arrow_x, arrow_y, width=0.02)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.9"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -160,30 +160,6 @@ def compile_circuit(model:str | os.PathLike | pathlib.Path,compiled_circuit:str
"""
...
def create_evm_data_attestation(input_data:str | os.PathLike | pathlib.Path,settings_path:str | os.PathLike | pathlib.Path,sol_code_path:str | os.PathLike | pathlib.Path,abi_path:str | os.PathLike | pathlib.Path,witness_path:typing.Optional[str | os.PathLike | pathlib.Path]) -> typing.Any:
r"""
Creates an EVM compatible data attestation verifier, you will need solc installed in your environment to run this
Arguments
---------
input_data: str
The path to the .json data file, which should contain the necessary calldata and account addresses needed to read from all the on-chain view functions that return the data that the network ingests as inputs
settings_path: str
The path to the settings file
sol_code_path: str
The path to the create the solidity verifier
abi_path: str
The path to create the ABI for the solidity verifier
Returns
-------
bool
"""
...
def create_evm_verifier(vk_path:str | os.PathLike | pathlib.Path,settings_path:str | os.PathLike | pathlib.Path,sol_code_path:str | os.PathLike | pathlib.Path,abi_path:str | os.PathLike | pathlib.Path,srs_path:typing.Optional[str | os.PathLike | pathlib.Path],reusable:bool) -> typing.Any:
r"""
Creates an EVM compatible verifier, you will need solc installed in your environment to run this
@@ -247,7 +223,7 @@ def create_evm_verifier_aggr(aggregation_settings:typing.Sequence[str | os.PathL
"""
...
def create_evm_vka(vk_path:str | os.PathLike | pathlib.Path,settings_path:str | os.PathLike | pathlib.Path,sol_code_path:str | os.PathLike | pathlib.Path,abi_path:str | os.PathLike | pathlib.Path,srs_path:typing.Optional[str | os.PathLike | pathlib.Path]) -> typing.Any:
def create_evm_vka(vk_path:str | os.PathLike | pathlib.Path,settings_path:str | os.PathLike | pathlib.Path,vka_path:str | os.PathLike | pathlib.Path,srs_path:typing.Optional[str | os.PathLike | pathlib.Path]) -> typing.Any:
r"""
Creates an Evm VK artifact. This command generated a VK with circuit specific meta data encoding in memory for use by the reusable H2 verifier.
This is useful for deploying verifier that were otherwise too big to fit on chain and required aggregation.
@@ -260,8 +236,8 @@ def create_evm_vka(vk_path:str | os.PathLike | pathlib.Path,settings_path:str |
settings_path: str
The path to the settings file
sol_code_path: str
The path to the create the solidity verifying key.
vka_path: str
The path to the create the vka calldata.
abi_path: str
The path to create the ABI for the solidity verifier
@@ -275,12 +251,6 @@ def create_evm_vka(vk_path:str | os.PathLike | pathlib.Path,settings_path:str |
"""
...
def deploy_da_evm(addr_path:str | os.PathLike | pathlib.Path,input_data:str | os.PathLike | pathlib.Path,settings_path:str | os.PathLike | pathlib.Path,sol_code_path:str | os.PathLike | pathlib.Path,rpc_url:typing.Optional[str],optimizer_runs:int,private_key:typing.Optional[str]) -> typing.Any:
r"""
deploys the solidity da verifier
"""
...
def deploy_evm(addr_path:str | os.PathLike | pathlib.Path,sol_code_path:str | os.PathLike | pathlib.Path,rpc_url:typing.Optional[str],contract_type:str,optimizer_runs:int,private_key:typing.Optional[str]) -> typing.Any:
r"""
deploys the solidity verifier
@@ -706,35 +676,6 @@ def setup_aggregate(sample_snarks:typing.Sequence[str | os.PathLike | pathlib.Pa
"""
...
def setup_test_evm_data(data_path:str | os.PathLike | pathlib.Path,compiled_circuit_path:str | os.PathLike | pathlib.Path,test_data:str | os.PathLike | pathlib.Path,input_source:PyTestDataSource,output_source:PyTestDataSource,rpc_url:typing.Optional[str]) -> typing.Any:
r"""
Setup test evm data
Arguments
---------
data_path: str
The path to the .json data file, which should include both the network input (possibly private) and the network output (public input to the proof)
compiled_circuit_path: str
The path to the compiled model file (generated using the compile-circuit command)
test_data: str
For testing purposes only. The optional path to the .json data file that will be generated that contains the OnChain data storage information derived from the file information in the data .json file. Should include both the network input (possibly private) and the network output (public input to the proof)
input_sources: str
Where the input data comes from
output_source: str
Where the output data comes from
rpc_url: str
RPC URL for an EVM compatible node, if None, uses Anvil as a local RPC node
Returns
-------
bool
"""
...
def swap_proof_commitments(proof_path:str | os.PathLike | pathlib.Path,witness_path:str | os.PathLike | pathlib.Path) -> None:
r"""
@@ -823,7 +764,7 @@ def verify_aggr(proof_path:str | os.PathLike | pathlib.Path,vk_path:str | os.Pat
"""
...
def verify_evm(addr_verifier:str,proof_path:str | os.PathLike | pathlib.Path,rpc_url:typing.Optional[str],addr_da:typing.Optional[str],addr_vk:typing.Optional[str]) -> typing.Any:
def verify_evm(addr_verifier:str,proof_path:str | os.PathLike | pathlib.Path,rpc_url:typing.Optional[str],vka_path:typing.Optional[str]) -> typing.Any:
r"""
verifies an evm compatible proof, you will need solc installed in your environment to run this
@@ -838,11 +779,8 @@ def verify_evm(addr_verifier:str,proof_path:str | os.PathLike | pathlib.Path,rpc
rpc_url: str
RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
addr_da: str
does the verifier use data attestation ?
addr_vk: str
The addess of the separate VK contract (if the verifier key is rendered as a separate contract)
vka_path: str
The path to the VKA calldata bytes file (generated using the create_evm_vka command)
Returns
-------
bool

View File

@@ -1607,22 +1607,15 @@ fn verify_aggr(
#[pyfunction(signature = (
proof=PathBuf::from(DEFAULT_PROOF),
calldata=PathBuf::from(DEFAULT_CALLDATA),
addr_vk=None,
vka_path=None,
))]
#[gen_stub_pyfunction]
fn encode_evm_calldata<'a>(
proof: PathBuf,
calldata: PathBuf,
addr_vk: Option<&'a str>,
vka_path: Option<PathBuf>,
) -> Result<Vec<u8>, PyErr> {
let addr_vk = if let Some(addr_vk) = addr_vk {
let addr_vk = H160Flag::from(addr_vk);
Some(addr_vk)
} else {
None
};
crate::execute::encode_evm_calldata(proof, calldata, addr_vk).map_err(|e| {
crate::execute::encode_evm_calldata(proof, calldata, vka_path).map_err(|e| {
let err_str = format!("Failed to generate calldata: {}", e);
PyRuntimeError::new_err(err_str)
})
@@ -1702,15 +1695,15 @@ fn create_evm_verifier(
/// settings_path: str
/// The path to the settings file
///
/// sol_code_path: str
/// The path to the create the solidity verifying key.
///
/// abi_path: str
/// The path to create the ABI for the solidity verifier
/// vka_path: str
/// The path to the verification artifact calldata bytes file.
///
/// srs_path: str
/// The path to the SRS file
///
/// decimals: int
/// The number of decimals used for the rescaling of fixed point felt instances into on-chain floats.
///
/// Returns
/// -------
/// bool
@@ -1718,21 +1711,21 @@ fn create_evm_verifier(
#[pyfunction(signature = (
vk_path=PathBuf::from(DEFAULT_VK),
settings_path=PathBuf::from(DEFAULT_SETTINGS),
sol_code_path=PathBuf::from(DEFAULT_VK_SOL),
abi_path=PathBuf::from(DEFAULT_VERIFIER_ABI),
srs_path=None
vka_path=PathBuf::from(DEFAULT_VKA),
srs_path=None,
decimals=DEFAULT_DECIMALS.parse().unwrap(),
))]
#[gen_stub_pyfunction]
fn create_evm_vka(
py: Python,
vk_path: PathBuf,
settings_path: PathBuf,
sol_code_path: PathBuf,
abi_path: PathBuf,
vka_path: PathBuf,
srs_path: Option<PathBuf>,
decimals: usize,
) -> PyResult<Bound<'_, PyAny>> {
pyo3_async_runtimes::tokio::future_into_py(py, async move {
crate::execute::create_evm_vka(vk_path, srs_path, settings_path, sol_code_path, abi_path)
crate::execute::create_evm_vka(vk_path, srs_path, settings_path, vka_path, decimals)
.await
.map_err(|e| {
let err_str = format!("Failed to run create_evm_verifier: {}", e);
@@ -1743,124 +1736,7 @@ fn create_evm_vka(
})
}
/// Creates an EVM compatible data attestation verifier, you will need solc installed in your environment to run this
///
/// Arguments
/// ---------
/// input_data: str
/// The path to the .json data file, which should contain the necessary calldata and account addresses needed to read from all the on-chain view functions that return the data that the network ingests as inputs
///
/// settings_path: str
/// The path to the settings file
///
/// sol_code_path: str
/// The path to the create the solidity verifier
///
/// abi_path: str
/// The path to create the ABI for the solidity verifier
///
/// Returns
/// -------
/// bool
///
#[pyfunction(signature = (
input_data=String::from(DEFAULT_DATA),
settings_path=PathBuf::from(DEFAULT_SETTINGS),
sol_code_path=PathBuf::from(DEFAULT_SOL_CODE_DA),
abi_path=PathBuf::from(DEFAULT_VERIFIER_DA_ABI),
witness_path=None,
))]
#[gen_stub_pyfunction]
fn create_evm_data_attestation(
py: Python,
input_data: String,
settings_path: PathBuf,
sol_code_path: PathBuf,
abi_path: PathBuf,
witness_path: Option<PathBuf>,
) -> PyResult<Bound<'_, PyAny>> {
pyo3_async_runtimes::tokio::future_into_py(py, async move {
crate::execute::create_evm_data_attestation(
settings_path,
sol_code_path,
abi_path,
input_data,
witness_path,
)
.await
.map_err(|e| {
let err_str = format!("Failed to run create_evm_data_attestation: {}", e);
PyRuntimeError::new_err(err_str)
})?;
Ok(true)
})
}
/// Setup test evm witness
///
/// Arguments
/// ---------
/// data_path: str
/// The path to the .json data file, which should include both the network input (possibly private) and the network output (public input to the proof)
///
/// compiled_circuit_path: str
/// The path to the compiled model file (generated using the compile-circuit command)
///
/// test_data: str
/// For testing purposes only. The optional path to the .json data file that will be generated that contains the OnChain data storage information derived from the file information in the data .json file. Should include both the network input (possibly private) and the network output (public input to the proof)
///
/// input_sources: str
/// Where the input data comes from
///
/// output_source: str
/// Where the output data comes from
///
/// rpc_url: str
/// RPC URL for an EVM compatible node, if None, uses Anvil as a local RPC node
///
/// Returns
/// -------
/// bool
///
#[pyfunction(signature = (
data_path,
compiled_circuit_path,
test_data,
input_source,
output_source,
rpc_url,
))]
#[gen_stub_pyfunction]
fn setup_test_evm_data(
py: Python,
data_path: String,
compiled_circuit_path: PathBuf,
test_data: PathBuf,
input_source: PyTestDataSource,
output_source: PyTestDataSource,
rpc_url: String,
) -> PyResult<Bound<'_, PyAny>> {
pyo3_async_runtimes::tokio::future_into_py(py, async move {
crate::execute::setup_test_evm_data(
data_path,
compiled_circuit_path,
test_data,
rpc_url,
input_source.into(),
output_source.into(),
)
.await
.map_err(|e| {
let err_str = format!("Failed to run setup_test_evm_data: {}", e);
PyRuntimeError::new_err(err_str)
})?;
Ok(true)
})
}
/// deploys the solidity verifier
/// Deploys the solidity verifier
#[pyfunction(signature = (
addr_path,
rpc_url,
@@ -1898,46 +1774,64 @@ fn deploy_evm(
})
}
/// deploys the solidity da verifier
/// Registers a VKA on the EZKL reusable verifier contract
///
/// Arguments
/// ---------
/// addr_verifier: str
/// The reusable verifier contract's address as a hex string
///
/// rpc_url: str
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
///
/// vka_path: str
/// The path to the VKA calldata bytes file (generated using the create_evm_vka command)
///
/// vka_digest_path: str
/// The path to the VKA digest file, aka hash of the VKA calldata bytes file
///
/// private_key: str
/// The private key to use for signing the transaction. If None, will use the default private key
///
/// Returns
/// -------
/// bool
///
#[pyfunction(signature = (
addr_path,
input_data,
addr_verifier,
rpc_url,
settings_path=PathBuf::from(DEFAULT_SETTINGS),
sol_code_path=PathBuf::from(DEFAULT_SOL_CODE_DA),
optimizer_runs=DEFAULT_OPTIMIZER_RUNS.parse().unwrap(),
private_key=None
vka_path=PathBuf::from(DEFAULT_VKA),
vka_digest_path=PathBuf::from(DEFAULT_VKA_DIGEST),
private_key=None,
))]
#[gen_stub_pyfunction]
fn deploy_da_evm(
py: Python,
addr_path: PathBuf,
input_data: String,
fn register_vka<'a>(
py: Python<'a>,
addr_verifier: &'a str,
rpc_url: String,
settings_path: PathBuf,
sol_code_path: PathBuf,
optimizer_runs: usize,
vka_path: PathBuf,
vka_digest_path: PathBuf,
private_key: Option<String>,
) -> PyResult<Bound<'_, PyAny>> {
) -> PyResult<Bound<'a, PyAny>> {
let addr_verifier = H160Flag::from(addr_verifier);
pyo3_async_runtimes::tokio::future_into_py(py, async move {
crate::execute::deploy_da_evm(
input_data,
settings_path,
sol_code_path,
crate::execute::register_vka(
rpc_url,
addr_path,
optimizer_runs,
addr_verifier,
vka_path,
vka_digest_path,
private_key,
)
.await
.map_err(|e| {
let err_str = format!("Failed to run deploy_da_evm: {}", e);
let err_str = format!("Failed to run register_vka: {}", e);
PyRuntimeError::new_err(err_str)
})?;
Ok(true)
})
}
/// verifies an evm compatible proof, you will need solc installed in your environment to run this
///
/// Arguments
@@ -1951,11 +1845,8 @@ fn deploy_da_evm(
/// rpc_url: str
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
///
/// addr_da: str
/// does the verifier use data attestation ?
///
/// addr_vk: str
/// The address of the separate VK contract (if the verifier key is rendered as a separate contract)
/// vka_path: str
/// The path to the VKA calldata bytes file (generated using the create_evm_vka command)
/// Returns
/// -------
/// bool
@@ -1964,8 +1855,7 @@ fn deploy_da_evm(
addr_verifier,
rpc_url,
proof_path=PathBuf::from(DEFAULT_PROOF),
addr_da = None,
addr_vk = None,
vka_path = None,
))]
#[gen_stub_pyfunction]
fn verify_evm<'a>(
@@ -1973,25 +1863,12 @@ fn verify_evm<'a>(
addr_verifier: &'a str,
rpc_url: String,
proof_path: PathBuf,
addr_da: Option<&'a str>,
addr_vk: Option<&'a str>,
vka_path: Option<PathBuf>,
) -> PyResult<Bound<'a, PyAny>> {
let addr_verifier = H160Flag::from(addr_verifier);
let addr_da = if let Some(addr_da) = addr_da {
let addr_da = H160Flag::from(addr_da);
Some(addr_da)
} else {
None
};
let addr_vk = if let Some(addr_vk) = addr_vk {
let addr_vk = H160Flag::from(addr_vk);
Some(addr_vk)
} else {
None
};
pyo3_async_runtimes::tokio::future_into_py(py, async move {
crate::execute::verify_evm(proof_path, addr_verifier, rpc_url, addr_da, addr_vk)
crate::execute::verify_evm(proof_path, addr_verifier, rpc_url, vka_path)
.await
.map_err(|e| {
let err_str = format!("Failed to run verify_evm: {}", e);
@@ -2115,12 +1992,10 @@ fn ezkl(m: &Bound<'_, PyModule>) -> PyResult<()> {
m.add_function(wrap_pyfunction!(create_evm_verifier, m)?)?;
m.add_function(wrap_pyfunction!(create_evm_vka, m)?)?;
m.add_function(wrap_pyfunction!(deploy_evm, m)?)?;
m.add_function(wrap_pyfunction!(deploy_da_evm, m)?)?;
m.add_function(wrap_pyfunction!(verify_evm, m)?)?;
m.add_function(wrap_pyfunction!(setup_test_evm_data, m)?)?;
m.add_function(wrap_pyfunction!(create_evm_verifier_aggr, m)?)?;
m.add_function(wrap_pyfunction!(create_evm_data_attestation, m)?)?;
m.add_function(wrap_pyfunction!(encode_evm_calldata, m)?)?;
m.add_function(wrap_pyfunction!(register_vka, m)?)?;
Ok(())
}

View File

@@ -1,6 +1,7 @@
use halo2_proofs::{
plonk::*,
poly::{
VerificationStrategy,
commitment::{CommitmentScheme, ParamsProver},
ipa::{
commitment::{IPACommitmentScheme, ParamsIPA},
@@ -12,7 +13,6 @@ use halo2_proofs::{
multiopen::{ProverSHPLONK, VerifierSHPLONK},
strategy::SingleStrategy as KZGSingleStrategy,
},
VerificationStrategy,
},
};
use std::fmt::Display;
@@ -20,15 +20,15 @@ use std::io::BufReader;
use std::str::FromStr;
use crate::{
CheckMode, Commitments, EZKLError as InnerEZKLError,
circuit::region::RegionSettings,
graph::GraphSettings,
pfsys::{
create_proof_circuit,
TranscriptType, create_proof_circuit,
evm::aggregation_kzg::{AggregationCircuit, PoseidonTranscript},
verify_proof_circuit, TranscriptType,
verify_proof_circuit,
},
tensor::TensorType,
CheckMode, Commitments, EZKLError as InnerEZKLError,
};
use crate::graph::{GraphCircuit, GraphWitness};
@@ -66,26 +66,24 @@ impl From<InnerEZKLError> for EZKLError {
pub(crate) fn encode_verifier_calldata(
// TODO - shuold it be pub(crate) or pub or pub(super)?
proof: Vec<u8>,
vk_address: Option<Vec<u8>>,
vka: Option<Vec<u8>>,
) -> Result<Vec<u8>, EZKLError> {
let snark: crate::pfsys::Snark<Fr, G1Affine> =
serde_json::from_slice(&proof[..]).map_err(InnerEZKLError::from)?;
let vk_address: Option<[u8; 20]> = if let Some(vk_address) = vk_address {
let array: [u8; 20] =
serde_json::from_slice(&vk_address[..]).map_err(InnerEZKLError::from)?;
let vka_buf: Option<Vec<[u8; 32]>> = if let Some(vka) = vka {
let array: Vec<[u8; 32]> =
serde_json::from_slice(&vka[..]).map_err(InnerEZKLError::from)?;
Some(array)
} else {
None
};
let vka: Option<&[[u8; 32]]> = vka_buf.as_deref();
let flattened_instances = snark.instances.into_iter().flatten();
let encoded = encode_calldata(
vk_address,
&snark.proof,
&flattened_instances.collect::<Vec<_>>(),
);
let encoded = encode_calldata(vka, &snark.proof, &flattened_instances.collect::<Vec<_>>());
Ok(encoded)
}

View File

@@ -1,6 +1,7 @@
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
use alloy::primitives::Address as H160;
use clap::{Command, Parser, Subcommand};
use clap_complete::{Generator, Shell, generate};
use clap_complete::{generate, Generator, Shell};
#[cfg(feature = "python-bindings")]
use pyo3::{conversion::FromPyObject, exceptions::PyValueError, prelude::*};
use serde::{Deserialize, Serialize};
@@ -8,10 +9,9 @@ use std::path::PathBuf;
use std::str::FromStr;
use tosubcommand::{ToFlags, ToSubcommand};
use crate::{Commitments, RunArgs, pfsys::ProofType};
use crate::{pfsys::ProofType, Commitments, RunArgs};
use crate::circuit::CheckMode;
use crate::graph::TestDataSource;
use crate::pfsys::TranscriptType;
/// The default path to the .json data file
@@ -42,20 +42,14 @@ pub const DEFAULT_SPLIT: &str = "false";
pub const DEFAULT_VERIFIER_ABI: &str = "verifier_abi.json";
/// Default verifier abi for aggregated proofs
pub const DEFAULT_VERIFIER_AGGREGATED_ABI: &str = "verifier_aggr_abi.json";
/// Default verifier abi for data attestation
pub const DEFAULT_VERIFIER_DA_ABI: &str = "verifier_da_abi.json";
/// Default solidity code
pub const DEFAULT_SOL_CODE: &str = "evm_deploy.sol";
/// Default calldata path
pub const DEFAULT_CALLDATA: &str = "calldata.bytes";
/// Default solidity code for aggregated proofs
pub const DEFAULT_SOL_CODE_AGGREGATED: &str = "evm_deploy_aggr.sol";
/// Default solidity code for data attestation
pub const DEFAULT_SOL_CODE_DA: &str = "evm_deploy_da.sol";
/// Default contract address
pub const DEFAULT_CONTRACT_ADDRESS: &str = "contract.address";
/// Default contract address for data attestation
pub const DEFAULT_CONTRACT_ADDRESS_DA: &str = "contract_da.address";
/// Default contract address for vk
pub const DEFAULT_CONTRACT_ADDRESS_VK: &str = "contract_vk.address";
/// Default check mode
@@ -78,8 +72,8 @@ pub const DEFAULT_DISABLE_SELECTOR_COMPRESSION: &str = "false";
pub const DEFAULT_RENDER_REUSABLE: &str = "false";
/// Default contract deployment type
pub const DEFAULT_CONTRACT_DEPLOYMENT_TYPE: &str = "verifier";
/// Default VK sol path
pub const DEFAULT_VK_SOL: &str = "vk.sol";
/// Default VKA calldata path
pub const DEFAULT_VKA: &str = "vka.bytes";
/// Default VK abi path
pub const DEFAULT_VK_ABI: &str = "vk.abi";
/// Default scale rebase multipliers for calibration
@@ -92,6 +86,10 @@ pub const DEFAULT_ONLY_RANGE_CHECK_REBASE: &str = "false";
pub const DEFAULT_COMMITMENT: &str = "kzg";
/// Default seed used to generate random data
pub const DEFAULT_SEED: &str = "21242";
/// Default number of decimals for instances rescaling on-chain.
pub const DEFAULT_DECIMALS: &str = "18";
/// Default path for the vka digest file
pub const DEFAULT_VKA_DIGEST: &str = "vka.digest";
#[cfg(feature = "python-bindings")]
/// Converts TranscriptType into a PyObject (Required for TranscriptType to be compatible with Python)
@@ -187,8 +185,6 @@ pub enum ContractType {
/// Can also be used as an alternative to aggregation for verifiers that are otherwise too large to fit on-chain.
reusable: bool,
},
/// Deploys a verifying key artifact that the reusable verifier loads into memory during runtime. Encodes the circuit specific data that was otherwise hardcoded onto the stack.
VerifyingKeyArtifact,
}
impl Default for ContractType {
@@ -207,7 +203,6 @@ impl std::fmt::Display for ContractType {
"verifier/reusable".to_string()
}
ContractType::Verifier { reusable: false } => "verifier".to_string(),
ContractType::VerifyingKeyArtifact => "vka".to_string(),
}
)
}
@@ -224,7 +219,6 @@ impl From<&str> for ContractType {
match s {
"verifier" => ContractType::Verifier { reusable: false },
"verifier/reusable" => ContractType::Verifier { reusable: true },
"vka" => ContractType::VerifyingKeyArtifact,
_ => {
log::error!("Invalid value for ContractType");
log::warn!("Defaulting to verifier");
@@ -234,24 +228,25 @@ impl From<&str> for ContractType {
}
}
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
#[derive(Debug, Copy, Clone, Serialize, Deserialize, PartialEq, PartialOrd)]
/// wrapper for H160 to make it easy to parse into flag vals
pub struct H160Flag {
inner: H160,
}
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
impl From<H160Flag> for H160 {
fn from(val: H160Flag) -> H160 {
val.inner
}
}
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
impl ToFlags for H160Flag {
fn to_flags(&self) -> Vec<String> {
vec![format!("{:#x}", self.inner)]
}
}
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
impl From<&str> for H160Flag {
fn from(s: &str) -> Self {
Self {
@@ -299,7 +294,6 @@ impl IntoPy<PyObject> for ContractType {
match self {
ContractType::Verifier { reusable: true } => "verifier/reusable".to_object(py),
ContractType::Verifier { reusable: false } => "verifier".to_object(py),
ContractType::VerifyingKeyArtifact => "vka".to_object(py),
}
}
}
@@ -312,7 +306,6 @@ impl<'source> FromPyObject<'source> for ContractType {
match strval.to_lowercase().as_str() {
"verifier" => Ok(ContractType::Verifier { reusable: false }),
"verifier/reusable" => Ok(ContractType::Verifier { reusable: true }),
"vka" => Ok(ContractType::VerifyingKeyArtifact),
_ => Err(PyValueError::new_err("Invalid value for ContractType")),
}
}
@@ -671,30 +664,6 @@ pub enum Commands {
#[arg(long, default_value = DEFAULT_DISABLE_SELECTOR_COMPRESSION, action = clap::ArgAction::SetTrue)]
disable_selector_compression: Option<bool>,
},
/// Deploys a test contact that the data attester reads from and creates a data attestation formatted input.json file that contains call data information
#[command(arg_required_else_help = true)]
SetupTestEvmData {
/// The path to the .json data file, which should include both the network input (possibly private) and the network output (public input to the proof)
#[arg(short = 'D', long, value_hint = clap::ValueHint::FilePath)]
data: Option<String>,
/// The path to the compiled model file (generated using the compile-circuit command)
#[arg(short = 'M', long, value_hint = clap::ValueHint::FilePath)]
compiled_circuit: Option<PathBuf>,
/// For testing purposes only. The optional path to the .json data file that will be generated that contains the OnChain data storage information
/// derived from the file information in the data .json file.
/// Should include both the network input (possibly private) and the network output (public input to the proof)
#[arg(short = 'T', long, value_hint = clap::ValueHint::FilePath)]
test_data: PathBuf,
/// RPC URL for an Ethereum node
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: String,
/// where the input data come from
#[arg(long, default_value = "on-chain", value_hint = clap::ValueHint::Other)]
input_source: TestDataSource,
/// where the output data come from
#[arg(long, default_value = "on-chain", value_hint = clap::ValueHint::Other)]
output_source: TestDataSource,
},
/// Swaps the positions in the transcript that correspond to commitments
SwapProofCommitments {
/// The path to the proof file
@@ -737,6 +706,7 @@ pub enum Commands {
},
/// Encodes a proof into evm calldata
#[command(name = "encode-evm-calldata")]
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
EncodeEvmCalldata {
/// The path to the proof file (generated using the prove command)
#[arg(long, default_value = DEFAULT_PROOF, value_hint = clap::ValueHint::FilePath)]
@@ -744,12 +714,13 @@ pub enum Commands {
/// The path to save the calldata to
#[arg(long, default_value = DEFAULT_CALLDATA, value_hint = clap::ValueHint::FilePath)]
calldata_path: Option<PathBuf>,
/// The path to the verification key address (only used if the vk is rendered as a separate contract)
/// The path to the serialized VKA file
#[arg(long, value_hint = clap::ValueHint::Other)]
addr_vk: Option<H160Flag>,
vka_path: Option<PathBuf>,
},
/// Creates an Evm verifier for a single proof
#[command(name = "create-evm-verifier")]
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
CreateEvmVerifier {
/// The path to SRS, if None will use ~/.ezkl/srs/kzg{logrows}.srs
#[arg(long, value_hint = clap::ValueHint::FilePath)]
@@ -770,8 +741,9 @@ pub enum Commands {
#[arg(long, default_value = DEFAULT_RENDER_REUSABLE, action = clap::ArgAction::SetTrue)]
reusable: Option<bool>,
},
/// Creates an Evm verifier artifact for a single proof to be used by the reusable verifier
/// Creates an evm verifier artifact to be used by the reusable verifier
#[command(name = "create-evm-vka")]
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
CreateEvmVka {
/// The path to SRS, if None will use ~/.ezkl/srs/kzg{logrows}.srs
#[arg(long, value_hint = clap::ValueHint::FilePath)]
@@ -782,39 +754,18 @@ pub enum Commands {
/// The path to load the desired verification key file
#[arg(long, default_value = DEFAULT_VK, value_hint = clap::ValueHint::FilePath)]
vk_path: Option<PathBuf>,
/// The path to output the Solidity code
#[arg(long, default_value = DEFAULT_VK_SOL, value_hint = clap::ValueHint::FilePath)]
sol_code_path: Option<PathBuf>,
/// The path to output the Solidity verifier ABI
#[arg(long, default_value = DEFAULT_VK_ABI, value_hint = clap::ValueHint::FilePath)]
abi_path: Option<PathBuf>,
},
/// Creates an Evm verifier that attests to on-chain inputs for a single proof
#[command(name = "create-evm-da")]
CreateEvmDa {
/// The path to load circuit settings .json file from (generated using the gen-settings command)
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
settings_path: Option<PathBuf>,
/// The path to output the Solidity code
#[arg(long, default_value = DEFAULT_SOL_CODE_DA, value_hint = clap::ValueHint::FilePath)]
sol_code_path: Option<PathBuf>,
/// The path to output the Solidity verifier ABI
#[arg(long, default_value = DEFAULT_VERIFIER_DA_ABI, value_hint = clap::ValueHint::FilePath)]
abi_path: Option<PathBuf>,
/// The path to the .json data file, which should
/// contain the necessary calldata and account addresses
/// needed to read from all the on-chain
/// view functions that return the data that the network
/// ingests as inputs.
#[arg(short = 'D', long, default_value = DEFAULT_DATA, value_hint = clap::ValueHint::FilePath)]
data: Option<String>,
/// The path to the witness file. This is needed for proof swapping for kzg commitments.
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS, value_hint = clap::ValueHint::FilePath)]
witness: Option<PathBuf>,
/// The path to output the vka calldata
#[arg(long, default_value = DEFAULT_VKA, value_hint = clap::ValueHint::FilePath)]
vka_path: Option<PathBuf>,
/// The number of decimals we want to use for the rescaling of the instances into on-chain floats
/// Default is 18, which is the number of decimals used by most ERC20 tokens
#[arg(long, default_value = DEFAULT_DECIMALS, value_hint = clap::ValueHint::Other)]
decimals: Option<usize>,
},
/// Creates an Evm verifier for an aggregate proof
#[command(name = "create-evm-verifier-aggr")]
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
CreateEvmVerifierAggr {
/// The path to SRS, if None will use ~/.ezkl/srs/kzg{logrows}.srs
#[arg(long, value_hint = clap::ValueHint::FilePath)]
@@ -878,6 +829,7 @@ pub enum Commands {
commitment: Option<Commitments>,
},
/// Deploys an evm contract (verifier, reusable verifier, or vk artifact) that is generated by ezkl
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
DeployEvm {
/// The path to the Solidity code (generated using the create-evm-verifier command)
#[arg(long, default_value = DEFAULT_SOL_CODE, value_hint = clap::ValueHint::FilePath)]
@@ -898,33 +850,9 @@ pub enum Commands {
#[arg(long = "contract-type", short = 'C', default_value = DEFAULT_CONTRACT_DEPLOYMENT_TYPE, value_hint = clap::ValueHint::Other)]
contract: ContractType,
},
/// Deploys an evm verifier that allows for data attestation
#[command(name = "deploy-evm-da")]
DeployEvmDa {
/// The path to the .json data file, which should include both the network input (possibly private) and the network output (public input to the proof)
#[arg(short = 'D', long, default_value = DEFAULT_DATA, value_hint = clap::ValueHint::FilePath)]
data: Option<String>,
/// The path to load circuit settings .json file from (generated using the gen-settings command)
#[arg(long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
settings_path: Option<PathBuf>,
/// The path to the Solidity code
#[arg(long, default_value = DEFAULT_SOL_CODE_DA, value_hint = clap::ValueHint::FilePath)]
sol_code_path: Option<PathBuf>,
/// RPC URL for an Ethereum node
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: String,
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS_DA, value_hint = clap::ValueHint::FilePath)]
/// The path to output the contract address
addr_path: Option<PathBuf>,
/// The optimizer runs to set on the verifier. (Lower values optimize for deployment, while higher values optimize for execution)
#[arg(long, default_value = DEFAULT_OPTIMIZER_RUNS, value_hint = clap::ValueHint::Other)]
optimizer_runs: usize,
/// Private secp256K1 key in hex format, 64 chars, no 0x prefix, of the account signing transactions. If None the private key will be generated by Anvil
#[arg(short = 'P', long, value_hint = clap::ValueHint::Other)]
private_key: Option<String>,
},
/// Verifies a proof using a local Evm executor, returning accept or reject
#[command(name = "verify-evm")]
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
VerifyEvm {
/// The path to the proof file (generated using the prove command)
#[arg(long, default_value = DEFAULT_PROOF, value_hint = clap::ValueHint::FilePath)]
@@ -935,12 +863,29 @@ pub enum Commands {
/// RPC URL for an Ethereum node
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: String,
/// does the verifier use data attestation ?
#[arg(long, value_hint = clap::ValueHint::Other)]
addr_da: Option<H160Flag>,
// is the vk rendered seperately, if so specify an address
#[arg(long, value_hint = clap::ValueHint::Other)]
addr_vk: Option<H160Flag>,
/// The path to the serialized vka file
#[arg(long, default_value = DEFAULT_VKA, value_hint = clap::ValueHint::FilePath)]
vka_path: Option<PathBuf>,
},
/// Registers a VKA, returning the its digest used to identify it on-chain.
#[command(name = "register-vka")]
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
RegisterVka {
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: String,
/// The path to the reusable verifier contract's address
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS, value_hint = clap::ValueHint::Other)]
addr_verifier: H160Flag,
/// The path to the serialized VKA file
#[arg(long, default_value = DEFAULT_VKA, value_hint = clap::ValueHint::FilePath)]
vka_path: Option<PathBuf>,
/// The path to output the VKA digest to
#[arg(long, default_value = DEFAULT_VKA_DIGEST, value_hint = clap::ValueHint::FilePath)]
vka_digest_path: Option<PathBuf>,
/// Private secp256K1 key in hex format, 64 chars, no 0x prefix, of the account signing transactions. If None the private key will be generated by Anvil
#[arg(short = 'P', long, value_hint = clap::ValueHint::Other)]
private_key: Option<String>,
},
#[cfg(not(feature = "no-update"))]
/// Updates ezkl binary to version specified (or latest if not specified)

View File

@@ -1,7 +1,4 @@
use crate::graph::DataSource;
use crate::graph::GraphSettings;
use crate::graph::input::{CallToAccount, CallsToAccount, FileSourceInner, GraphData};
use crate::graph::modules::POSEIDON_INSTANCES;
use crate::graph::input::FileSourceInner;
use crate::pfsys::Snark;
use crate::pfsys::evm::EvmVerificationError;
use alloy::contract::CallBuilder;
@@ -9,11 +6,10 @@ use alloy::core::primitives::Address as H160;
use alloy::core::primitives::Bytes;
use alloy::core::primitives::U256;
use alloy::dyn_abi::abi::TokenSeq;
use alloy::dyn_abi::abi::token::{DynSeqToken, PackedSeqToken, WordToken};
// use alloy::providers::Middleware;
use alloy::json_abi::JsonAbi;
use alloy::primitives::ruint::ParseError;
use alloy::primitives::{B256, I256, ParseSignedError};
use alloy::primitives::{I256, ParseSignedError};
use alloy::providers::ProviderBuilder;
use alloy::providers::fillers::{
ChainIdFiller, FillProvider, GasFiller, JoinFill, NonceFiller, SignerFiller,
@@ -30,10 +26,9 @@ use alloy::transports::{RpcError, TransportErrorKind};
use foundry_compilers::Solc;
use foundry_compilers::artifacts::Settings as SolcSettings;
use foundry_compilers::error::{SolcError, SolcIoError};
use halo2_solidity_verifier::encode_calldata;
use halo2_solidity_verifier::{encode_calldata, encode_register_vk_calldata};
use halo2curves::bn256::{Fr, G1Affine};
use halo2curves::group::ff::PrimeField;
use itertools::Itertools;
use log::{debug, info, warn};
use reqwest::Client;
use std::path::PathBuf;
@@ -229,9 +224,9 @@ abigen!(
bool neg;
if (instances[i] > uint128(type(int128).max)) {
x = int256(ORDER - instances[i]);
neg = true;
} else {
x = int256(instances[i]);
neg = true;
}
uint output = mulDiv(uint256(x), numerator, denominator);
if (mulmod(uint256(x), numerator, denominator) * 2 >= denominator) {
@@ -244,6 +239,16 @@ abigen!(
}
);
#[derive(Debug, thiserror::Error)]
pub enum RescaleCheckError {
#[error("rescaled instance #{idx} mismatch: expected {expected}, got {got}")]
Mismatch {
idx: usize,
expected: String,
got: String,
},
}
#[derive(Debug, thiserror::Error)]
pub enum EthError {
#[error("a transport error occurred: {0}")]
@@ -291,6 +296,10 @@ pub enum EthError {
Svm(String),
#[error("no contract output found")]
NoContractOutput,
#[error("failed to load vka data: {0}")]
VkaData(String),
#[error("rescaledinstance mismatch: {0}")]
RescaleCheckError(#[from] RescaleCheckError),
}
// we have to generate these two contract differently because they are generated dynamically ! and hence the static compilation from above does not suit
@@ -369,206 +378,82 @@ pub async fn deploy_contract_via_solidity(
}
///
pub async fn deploy_da_verifier_via_solidity(
settings_path: PathBuf,
input: String,
sol_code_path: PathBuf,
pub async fn register_vka_via_rv(
rpc_url: &str,
runs: usize,
private_key: Option<&str>,
) -> Result<H160, EthError> {
let (client, client_address) = setup_eth_backend(rpc_url, private_key).await?;
rv_address: H160,
vka_words: &[[u8; 32]],
) -> Result<Vec<u8>, EthError> {
let (client, _) = setup_eth_backend(rpc_url, private_key).await?;
let input = GraphData::from_str(&input).map_err(|_| EthError::GraphData)?;
let encoded = encode_register_vk_calldata(vka_words);
let settings = GraphSettings::load(&settings_path).map_err(|_| EthError::GraphSettings)?;
debug!(
"encoded register vka calldata: {:#?}",
hex::encode(&encoded)
);
let mut scales: Vec<u32> = vec![];
// The data that will be stored in the test contracts that will eventually be read from.
let mut call_to_account = None;
let input: TransactionInput = encoded.into();
let mut instance_shapes = vec![];
let mut model_instance_offset = 0;
let tx = TransactionRequest::default().to(rv_address).input(input);
debug!("transaction {:#?}", tx);
if settings.run_args.input_visibility.is_hashed() {
instance_shapes.push(POSEIDON_INSTANCES)
} else if settings.run_args.input_visibility.is_public() {
for idx in 0..settings.model_input_scales.len() {
let shape = &settings.model_instance_shapes[idx];
instance_shapes.push(shape.iter().product::<usize>());
model_instance_offset += 1;
}
let result = client.call(&tx).await;
if let Err(e) = result {
return Err(EvmVerificationError::SolidityExecution(e.to_string()).into());
}
let result = result?;
debug!("result: {:#?}", result.to_vec());
// decode return bytes value into uint8
let output = result.to_vec();
let gas = client.estimate_gas(&tx).await?;
info!("estimated vka registration cost: {:#?}", gas);
// broadcast the transaction
let result = client.send_transaction(tx).await?;
result.watch().await?;
// if gas is greater than 30 million warn the user that the gas cost is above ethereum's 30 million block gas limit
if gas > 30_000_000_u128 {
warn!(
"Gas cost of verify transaction is greater than 30 million block gas limit. It will fail on mainnet."
);
} else if gas > 15_000_000_u128 {
warn!(
"Gas cost of verify transaction is greater than 15 million, the target block size for ethereum"
);
}
if settings.run_args.param_visibility.is_hashed() {
return Err(EvmVerificationError::InvalidVisibility.into());
}
if settings.run_args.output_visibility.is_hashed() {
instance_shapes.push(POSEIDON_INSTANCES)
} else if settings.run_args.output_visibility.is_public() {
for idx in model_instance_offset..model_instance_offset + settings.model_output_scales.len()
{
let shape = &settings.model_instance_shapes[idx];
instance_shapes.push(shape.iter().product::<usize>());
}
}
let mut instance_idx = 0;
let mut contract_instance_offset = 0;
if let DataSource::OnChain(source) = input.input_data {
if settings.run_args.input_visibility.is_hashed_public() {
// set scales 1.0
scales.extend(vec![0; instance_shapes[instance_idx]]);
instance_idx += 1;
} else {
let input_scales = settings.clone().model_input_scales;
// give each input a scale
for scale in input_scales {
scales.extend(vec![scale as u32; instance_shapes[instance_idx]]);
instance_idx += 1;
}
}
// match statement for enum type of source.call
call_to_account = Some(source.call);
} else if let DataSource::File(source) = input.input_data {
if settings.run_args.input_visibility.is_public() {
instance_idx += source.len();
for s in source {
contract_instance_offset += s.len();
}
}
}
if let Some(DataSource::OnChain(source)) = input.output_data {
if settings.run_args.output_visibility.is_hashed_public() {
// set scales 1.0
scales.extend(vec![0; instance_shapes[instance_idx]]);
} else {
let input_scales = settings.clone().model_output_scales;
// give each output a scale
for scale in input_scales {
scales.extend(vec![scale as u32; instance_shapes[instance_idx]]);
instance_idx += 1;
}
}
call_to_account = Some(source.call);
// match statement for enum type of source.calls
}
deploy_da_contract(
client,
contract_instance_offset,
client_address,
scales,
call_to_account,
sol_code_path,
runs,
&settings,
)
.await
}
async fn deploy_da_contract(
client: EthersClient,
contract_instance_offset: usize,
client_address: alloy::primitives::Address,
scales: Vec<u32>,
call_to_accounts: Option<CallToAccount>,
sol_code_path: PathBuf,
runs: usize,
settings: &GraphSettings,
) -> Result<H160, EthError> {
let (abi, bytecode, runtime_bytecode) =
get_contract_artifacts(sol_code_path, "DataAttestation", runs).await?;
let (contract_address, call_data, decimals) = if let Some(call_to_accounts) = call_to_accounts {
parse_call_to_account(call_to_accounts)?
} else {
// if calls to accounts is empty then we know need to check that atleast there kzg visibility in the settings file
let kzg_visibility = settings.run_args.input_visibility.is_polycommit()
|| settings.run_args.output_visibility.is_polycommit()
|| settings.run_args.param_visibility.is_polycommit();
if !kzg_visibility {
return Err(EthError::OnChainDataSource);
}
let factory =
get_sol_contract_factory(abi, bytecode, runtime_bytecode, client, None::<()>)?;
let contract = factory.deploy().await?;
return Ok(contract);
};
let factory = get_sol_contract_factory(
abi,
bytecode,
runtime_bytecode,
client,
Some((
// address _contractAddress,
WordToken(contract_address.into_word()),
// bytes memory _callData,
PackedSeqToken(call_data.as_ref()),
// uint256 [] _decimals,
DynSeqToken(
decimals
.iter()
.map(|i| WordToken(B256::from(*i)))
.collect_vec(),
),
// uint[] memory _bits,
DynSeqToken(
scales
.clone()
.into_iter()
.map(|i| WordToken(U256::from(i).into()))
.collect_vec(),
),
// uint8 _instanceOffset,
WordToken(U256::from(contract_instance_offset as u32).into()),
// address _admin
WordToken(client_address.into_word()),
)),
)?;
debug!("scales: {:#?}", scales);
debug!("call_data: {:#?}", call_data);
debug!("contract_addresses: {:#?}", contract_address);
debug!("decimals: {:#?}", decimals);
let contract = factory.deploy().await?;
Ok(contract)
}
type ParsedCallToAccount = (H160, Bytes, Vec<U256>);
fn parse_call_to_account(call_to_account: CallToAccount) -> Result<ParsedCallToAccount, EthError> {
let contract_address_bytes = hex::decode(&call_to_account.address)?;
let contract_address = H160::from_slice(&contract_address_bytes);
let call_data_bytes = hex::decode(&call_to_account.call_data)?;
let call_data = Bytes::from(call_data_bytes);
// Parse the decimals array as uint256 array for the contract.
// iterate through the decimals array and convert each element to a uint256
let mut decimals: Vec<U256> = vec![];
for decimal in &call_to_account.decimals {
decimals.push(I256::from_dec_str(&decimal.to_string())?.unsigned_abs());
}
// let decimal = I256::from_dec_str(&call_to_account.decimals.to_string())?.unsigned_abs();
Ok((contract_address, call_data, decimals))
Ok(output)
}
/// Verify a proof using a Solidity verifier contract
/// TODO: add param to pass vka_digest and use that to fetch the VKA by indexing the RegisteredVKA events on the RV
pub async fn verify_proof_via_solidity(
proof: Snark<Fr, G1Affine>,
addr: H160,
addr_vk: Option<H160>,
vka_path: Option<PathBuf>,
rpc_url: &str,
) -> Result<bool, EthError> {
let flattened_instances = proof.instances.into_iter().flatten();
let encoded = encode_calldata(
addr_vk.as_ref().map(|x| x.0).map(|x| x.0),
&proof.proof,
&flattened_instances.collect::<Vec<_>>(),
);
// Load the vka, which is bincode serialized, from the vka_path
let vka_buf: Option<Vec<[u8; 32]>> = match vka_path {
Some(path) => {
let bytes = std::fs::read(path)?;
Some(bincode::deserialize(&bytes).map_err(|e| EthError::VkaData(e.to_string()))?)
}
None => None,
};
let vka: Option<&[[u8; 32]]> = vka_buf.as_deref();
let encoded = encode_calldata(vka, &proof.proof, &flattened_instances.collect::<Vec<_>>());
debug!("encoded: {:#?}", hex::encode(&encoded));
@@ -585,8 +470,37 @@ pub async fn verify_proof_via_solidity(
}
let result = result?;
debug!("result: {:#?}", result.to_vec());
// if result is larger than 32
if result.to_vec().len() > 32 {
// From result[96..], iterate through 32 byte chunks converting them to U256
let rescaled_instances = result.to_vec()[96..]
.chunks_exact(32)
.map(|chunk| U256::from_be_slice(chunk).to_string())
.collect::<Vec<_>>();
if let Some(pretty) = &proof.pretty_public_inputs {
// 1⃣ collect reference decimals --------------------------------------
let mut refs = pretty.rescaled_inputs.clone();
refs.extend(pretty.rescaled_outputs.clone()); // extend inputs with outputs
let reference: Vec<String> = refs.into_iter().flatten().collect();
// 2⃣ compare elementwise -------------------------------------------
for (idx, (inst, exp)) in rescaled_instances.iter().zip(reference.iter()).enumerate() {
if !scaled_matches(inst, exp) {
return Err(EthError::RescaleCheckError(RescaleCheckError::Mismatch {
idx,
expected: exp.clone(),
got: to_decimal_18(inst),
}));
}
}
debug!("✅ all rescaled instances match their expected values");
}
}
// decode return bytes value into uint8
let result = result.to_vec().last().ok_or(EthError::NoContractOutput)? == &1u8;
let result = result.to_vec()[..32]
.last()
.ok_or(EthError::NoContractOutput)?
== &1u8;
if !result {
return Err(EvmVerificationError::InvalidProof.into());
}
@@ -655,170 +569,6 @@ pub async fn setup_test_contract<M: 'static + Provider<Http<Client>, Ethereum>>(
Ok((contract, decimals))
}
/// Verify a proof using a Solidity DataAttestation contract.
/// Used for testing purposes.
pub async fn verify_proof_with_data_attestation(
proof: Snark<Fr, G1Affine>,
addr_verifier: H160,
addr_da: H160,
addr_vk: Option<H160>,
rpc_url: &str,
) -> Result<bool, EthError> {
use ethabi::{Function, Param, ParamType, StateMutability, Token};
let mut public_inputs: Vec<U256> = vec![];
let flattened_instances = proof.instances.into_iter().flatten();
for val in flattened_instances.clone() {
let bytes = val.to_repr();
let u = U256::from_le_slice(bytes.inner().as_slice());
public_inputs.push(u);
}
let encoded_verifier = encode_calldata(
addr_vk.as_ref().map(|x| x.0).map(|x| x.0),
&proof.proof,
&flattened_instances.collect::<Vec<_>>(),
);
debug!("encoded: {:#?}", hex::encode(&encoded_verifier));
debug!("public_inputs: {:#?}", public_inputs);
debug!("proof: {:#?}", Bytes::from(proof.proof.to_vec()));
#[allow(deprecated)]
let func = Function {
name: "verifyWithDataAttestation".to_owned(),
inputs: vec![
Param {
name: "verifier".to_owned(),
kind: ParamType::Address,
internal_type: None,
},
Param {
name: "encoded".to_owned(),
kind: ParamType::Bytes,
internal_type: None,
},
],
outputs: vec![Param {
name: "success".to_owned(),
kind: ParamType::Bool,
internal_type: None,
}],
constant: None,
state_mutability: StateMutability::View,
};
let encoded = func.encode_input(&[
Token::Address(addr_verifier.0.0.into()),
Token::Bytes(encoded_verifier),
])?;
debug!("encoded: {:#?}", hex::encode(&encoded));
let encoded: TransactionInput = encoded.into();
let (client, _) = setup_eth_backend(rpc_url, None).await?;
let tx = TransactionRequest::default().to(addr_da).input(encoded);
debug!("transaction {:#?}", tx);
info!(
"estimated verify gas cost: {:#?}",
client.estimate_gas(&tx).await?
);
let result = client.call(&tx).await;
if let Err(e) = result {
return Err(EvmVerificationError::SolidityExecution(e.to_string()).into());
}
let result = result?;
debug!("result: {:#?}", result);
// decode return bytes value into uint8
let result = result.to_vec().last().ok_or(EthError::NoContractOutput)? == &1u8;
if !result {
return Err(EvmVerificationError::InvalidProof.into());
}
Ok(true)
}
/// Tests on-chain data storage by deploying a contract that stores the network input and or output
/// data in its storage. It does this by converting the floating point values to integers and storing the
/// the number of decimals of the floating point value on chain.
pub async fn test_on_chain_data<M: 'static + Provider<Http<Client>, Ethereum>>(
client: Arc<M>,
data: &[Vec<FileSourceInner>],
) -> Result<CallToAccount, EthError> {
let (contract, decimals) = setup_test_contract(client, data).await?;
// Get the encoded calldata for the input
let builder = contract.readAll();
let call = builder.calldata();
let call_to_account = CallToAccount {
call_data: hex::encode(call),
decimals,
address: hex::encode(contract.address().0.0),
};
info!("call_to_account: {:#?}", call_to_account);
Ok(call_to_account)
}
/// Reads on-chain inputs, returning the raw encoded data returned from making all the calls in on_chain_input_data
pub async fn read_on_chain_inputs_multi<M: 'static + Provider<Http<Client>, Ethereum>>(
client: Arc<M>,
address: H160,
data: &Vec<CallsToAccount>,
) -> Result<(Vec<Bytes>, Vec<u8>), EthError> {
// Iterate over all on-chain inputs
let mut fetched_inputs = vec![];
let mut decimals = vec![];
for on_chain_data in data {
// Construct the address
let contract_address_bytes = hex::decode(&on_chain_data.address)?;
let contract_address = H160::from_slice(&contract_address_bytes);
for (call_data, decimal) in &on_chain_data.call_data {
let call_data_bytes = hex::decode(call_data)?;
let input: TransactionInput = call_data_bytes.into();
let tx = TransactionRequest::default()
.to(contract_address)
.from(address)
.input(input);
debug!("transaction {:#?}", tx);
let result = client.call(&tx).await?;
debug!("return data {:#?}", result);
fetched_inputs.push(result);
decimals.push(*decimal);
}
}
Ok((fetched_inputs, decimals))
}
/// Reads on-chain inputs, returning the raw encoded data returned from making the single call in on_chain_input_data
/// that returns the array of input data we will attest to.
pub async fn read_on_chain_inputs<M: 'static + Provider<Http<Client>, Ethereum>>(
client: Arc<M>,
address: H160,
data: &CallToAccount,
) -> Result<Bytes, EthError> {
// Iterate over all on-chain inputs
let contract_address_bytes = hex::decode(&data.address)?;
let contract_address = H160::from_slice(&contract_address_bytes);
let call_data_bytes = hex::decode(&data.call_data)?;
let input: TransactionInput = call_data_bytes.into();
let tx = TransactionRequest::default()
.to(contract_address)
.from(address)
.input(input);
debug!("transaction {:#?}", tx);
let result = client.call(&tx).await?;
debug!("return data {:#?}", result);
Ok(result)
}
pub async fn evm_quantize<M: 'static + Provider<Http<Client>, Ethereum>>(
client: Arc<M>,
scales: Vec<crate::Scale>,
@@ -966,6 +716,74 @@ pub async fn get_contract_artifacts(
Ok((abi, bytecode, runtime_bytecode))
}
/// Convert a 1e18 fixedpoint **integer string** into a decimal string.
///
/// `"1541748046875000000"` → `"1.541748046875000000"`
/// `"273690402507781982"` → `"0.273690402507781982"`
fn to_decimal_18(s: &str) -> String {
let s = s.trim_start_matches('0');
if s.is_empty() {
return "0".into();
}
if s.len() <= 18 {
// pad on the left so we always have exactly 18 fraction digits
return format!("0.{:0>18}", s);
}
let split = s.len() - 18;
format!("{}.{}", &s[..split], &s[split..]) // ← correct slice here
}
/// “Bankersround” comparison: compare the **decimal** produced
/// by `instance` to the reference string `expected`.
///
/// * All digits present in `expected` (integer part **and** fraction)
/// must match exactly.
/// * Excess digits in `instance` are ignored **unless** the very first
/// excess digit  5; in that case we round the last compared digit
/// and check again.
fn scaled_matches(instance: &str, expected: &str) -> bool {
let inst_dec = to_decimal_18(instance);
let (inst_int, inst_frac) = inst_dec.split_once('.').unwrap_or((&inst_dec, ""));
let (exp_int, exp_frac) = expected.split_once('.').unwrap_or((expected, ""));
// integer part must be identical
if inst_int != exp_int {
return false;
}
// fractionpart comparison with optional rounding
let cmp_len = exp_frac.len();
let inst_cmp = &inst_frac[..cmp_len.min(inst_frac.len())];
let trailing = inst_frac.chars().nth(cmp_len).unwrap_or('0');
if inst_cmp == exp_frac {
true // exact match
} else if trailing >= '5' {
// need to round
// round the inst_cmp (string) up by one ulp
let mut rounded = inst_cmp.chars().collect::<Vec<_>>();
let mut carry = true;
for d in rounded.iter_mut().rev() {
if !carry {
break;
}
let v = d.to_digit(10).unwrap() + 1;
*d = char::from_digit(v % 10, 10).unwrap();
carry = v == 10;
}
if carry {
// 0.999… → 1.000…
return exp_int
== &(num::BigUint::parse_bytes(exp_int.as_bytes(), 10).unwrap() + 1u32)
.to_string()
&& exp_frac.chars().all(|c| c == '0');
}
rounded.into_iter().collect::<String>() == exp_frac
} else {
false
}
}
/// Sets the constants stored in the da verifier
pub fn fix_da_sol(commitment_bytes: Option<Vec<u8>>, only_kzg: bool) -> Result<String, EthError> {
let mut contract = ATTESTDATA_SOL.to_string();

View File

@@ -1,30 +1,30 @@
use crate::EZKL_BUF_CAPACITY;
use crate::circuit::CheckMode;
use crate::circuit::region::RegionSettings;
use crate::circuit::CheckMode;
use crate::commands::CalibrationTarget;
use crate::eth::{deploy_contract_via_solidity, deploy_da_verifier_via_solidity, fix_da_sol};
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
use crate::eth::{deploy_contract_via_solidity, register_vka_via_rv};
#[allow(unused_imports)]
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
use crate::eth::{get_contract_artifacts, verify_proof_via_solidity};
use crate::graph::input::GraphData;
use crate::graph::{GraphCircuit, GraphSettings, GraphWitness, Model};
use crate::graph::{TestDataSource, TestSources};
use crate::pfsys::evm::aggregation_kzg::{AggregationCircuit, PoseidonTranscript};
use crate::pfsys::{
ProofSplitCommit, create_proof_circuit, swap_proof_commitments_polycommit, verify_proof_circuit,
create_keys, load_pk, load_vk, save_params, save_pk, Snark, StrategyType, TranscriptType,
};
use crate::pfsys::{
Snark, StrategyType, TranscriptType, create_keys, load_pk, load_vk, save_params, save_pk,
create_proof_circuit, swap_proof_commitments_polycommit, verify_proof_circuit, ProofSplitCommit,
};
use crate::pfsys::{save_vk, srs::*};
use crate::tensor::TensorError;
use crate::EZKL_BUF_CAPACITY;
use crate::{commands::*, EZKLError};
use crate::{Commitments, RunArgs};
use crate::{EZKLError, commands::*};
use colored::Colorize;
#[cfg(unix)]
use gag::Gag;
use halo2_proofs::dev::VerifyFailure;
use halo2_proofs::plonk::{self, Circuit};
use halo2_proofs::poly::VerificationStrategy;
use halo2_proofs::poly::commitment::{CommitmentScheme, Params};
use halo2_proofs::poly::commitment::{ParamsProver, Verifier};
use halo2_proofs::poly::ipa::commitment::{IPACommitmentScheme, ParamsIPA};
@@ -37,7 +37,9 @@ use halo2_proofs::poly::kzg::strategy::AccumulatorStrategy as KZGAccumulatorStra
use halo2_proofs::poly::kzg::{
commitment::ParamsKZG, strategy::SingleStrategy as KZGSingleStrategy,
};
use halo2_proofs::poly::VerificationStrategy;
use halo2_proofs::transcript::{EncodedChallenge, TranscriptReadBuffer};
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
use halo2_solidity_verifier;
use halo2curves::bn256::{Bn256, Fr, G1Affine};
use halo2curves::ff::{FromUniformBytes, WithSmallOrderMulGroup};
@@ -48,15 +50,18 @@ use itertools::Itertools;
use lazy_static::lazy_static;
use log::debug;
use log::{info, trace, warn};
use serde::Serialize;
use serde::de::DeserializeOwned;
use serde::Serialize;
use snark_verifier::loader::native::NativeLoader;
use snark_verifier::system::halo2::Config;
use snark_verifier::system::halo2::compile;
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
use snark_verifier::system::halo2::Config;
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
use std::fs::File;
use std::io::BufWriter;
use std::io::{Cursor, Write};
use std::io::Cursor;
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
use std::io::Write;
use std::path::Path;
use std::path::PathBuf;
use std::str::FromStr;
@@ -187,6 +192,7 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
model.unwrap_or(DEFAULT_MODEL.into()),
witness.unwrap_or(DEFAULT_WITNESS.into()),
),
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
Commands::CreateEvmVerifier {
vk_path,
srs_path,
@@ -205,48 +211,35 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
)
.await
}
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
Commands::EncodeEvmCalldata {
proof_path,
calldata_path,
addr_vk,
vka_path,
} => encode_evm_calldata(
proof_path.unwrap_or(DEFAULT_PROOF.into()),
calldata_path.unwrap_or(DEFAULT_CALLDATA.into()),
addr_vk,
vka_path,
)
.map(|e| serde_json::to_string(&e).unwrap()),
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
Commands::CreateEvmVka {
vk_path,
srs_path,
settings_path,
sol_code_path,
abi_path,
vka_path,
decimals,
} => {
create_evm_vka(
vk_path.unwrap_or(DEFAULT_VK.into()),
srs_path,
settings_path.unwrap_or(DEFAULT_SETTINGS.into()),
sol_code_path.unwrap_or(DEFAULT_VK_SOL.into()),
abi_path.unwrap_or(DEFAULT_VK_ABI.into()),
)
.await
}
Commands::CreateEvmDa {
settings_path,
sol_code_path,
abi_path,
data,
witness,
} => {
create_evm_data_attestation(
settings_path.unwrap_or(DEFAULT_SETTINGS.into()),
sol_code_path.unwrap_or(DEFAULT_SOL_CODE_DA.into()),
abi_path.unwrap_or(DEFAULT_VERIFIER_DA_ABI.into()),
data.unwrap_or(DEFAULT_DATA.into()),
witness,
vka_path.unwrap_or(DEFAULT_VKA.into()),
decimals.unwrap_or(DEFAULT_DECIMALS.parse().unwrap()),
)
.await
}
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
Commands::CreateEvmVerifierAggr {
vk_path,
srs_path,
@@ -292,24 +285,6 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
disable_selector_compression
.unwrap_or(DEFAULT_DISABLE_SELECTOR_COMPRESSION.parse().unwrap()),
),
Commands::SetupTestEvmData {
data,
compiled_circuit,
test_data,
rpc_url,
input_source,
output_source,
} => {
setup_test_evm_data(
data.unwrap_or(DEFAULT_DATA.into()),
compiled_circuit.unwrap_or(DEFAULT_COMPILED_CIRCUIT.into()),
test_data,
rpc_url,
input_source,
output_source,
)
.await
}
Commands::SwapProofCommitments {
proof_path,
witness_path,
@@ -418,6 +393,7 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
commitment.into(),
)
.map(|e| serde_json::to_string(&e).unwrap()),
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
Commands::DeployEvm {
sol_code_path,
rpc_url,
@@ -436,39 +412,35 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
)
.await
}
Commands::DeployEvmDa {
data,
settings_path,
sol_code_path,
rpc_url,
addr_path,
optimizer_runs,
private_key,
} => {
deploy_da_evm(
data.unwrap_or(DEFAULT_DATA.into()),
settings_path.unwrap_or(DEFAULT_SETTINGS.into()),
sol_code_path.unwrap_or(DEFAULT_SOL_CODE_DA.into()),
rpc_url,
addr_path.unwrap_or(DEFAULT_CONTRACT_ADDRESS_DA.into()),
optimizer_runs,
private_key,
)
.await
}
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
Commands::VerifyEvm {
proof_path,
addr_verifier,
rpc_url,
addr_da,
addr_vk,
vka_path,
} => {
verify_evm(
proof_path.unwrap_or(DEFAULT_PROOF.into()),
addr_verifier,
rpc_url,
addr_da,
addr_vk,
vka_path,
)
.await
}
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
Commands::RegisterVka {
addr_verifier,
vka_path,
rpc_url,
vka_digest_path,
private_key,
} => {
register_vka(
rpc_url,
addr_verifier,
vka_path.unwrap_or(DEFAULT_VKA.into()),
vka_digest_path.unwrap_or(DEFAULT_VKA_DIGEST.into()),
private_key,
)
.await
}
@@ -1444,6 +1416,7 @@ pub(crate) fn mock(
Ok(String::new())
}
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
pub(crate) async fn create_evm_verifier(
vk_path: PathBuf,
srs_path: Option<PathBuf>,
@@ -1461,7 +1434,9 @@ pub(crate) async fn create_evm_verifier(
)?;
let num_instance = settings.total_instances();
let num_instance: usize = num_instance.iter().sum::<usize>();
// create a scales array that is the same length as the number of instances, all populated with 0
let scales = vec![0; num_instance.len()];
// let poseidon_instance = settings.module_sizes.num_instances().iter().sum::<usize>();
let vk = load_vk::<KZGCommitmentScheme<Bn256>, GraphCircuit>(vk_path, settings)?;
trace!("params computed");
@@ -1470,7 +1445,10 @@ pub(crate) async fn create_evm_verifier(
&params,
&vk,
halo2_solidity_verifier::BatchOpenScheme::Bdfg21,
num_instance,
&num_instance,
&scales,
0,
0,
);
let (verifier_solidity, name) = if reusable {
(generator.render_separately()?.0, "Halo2VerifierReusable") // ignore the rendered vk artifact for now and generate it in create_evm_vka
@@ -1488,12 +1466,13 @@ pub(crate) async fn create_evm_verifier(
Ok(String::new())
}
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
pub(crate) async fn create_evm_vka(
vk_path: PathBuf,
srs_path: Option<PathBuf>,
settings_path: PathBuf,
sol_code_path: PathBuf,
abi_path: PathBuf,
vka_path: PathBuf,
decimals: usize,
) -> Result<String, EZKLError> {
let settings = GraphSettings::load(&settings_path)?;
let commitment: Commitments = settings.run_args.commitment.into();
@@ -1503,133 +1482,52 @@ pub(crate) async fn create_evm_vka(
commitment,
)?;
let num_instance = settings.total_instances();
let num_instance: usize = num_instance.iter().sum::<usize>();
let num_poseidon_instance = settings.module_sizes.num_instances().iter().sum::<usize>();
let num_fixed_point_instance = settings
.model_instance_shapes
.iter()
.map(|x| x.iter().product::<usize>())
.collect_vec();
let scales = settings.get_model_instance_scales();
let vk = load_vk::<KZGCommitmentScheme<Bn256>, GraphCircuit>(vk_path, settings)?;
trace!("params computed");
// assert that the decimals must be less than or equal to 38 to prevent overflow
if decimals > 38 {
return Err("decimals must be less than or equal to 38".into());
}
let generator = halo2_solidity_verifier::SolidityGenerator::new(
&params,
&vk,
halo2_solidity_verifier::BatchOpenScheme::Bdfg21,
num_instance,
&num_fixed_point_instance,
&scales,
decimals,
num_poseidon_instance,
);
let vk_solidity = generator.render_separately()?.1;
let vka_words: Vec<[u8; 32]> = generator.render_separately_vka_words()?.1;
let serialized_vka_words = bincode::serialize(&vka_words).or_else(|e| {
Err(EZKLError::from(format!(
"Failed to serialize vka words: {}",
e
)))
})?;
File::create(sol_code_path.clone())?.write_all(vk_solidity.as_bytes())?;
File::create(vka_path.clone())?.write_all(&serialized_vka_words)?;
// fetch abi of the contract
let (abi, _, _) = get_contract_artifacts(sol_code_path, "Halo2VerifyingArtifact", 0).await?;
// save abi to file
serde_json::to_writer(std::fs::File::create(abi_path)?, &abi)?;
// Load in the vka words and deserialize them and check that they match the original
let bytes = std::fs::read(vka_path)?;
let vka_buf: Vec<[u8; 32]> = bincode::deserialize(&bytes)
.map_err(|e| EZKLError::from(format!("Failed to deserialize vka words: {e}")))?;
if vka_buf != vka_words {
return Err("vka words do not match".into());
};
Ok(String::new())
}
pub(crate) async fn create_evm_data_attestation(
settings_path: PathBuf,
sol_code_path: PathBuf,
abi_path: PathBuf,
input: String,
witness: Option<PathBuf>,
) -> Result<String, EZKLError> {
#[allow(unused_imports)]
use crate::graph::{DataSource, VarVisibility};
use crate::{graph::Visibility, pfsys::get_proof_commitments};
let settings = GraphSettings::load(&settings_path)?;
let visibility = VarVisibility::from_args(&settings.run_args)?;
trace!("params computed");
// if input is not provided, we just instantiate dummy input data
let data =
GraphData::from_str(&input).unwrap_or_else(|_| GraphData::new(DataSource::File(vec![])));
debug!("data attestation data: {:?}", data);
// The number of input and output instances we attest to for the single call data attestation
let mut input_len = None;
let mut output_len = None;
if let Some(DataSource::OnChain(source)) = data.output_data {
if visibility.output.is_private() {
return Err("private output data on chain is not supported on chain".into());
}
output_len = Some(source.call.decimals.len());
};
if let DataSource::OnChain(source) = data.input_data {
if visibility.input.is_private() {
return Err("private input data on chain is not supported on chain".into());
}
input_len = Some(source.call.decimals.len());
};
// If both model inputs and outputs are attested to then we
// Read the settings file. Look if either the run_ars.input_visibility, run_args.output_visibility or run_args.param_visibility is KZGCommit
// if so, then we need to load the witness
let commitment_bytes = if settings.run_args.input_visibility == Visibility::KZGCommit
|| settings.run_args.output_visibility == Visibility::KZGCommit
|| settings.run_args.param_visibility == Visibility::KZGCommit
{
let witness = GraphWitness::from_path(witness.unwrap_or(DEFAULT_WITNESS.into()))?;
let commitments = witness.get_polycommitments();
let proof_first_bytes = get_proof_commitments::<
KZGCommitmentScheme<Bn256>,
_,
EvmTranscript<G1Affine, _, _, _>,
>(&commitments);
Some(proof_first_bytes.unwrap())
} else {
None
};
let output: String = fix_da_sol(
commitment_bytes,
input_len.is_none() && output_len.is_none(),
)?;
let mut f = File::create(sol_code_path.clone())?;
let _ = f.write(output.as_bytes());
// fetch abi of the contract
let (abi, _, _) = get_contract_artifacts(sol_code_path, "DataAttestation", 0).await?;
// save abi to file
serde_json::to_writer(std::fs::File::create(abi_path)?, &abi)?;
Ok(String::new())
}
pub(crate) async fn deploy_da_evm(
data: String,
settings_path: PathBuf,
sol_code_path: PathBuf,
rpc_url: String,
addr_path: PathBuf,
runs: usize,
private_key: Option<String>,
) -> Result<String, EZKLError> {
let contract_address = deploy_da_verifier_via_solidity(
settings_path,
data,
sol_code_path,
&rpc_url,
runs,
private_key.as_deref(),
)
.await?;
info!("Contract deployed at: {}", contract_address);
let mut f = File::create(addr_path)?;
write!(f, "{:#?}", contract_address)?;
Ok(String::new())
}
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
pub(crate) async fn deploy_evm(
sol_code_path: PathBuf,
rpc_url: String,
@@ -1641,7 +1539,6 @@ pub(crate) async fn deploy_evm(
let contract_name = match contract {
ContractType::Verifier { reusable: false } => "Halo2Verifier",
ContractType::Verifier { reusable: true } => "Halo2VerifierReusable",
ContractType::VerifyingKeyArtifact => "Halo2VerifyingArtifact",
};
let contract_address = deploy_contract_via_solidity(
sol_code_path,
@@ -1659,21 +1556,61 @@ pub(crate) async fn deploy_evm(
Ok(String::new())
}
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
pub(crate) async fn register_vka(
rpc_url: String,
rv_addr: H160Flag,
vka_path: PathBuf,
vka_digest_path: PathBuf,
private_key: Option<String>,
) -> Result<String, EZKLError> {
// Load the vka, which is bincode serialized, from the vka_path
let bytes = std::fs::read(vka_path)?;
let vka_buf: Vec<[u8; 32]> = bincode::deserialize(&bytes)
.map_err(|e| EZKLError::from(format!("Failed to deserialize vka words: {e}")))?;
let vka_digest = register_vka_via_rv(
rpc_url.as_ref(),
private_key.as_deref(),
rv_addr.into(),
&vka_buf,
)
.await?;
info!("VKA digest: {:#?}", vka_digest);
let mut f = File::create(vka_digest_path)?;
write!(f, "{:#?}", vka_digest)?;
Ok(String::new())
}
/// Encodes the calldata for the EVM verifier (both aggregated and single proof)
/// TODO: Add a "RV address param" which will query the "RegisteredVKA" events to fetch the
/// VKA from the vka_digest.
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
pub(crate) fn encode_evm_calldata(
proof_path: PathBuf,
calldata_path: PathBuf,
addr_vk: Option<H160Flag>,
vka_path: Option<PathBuf>,
) -> Result<Vec<u8>, EZKLError> {
let snark = Snark::load::<IPACommitmentScheme<G1Affine>>(&proof_path)?;
let flattened_instances = snark.instances.into_iter().flatten();
// Load the vka, which is bincode serialized, from the vka_path
let vka_buf: Option<Vec<[u8; 32]>> =
match vka_path {
Some(path) => {
let bytes = std::fs::read(path)?;
Some(bincode::deserialize(&bytes).map_err(|e| {
EZKLError::from(format!("Failed to deserialize vka words: {e}"))
})?)
}
None => None,
};
let vka: Option<&[[u8; 32]]> = vka_buf.as_deref();
let encoded = halo2_solidity_verifier::encode_calldata(
addr_vk
.as_ref()
.map(|x| alloy::primitives::Address::from(*x).0)
.map(|x| x.0),
vka,
&snark.proof,
&flattened_instances.collect::<Vec<_>>(),
);
@@ -1685,35 +1622,24 @@ pub(crate) fn encode_evm_calldata(
Ok(encoded)
}
/// TODO: Add an optional vka_digest param that will allow use to fetch the assocaited VKA
/// from the RegisteredVKA events on the RV.
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
pub(crate) async fn verify_evm(
proof_path: PathBuf,
addr_verifier: H160Flag,
rpc_url: String,
addr_da: Option<H160Flag>,
addr_vk: Option<H160Flag>,
vka_path: Option<PathBuf>,
) -> Result<String, EZKLError> {
use crate::eth::verify_proof_with_data_attestation;
let proof = Snark::load::<KZGCommitmentScheme<Bn256>>(&proof_path)?;
let result = if let Some(addr_da) = addr_da {
verify_proof_with_data_attestation(
proof.clone(),
addr_verifier.into(),
addr_da.into(),
addr_vk.map(|s| s.into()),
&rpc_url,
)
.await?
} else {
verify_proof_via_solidity(
proof.clone(),
addr_verifier.into(),
addr_vk.map(|s| s.into()),
&rpc_url,
)
.await?
};
let result = verify_proof_via_solidity(
proof.clone(),
addr_verifier.into(),
vka_path.map(|s| s.into()),
rpc_url.as_ref(),
)
.await?;
info!("Solidity verification result: {}", result);
@@ -1724,6 +1650,7 @@ pub(crate) async fn verify_evm(
Ok(String::new())
}
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
pub(crate) async fn create_evm_aggregate_verifier(
vk_path: PathBuf,
srs_path: Option<PathBuf>,
@@ -1749,8 +1676,8 @@ pub(crate) async fn create_evm_aggregate_verifier(
.sum();
let num_instance = AggregationCircuit::num_instance(num_instance);
let scales = vec![0; num_instance.len()];
assert_eq!(num_instance.len(), 1);
let num_instance = num_instance[0];
let agg_vk = load_vk::<KZGCommitmentScheme<Bn256>, AggregationCircuit>(vk_path, ())?;
@@ -1758,7 +1685,10 @@ pub(crate) async fn create_evm_aggregate_verifier(
&params,
&agg_vk,
halo2_solidity_verifier::BatchOpenScheme::Bdfg21,
num_instance,
&num_instance,
&scales,
0,
0,
);
let acc_encoding = halo2_solidity_verifier::AccumulatorEncoding::new(
@@ -1847,41 +1777,6 @@ pub(crate) fn setup(
Ok(String::new())
}
pub(crate) async fn setup_test_evm_data(
data_path: String,
compiled_circuit_path: PathBuf,
test_data: PathBuf,
rpc_url: String,
input_source: TestDataSource,
output_source: TestDataSource,
) -> Result<String, EZKLError> {
use crate::graph::TestOnChainData;
let mut data = GraphData::from_str(&data_path)?;
let mut circuit = GraphCircuit::load(compiled_circuit_path)?;
// if both input and output are from files fail
if matches!(input_source, TestDataSource::File) && matches!(output_source, TestDataSource::File)
{
return Err("Both input and output cannot be from files".into());
}
let test_on_chain_data = TestOnChainData {
data: test_data.clone(),
rpc: rpc_url,
data_sources: TestSources {
input: input_source,
output: output_source,
},
};
circuit
.populate_on_chain_test_data(&mut data, test_on_chain_data)
.await?;
Ok(String::new())
}
use crate::pfsys::ProofType;
#[allow(clippy::too_many_arguments)]

View File

@@ -104,6 +104,7 @@ pub enum GraphError {
not(all(target_arch = "wasm32", target_os = "unknown"))
))]
#[error("[eth] {0}")]
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
EthError(#[from] crate::eth::EthError),
/// Json error
#[error("[json] {0}")]

View File

@@ -182,46 +182,6 @@ impl OnChainSource {
pub fn new(call: CallToAccount, rpc: RPCUrl) -> Self {
OnChainSource { call, rpc }
}
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
/// Creates test data for the OnChain data source
/// Used for testing and development purposes
///
/// # Arguments
/// * `data` - Sample file data to use
/// * `scales` - Scaling factors for each input
/// * `shapes` - Shapes of the input tensors
/// * `rpc` - Optional RPC endpoint override
pub async fn test_from_file_data(
data: &FileSource,
scales: Vec<crate::Scale>,
mut shapes: Vec<Vec<usize>>,
rpc: &str,
) -> Result<Self, GraphError> {
use crate::eth::{read_on_chain_inputs, test_on_chain_data};
use log::debug;
// Set up local anvil instance for reading on-chain data
let (client, client_address) = crate::eth::setup_eth_backend(rpc, None).await?;
let mut scales = scales;
// set scales to 1 where data is a field element
for (idx, i) in data.iter().enumerate() {
if i.iter().all(|e| e.is_field()) {
scales[idx] = 0;
shapes[idx] = vec![i.len()];
}
}
let used_rpc = rpc.to_string();
let call_to_account = test_on_chain_data(client.clone(), data).await?;
debug!("Call to account: {:?}", call_to_account);
let inputs = read_on_chain_inputs(client.clone(), client_address, &call_to_account).await?;
debug!("Inputs: {:?}", inputs);
// Fill the input_data field of the GraphData struct
Ok(OnChainSource::new(call_to_account, used_rpc))
}
}
/// Specification for view-only calls to fetch on-chain data
@@ -253,8 +213,6 @@ pub struct CallToAccount {
pub enum DataSource {
/// Data from a JSON file containing arrays of values
File(FileSource),
/// Data fetched from blockchain contracts
OnChain(OnChainSource),
}
impl Default for DataSource {
@@ -289,12 +247,6 @@ impl From<Vec<Vec<f64>>> for DataSource {
}
}
impl From<OnChainSource> for DataSource {
fn from(data: OnChainSource) -> Self {
DataSource::OnChain(data)
}
}
// Note: Always use JSON serialization for untagged enums
impl<'de> Deserialize<'de> for DataSource {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
@@ -309,12 +261,6 @@ impl<'de> Deserialize<'de> for DataSource {
return Ok(DataSource::File(t));
}
// Try deserializing as OnChainSource
let second_try: Result<OnChainSource, _> = serde_json::from_str(this_json.get());
if let Ok(t) = second_try {
return Ok(DataSource::OnChain(t));
}
Err(serde::de::Error::custom("failed to deserialize DataSource"))
}
}
@@ -360,12 +306,6 @@ impl GraphData {
}
}
}
_ => {
return Err(GraphError::InvalidDims(
0,
"non file data cannot be split into batches".to_string(),
));
}
}
Ok(inputs)
}
@@ -491,15 +431,6 @@ impl GraphData {
input_data: DataSource::File(data),
output_data: _,
} => data.clone(),
GraphData {
input_data: DataSource::OnChain(_),
output_data: _,
} => {
return Err(GraphError::InvalidDims(
0,
"on-chain data cannot be split into batches".to_string(),
));
}
};
// Process each input tensor according to its shape
@@ -627,13 +558,6 @@ impl ToPyObject for DataSource {
fn to_object(&self, py: Python) -> PyObject {
match self {
DataSource::File(data) => data.to_object(py),
DataSource::OnChain(source) => {
let dict = PyDict::new(py);
dict.set_item("rpc_url", &source.rpc).unwrap();
dict.set_item("calls_to_accounts", &source.call.to_object(py))
.unwrap();
dict.to_object(py)
}
}
}
}

View File

@@ -25,9 +25,11 @@ use itertools::Itertools;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use tosubcommand::ToFlags;
#[cfg(any(not(feature = "ezkl"), target_arch = "wasm32"))]
use self::input::{FileSource, GraphData};
use self::errors::GraphError;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use self::input::OnChainSource;
use self::input::{FileSource, GraphData};
use self::modules::{GraphModules, ModuleConfigs, ModuleForwardResult, ModuleSizes};
use crate::circuit::lookup::LookupOp;
@@ -538,16 +540,38 @@ impl GraphSettings {
/// calculate the total number of instances
pub fn total_instances(&self) -> Vec<usize> {
let mut instances: Vec<usize> = self
.model_instance_shapes
.iter()
.map(|x| x.iter().product())
.collect();
instances.extend(self.module_sizes.num_instances());
let mut instances: Vec<usize> = self.module_sizes.num_instances();
instances.extend(
self.model_instance_shapes
.iter()
.map(|x| x.iter().product::<usize>()),
);
instances
}
/// get the scale data for instances
pub fn get_model_instance_scales(&self) -> Vec<crate::Scale> {
let mut scales = vec![];
if self.run_args.input_visibility.is_public() {
scales.extend(
self.model_input_scales
.iter()
.map(|x| x.clone())
.collect::<Vec<crate::Scale>>(),
);
};
if self.run_args.output_visibility.is_public() {
scales.extend(
self.model_output_scales
.iter()
.map(|x| x.clone())
.collect::<Vec<crate::Scale>>(),
);
};
scales
}
/// calculate the log2 of the total number of instances
pub fn log2_total_instances(&self) -> u32 {
let sum = self.total_instances().iter().sum::<usize>();
@@ -937,6 +961,7 @@ impl GraphCircuit {
}
///
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
pub fn load_graph_from_file_exclusively(
&mut self,
data: &GraphData,
@@ -950,7 +975,6 @@ impl GraphCircuit {
DataSource::File(file_data) => {
self.load_file_data(file_data, &shapes, scales, input_types)
}
_ => Err(GraphError::OnChainDataSource),
}
}
@@ -982,7 +1006,6 @@ impl GraphCircuit {
DataSource::File(file_data) => {
self.load_file_data(file_data, &shapes, scales, input_types)
}
DataSource::OnChain(_) => Err(GraphError::OnChainDataSource),
}
}
@@ -996,45 +1019,12 @@ impl GraphCircuit {
input_types: Vec<InputType>,
) -> Result<Vec<Tensor<Fp>>, GraphError> {
match &data {
DataSource::OnChain(source) => {
let mut per_item_scale = vec![];
for (i, shape) in shapes.iter().enumerate() {
per_item_scale.extend(vec![scales[i]; shape.iter().product::<usize>()]);
}
self.load_on_chain_data(source.clone(), &shapes, per_item_scale)
.await
}
DataSource::File(file_data) => {
self.load_file_data(file_data, &shapes, scales, input_types)
}
}
}
/// Prepare on chain test data
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
pub async fn load_on_chain_data(
&mut self,
source: OnChainSource,
shapes: &Vec<Vec<usize>>,
scales: Vec<crate::Scale>,
) -> Result<Vec<Tensor<Fp>>, GraphError> {
use crate::eth::{evm_quantize, read_on_chain_inputs, setup_eth_backend};
let (client, client_address) = setup_eth_backend(&source.rpc, None).await?;
let input = read_on_chain_inputs(client.clone(), client_address, &source.call).await?;
let quantized_evm_inputs =
evm_quantize(client, scales, &input, &source.call.decimals).await?;
// on-chain data has already been quantized at this point. Just need to reshape it and push into tensor vector
let mut inputs: Vec<Tensor<Fp>> = vec![];
for (input, shape) in [quantized_evm_inputs].iter().zip(shapes) {
let mut t: Tensor<Fp> = input.iter().cloned().collect();
t.reshape(shape)?;
inputs.push(t);
}
Ok(inputs)
}
///
pub fn load_file_data(
&mut self,
@@ -1410,85 +1400,6 @@ impl GraphCircuit {
let model = Model::from_run_args(&params.run_args, model_path)?;
Self::new_from_settings(model, params.clone(), check_mode)
}
///
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
pub async fn populate_on_chain_test_data(
&mut self,
data: &mut GraphData,
test_on_chain_data: TestOnChainData,
) -> Result<(), GraphError> {
// Set up local anvil instance for reading on-chain data
let input_scales = self.model().graph.get_input_scales();
let output_scales = self.model().graph.get_output_scales()?;
let input_shapes = self.model().graph.input_shapes()?;
let output_shapes = self.model().graph.output_shapes()?;
let mut input_data = None;
let mut output_data = None;
if matches!(
test_on_chain_data.data_sources.input,
TestDataSource::OnChain
) {
// if not public then fail
if self.settings().run_args.input_visibility.is_private() {
return Err(GraphError::OnChainDataSource);
}
input_data = match &data.input_data {
DataSource::File(input_data) => Some(input_data),
_ => {
return Err(GraphError::MissingDataSource);
}
};
}
if matches!(
test_on_chain_data.data_sources.output,
TestDataSource::OnChain
) {
// if not public then fail
if self.settings().run_args.output_visibility.is_private() {
return Err(GraphError::OnChainDataSource);
}
output_data = match &data.output_data {
Some(DataSource::File(output_data)) => Some(output_data),
_ => return Err(GraphError::MissingDataSource),
};
}
// Merge the input and output data
let mut file_data: Vec<Vec<input::FileSourceInner>> = vec![];
let mut scales: Vec<crate::Scale> = vec![];
let mut shapes: Vec<Vec<usize>> = vec![];
if let Some(input_data) = input_data {
file_data.extend(input_data.clone());
scales.extend(input_scales.clone());
shapes.extend(input_shapes.clone());
}
if let Some(output_data) = output_data {
file_data.extend(output_data.clone());
scales.extend(output_scales.clone());
shapes.extend(output_shapes.clone());
};
// print file data
debug!("file data: {:?}", file_data);
let on_chain_data: OnChainSource =
OnChainSource::test_from_file_data(&file_data, scales, shapes, &test_on_chain_data.rpc)
.await?;
// Here we update the GraphData struct with the on-chain data
if input_data.is_some() {
data.input_data = on_chain_data.clone().into();
}
if output_data.is_some() {
data.output_data = Some(on_chain_data.into());
}
debug!("test on-chain data: {:?}", data);
// Save the updated GraphData struct to the data_path
data.save(test_on_chain_data.data)?;
Ok(())
}
}
#[derive(Clone, Debug, Default, Serialize, Deserialize)]

View File

@@ -44,6 +44,7 @@ pub enum EZKLError {
not(all(target_arch = "wasm32", target_os = "unknown"))
))]
#[error("[eth] {0}")]
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
EthError(#[from] eth::EthError),
#[error("[graph] {0}")]
GraphError(#[from] graph::errors::GraphError),
@@ -97,11 +98,11 @@ impl From<String> for EZKLError {
use std::str::FromStr;
use circuit::{CheckMode, table::Range};
use circuit::{table::Range, CheckMode};
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use clap::Args;
use fieldutils::IntegerRep;
use graph::{MAX_PUBLIC_SRS, Visibility};
use graph::{Visibility, MAX_PUBLIC_SRS};
use halo2_proofs::poly::{
ipa::commitment::IPACommitmentScheme, kzg::commitment::KZGCommitmentScheme,
};
@@ -134,7 +135,7 @@ pub mod circuit;
/// CLI commands.
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
pub mod commands;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
// abigen doesn't generate docs for this module
#[allow(missing_docs)]
/// Utility functions for contracts

View File

@@ -3,10 +3,10 @@
mod native_tests {
// use ezkl::circuit::table::RESERVED_BLINDING_ROWS_PAD;
use ezkl::Commitments;
use ezkl::graph::input::{FileSource, FileSourceInner, GraphData};
use ezkl::graph::{DataSource, GraphSettings, GraphWitness};
use ezkl::graph::input::{FileSource, GraphData};
use ezkl::graph::{DataSource, GraphSettings};
use ezkl::pfsys::Snark;
use ezkl::Commitments;
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
use halo2curves::bn256::Bn256;
use lazy_static::lazy_static;
@@ -991,7 +991,6 @@ mod native_tests {
use crate::native_tests::kzg_evm_prove_and_verify;
use crate::native_tests::kzg_evm_prove_and_verify_reusable_verifier;
use crate::native_tests::kzg_evm_on_chain_input_prove_and_verify;
use crate::native_tests::kzg_evm_aggr_prove_and_verify;
use tempdir::TempDir;
use crate::native_tests::Hardfork;
@@ -1006,101 +1005,6 @@ mod native_tests {
}
/// Currently only on chain inputs that return a non-negative value are supported.
const TESTS_ON_CHAIN_INPUT: [&str; 17] = [
"1l_mlp",
"1l_average",
"1l_reshape",
"1l_sigmoid",
"1l_div",
"1l_sqrt",
"1l_prelu",
"1l_var",
"1l_leakyrelu",
"1l_gelu_noappx",
"1l_relu",
"1l_tanh",
"2l_relu_sigmoid_small",
"2l_relu_small",
"2l_relu_fc",
"min",
"max"
];
seq!(N in 0..=16 {
#(#[test_case((TESTS_ON_CHAIN_INPUT[N],Hardfork::Latest))])*
#(#[test_case((TESTS_ON_CHAIN_INPUT[N],Hardfork::Paris))])*
#(#[test_case((TESTS_ON_CHAIN_INPUT[N],Hardfork::London))])*
#(#[test_case((TESTS_ON_CHAIN_INPUT[N],Hardfork::Shanghai))])*
fn kzg_evm_on_chain_input_prove_and_verify_(test: (&str,Hardfork)) {
let (test,hardfork) = test;
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(true, hardfork);
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "on-chain", "file", "public", "private", "private");
// test_dir.close().unwrap();
}
#(#[test_case(TESTS_ON_CHAIN_INPUT[N])])*
fn kzg_evm_on_chain_output_prove_and_verify_(test: &str) {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(true, Hardfork::Latest);
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "file", "on-chain", "private", "public", "private");
// test_dir.close().unwrap();
}
#(#[test_case(TESTS_ON_CHAIN_INPUT[N])])*
fn kzg_evm_on_chain_input_output_prove_and_verify_(test: &str) {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(true, Hardfork::Latest);
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "on-chain", "on-chain", "public", "public", "private");
test_dir.close().unwrap();
}
#(#[test_case(TESTS_ON_CHAIN_INPUT[N])])*
fn kzg_evm_on_chain_input_output_hashed_prove_and_verify_(test: &str) {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(true, Hardfork::Latest);
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "on-chain", "on-chain", "hashed", "hashed", "private");
test_dir.close().unwrap();
}
#(#[test_case(TESTS_ON_CHAIN_INPUT[N])])*
fn kzg_evm_on_chain_input_kzg_output_kzg_params_prove_and_verify_(test: &str) {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(true, Hardfork::Latest);
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "on-chain", "file", "public", "polycommit", "polycommit");
test_dir.close().unwrap();
}
#(#[test_case(TESTS_ON_CHAIN_INPUT[N])])*
fn kzg_evm_on_chain_output_kzg_input_kzg_params_prove_and_verify_(test: &str) {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(true, Hardfork::Latest);
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "file", "on-chain", "polycommit", "public", "polycommit");
test_dir.close().unwrap();
}
#(#[test_case(TESTS_ON_CHAIN_INPUT[N])])*
fn kzg_evm_on_chain_all_kzg_params_prove_and_verify_(test: &str) {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let _anvil_child = crate::native_tests::start_anvil(true, Hardfork::Latest);
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "file", "file", "polycommit", "polycommit", "polycommit");
test_dir.close().unwrap();
}
});
seq!(N in 0..=17 {
// these take a particularly long time to run
#(#[test_case(TESTS_EVM_AGGR[N])])*
@@ -2192,15 +2096,14 @@ mod native_tests {
}
};
let addr_path_arg_vk = format!("--addr-path={}/{}/addr_vk.txt", test_dir, example_name);
let sol_arg_vk: String = format!("--sol-code-path={}/{}/vk.sol", test_dir, example_name);
let arg_vka: String = format!("--vka-path={}/{}/vka.bytes", test_dir, example_name);
// create the verifier
let args = vec![
"create-evm-vka",
"--vk-path",
&vk_arg,
&settings_arg,
&sol_arg_vk,
&arg_vka,
];
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
@@ -2209,13 +2112,12 @@ mod native_tests {
.expect("failed to execute process");
assert!(status.success());
// deploy the vka
// register the vka
let args = vec![
"deploy-evm",
"register-vka",
rpc_arg.as_str(),
addr_path_arg_vk.as_str(),
sol_arg_vk.as_str(),
"-C=vka",
arg_vka.as_str(),
deployed_addr_arg.as_str(),
];
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
@@ -2224,19 +2126,13 @@ mod native_tests {
.expect("failed to execute process");
assert!(status.success());
// read in the address
let addr_vk = std::fs::read_to_string(format!("{}/{}/addr_vk.txt", test_dir, example_name))
.expect("failed to read address file");
let deployed_addr_arg_vk = format!("--addr-vk={}", addr_vk);
// create encoded calldata
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"encode-evm-calldata",
"--proof-path",
&format!("{}/{}/proof.pf", test_dir, example_name),
&deployed_addr_arg_vk,
&arg_vka,
])
.status()
.expect("failed to execute process");
@@ -2251,7 +2147,7 @@ mod native_tests {
pf_arg.as_str(),
rpc_arg.as_str(),
deployed_addr_arg.as_str(),
deployed_addr_arg_vk.as_str(),
arg_vka.as_str(),
];
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
@@ -2329,298 +2225,6 @@ mod native_tests {
assert!(status.success());
}
fn kzg_evm_on_chain_input_prove_and_verify(
test_dir: &str,
example_name: String,
input_source: &str,
output_source: &str,
input_visibility: &str,
output_visibility: &str,
param_visibility: &str,
) {
gen_circuit_settings_and_witness(
test_dir,
example_name.clone(),
input_visibility,
param_visibility,
output_visibility,
1,
"resources",
// we need the accuracy
Some(vec![4]),
1,
Commitments::KZG,
2,
false,
None,
None,
);
let model_path = format!("{}/{}/network.compiled", test_dir, example_name);
let settings_path = format!("{}/{}/settings.json", test_dir, example_name);
init_params(settings_path.clone().into());
let data_path = format!("{}/{}/input.json", test_dir, example_name);
let witness_path = format!("{}/{}/witness.json", test_dir, example_name);
let test_on_chain_data_path = format!("{}/{}/on_chain_input.json", test_dir, example_name);
let rpc_arg = format!("--rpc-url={}", LIMITLESS_ANVIL_URL.as_str());
let private_key = format!("--private-key={}", *ANVIL_DEFAULT_PRIVATE_KEY);
let test_input_source = format!("--input-source={}", input_source);
let test_output_source = format!("--output-source={}", output_source);
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"setup",
"-M",
&model_path,
"--pk-path",
&format!("{}/{}/key.pk", test_dir, example_name),
"--vk-path",
&format!("{}/{}/key.vk", test_dir, example_name),
])
.status()
.expect("failed to execute process");
assert!(status.success());
// generate the witness, passing the vk path to generate the necessary kzg commits
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"gen-witness",
"-D",
&data_path,
"-M",
&model_path,
"-O",
&witness_path,
"--vk-path",
&format!("{}/{}/key.vk", test_dir, example_name),
])
.status()
.expect("failed to execute process");
assert!(status.success());
// load witness
let witness: GraphWitness = GraphWitness::from_path(witness_path.clone().into()).unwrap();
// print out the witness
println!("WITNESS: {:?}", witness);
let mut input: GraphData = GraphData::from_path(data_path.clone().into()).unwrap();
if input_source != "file" || output_source != "file" {
println!("on chain input");
if input_visibility == "hashed" {
let hashes = witness.processed_inputs.unwrap().poseidon_hash.unwrap();
input.input_data = DataSource::File(
hashes
.iter()
.map(|h| vec![FileSourceInner::Field(*h)])
.collect(),
);
}
if output_visibility == "hashed" {
let hashes = witness.processed_outputs.unwrap().poseidon_hash.unwrap();
input.output_data = Some(DataSource::File(
hashes
.iter()
.map(|h| vec![FileSourceInner::Field(*h)])
.collect(),
));
} else {
input.output_data = Some(DataSource::File(
witness
.pretty_elements
.unwrap()
.rescaled_outputs
.iter()
.map(|o| {
o.iter()
.map(|f| FileSourceInner::Float(f.parse().unwrap()))
.collect()
})
.collect(),
));
}
input.save(data_path.clone().into()).unwrap();
let args = vec![
"setup-test-evm-data",
"-D",
data_path.as_str(),
"-M",
&model_path,
"--test-data",
test_on_chain_data_path.as_str(),
rpc_arg.as_str(),
test_input_source.as_str(),
test_output_source.as_str(),
];
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args(args)
.status()
.expect("failed to execute process");
assert!(status.success());
// generate the witness, passing the vk path to generate the necessary kzg commits only
// if input visibility is NOT hashed
if input_visibility != "hashed" {
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"gen-witness",
"-D",
&test_on_chain_data_path,
"-M",
&model_path,
"-O",
&witness_path,
"--vk-path",
&format!("{}/{}/key.vk", test_dir, example_name),
])
.status()
.expect("failed to execute process");
assert!(status.success());
}
}
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"prove",
"-W",
&witness_path,
"-M",
&model_path,
"--proof-path",
&format!("{}/{}/proof.pf", test_dir, example_name),
"--pk-path",
&format!("{}/{}/key.pk", test_dir, example_name),
])
.status()
.expect("failed to execute process");
assert!(status.success());
let vk_arg = format!("{}/{}/key.vk", test_dir, example_name);
let settings_arg = format!("--settings-path={}", settings_path);
// create encoded calldata
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"encode-evm-calldata",
"--proof-path",
&format!("{}/{}/proof.pf", test_dir, example_name),
])
.status()
.expect("failed to execute process");
assert!(status.success());
// create the verifier
let mut args = vec!["create-evm-verifier", "--vk-path", &vk_arg, &settings_arg];
let sol_arg = format!("{}/{}/kzg.sol", test_dir, example_name);
args.push("--sol-code-path");
args.push(sol_arg.as_str());
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args(&args)
.status()
.expect("failed to execute process");
assert!(status.success());
let addr_path_verifier_arg = format!(
"--addr-path={}/{}/addr_verifier.txt",
test_dir, example_name
);
// deploy the verifier
let mut args = vec![
"deploy-evm",
rpc_arg.as_str(),
addr_path_verifier_arg.as_str(),
];
args.push("--sol-code-path");
args.push(sol_arg.as_str());
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args(&args)
.status()
.expect("failed to execute process");
assert!(status.success());
let sol_arg = format!("{}/{}/kzg.sol", test_dir, example_name);
let mut create_da_args = vec![
"create-evm-da",
&settings_arg,
"--sol-code-path",
sol_arg.as_str(),
"-W",
&witness_path,
];
// if there is a on-chain source we add the data
if input_source != "file" || output_source != "file" {
create_da_args.push("-D");
create_da_args.push(test_on_chain_data_path.as_str());
}
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args(&create_da_args)
.status()
.expect("failed to execute process");
assert!(status.success());
let deploy_evm_data_path = if input_source != "file" || output_source != "file" {
test_on_chain_data_path.clone()
} else {
data_path.clone()
};
let addr_path_da_arg = format!("--addr-path={}/{}/addr_da.txt", test_dir, example_name);
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"deploy-evm-da",
format!("--settings-path={}", settings_path).as_str(),
"-D",
deploy_evm_data_path.as_str(),
"--sol-code-path",
sol_arg.as_str(),
rpc_arg.as_str(),
addr_path_da_arg.as_str(),
private_key.as_str(),
])
.status()
.expect("failed to execute process");
assert!(status.success());
let pf_arg = format!("{}/{}/proof.pf", test_dir, example_name);
// read in the verifier address
let addr_verifier =
std::fs::read_to_string(format!("{}/{}/addr_verifier.txt", test_dir, example_name))
.expect("failed to read address file");
let deployed_addr_verifier_arg = format!("--addr-verifier={}", addr_verifier);
// read in the da address
let addr_da = std::fs::read_to_string(format!("{}/{}/addr_da.txt", test_dir, example_name))
.expect("failed to read address file");
let deployed_addr_da_arg = format!("--addr-da={}", addr_da);
let args = vec![
"verify-evm",
"--proof-path",
pf_arg.as_str(),
deployed_addr_verifier_arg.as_str(),
deployed_addr_da_arg.as_str(),
rpc_arg.as_str(),
];
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args(&args)
.status()
.expect("failed to execute process");
assert!(status.success());
}
fn build_ezkl() {
#[cfg(feature = "icicle")]
let args = [
@@ -2643,7 +2247,7 @@ mod native_tests {
// not macos-metal and not icicle
#[cfg(all(not(feature = "icicle"), not(feature = "macos-metal")))]
let args = ["build", "--profile=test-runs", "--bin", "ezkl"];
#[cfg(not(feature = "mv-lookup"))]
#[cfg(feature = "eth-original-lookup")]
let args = [
"build",
"--profile=test-runs",
@@ -2651,7 +2255,7 @@ mod native_tests {
"ezkl",
"--no-default-features",
"--features",
"ezkl",
"ezkl,solidity-verifier,eth",
];
let status = Command::new("cargo")

View File

@@ -146,7 +146,7 @@ mod py_tests {
}
}
const TESTS: [&str; 35] = [
const TESTS: [&str; 31] = [
"mnist_gan.ipynb", // 0
"ezkl_demo_batch.ipynb", // 1
"proof_splitting.ipynb", // 2
@@ -155,33 +155,29 @@ mod py_tests {
"mnist_gan_proof_splitting.ipynb", // 5
"hashed_vis.ipynb", // 6
"simple_demo_all_public.ipynb", // 7
"data_attest.ipynb", // 8
"little_transformer.ipynb", // 9
"simple_demo_aggregated_proofs.ipynb", // 10
"ezkl_demo.ipynb", // 11
"lstm.ipynb", // 12
"set_membership.ipynb", // 13
"decision_tree.ipynb", // 14
"random_forest.ipynb", // 15
"gradient_boosted_trees.ipynb", // 16
"xgboost.ipynb", // 17
"lightgbm.ipynb", // 18
"svm.ipynb", // 19
"simple_demo_public_input_output.ipynb", // 20
"simple_demo_public_network_output.ipynb", // 21
"gcn.ipynb", // 22
"linear_regression.ipynb", // 23
"stacked_regression.ipynb", // 24
"data_attest_hashed.ipynb", // 25
"kzg_vis.ipynb", // 26
"kmeans.ipynb", // 27
"solvency.ipynb", // 28
"sklearn_mlp.ipynb", // 29
"generalized_inverse.ipynb", // 30
"mnist_classifier.ipynb", // 31
"world_rotation.ipynb", // 32
"logistic_regression.ipynb", // 33
"univ3-da.ipynb", // 34
"little_transformer.ipynb", // 8
"simple_demo_aggregated_proofs.ipynb", // 9
"ezkl_demo.ipynb", // 10
"lstm.ipynb", // 11
"set_membership.ipynb", // 12
"decision_tree.ipynb", // 13
"random_forest.ipynb", // 14
"gradient_boosted_trees.ipynb", // 15
"xgboost.ipynb", // 16
"lightgbm.ipynb", // 17
"svm.ipynb", // 18
"simple_demo_public_input_output.ipynb", // 19
"simple_demo_public_network_output.ipynb", // 20
"gcn.ipynb", // 21
"linear_regression.ipynb", // 22
"stacked_regression.ipynb", // 23
"kzg_vis.ipynb", // 24
"kmeans.ipynb", // 25
"solvency.ipynb", // 26
"sklearn_mlp.ipynb", // 27
"generalized_inverse.ipynb", // 28
"mnist_classifier.ipynb", // 29
"logistic_regression.ipynb", // 30
];
macro_rules! test_func {
@@ -194,7 +190,7 @@ mod py_tests {
use super::*;
seq!(N in 0..=32 {
seq!(N in 0..=30 {
#(#[test_case(TESTS[N])])*
fn run_notebook_(test: &str) {

View File

@@ -430,7 +430,7 @@ async def test_create_evm_verifier_separate_vk():
vk_path = os.path.join(folder_path, 'test_evm.vk')
settings_path = os.path.join(folder_path, 'settings.json')
sol_code_path = os.path.join(folder_path, 'test_separate.sol')
vk_code_path = os.path.join(folder_path, 'test_vk.sol')
vka_path = os.path.join(folder_path, 'vka.calldata')
abi_path = os.path.join(folder_path, 'test_separate.abi')
abi_vk_path = os.path.join(folder_path, 'test_vk_separate.abi')
proof_path = os.path.join(folder_path, 'test_evm.pf')
@@ -455,9 +455,8 @@ async def test_create_evm_verifier_separate_vk():
res = await ezkl.create_evm_vka(
vk_path,
settings_path,
vk_code_path,
abi_vk_path,
srs_path=srs_path,
vka_path,
srs_path=srs_path
)
assert res == True
@@ -472,7 +471,7 @@ async def test_deploy_evm_reusable_and_vka():
addr_path_verifier = os.path.join(folder_path, 'address_separate.json')
addr_path_vk = os.path.join(folder_path, 'address_vk.json')
sol_code_path = os.path.join(folder_path, 'test_separate.sol')
vk_code_path = os.path.join(folder_path, 'test_vk.sol')
vka_path = os.path.join(folder_path, 'vka.calldata')
# TODO: without optimization there will be out of gas errors
# sol_code_path = os.path.join(folder_path, 'test.sol')
@@ -484,11 +483,14 @@ async def test_deploy_evm_reusable_and_vka():
"verifier/reusable",
)
res = await ezkl.deploy_evm(
addr_path_vk,
with open(addr_path_verifier, 'r') as file:
addr_verifier = file.read().rstrip()
# TODO fix: we need to call register vka instead of deploy evm
res = await ezkl.register_vka(
addr_verifier,
anvil_url,
vk_code_path,
"vka",
vka_path=vka_path,
)
assert res == True
@@ -579,7 +581,7 @@ async def test_verify_evm_separate_vk():
"""
proof_path = os.path.join(folder_path, 'test_evm.pf')
addr_path_verifier = os.path.join(folder_path, 'address_separate.json')
addr_path_vk = os.path.join(folder_path, 'address_vk.json')
vka_path = os.path.join(folder_path, 'vka.calldata')
proof_path = os.path.join(folder_path, 'test_evm.pf')
calldata_path = os.path.join(folder_path, 'calldata_separate.bytes')
@@ -588,13 +590,8 @@ async def test_verify_evm_separate_vk():
print(addr_verifier)
with open(addr_path_vk, 'r') as file:
addr_vk = file.read().rstrip()
print(addr_vk)
# res is now a vector of bytes
res = ezkl.encode_evm_calldata(proof_path, calldata_path, addr_vk=addr_vk)
res = ezkl.encode_evm_calldata(proof_path, calldata_path, vka_path=vka_path)
assert os.path.isfile(calldata_path)
assert len(res) > 0
@@ -606,7 +603,7 @@ async def test_verify_evm_separate_vk():
addr_verifier,
anvil_url,
proof_path,
addr_vk=addr_vk,
vka_path=vka_path,
# sol_code_path
# optimizer_runs
)