Compare commits

...

31 Commits

Author SHA1 Message Date
github-actions[bot]
d16fe8e928 ci: update version string in docs 2025-04-29 14:58:55 +00:00
dante
839030ce10 chore: rm halo2proofs patches (#976) 2025-04-29 10:58:35 -04:00
dante
cfccc5460c refactor: rm postgres (#977) 2025-04-29 08:59:14 -04:00
dante
0de0682bfa refactor: configurable div epsilon (#968) 2025-04-23 09:12:24 +01:00
dante
bf9cf14ab7 refactor!: rpc url should be required (#965)
BREAKING CHANGE: in python the order of arguments for evm related functions has changed
2025-04-22 12:45:36 +01:00
dante
6818962ac2 chore: pass in raw data for gen-witness from file (#964) 2025-04-06 14:08:11 -04:00
dante
70469e3bf9 chore: add min/max to gen-random-data (#960) 2025-03-25 19:32:15 +00:00
dante
52ff187e55 refactor: command struct names should match str (#959) 2025-03-24 12:54:43 +00:00
dante
4e57a5a486 docs: link to audit (#958)
---------

Co-authored-by: Jason Morton <jason.morton@gmail.com>
2025-03-23 21:12:44 +00:00
Ethan Cemer
fe978caa85 fix!: bug fixes (#956)
BREAKING CHANGE: DA verifier no longer backwards compatible
2025-03-18 22:08:29 +00:00
dante
1bef92407c fix: recip denom epsilon can induce non opt res (#957) 2025-03-17 14:46:33 +00:00
dante
5ff1c48ede refactor: allow for negative stride downsample (#955) 2025-03-14 12:07:37 -04:00
dante
ab4997d0c2 chore: update docs and panics (#952) 2025-03-10 11:32:28 -04:00
dante
701e69dd2f fix: handle [] shapes in sort (#954) 2025-03-08 13:17:45 -05:00
dante
f631445e26 docs: document arguments better (#950) 2025-03-05 16:10:50 -05:00
dante
fcbb27677f fix: empty dim len can be 1 (#949) 2025-02-28 23:56:19 -05:00
dante
bc26691bd5 chore: smaller cat dog example (#947) 2025-02-28 10:37:08 -05:00
dante
73c813a81d feat: pass data directly in cli (#939) 2025-02-13 12:35:13 -05:00
dante
ae076aef09 refactor: rm tolerance parameter (#937) 2025-02-11 12:57:18 -05:00
dante
a7544f4060 feat: generalize conv mem layout and ND (#935) 2025-02-10 09:11:58 -05:00
dante
c19fa5218a refactor: enforce max decomp base/legs in args (#936) 2025-02-09 16:15:40 -05:00
rebustron
eb205d0c73 chore: fix typos in comments and docs (#934) 2025-02-08 19:13:17 -05:00
dante
db498f8d7c docs: cat-dog example (#929) 2025-02-08 17:30:13 -05:00
Cypher Pepe
a363c91160 fix: broken links in polycommit.rs and poseidon.rs (#932) 2025-02-08 12:40:53 -05:00
dante
f7f04415fa chore!: add model input/output types to settings (#933)
BREAKING CHANGE: compiled model serialization is not backwards compatible
2025-02-07 16:05:59 -05:00
Jseam
de8d419e5d ci: change to sha hashes (#922) 2025-02-07 12:27:35 -05:00
dante
a38d318923 fix: pypi publication pipeline (#931) 2025-02-05 23:03:21 -05:00
dante
864990fe2d fix: publishing path 2025-02-05 19:57:13 -05:00
dante
29c3e4f977 fix: bump download artifact to v4 2025-02-05 19:05:29 -05:00
dante
0689115828 fix: ezkl-gpu name (#930) 2025-02-05 18:29:28 -05:00
dante
99f741304a Revert "fix: ezkl-gpu install"
This reverts commit 20ac99fdbf.
2025-02-05 18:03:46 -05:00
83 changed files with 6922 additions and 5258 deletions

View File

@@ -12,10 +12,10 @@ jobs:
contents: read
runs-on: self-hosted
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2023-06-27
override: true
@@ -29,10 +29,10 @@ jobs:
runs-on: self-hosted
needs: [bench_poseidon]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2023-06-27
override: true
@@ -46,10 +46,10 @@ jobs:
runs-on: self-hosted
needs: [bench_poseidon]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2023-06-27
override: true
@@ -63,10 +63,10 @@ jobs:
runs-on: self-hosted
needs: [bench_poseidon]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2023-06-27
override: true
@@ -80,10 +80,10 @@ jobs:
runs-on: self-hosted
needs: [bench_poseidon]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2023-06-27
override: true
@@ -97,10 +97,10 @@ jobs:
runs-on: self-hosted
needs: [bench_poseidon]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2023-06-27
override: true
@@ -114,10 +114,10 @@ jobs:
runs-on: self-hosted
needs: [bench_poseidon]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2023-06-27
override: true
@@ -131,10 +131,10 @@ jobs:
runs-on: self-hosted
needs: [bench_poseidon]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2023-06-27
override: true
@@ -148,10 +148,10 @@ jobs:
runs-on: self-hosted
needs: [bench_poseidon]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2023-06-27
override: true
@@ -165,10 +165,10 @@ jobs:
runs-on: self-hosted
needs: [bench_poseidon]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2023-06-27
override: true
@@ -182,10 +182,10 @@ jobs:
runs-on: self-hosted
needs: [bench_poseidon]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2023-06-27
override: true

View File

@@ -24,15 +24,15 @@ jobs:
runs-on: ubuntu-latest
if: startsWith(github.ref, 'refs/tags/')
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: jetli/wasm-pack-action@v0.4.0
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa #v0.4.0
with:
# Pin to version 0.12.1
version: 'v0.12.1'
@@ -40,7 +40,7 @@ jobs:
run: rustup target add wasm32-unknown-unknown
- name: Add rust-src
run: rustup component add rust-src --toolchain nightly-2024-07-18-x86_64-unknown-linux-gnu
run: rustup component add rust-src --toolchain nightly-2025-02-17-x86_64-unknown-linux-gnu
- name: Install binaryen
run: |
set -e
@@ -176,7 +176,7 @@ jobs:
curl -s "https://raw.githubusercontent.com/zkonduit/ezkljs-engine/main/README.md" > ./pkg/README.md
- name: Set up Node.js
uses: actions/setup-node@v3
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 #v3.8.2
with:
node-version: "18.12.1"
registry-url: "https://registry.npmjs.org"
@@ -201,7 +201,7 @@ jobs:
RELEASE_TAG: ${{ github.ref_name }}
if: startsWith(github.ref, 'refs/tags/')
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Update version in package.json
@@ -230,13 +230,13 @@ jobs:
NR==30{$0=" specifier: \"" tag "\""}
NR==31{$0=" version: \"" tag "\""}
NR==400{$0=" /@ezkljs/engine@" tag ":"}
NR==401{$0=" resolution: {integrity: \"" integrity "\"}"} 1' in-browser-evm-verifier/pnpm-lock.yaml > temp.yaml && mv temp.yaml in-browser-evm-verifier/pnpm-lock.yaml
NR==401{$0=" resolution: {integrity: \"" integrity "\"}"} 1' in-browser-evm-verifier/pnpm-lock.yaml > temp.yaml && mv temp.yaml in-browser-evm-verifier/pnpm-lock.yaml
- name: Use pnpm 8
uses: pnpm/action-setup@v2
uses: pnpm/action-setup@eae0cfeb286e66ffb5155f1a79b90583a127a68b #v2.4.1
with:
version: 8
- name: Set up Node.js
uses: actions/setup-node@v3
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 #v3.8.2
with:
node-version: "18.12.1"
registry-url: "https://registry.npmjs.org"
@@ -247,4 +247,4 @@ jobs:
pnpm run build
pnpm publish --no-git-checks
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -10,12 +10,12 @@ jobs:
contents: read
runs-on: kaiju
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- name: nanoGPT Mock

View File

@@ -28,34 +28,36 @@ jobs:
env:
RELEASE_TAG: ${{ github.ref_name }}
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions/setup-python@v4
- uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0
with:
python-version: 3.12
architecture: x64
- name: Set pyproject.toml version to match github tag
- name: Set pyproject.toml version to match github tag and rename ezkl to ezkl-gpu
shell: bash
run: |
mv pyproject.toml pyproject.toml.orig
sed "s/ezkl/ezkl-gpu/" pyproject.toml.orig >pyproject.toml
sed "s/0\\.0\\.0/${RELEASE_TAG//v}/" pyproject.toml.orig >pyproject.toml
sed "s/ezkl/ezkl-gpu/" pyproject.toml.orig > pyproject.toml.tmp
sed "s/0\\.0\\.0/${RELEASE_TAG//v}/" pyproject.toml.tmp > pyproject.toml
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2023-06-27
override: true
components: rustfmt, clippy
- name: Set Cargo.toml version to match github tag
- name: Set Cargo.toml version to match github tag and rename ezkl to ezkl-gpu
shell: bash
# the ezkl substitution here looks for the first instance of name = "ezkl" and changes it to "ezkl-gpu"
run: |
mv Cargo.toml Cargo.toml.orig
sed "s/0\\.0\\.0/${RELEASE_TAG//v}/" Cargo.toml.orig >Cargo.toml
sed "0,/name = \"ezkl\"/s/name = \"ezkl\"/name = \"ezkl-gpu\"/" Cargo.toml.orig > Cargo.toml.tmp
sed "s/0\\.0\\.0/${RELEASE_TAG//v}/" Cargo.toml.tmp > Cargo.toml
mv Cargo.lock Cargo.lock.orig
sed "s/0\\.0\\.0/${RELEASE_TAG//v}/" Cargo.lock.orig >Cargo.lock
sed "s/0\\.0\\.0/${RELEASE_TAG//v}/" Cargo.lock.orig > Cargo.lock
- name: Install required libraries
shell: bash
@@ -63,7 +65,7 @@ jobs:
sudo apt-get update && sudo apt-get install -y openssl pkg-config libssl-dev
- name: Build wheels
uses: PyO3/maturin-action@v1
uses: PyO3/maturin-action@5f8a1b3b0aad13193f46c9131f9b9e663def8ce5 #v1.46.0
with:
target: ${{ matrix.target }}
manylinux: auto
@@ -73,10 +75,10 @@ jobs:
- name: Install built wheel
if: matrix.target == 'x86_64'
run: |
pip install ezkl --no-index --find-links dist --force-reinstall
pip install ezkl-gpu --no-index --find-links dist --force-reinstall
- name: Upload wheels
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
with:
name: wheels
path: dist
@@ -92,7 +94,7 @@ jobs:
# needs: [ macos, windows, linux, linux-cross, musllinux, musllinux-cross ]
needs: [linux]
steps:
- uses: actions/download-artifact@v3
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 #v4.1.8
with:
name: wheels
- name: List Files
@@ -104,14 +106,14 @@ jobs:
# publishes to PyPI
- name: Publish package distributions to PyPI
continue-on-error: true
uses: pypa/gh-action-pypi-publish@unstable/v1
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc #v1.12.4
with:
packages-dir: ./
packages-dir: ./wheels
# publishes to TestPyPI
- name: Publish package distribution to TestPyPI
continue-on-error: true
uses: pypa/gh-action-pypi-publish@unstable/v1
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc #v1.12.4
with:
repository-url: https://test.pypi.org/legacy/
packages-dir: ./
packages-dir: ./wheels

View File

@@ -26,10 +26,10 @@ jobs:
env:
RELEASE_TAG: ${{ github.ref_name }}
steps:
- uses: actions/checkout@v4
with:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions/setup-python@v4
- uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0
with:
python-version: 3.12
architecture: x64
@@ -48,21 +48,21 @@ jobs:
mv Cargo.lock Cargo.lock.orig
sed "s/0\\.0\\.0/${RELEASE_TAG//v}/" Cargo.lock.orig >Cargo.lock
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- name: Build wheels
if: matrix.target == 'universal2-apple-darwin'
uses: PyO3/maturin-action@v1
uses: PyO3/maturin-action@5f8a1b3b0aad13193f46c9131f9b9e663def8ce5 #v1.46.0
with:
target: ${{ matrix.target }}
args: --release --out dist --features python-bindings
- name: Build wheels
if: matrix.target == 'x86_64'
uses: PyO3/maturin-action@v1
uses: PyO3/maturin-action@5f8a1b3b0aad13193f46c9131f9b9e663def8ce5 #v1.46.0
with:
target: ${{ matrix.target }}
args: --release --out dist --features python-bindings
@@ -73,7 +73,7 @@ jobs:
python -c "import ezkl"
- name: Upload wheels
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
with:
name: dist-macos-${{ matrix.target }}
path: dist
@@ -87,10 +87,10 @@ jobs:
matrix:
target: [x64, x86]
steps:
- uses: actions/checkout@v4
with:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions/setup-python@v4
- uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0
with:
python-version: 3.12
architecture: ${{ matrix.target }}
@@ -113,14 +113,14 @@ jobs:
mv Cargo.lock Cargo.lock.orig
sed "s/0\\.0\\.0/${RELEASE_TAG//v}/" Cargo.lock.orig >Cargo.lock
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- name: Build wheels
uses: PyO3/maturin-action@v1
uses: PyO3/maturin-action@5f8a1b3b0aad13193f46c9131f9b9e663def8ce5 #v1.46.0
with:
target: ${{ matrix.target }}
args: --release --out dist --features python-bindings
@@ -130,7 +130,7 @@ jobs:
python -c "import ezkl"
- name: Upload wheels
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0 #v4.6.0
with:
name: dist-windows-${{ matrix.target }}
path: dist
@@ -144,10 +144,10 @@ jobs:
matrix:
target: [x86_64]
steps:
- uses: actions/checkout@v4
with:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions/setup-python@v4
- uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0
with:
python-version: 3.12
architecture: x64
@@ -176,7 +176,7 @@ jobs:
sudo apt-get update && sudo apt-get install -y openssl pkg-config libssl-dev
- name: Build wheels
uses: PyO3/maturin-action@v1
uses: PyO3/maturin-action@5f8a1b3b0aad13193f46c9131f9b9e663def8ce5 #v1.46.0
with:
target: ${{ matrix.target }}
manylinux: auto
@@ -203,7 +203,7 @@ jobs:
python -c "import ezkl"
- name: Upload wheels
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
with:
name: dist-linux-${{ matrix.target }}
path: dist
@@ -218,10 +218,10 @@ jobs:
target:
- x86_64-unknown-linux-musl
steps:
- uses: actions/checkout@v4
with:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions/setup-python@v4
- uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0
with:
python-version: 3.12
architecture: x64
@@ -250,7 +250,7 @@ jobs:
sudo apt-get update && sudo apt-get install -y pkg-config libssl-dev
- name: Build wheels
uses: PyO3/maturin-action@v1
uses: PyO3/maturin-action@5f8a1b3b0aad13193f46c9131f9b9e663def8ce5 #v1.46.0
with:
target: ${{ matrix.target }}
manylinux: musllinux_1_2
@@ -258,7 +258,7 @@ jobs:
- name: Install built wheel
if: matrix.target == 'x86_64-unknown-linux-musl'
uses: addnab/docker-run-action@v3
uses: addnab/docker-run-action@3e77f186b7a929ef010f183a9e24c0f9955ea609
with:
image: alpine:latest
options: -v ${{ github.workspace }}:/io -w /io
@@ -271,9 +271,9 @@ jobs:
python3 -c "import ezkl"
- name: Upload wheels
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
with:
name: name: dist-musllinux-${{ matrix.target }}
name: dist-musllinux-${{ matrix.target }}
path: dist
musllinux-cross:
@@ -287,10 +287,10 @@ jobs:
- target: aarch64-unknown-linux-musl
arch: aarch64
steps:
- uses: actions/checkout@v4
with:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions/setup-python@v4
- uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0
with:
python-version: 3.12
@@ -313,13 +313,13 @@ jobs:
sed "s/0\\.0\\.0/${RELEASE_TAG//v}/" Cargo.lock.orig >Cargo.lock
- name: Build wheels
uses: PyO3/maturin-action@v1
uses: PyO3/maturin-action@5f8a1b3b0aad13193f46c9131f9b9e663def8ce5 #v1.46.0
with:
target: ${{ matrix.platform.target }}
manylinux: musllinux_1_2
args: --release --out dist --features python-bindings
- uses: uraimo/run-on-arch-action@v2.8.1
- uses: uraimo/run-on-arch-action@5397f9e30a9b62422f302092631c99ae1effcd9e #v2.8.1
name: Install built wheel
with:
arch: ${{ matrix.platform.arch }}
@@ -334,7 +334,7 @@ jobs:
python3 -c "import ezkl"
- name: Upload wheels
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 #v4.6.0
with:
name: dist-musllinux-${{ matrix.platform.target }}
path: dist
@@ -347,26 +347,26 @@ jobs:
if: "startsWith(github.ref, 'refs/tags/')"
needs: [macos, windows, linux, musllinux, musllinux-cross]
steps:
- uses: actions/download-artifact@v3
- uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 #v4.1.8
with:
pattern: dist-*
merge-multiple: true
path: dist
path: wheels
- name: List Files
run: ls -R
# # publishes to TestPyPI
# - name: Publish package distribution to TestPyPI
# uses: pypa/gh-action-pypi-publish@unstable/v1
# uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc #v1.12.4
# with:
# repository-url: https://test.pypi.org/legacy/
# packages-dir: ./
# publishes to PyPI
- name: Publish package distributions to PyPI
uses: pypa/gh-action-pypi-publish@unstable/v1
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc #v1.12.4
with:
packages-dir: ./
packages-dir: ./wheels
doc-publish:
@@ -377,10 +377,10 @@ jobs:
needs: pypi-publish
steps:
- uses: actions/checkout@v4
with:
with:
persist-credentials: false
- name: Trigger RTDs build
uses: dfm/rtds-action@v1
uses: dfm/rtds-action@618148c547f4b56cdf4fa4dcf3a94c91ce025f2d
with:
webhook_url: ${{ secrets.RTDS_WEBHOOK_URL }}
webhook_token: ${{ secrets.RTDS_WEBHOOK_TOKEN }}

View File

@@ -30,7 +30,7 @@ jobs:
- name: Create Github Release
id: create-release
uses: softprops/action-gh-release@v1
uses: softprops/action-gh-release@c95fe1489396fe8a9eb87c0abf8aa5b2ef267fda #v2.2.1
with:
token: ${{ secrets.RELEASE_TOKEN }}
tag_name: ${{ env.EZKL_VERSION }}
@@ -49,14 +49,14 @@ jobs:
RUST_BACKTRACE: 1
PCRE2_SYS_STATIC: 1
steps:
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- name: Checkout repo
uses: actions/checkout@v4
with:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
@@ -90,7 +90,7 @@ jobs:
echo "ASSET=build-artifacts/ezkl-linux-gpu.tar.gz" >> $GITHUB_ENV
- name: Upload release archive
uses: actions/upload-release-asset@v1.0.2
uses: actions/upload-release-asset@e8f9f06c4b078e705bd2ea027f0926603fc9b4d5 #v1.0.2
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
with:
@@ -119,33 +119,33 @@ jobs:
include:
- build: windows-msvc
os: windows-latest
rust: nightly-2024-07-18
rust: nightly-2025-02-17
target: x86_64-pc-windows-msvc
- build: macos
os: macos-13
rust: nightly-2024-07-18
rust: nightly-2025-02-17
target: x86_64-apple-darwin
- build: macos-aarch64
os: macos-13
rust: nightly-2024-07-18
rust: nightly-2025-02-17
target: aarch64-apple-darwin
- build: linux-musl
os: ubuntu-22.04
rust: nightly-2024-07-18
rust: nightly-2025-02-17
target: x86_64-unknown-linux-musl
- build: linux-gnu
os: ubuntu-22.04
rust: nightly-2024-07-18
rust: nightly-2025-02-17
target: x86_64-unknown-linux-gnu
- build: linux-aarch64
os: ubuntu-22.04
rust: nightly-2024-07-18
rust: nightly-2025-02-17
target: aarch64-unknown-linux-gnu
steps:
- name: Checkout repo
uses: actions/checkout@v4
with:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Get release version from tag
@@ -170,7 +170,7 @@ jobs:
fi
- name: Install Rust
uses: dtolnay/rust-toolchain@nightly
uses: dtolnay/rust-toolchain@4f94fbe7e03939b0e674bcc9ca609a16088f63ff #nightly branch, TODO: update when required
with:
target: ${{ matrix.target }}
@@ -196,7 +196,7 @@ jobs:
echo "target flag is: ${{ env.TARGET_FLAGS }}"
echo "target dir is: ${{ env.TARGET_DIR }}"
- name: Build release binary (no asm or metal)
- name: Build release binary (no asm or metal)
if: matrix.build != 'linux-gnu' && matrix.build != 'macos-aarch64'
run: ${{ env.CARGO }} build --release ${{ env.TARGET_FLAGS }} -Z sparse-registry
@@ -233,7 +233,7 @@ jobs:
echo "ASSET=build-artifacts/ezkl-win.zip" >> $GITHUB_ENV
- name: Upload release archive
uses: actions/upload-release-asset@v1.0.2
uses: actions/upload-release-asset@e8f9f06c4b078e705bd2ea027f0926603fc9b4d5 #v1.0.2
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}
with:

View File

@@ -25,15 +25,15 @@ jobs:
contents: read
runs-on: large-self-hosted
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: baptiste0928/cargo-install@v1
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3
with:
crate: cargo-nextest
locked: true
@@ -45,12 +45,12 @@ jobs:
contents: read
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- name: Build
@@ -61,12 +61,12 @@ jobs:
contents: read
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- name: Docs
@@ -77,15 +77,15 @@ jobs:
contents: read
runs-on: ubuntu-latest-32-cores
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: baptiste0928/cargo-install@v1
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
with:
crate: cargo-nextest
locked: true
@@ -102,25 +102,21 @@ jobs:
# env:
# ENABLE_ICICLE_GPU: true
# steps:
# - uses: actions/checkout@v4
# - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# with:
# persist-credentials: false
# - uses: actions-rs/toolchain@v1
# - uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
# with:
# toolchain: nightly-2024-07-18
# toolchain: nightly-2025-02-17
# override: true
# components: rustfmt, clippy
# - uses: baptiste0928/cargo-install@v1
# - uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
# with:
# crate: cargo-nextest
# locked: true
# - uses: mwilliamson/setup-wasmtime-action@v2
# - uses: mwilliamson/setup-wasmtime-action@bf814d7d8fc3c3a77dfe114bd9fb8a2c575f6ad6 #v2.0.0
# with:
# wasmtime-version: "3.0.1"
# - name: Install wasm32-wasi
# run: rustup target add wasm32-wasi
# - name: Install cargo-wasi
# run: cargo install cargo-wasi
# # - name: Matmul overflow (wasi)
# # run: cargo wasi test matmul_col_ultra_overflow -- --include-ignored --nocapture
# # - name: Conv overflow (wasi)
@@ -139,25 +135,21 @@ jobs:
contents: read
runs-on: non-gpu
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: baptiste0928/cargo-install@v1
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
with:
crate: cargo-nextest
locked: true
- uses: mwilliamson/setup-wasmtime-action@v2
- uses: mwilliamson/setup-wasmtime-action@bf814d7d8fc3c3a77dfe114bd9fb8a2c575f6ad6 #v2.0.0
with:
wasmtime-version: "3.0.1"
- name: Install wasm32-wasi
run: rustup target add wasm32-wasi
- name: Install cargo-wasi
run: cargo install cargo-wasi
# - name: Matmul overflow (wasi)
# run: cargo wasi test matmul_col_ultra_overflow -- --include-ignored --nocapture
# - name: Conv overflow (wasi)
@@ -176,25 +168,21 @@ jobs:
contents: read
runs-on: non-gpu
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: baptiste0928/cargo-install@v1
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
with:
crate: cargo-nextest
locked: true
- uses: mwilliamson/setup-wasmtime-action@v2
- uses: mwilliamson/setup-wasmtime-action@bf814d7d8fc3c3a77dfe114bd9fb8a2c575f6ad6 #v2.0.0
with:
wasmtime-version: "3.0.1"
- name: Install wasm32-wasi
run: rustup target add wasm32-wasi
- name: Install cargo-wasi
run: cargo install cargo-wasi
# - name: Matmul overflow (wasi)
# run: cargo wasi test matmul_col_ultra_overflow -- --include-ignored --nocapture
# - name: Conv overflow (wasi)
@@ -213,15 +201,15 @@ jobs:
contents: read
runs-on: ubuntu-latest-16-cores
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: baptiste0928/cargo-install@v1
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
with:
crate: cargo-nextest
locked: true
@@ -233,49 +221,70 @@ jobs:
contents: read
runs-on: non-gpu
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: jetli/wasm-pack-action@v0.4.0
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa #v0.4.0
with:
# Pin to version 0.12.1
version: "v0.12.1"
- uses: nanasess/setup-chromedriver@v2
- uses: nanasess/setup-chromedriver@affb1ea8848cbb080be372c1e8d7a5c173e9298f #v2.3.0
# with:
# chromedriver-version: "115.0.5790.102"
- name: Install wasm32-unknown-unknown
run: rustup target add wasm32-unknown-unknown
- name: Add rust-src
run: rustup component add rust-src --toolchain nightly-2024-07-18-x86_64-unknown-linux-gnu
run: rustup component add rust-src --toolchain nightly-2025-02-17-x86_64-unknown-linux-gnu
- name: Run wasm verifier tests
# on mac:
# AR=/opt/homebrew/opt/llvm/bin/llvm-ar CC=/opt/homebrew/opt/llvm/bin/clang wasm-pack test --firefox --headless -- -Z build-std="panic_abort,std" --features web
run: wasm-pack test --chrome --headless -- -Z build-std="panic_abort,std" --features web
foudry-solidity-tests:
permissions:
contents: read
runs-on: non-gpu
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
submodules: recursive
- name: Install Foundry
uses: foundry-rs/foundry-toolchain@3b74dacdda3c0b763089addb99ed86bc3800e68b
- name: Run tests
run: |
cd tests/foundry
forge install https://github.com/foundry-rs/forge-std --no-git --no-commit
forge test -vvvv --fuzz-runs 64
mock-proving-tests:
permissions:
contents: read
runs-on: non-gpu
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: baptiste0928/cargo-install@v1
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
with:
crate: cargo-nextest
locked: true
# - name: The Worm Mock
# run: cargo nextest run --verbose tests::large_mock_::large_tests_5_expects -- --include-ignored
- name: Large 1D Conv Mock
run: cargo nextest run --verbose tests::large_mock_::large_tests_7_expects -- --include-ignored
- name: MNIST Gan Mock
run: cargo nextest run --verbose tests::large_mock_::large_tests_4_expects -- --include-ignored
- name: NanoGPT Mock
@@ -292,8 +301,6 @@ jobs:
run: cargo nextest run --verbose tests::mock_fixed_params_ --test-threads 32
- name: public outputs and bounded lookup log
run: cargo nextest run --verbose tests::mock_bounded_lookup_log --test-threads 32
- name: public outputs and tolerance > 0
run: cargo nextest run --verbose tests::mock_tolerance_public_outputs_ --test-threads 32
- name: public outputs + batch size == 10
run: cargo nextest run --verbose tests::mock_large_batch_public_outputs_ --test-threads 16
- name: kzg inputs
@@ -329,32 +336,32 @@ jobs:
runs-on: non-gpu
needs: [build, library-tests, docs, python-tests, python-integration-tests]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: baptiste0928/cargo-install@v1
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
with:
crate: cargo-nextest
locked: true
- uses: actions/checkout@v3
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Use pnpm 8
uses: pnpm/action-setup@v2
uses: pnpm/action-setup@eae0cfeb286e66ffb5155f1a79b90583a127a68b #v2.4.1
with:
version: 8
- name: Use Node.js 18.12.1
uses: actions/setup-node@v3
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 #v3.8.2
with:
node-version: "18.12.1"
cache: "pnpm"
- name: "Add rust-src"
run: rustup component add rust-src --toolchain nightly-2024-07-18-x86_64-unknown-linux-gnu
run: rustup component add rust-src --toolchain nightly-2025-02-17-x86_64-unknown-linux-gnu
- name: Install dependencies for js tests and in-browser-evm-verifier package
run: |
pnpm install --frozen-lockfile
@@ -414,28 +421,28 @@ jobs:
# runs-on: macos-13
# # needs: [build, library-tests, docs]
# steps:
# - uses: actions/checkout@v4
# - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# with:
# persist-credentials: false
# - uses: actions-rs/toolchain@v1
# - uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
# with:
# toolchain: nightly-2024-07-18
# toolchain: nightly-2025-02-17
# override: true
# components: rustfmt, clippy
# - uses: jetli/wasm-pack-action@v0.4.0
# - uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa #v0.4.0
# with:
# # Pin to version 0.12.1
# version: 'v0.12.1'
# - name: Add rust-src
# run: rustup component add rust-src --toolchain nightly-2024-07-18
# - uses: actions/checkout@v3
# run: rustup component add rust-src --toolchain nightly-2025-02-17
# - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# with:
# persist-credentials: false
# - name: Use pnpm 8
# uses: pnpm/action-setup@v2
# uses: pnpm/action-setup@eae0cfeb286e66ffb5155f1a79b90583a127a68b #v2.4.1
# with:
# version: 8
# - uses: baptiste0928/cargo-install@v1
# - uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
# with:
# crate: cargo-nextest
# locked: true
@@ -448,15 +455,15 @@ jobs:
runs-on: non-gpu
needs: [build, library-tests, docs]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: jetli/wasm-pack-action@v0.4.0
- uses: jetli/wasm-pack-action@0d096b08b4e5a7de8c28de67e11e945404e9eefa #v0.4.0
with:
# Pin to version 0.12.1
version: "v0.12.1"
@@ -464,16 +471,16 @@ jobs:
run: rustup target add wasm32-unknown-unknown
- name: Add rust-src
run: rustup component add rust-src --toolchain nightly-2024-07-18-x86_64-unknown-linux-gnu
- uses: actions/checkout@v3
run: rustup component add rust-src --toolchain nightly-2025-02-17-x86_64-unknown-linux-gnu
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Use pnpm 8
uses: pnpm/action-setup@v2
uses: pnpm/action-setup@eae0cfeb286e66ffb5155f1a79b90583a127a68b #v2.4.1
with:
version: 8
- name: Use Node.js 18.12.1
uses: actions/setup-node@v3
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 #v3.8.2
with:
node-version: "18.12.1"
cache: "pnpm"
@@ -483,7 +490,7 @@ jobs:
env:
CI: false
NODE_ENV: development
- uses: baptiste0928/cargo-install@v1
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
with:
crate: cargo-nextest
locked: true
@@ -529,18 +536,18 @@ jobs:
# env:
# ENABLE_ICICLE_GPU: true
# steps:
# - uses: actions/checkout@v4
# - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# with:
# persist-credentials: false
# - uses: actions-rs/toolchain@v1
# - uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
# with:
# toolchain: nightly-2024-07-18
# toolchain: nightly-2025-02-17
# override: true
# components: rustfmt, clippy
# - name: Add rust-src
# run: rustup component add rust-src --toolchain nightly-2024-07-18-x86_64-unknown-linux-gnu
# - uses: actions/checkout@v3
# - uses: baptiste0928/cargo-install@v1
# run: rustup component add rust-src --toolchain nightly-2025-02-17-x86_64-unknown-linux-gnu
# - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# - uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
# with:
# crate: cargo-nextest
# locked: true
@@ -567,15 +574,15 @@ jobs:
runs-on: self-hosted
needs: [build, library-tests, docs, python-tests, python-integration-tests]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: dtolnay/rust-toolchain@4f94fbe7e03939b0e674bcc9ca609a16088f63ff #nightly branch, TODO: update when required
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: baptiste0928/cargo-install@v1
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
with:
crate: cargo-nextest
locked: true
@@ -587,15 +594,15 @@ jobs:
# env:
# ENABLE_ICICLE_GPU: true
# steps:
# - uses: actions/checkout@v4
# - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
# with:
# persist-credentials: false
# - uses: actions-rs/toolchain@v1
# - uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
# with:
# toolchain: nightly-2024-07-18
# toolchain: nightly-2025-02-17
# override: true
# components: rustfmt, clippy
# - uses: baptiste0928/cargo-install@v1
# - uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
# with:
# crate: cargo-nextest
# locked: true
@@ -608,15 +615,15 @@ jobs:
runs-on: large-self-hosted
needs: [build, library-tests, docs, python-tests, python-integration-tests]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: baptiste0928/cargo-install@v1
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
with:
crate: cargo-nextest
locked: true
@@ -629,15 +636,15 @@ jobs:
runs-on: large-self-hosted
needs: [build, library-tests, docs, python-tests, python-integration-tests]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: baptiste0928/cargo-install@v1
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
with:
crate: cargo-nextest
locked: true
@@ -654,15 +661,15 @@ jobs:
runs-on: ubuntu-latest-32-cores
needs: [build, library-tests, docs]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: baptiste0928/cargo-install@v1
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
with:
crate: cargo-nextest
locked: true
@@ -675,15 +682,15 @@ jobs:
runs-on: non-gpu
needs: [build, library-tests, docs]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions/setup-python@v4
- uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0
with:
python-version: "3.12"
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- name: Install cmake
@@ -705,18 +712,18 @@ jobs:
runs-on: non-gpu
needs: [build, library-tests, docs, python-tests, python-integration-tests]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions/setup-python@v4
- uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0
with:
python-version: "3.12"
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: baptiste0928/cargo-install@v1
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
with:
crate: cargo-nextest
locked: true
@@ -756,18 +763,18 @@ jobs:
# Maps tcp port 5432 on service container to the host
- 5432:5432
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions/setup-python@v4
- uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0
with:
python-version: "3.11"
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: baptiste0928/cargo-install@v1
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
with:
crate: cargo-nextest
locked: true
@@ -781,6 +788,8 @@ jobs:
run: python -m venv .env --clear; source .env/bin/activate; pip install -r requirements.txt; python -m ensurepip --upgrade
- name: Build python ezkl
run: source .env/bin/activate; unset CONDA_PREFIX; maturin develop --features python-bindings --profile=test-runs
- name: Cat and Dog notebook
run: source .env/bin/activate; cargo nextest run py_tests::tests::cat_and_dog_notebook_
- name: All notebooks
run: source .env/bin/activate; cargo nextest run py_tests::tests::run_notebook_ --test-threads 1
- name: Voice tutorial
@@ -789,8 +798,6 @@ jobs:
run: source .env/bin/activate; cargo nextest run py_tests::tests::neural_bag_of_words_ --no-capture
- name: Felt conversion
run: source .env/bin/activate; cargo nextest run py_tests::tests::felt_conversion_test_ --no-capture
- name: Postgres tutorials
run: source .env/bin/activate; cargo nextest run py_tests::tests::postgres_ --no-capture
- name: Tictactoe tutorials
run: source .env/bin/activate; cargo nextest run py_tests::tests::tictactoe_ --test-threads 1
# - name: authenticate-kaggle-cli
@@ -812,20 +819,20 @@ jobs:
contents: read
runs-on: macos-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- uses: baptiste0928/cargo-install@v1
- uses: baptiste0928/cargo-install@91c5da15570085bcde6f4d7aed98cb82d6769fd3 #v3.3.0
with:
crate: cargo-nextest
locked: true
- name: Run ios tests
run: CARGO_BUILD_TARGET=aarch64-apple-darwin RUSTUP_TOOLCHAIN=nightly-2024-07-18-aarch64-apple-darwin cargo test --test ios_integration_tests --features ios-bindings-test --no-default-features
run: CARGO_BUILD_TARGET=aarch64-apple-darwin RUSTUP_TOOLCHAIN=nightly-2025-02-17-aarch64-apple-darwin cargo test --test ios_integration_tests --features ios-bindings-test --no-default-features
swift-package-tests:
permissions:
@@ -834,12 +841,12 @@ jobs:
needs: [ios-integration-tests]
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
- name: Build EzklCoreBindings
@@ -885,4 +892,4 @@ jobs:
-destination 'platform=iOS Simulator,name=iPhone 15 Pro,OS=17.5' \
-parallel-testing-enabled NO \
-resultBundlePath ../../exampleTestResults \
-skip-testing:EzklAppUITests/EzklAppUITests/testButtonClicksInOrder
-skip-testing:EzklAppUITests/EzklAppUITests/testButtonClicksInOrder

View File

@@ -12,12 +12,12 @@ jobs:
contents: read
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- uses: actions-rs/toolchain@v1
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2024-07-18
toolchain: nightly-2025-02-17
override: true
components: rustfmt, clippy
@@ -30,4 +30,3 @@ jobs:
run: zizmor .

View File

@@ -19,8 +19,8 @@ jobs:
steps:
- name: Checkout EZKL
uses: actions/checkout@v3
with:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Extract TAG from github.ref_name
@@ -34,7 +34,7 @@ jobs:
echo "TAG=$NEW_TAG" >> $GITHUB_ENV
- name: Install Rust (nightly)
uses: actions-rs/toolchain@v1
uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly
override: true

View File

@@ -11,12 +11,12 @@ jobs:
contents: write
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Bump version and push tag
id: tag_version
uses: mathieudutour/github-tag-action@v6.2
uses: mathieudutour/github-tag-action@a22cf08638b34d5badda920f9daf6e72c477b07b #v6.2
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
@@ -46,7 +46,7 @@ jobs:
git tag $RELEASE_TAG
- name: Push changes
uses: ad-m/github-push-action@master
uses: ad-m/github-push-action@77c5b412c50b723d2a4fbc6d71fb5723bcd439aa #master
env:
RELEASE_TAG: ${{ steps.tag_version.outputs.new_tag }}
with:

3
.gitignore vendored
View File

@@ -9,6 +9,7 @@ pkg
!AttestData.sol
!VerifierBase.sol
!LoadInstances.sol
!AttestData.t.sol
*.pf
*.vk
*.pk
@@ -49,3 +50,5 @@ timingData.json
!tests/assets/vk.key
docs/python/build
!tests/assets/vk_aggr.key
cache
out

2552
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -69,20 +69,18 @@ reqwest = { version = "0.12.4", default-features = false, features = [
"stream",
], optional = true }
openssl = { version = "0.10.55", features = ["vendored"], optional = true }
tokio-postgres = { version = "0.7.10", optional = true }
pg_bigdecimal = { version = "0.1.5", optional = true }
lazy_static = { version = "1.4.0", optional = true }
colored_json = { version = "3.0.1", default-features = false, optional = true }
tokio = { version = "1.35.0", default-features = false, features = [
"macros",
"rt-multi-thread",
], optional = true }
pyo3 = { version = "0.23.2", features = [
pyo3 = { version = "0.24.2", features = [
"extension-module",
"abi3-py37",
"macros",
], default-features = false, optional = true }
pyo3-async-runtimes = { git = "https://github.com/PyO3/pyo3-async-runtimes", version = "0.23.0", features = [
pyo3-async-runtimes = { git = "https://github.com/PyO3/pyo3-async-runtimes", version = "0.24.0", features = [
"attributes",
"tokio-runtime",
], default-features = false, optional = true }
@@ -242,8 +240,6 @@ ezkl = [
"dep:indicatif",
"dep:gag",
"dep:reqwest",
"dep:tokio-postgres",
"dep:pg_bigdecimal",
"dep:lazy_static",
"dep:tokio",
"dep:openssl",
@@ -275,12 +271,6 @@ no-update = []
macos-metal = ["halo2_proofs/macos"]
ios-metal = ["halo2_proofs/ios"]
[patch.'https://github.com/zkonduit/halo2']
halo2_proofs = { git = "https://github.com/zkonduit/halo2#f441c920be45f8f05d2c06a173d82e8885a5ed4d", package = "halo2_proofs" }
[patch.'https://github.com/zkonduit/halo2#0654e92bdf725fd44d849bfef3643870a8c7d50b']
halo2_proofs = { git = "https://github.com/zkonduit/halo2#f441c920be45f8f05d2c06a173d82e8885a5ed4d", package = "halo2_proofs" }
[patch.crates-io]
uniffi_testing = { git = "https://github.com/ElusAegis/uniffi-rs", branch = "feat/testing-feature-build-fix" }
@@ -289,7 +279,7 @@ uniffi_testing = { git = "https://github.com/ElusAegis/uniffi-rs", branch = "fea
rustflags = ["-C", "relocation-model=pic"]
lto = "fat"
codegen-units = 1
#panic = "abort"
# panic = "abort"
[profile.test-runs]
@@ -297,8 +287,4 @@ inherits = "dev"
opt-level = 3
[package.metadata.wasm-pack.profile.release]
wasm-opt = [
"-O4",
"--flexible-inline-max-function-size",
"4294967295",
]
wasm-opt = ["-O4", "--flexible-inline-max-function-size", "4294967295"]

View File

@@ -43,7 +43,7 @@ The generated proofs can then be verified with much less computational resources
----------------------
### getting started ⚙️
### Getting Started ⚙️
The easiest way to get started is to try out a notebook.
@@ -76,12 +76,12 @@ For more details visit the [docs](https://docs.ezkl.xyz). The CLI is faster than
Build the auto-generated rust documentation and open the docs in your browser locally. `cargo doc --open`
#### In-browser EVM verifier
#### In-browser EVM Verifier
As an alternative to running the native Halo2 verifier as a WASM binding in the browser, you can use the in-browser EVM verifier. The source code of which you can find in the `in-browser-evm-verifier` directory and a README with instructions on how to use it.
### building the project 🔨
### Building the Project 🔨
#### Rust CLI
@@ -96,7 +96,7 @@ cargo install --locked --path .
#### building python bindings
#### Building Python Bindings
Python bindings exists and can be built using `maturin`. You will need `rust` and `cargo` to be installed.
```bash
@@ -126,7 +126,7 @@ unset ENABLE_ICICLE_GPU
**NOTE:** Even with the above environment variable set, icicle is disabled for circuits where k <= 8. To change the value of `k` where icicle is enabled, you can set the environment variable `ICICLE_SMALL_K`.
### contributing 🌎
### Contributing 🌎
If you're interested in contributing and are unsure where to start, reach out to one of the maintainers:
@@ -144,20 +144,21 @@ More broadly:
Any contribution intentionally submitted for inclusion in the work by you shall be licensed to Zkonduit Inc. under the terms and conditions specified in the [CLA](https://github.com/zkonduit/ezkl/blob/main/cla.md), which you agree to by intentionally submitting a contribution. In particular, you have the right to submit the contribution and we can distribute it, among other terms and conditions.
### no security guarantees
Ezkl is unaudited, beta software undergoing rapid development. There may be bugs. No guarantees of security are made and it should not be relied on in production.
### Audits & Security
> NOTE: Because operations are quantized when they are converted from an onnx file to a zk-circuit, outputs in python and ezkl may differ slightly.
[v21.0.0](https://github.com/zkonduit/ezkl/releases/tag/v21.0.0) has been audited by Trail of Bits, the report can be found [here](https://github.com/trailofbits/publications/blob/master/reviews/2025-03-zkonduit-ezkl-securityreview.pdf).
> NOTE: Because operations are quantized when they are converted from an onnx file to a zk-circuit, outputs in python and ezkl may differ slightly.
### Advanced security topics
Check out `docs/advanced_security` for more advanced information on potential threat vectors.
Check out `docs/advanced_security` for more advanced information on potential threat vectors that are specific to zero-knowledge inference, quantization, and to machine learning models generally.
### No Warranty
### no warranty
Copyright (c) 2024 Zkonduit Inc. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Copyright (c) 2025 Zkonduit Inc.

312
abis/DataAttestation.json Normal file
View File

@@ -0,0 +1,312 @@
[
{
"inputs": [
{
"internalType": "address",
"name": "_contractAddresses",
"type": "address"
},
{
"internalType": "bytes",
"name": "_callData",
"type": "bytes"
},
{
"internalType": "uint256[]",
"name": "_decimals",
"type": "uint256[]"
},
{
"internalType": "uint256[]",
"name": "_bits",
"type": "uint256[]"
},
{
"internalType": "uint8",
"name": "_instanceOffset",
"type": "uint8"
}
],
"stateMutability": "nonpayable",
"type": "constructor"
},
{
"inputs": [],
"name": "HALF_ORDER",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "ORDER",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint256[]",
"name": "instances",
"type": "uint256[]"
}
],
"name": "attestData",
"outputs": [],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "callData",
"outputs": [
{
"internalType": "bytes",
"name": "",
"type": "bytes"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "contractAddress",
"outputs": [
{
"internalType": "address",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "bytes",
"name": "encoded",
"type": "bytes"
}
],
"name": "getInstancesCalldata",
"outputs": [
{
"internalType": "uint256[]",
"name": "instances",
"type": "uint256[]"
}
],
"stateMutability": "pure",
"type": "function"
},
{
"inputs": [
{
"internalType": "bytes",
"name": "encoded",
"type": "bytes"
}
],
"name": "getInstancesMemory",
"outputs": [
{
"internalType": "uint256[]",
"name": "instances",
"type": "uint256[]"
}
],
"stateMutability": "pure",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint256",
"name": "index",
"type": "uint256"
}
],
"name": "getScalars",
"outputs": [
{
"components": [
{
"internalType": "uint256",
"name": "decimals",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "bits",
"type": "uint256"
}
],
"internalType": "struct DataAttestation.Scalars",
"name": "",
"type": "tuple"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "instanceOffset",
"outputs": [
{
"internalType": "uint8",
"name": "",
"type": "uint8"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint256",
"name": "x",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "y",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "denominator",
"type": "uint256"
}
],
"name": "mulDiv",
"outputs": [
{
"internalType": "uint256",
"name": "result",
"type": "uint256"
}
],
"stateMutability": "pure",
"type": "function"
},
{
"inputs": [
{
"internalType": "int256",
"name": "x",
"type": "int256"
},
{
"components": [
{
"internalType": "uint256",
"name": "decimals",
"type": "uint256"
},
{
"internalType": "uint256",
"name": "bits",
"type": "uint256"
}
],
"internalType": "struct DataAttestation.Scalars",
"name": "_scalars",
"type": "tuple"
}
],
"name": "quantizeData",
"outputs": [
{
"internalType": "int256",
"name": "quantized_data",
"type": "int256"
}
],
"stateMutability": "pure",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "target",
"type": "address"
},
{
"internalType": "bytes",
"name": "data",
"type": "bytes"
}
],
"name": "staticCall",
"outputs": [
{
"internalType": "bytes",
"name": "",
"type": "bytes"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "int256",
"name": "x",
"type": "int256"
}
],
"name": "toFieldElement",
"outputs": [
{
"internalType": "uint256",
"name": "field_element",
"type": "uint256"
}
],
"stateMutability": "pure",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "verifier",
"type": "address"
},
{
"internalType": "bytes",
"name": "encoded",
"type": "bytes"
}
],
"name": "verifyWithDataAttestation",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"stateMutability": "view",
"type": "function"
}
]

View File

@@ -1,167 +0,0 @@
[
{
"inputs": [
{
"internalType": "address[]",
"name": "_contractAddresses",
"type": "address[]"
},
{
"internalType": "bytes[][]",
"name": "_callData",
"type": "bytes[][]"
},
{
"internalType": "uint256[][]",
"name": "_decimals",
"type": "uint256[][]"
},
{
"internalType": "uint256[]",
"name": "_scales",
"type": "uint256[]"
},
{
"internalType": "uint8",
"name": "_instanceOffset",
"type": "uint8"
},
{
"internalType": "address",
"name": "_admin",
"type": "address"
}
],
"stateMutability": "nonpayable",
"type": "constructor"
},
{
"inputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"name": "accountCalls",
"outputs": [
{
"internalType": "address",
"name": "contractAddress",
"type": "address"
},
{
"internalType": "uint256",
"name": "callCount",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "admin",
"outputs": [
{
"internalType": "address",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "instanceOffset",
"outputs": [
{
"internalType": "uint8",
"name": "",
"type": "uint8"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"name": "scales",
"outputs": [
{
"internalType": "uint256",
"name": "",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "address[]",
"name": "_contractAddresses",
"type": "address[]"
},
{
"internalType": "bytes[][]",
"name": "_callData",
"type": "bytes[][]"
},
{
"internalType": "uint256[][]",
"name": "_decimals",
"type": "uint256[][]"
}
],
"name": "updateAccountCalls",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "_admin",
"type": "address"
}
],
"name": "updateAdmin",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "verifier",
"type": "address"
},
{
"internalType": "bytes",
"name": "encoded",
"type": "bytes"
}
],
"name": "verifyWithDataAttestation",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"stateMutability": "view",
"type": "function"
}
]

View File

@@ -1,147 +0,0 @@
[
{
"inputs": [
{
"internalType": "address",
"name": "_contractAddresses",
"type": "address"
},
{
"internalType": "bytes",
"name": "_callData",
"type": "bytes"
},
{
"internalType": "uint256",
"name": "_decimals",
"type": "uint256"
},
{
"internalType": "uint256[]",
"name": "_scales",
"type": "uint256[]"
},
{
"internalType": "uint8",
"name": "_instanceOffset",
"type": "uint8"
},
{
"internalType": "address",
"name": "_admin",
"type": "address"
}
],
"stateMutability": "nonpayable",
"type": "constructor"
},
{
"inputs": [],
"name": "accountCall",
"outputs": [
{
"internalType": "address",
"name": "contractAddress",
"type": "address"
},
{
"internalType": "bytes",
"name": "callData",
"type": "bytes"
},
{
"internalType": "uint256",
"name": "decimals",
"type": "uint256"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "admin",
"outputs": [
{
"internalType": "address",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "instanceOffset",
"outputs": [
{
"internalType": "uint8",
"name": "",
"type": "uint8"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "_contractAddresses",
"type": "address"
},
{
"internalType": "bytes",
"name": "_callData",
"type": "bytes"
},
{
"internalType": "uint256",
"name": "_decimals",
"type": "uint256"
}
],
"name": "updateAccountCalls",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "_admin",
"type": "address"
}
],
"name": "updateAdmin",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "verifier",
"type": "address"
},
{
"internalType": "bytes",
"name": "encoded",
"type": "bytes"
}
],
"name": "verifyWithDataAttestation",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"stateMutability": "view",
"type": "function"
}
]

View File

@@ -73,6 +73,8 @@ impl Circuit<Fr> for MyCircuit {
padding: vec![(0, 0)],
stride: vec![1; 2],
group: 1,
data_format: DataFormat::NCHW,
kernel_format: KernelFormat::OIHW,
}),
)
.unwrap();

View File

@@ -69,6 +69,7 @@ impl Circuit<Fr> for MyCircuit {
stride: vec![1, 1],
kernel_shape: vec![2, 2],
normalized: false,
data_format: DataFormat::NCHW,
}),
)
.unwrap();

View File

@@ -23,8 +23,6 @@ use halo2curves::bn256::{Bn256, Fr};
use rand::rngs::OsRng;
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
const L: usize = 10;
#[derive(Clone, Debug)]
struct MyCircuit {
image: ValTensor<Fr>,
@@ -40,7 +38,7 @@ impl Circuit<Fr> for MyCircuit {
}
fn configure(cs: &mut ConstraintSystem<Fr>) -> Self::Config {
PoseidonChip::<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE, 10>::configure(cs, ())
PoseidonChip::<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE>::configure(cs, ())
}
fn synthesize(
@@ -48,7 +46,7 @@ impl Circuit<Fr> for MyCircuit {
config: Self::Config,
mut layouter: impl Layouter<Fr>,
) -> Result<(), Error> {
let chip: PoseidonChip<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE, L> =
let chip: PoseidonChip<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE> =
PoseidonChip::new(config);
chip.layout(&mut layouter, &[self.image.clone()], 0, &mut HashMap::new())?;
Ok(())
@@ -59,7 +57,7 @@ fn runposeidon(c: &mut Criterion) {
let mut group = c.benchmark_group("poseidon");
for size in [64, 784, 2352, 12288].iter() {
let k = (PoseidonChip::<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE, L>::num_rows(*size)
let k = (PoseidonChip::<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE>::num_rows(*size)
as f32)
.log2()
.ceil() as u32;
@@ -67,7 +65,7 @@ fn runposeidon(c: &mut Criterion) {
let message = (0..*size).map(|_| Fr::random(OsRng)).collect::<Vec<_>>();
let _output =
PoseidonChip::<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE, L>::run(message.to_vec())
PoseidonChip::<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE>::run(message.to_vec())
.unwrap();
let mut image = Tensor::from(message.into_iter().map(Value::known));

View File

@@ -22,7 +22,7 @@ fn generate_test_data(size: usize, zero_probability: f64) -> Vec<ValType> {
let mut rng = rand::thread_rng();
(0..size)
.map(|_i| {
if rng.gen::<f64>() < zero_probability {
if rng.r#gen::<f64>() < zero_probability {
ValType::Constant(F::ZERO)
} else {
ValType::Constant(F::ONE) // Or some other non-zero value

View File

@@ -8,21 +8,27 @@ contract LoadInstances {
*/
function getInstancesMemory(
bytes memory encoded
) internal pure returns (uint256[] memory instances) {
) public pure returns (uint256[] memory instances) {
bytes4 funcSig;
uint256 instances_offset;
uint256 instances_length;
assembly {
// fetch function sig. Either `verifyProof(bytes,uint256[])` or `verifyProof(address,bytes,uint256[])`
funcSig := mload(add(encoded, 0x20))
}
if (funcSig == 0xaf83a18d) {
instances_offset = 0x64;
} else if (funcSig == 0x1e8e1e13) {
instances_offset = 0x44;
} else {
revert("Invalid function signature");
}
assembly {
// Fetch instances offset which is 4 + 32 + 32 bytes away from
// start of encoded for `verifyProof(bytes,uint256[])`,
// and 4 + 32 + 32 +32 away for `verifyProof(address,bytes,uint256[])`
instances_offset := mload(
add(encoded, add(0x44, mul(0x20, eq(funcSig, 0xaf83a18d))))
)
instances_offset := mload(add(encoded, instances_offset))
instances_length := mload(add(add(encoded, 0x24), instances_offset))
}
@@ -41,6 +47,10 @@ contract LoadInstances {
)
}
}
require(
funcSig == 0xaf83a18d || funcSig == 0x1e8e1e13,
"Invalid function signature"
);
}
/**
* @dev Parse the instances array from the Halo2Verifier encoded calldata.
@@ -49,23 +59,31 @@ contract LoadInstances {
*/
function getInstancesCalldata(
bytes calldata encoded
) internal pure returns (uint256[] memory instances) {
) public pure returns (uint256[] memory instances) {
bytes4 funcSig;
uint256 instances_offset;
uint256 instances_length;
assembly {
// fetch function sig. Either `verifyProof(bytes,uint256[])` or `verifyProof(address,bytes,uint256[])`
funcSig := calldataload(encoded.offset)
}
if (funcSig == 0xaf83a18d) {
instances_offset = 0x44;
} else if (funcSig == 0x1e8e1e13) {
instances_offset = 0x24;
} else {
revert("Invalid function signature");
}
// We need to create a new assembly block in order for solidity
// to cast the funcSig to a bytes4 type. Otherwise it will load the entire first 32 bytes of the calldata
// within the block
assembly {
// Fetch instances offset which is 4 + 32 + 32 bytes away from
// start of encoded for `verifyProof(bytes,uint256[])`,
// and 4 + 32 + 32 +32 away for `verifyProof(address,bytes,uint256[])`
instances_offset := calldataload(
add(
encoded.offset,
add(0x24, mul(0x20, eq(funcSig, 0xaf83a18d)))
)
add(encoded.offset, instances_offset)
)
instances_length := calldataload(
@@ -96,7 +114,7 @@ contract LoadInstances {
// The kzg commitments of a given model, all aggregated into a single bytes array.
// At solidity generation time, the commitments are hardcoded into the contract via the COMMITMENT_KZG constant.
// It will be used to check that the proof commitments match the expected commitments.
bytes constant COMMITMENT_KZG = hex"";
bytes constant COMMITMENT_KZG = hex"1234";
contract SwapProofCommitments {
/**
@@ -113,17 +131,20 @@ contract SwapProofCommitments {
assembly {
// fetch function sig. Either `verifyProof(bytes,uint256[])` or `verifyProof(address,bytes,uint256[])`
funcSig := calldataload(encoded.offset)
}
if (funcSig == 0xaf83a18d) {
proof_offset = 0x24;
} else if (funcSig == 0x1e8e1e13) {
proof_offset = 0x04;
} else {
revert("Invalid function signature");
}
assembly {
// Fetch proof offset which is 4 + 32 bytes away from
// start of encoded for `verifyProof(bytes,uint256[])`,
// and 4 + 32 + 32 away for `verifyProof(address,bytes,uint256[])`
proof_offset := calldataload(
add(
encoded.offset,
add(0x04, mul(0x20, eq(funcSig, 0xaf83a18d)))
)
)
proof_offset := calldataload(add(encoded.offset, proof_offset))
proof_length := calldataload(
add(add(encoded.offset, 0x04), proof_offset)
@@ -154,7 +175,7 @@ contract SwapProofCommitments {
let wordCommitment := mload(add(commitment, i))
equal := eq(wordProof, wordCommitment)
if eq(equal, 0) {
return(0, 0)
break
}
}
}
@@ -163,36 +184,38 @@ contract SwapProofCommitments {
} /// end checkKzgCommits
}
contract DataAttestationSingle is LoadInstances, SwapProofCommitments {
/**
* @notice Struct used to make view only call to account to fetch the data that EZKL reads from.
* @param the address of the account to make calls to
* @param the abi encoded function calls to make to the `contractAddress`
*/
struct AccountCall {
address contractAddress;
bytes callData;
contract DataAttestation is LoadInstances, SwapProofCommitments {
// the address of the account to make calls to
address public immutable contractAddress;
// the abi encoded function calls to make to the `contractAddress` that returns the attested to data
bytes public callData;
struct Scalars {
// The number of base 10 decimals to scale the data by.
// For most ERC20 tokens this is 1e18
uint256 decimals;
// The number of fractional bits of the fixed point EZKL data points.
uint256 bits;
}
AccountCall public accountCall;
uint[] scales;
Scalars[] private scalars;
address public admin;
function getScalars(uint256 index) public view returns (Scalars memory) {
return scalars[index];
}
/**
* @notice EZKL P value
* @dev In order to prevent the verifier from accepting two version of the same pubInput, n and the quantity (n + P), where n + P <= 2^256, we require that all instances are stricly less than P. a
* @dev The reason for this is that the assmebly code of the verifier performs all arithmetic operations modulo P and as a consequence can't distinguish between n and n + P.
*/
uint256 constant ORDER =
uint256 public constant ORDER =
uint256(
0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001
);
uint256 constant INPUT_LEN = 0;
uint256 constant OUTPUT_LEN = 0;
uint256 public constant HALF_ORDER = ORDER >> 1;
uint8 public instanceOffset;
@@ -204,53 +227,27 @@ contract DataAttestationSingle is LoadInstances, SwapProofCommitments {
constructor(
address _contractAddresses,
bytes memory _callData,
uint256 _decimals,
uint[] memory _scales,
uint8 _instanceOffset,
address _admin
uint256[] memory _decimals,
uint[] memory _bits,
uint8 _instanceOffset
) {
admin = _admin;
for (uint i; i < _scales.length; i++) {
scales.push(1 << _scales[i]);
require(
_bits.length == _decimals.length,
"Invalid scalar array lengths"
);
for (uint i; i < _bits.length; i++) {
scalars.push(Scalars(10 ** _decimals[i], 1 << _bits[i]));
}
populateAccountCalls(_contractAddresses, _callData, _decimals);
contractAddress = _contractAddresses;
callData = _callData;
instanceOffset = _instanceOffset;
}
function updateAdmin(address _admin) external {
require(msg.sender == admin, "Only admin can update admin");
if (_admin == address(0)) {
revert();
}
admin = _admin;
}
function updateAccountCalls(
address _contractAddresses,
bytes memory _callData,
uint256 _decimals
) external {
require(msg.sender == admin, "Only admin can update account calls");
populateAccountCalls(_contractAddresses, _callData, _decimals);
}
function populateAccountCalls(
address _contractAddresses,
bytes memory _callData,
uint256 _decimals
) internal {
AccountCall memory _accountCall = accountCall;
_accountCall.contractAddress = _contractAddresses;
_accountCall.callData = _callData;
_accountCall.decimals = 10 ** _decimals;
accountCall = _accountCall;
}
function mulDiv(
uint256 x,
uint256 y,
uint256 denominator
) internal pure returns (uint256 result) {
) public pure returns (uint256 result) {
unchecked {
uint256 prod0;
uint256 prod1;
@@ -298,21 +295,28 @@ contract DataAttestationSingle is LoadInstances, SwapProofCommitments {
/**
* @dev Quantize the data returned from the account calls to the scale used by the EZKL model.
* @param x - One of the elements of the data returned from the account calls
* @param _decimals - Number of base 10 decimals to scale the data by.
* @param _scale - The base 2 scale used to convert the floating point value into a fixed point value.
* @param _scalars - The scaling factors for the data returned from the account calls.
*
*/
function quantizeData(
int x,
uint256 _decimals,
uint256 _scale
) internal pure returns (int256 quantized_data) {
Scalars memory _scalars
) public pure returns (int256 quantized_data) {
if (_scalars.bits == 1 && _scalars.decimals == 1) {
return x;
}
bool neg = x < 0;
if (neg) x = -x;
uint output = mulDiv(uint256(x), _scale, _decimals);
if (mulmod(uint256(x), _scale, _decimals) * 2 >= _decimals) {
uint output = mulDiv(uint256(x), _scalars.bits, _scalars.decimals);
if (
mulmod(uint256(x), _scalars.bits, _scalars.decimals) * 2 >=
_scalars.decimals
) {
output += 1;
}
if (output > HALF_ORDER) {
revert("Overflow field modulus");
}
quantized_data = neg ? -int256(output) : int256(output);
}
/**
@@ -324,7 +328,7 @@ contract DataAttestationSingle is LoadInstances, SwapProofCommitments {
function staticCall(
address target,
bytes memory data
) internal view returns (bytes memory) {
) public view returns (bytes memory) {
(bool success, bytes memory returndata) = target.staticcall(data);
if (success) {
if (returndata.length == 0) {
@@ -345,7 +349,7 @@ contract DataAttestationSingle is LoadInstances, SwapProofCommitments {
*/
function toFieldElement(
int256 x
) internal pure returns (uint256 field_element) {
) public pure returns (uint256 field_element) {
// The casting down to uint256 is safe because the order is about 2^254, and the value
// of x ranges of -2^127 to 2^127, so x + int(ORDER) is always positive.
return uint256(x + int(ORDER)) % ORDER;
@@ -355,315 +359,16 @@ contract DataAttestationSingle is LoadInstances, SwapProofCommitments {
* @dev Make the account calls to fetch the data that EZKL reads from and attest to the data.
* @param instances - The public instances to the proof (the data in the proof that publicly accessible to the verifier).
*/
function attestData(uint256[] memory instances) internal view {
require(
instances.length >= INPUT_LEN + OUTPUT_LEN,
"Invalid public inputs length"
);
AccountCall memory _accountCall = accountCall;
uint[] memory _scales = scales;
bytes memory returnData = staticCall(
_accountCall.contractAddress,
_accountCall.callData
);
function attestData(uint256[] memory instances) public view {
bytes memory returnData = staticCall(contractAddress, callData);
int256[] memory x = abi.decode(returnData, (int256[]));
uint _offset;
int output = quantizeData(x[0], _accountCall.decimals, _scales[0]);
uint field_element = toFieldElement(output);
int output;
uint fieldElement;
for (uint i = 0; i < x.length; i++) {
if (field_element != instances[i + instanceOffset]) {
_offset += 1;
} else {
break;
}
}
uint length = x.length - _offset;
for (uint i = 1; i < length; i++) {
output = quantizeData(x[i], _accountCall.decimals, _scales[i]);
field_element = toFieldElement(output);
require(
field_element == instances[i + instanceOffset + _offset],
"Public input does not match"
);
}
}
/**
* @dev Verify the proof with the data attestation.
* @param verifier - The address of the verifier contract.
* @param encoded - The verifier calldata.
*/
function verifyWithDataAttestation(
address verifier,
bytes calldata encoded
) public view returns (bool) {
require(verifier.code.length > 0, "Address: call to non-contract");
attestData(getInstancesCalldata(encoded));
// static call the verifier contract to verify the proof
(bool success, bytes memory returndata) = verifier.staticcall(encoded);
if (success) {
return abi.decode(returndata, (bool));
} else {
revert("low-level call to verifier failed");
}
}
}
// This contract serves as a Data Attestation Verifier for the EZKL model.
// It is designed to read and attest to instances of proofs generated from a specified circuit.
// It is particularly constructed to read only int256 data from specified on-chain contracts' view functions.
// Overview of the contract functionality:
// 1. Initialization: Through the constructor, it sets up the contract calls that the EZKL model will read from.
// 2. Data Quantization: Quantizes the returned data into a scaled fixed-point representation. See the `quantizeData` method for details.
// 3. Static Calls: Makes static calls to fetch data from other contracts. See the `staticCall` method.
// 4. Field Element Conversion: The fixed-point representation is then converted into a field element modulo P using the `toFieldElement` method.
// 5. Data Attestation: The `attestData` method validates that the public instances match the data fetched and processed by the contract.
// 6. Proof Verification: The `verifyWithDataAttestationMulti` method parses the instances out of the encoded calldata and calls the `attestData` method to validate the public instances,
// 6b. Optional KZG Commitment Verification: It also checks the KZG commitments in the proof against the expected commitments using the `checkKzgCommits` method.
// then calls the `verifyProof` method to verify the proof on the verifier.
contract DataAttestationMulti is LoadInstances, SwapProofCommitments {
/**
* @notice Struct used to make view only calls to accounts to fetch the data that EZKL reads from.
* @param the address of the account to make calls to
* @param the abi encoded function calls to make to the `contractAddress`
*/
struct AccountCall {
address contractAddress;
mapping(uint256 => bytes) callData;
mapping(uint256 => uint256) decimals;
uint callCount;
}
AccountCall[] public accountCalls;
uint[] public scales;
address public admin;
/**
* @notice EZKL P value
* @dev In order to prevent the verifier from accepting two version of the same pubInput, n and the quantity (n + P), where n + P <= 2^256, we require that all instances are stricly less than P. a
* @dev The reason for this is that the assmebly code of the verifier performs all arithmetic operations modulo P and as a consequence can't distinguish between n and n + P.
*/
uint256 constant ORDER =
uint256(
0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001
);
uint256 constant INPUT_CALLS = 0;
uint256 constant OUTPUT_CALLS = 0;
uint8 public instanceOffset;
/**
* @dev Initialize the contract with account calls the EZKL model will read from.
* @param _contractAddresses - The calls to all the contracts EZKL reads storage from.
* @param _callData - The abi encoded function calls to make to the `contractAddress` that EZKL reads storage from.
*/
constructor(
address[] memory _contractAddresses,
bytes[][] memory _callData,
uint256[][] memory _decimals,
uint[] memory _scales,
uint8 _instanceOffset,
address _admin
) {
admin = _admin;
for (uint i; i < _scales.length; i++) {
scales.push(1 << _scales[i]);
}
populateAccountCalls(_contractAddresses, _callData, _decimals);
instanceOffset = _instanceOffset;
}
function updateAdmin(address _admin) external {
require(msg.sender == admin, "Only admin can update admin");
if (_admin == address(0)) {
revert();
}
admin = _admin;
}
function updateAccountCalls(
address[] memory _contractAddresses,
bytes[][] memory _callData,
uint256[][] memory _decimals
) external {
require(msg.sender == admin, "Only admin can update account calls");
populateAccountCalls(_contractAddresses, _callData, _decimals);
}
function populateAccountCalls(
address[] memory _contractAddresses,
bytes[][] memory _callData,
uint256[][] memory _decimals
) internal {
require(
_contractAddresses.length == _callData.length &&
accountCalls.length == _contractAddresses.length,
"Invalid input length"
);
require(
_decimals.length == _contractAddresses.length,
"Invalid number of decimals"
);
// fill in the accountCalls storage array
uint counter = 0;
for (uint256 i = 0; i < _contractAddresses.length; i++) {
AccountCall storage accountCall = accountCalls[i];
accountCall.contractAddress = _contractAddresses[i];
accountCall.callCount = _callData[i].length;
for (uint256 j = 0; j < _callData[i].length; j++) {
accountCall.callData[j] = _callData[i][j];
accountCall.decimals[j] = 10 ** _decimals[i][j];
}
// count the total number of storage reads across all of the accounts
counter += _callData[i].length;
}
require(
counter == INPUT_CALLS + OUTPUT_CALLS,
"Invalid number of calls"
);
}
function mulDiv(
uint256 x,
uint256 y,
uint256 denominator
) internal pure returns (uint256 result) {
unchecked {
uint256 prod0;
uint256 prod1;
assembly {
let mm := mulmod(x, y, not(0))
prod0 := mul(x, y)
prod1 := sub(sub(mm, prod0), lt(mm, prod0))
}
if (prod1 == 0) {
return prod0 / denominator;
}
require(denominator > prod1, "Math: mulDiv overflow");
uint256 remainder;
assembly {
remainder := mulmod(x, y, denominator)
prod1 := sub(prod1, gt(remainder, prod0))
prod0 := sub(prod0, remainder)
}
uint256 twos = denominator & (~denominator + 1);
assembly {
denominator := div(denominator, twos)
prod0 := div(prod0, twos)
twos := add(div(sub(0, twos), twos), 1)
}
prod0 |= prod1 * twos;
uint256 inverse = (3 * denominator) ^ 2;
inverse *= 2 - denominator * inverse;
inverse *= 2 - denominator * inverse;
inverse *= 2 - denominator * inverse;
inverse *= 2 - denominator * inverse;
inverse *= 2 - denominator * inverse;
inverse *= 2 - denominator * inverse;
result = prod0 * inverse;
return result;
}
}
/**
* @dev Quantize the data returned from the account calls to the scale used by the EZKL model.
* @param data - The data returned from the account calls.
* @param decimals - The number of decimals the data returned from the account calls has (for floating point representation).
* @param scale - The scale used to convert the floating point value into a fixed point value.
*/
function quantizeData(
bytes memory data,
uint256 decimals,
uint256 scale
) internal pure returns (int256 quantized_data) {
int x = abi.decode(data, (int256));
bool neg = x < 0;
if (neg) x = -x;
uint output = mulDiv(uint256(x), scale, decimals);
if (mulmod(uint256(x), scale, decimals) * 2 >= decimals) {
output += 1;
}
quantized_data = neg ? -int256(output) : int256(output);
}
/**
* @dev Make a static call to the account to fetch the data that EZKL reads from.
* @param target - The address of the account to make calls to.
* @param data - The abi encoded function calls to make to the `contractAddress` that EZKL reads storage from.
* @return The data returned from the account calls. (Must come from either a view or pure function. Will throw an error otherwise)
*/
function staticCall(
address target,
bytes memory data
) internal view returns (bytes memory) {
(bool success, bytes memory returndata) = target.staticcall(data);
if (success) {
if (returndata.length == 0) {
require(
target.code.length > 0,
"Address: call to non-contract"
);
}
return returndata;
} else {
revert("Address: low-level call failed");
}
}
/**
* @dev Convert the fixed point quantized data into a field element.
* @param x - The quantized data.
* @return field_element - The field element.
*/
function toFieldElement(
int256 x
) internal pure returns (uint256 field_element) {
// The casting down to uint256 is safe because the order is about 2^254, and the value
// of x ranges of -2^127 to 2^127, so x + int(ORDER) is always positive.
return uint256(x + int(ORDER)) % ORDER;
}
/**
* @dev Make the account calls to fetch the data that EZKL reads from and attest to the data.
* @param instances - The public instances to the proof (the data in the proof that publicly accessible to the verifier).
*/
function attestData(uint256[] memory instances) internal view {
require(
instances.length >= INPUT_CALLS + OUTPUT_CALLS,
"Invalid public inputs length"
);
uint256 _accountCount = accountCalls.length;
uint counter = 0;
for (uint8 i = 0; i < _accountCount; ++i) {
address account = accountCalls[i].contractAddress;
for (uint8 j = 0; j < accountCalls[i].callCount; j++) {
bytes memory returnData = staticCall(
account,
accountCalls[i].callData[j]
);
uint256 scale = scales[counter];
int256 quantized_data = quantizeData(
returnData,
accountCalls[i].decimals[j],
scale
);
uint256 field_element = toFieldElement(quantized_data);
require(
field_element == instances[counter + instanceOffset],
"Public input does not match"
);
counter++;
output = quantizeData(x[i], scalars[i]);
fieldElement = toFieldElement(output);
if (fieldElement != instances[i]) {
revert("Public input does not match");
}
}
}

View File

@@ -1,20 +1,52 @@
# EZKL Security Note: Quantization-Induced Model Backdoors
# EZKL Security Note: Quantization-Activated Model Backdoors
> Note: this only affects a situation where a party separate to an application's developer has access to the model's weights and can modify them. This is a common scenario in adversarial machine learning research, but can be less common in real-world applications. If you're building your models in house and deploying them yourself, this is less of a concern. If you're building a permisionless system where anyone can submit models, this is more of a concern.
## Model backdoors and provenance
Models processed through EZKL's quantization step can harbor backdoors that are dormant in the original full-precision model but activate during quantization. These backdoors force specific outputs when triggered, with impact varying by application.
Machine learning models inherently suffer from robustness issues, which can lead to various
kinds of attacks, from backdoors to evasion attacks. These vulnerabilities are a direct byproductof how machine learning models learn and cannot be remediated.
Key Factors:
We say a model has a backdoor whenever a specific attacker-chosen trigger in the input leads
to the model misbehaving. For instance, if we have an image classifier discriminating cats from dogs, the ability to turn any image of a cat into an image classified as a dog by changing a specific pixel pattern constitutes a backdoor.
- Larger models increase attack feasibility through more parameter capacity
- Smaller quantization scales facilitate attacks by allowing greater weight modifications
- Rebase ratio of 1 enables exploitation of convolutional layer consistency
Backdoors can be introduced using many different vectors. An attacker can introduce a
backdoor using traditional security vulnerabilities. For instance, they could directly alter the file containing model weights or dynamically hack the Python code of the model. In addition, backdoors can be introduced by the training data through a process known as poisoning. In this case, an attacker adds malicious data points to the dataset before the model is trained so that the model learns to associate the backdoor trigger with the intended misbehavior.
Limitations:
All these vectors constitute a whole range of provenance challenges, as any component of an
AI system can virtually be an entrypoint for a backdoor. Although provenance is already a
concern with traditional code, the issue is exacerbated with AI, as retraining a model is
cost-prohibitive. It is thus impractical to translate the “recompile it yourself” thinking to AI.
- Attack effectiveness depends on calibration settings and internal rescaling operations.
## Quantization activated backdoors
Backdoors are a generic concern in AI that is outside the scope of EZKL. However, EZKL may
activate a specific subset of backdoors. Several academic papers have demonstrated the
possibility, both in theory and in practice, of implanting undetectable and inactive backdoors in a full precision model that can be reactivated by quantization.
An external attacker may trick the user of an application running EZKL into loading a model
containing a quantization backdoor. This backdoor is active in the resulting model and circuit but not in the full-precision model supplied to EZKL, compromising the integrity of the target application and the resulting proof.
### When is this a concern for me as a user?
Any untrusted component in your AI stack may be a backdoor vector. In practice, the most
sensitive parts include:
- Datasets downloaded from the web or containing crowdsourced data
- Models downloaded from the web even after finetuning
- Untrusted software dependencies (well-known frameworks such as PyTorch can typically
be considered trusted)
- Any component loaded through an unsafe serialization format, such as Pickle.
Because backdoors are inherent to ML and cannot be eliminated, reviewing the provenance of
these sensitive components is especially important.
### Responsibilities of the user and EZKL
As EZKL cannot prevent backdoored models from being used, it is the responsibility of the user to review the provenance of all the components in their AI stack to ensure that no backdoor could have been implanted. EZKL shall not be held responsible for misleading prediction proofs resulting from using a backdoored model or for any harm caused to a system or its users due to a misbehaving model.
### Limitations:
- Attack effectiveness depends on calibration settings and internal rescaling operations.
- Further research needed on backdoor persistence through witness/proof stages.
- Can be mitigated by evaluating the quantized model (using `ezkl gen-witness`), rather than relying on the evaluation of the original model.
- Can be mitigated by evaluating the quantized model (using `ezkl gen-witness`), rather than relying on the evaluation of the original model in pytorch or onnx-runtime as difference in evaluation could reveal a backdoor.
References:

View File

@@ -1,7 +1,7 @@
import ezkl
project = 'ezkl'
release = '0.0.0'
release = '22.0.3'
version = release

View File

@@ -32,6 +32,7 @@ use mnist::*;
use rand::rngs::OsRng;
use std::marker::PhantomData;
mod params;
const K: usize = 20;
@@ -208,6 +209,8 @@ where
padding: vec![(PADDING, PADDING); 2],
stride: vec![STRIDE; 2],
group: 1,
data_format: DataFormat::NCHW,
kernel_format: KernelFormat::OIHW,
};
let x = config
.layer_config

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,13 @@
# download tess data
# check if first argument has been set
if [ ! -z "$1" ]; then
DATA_DIR=$1
else
DATA_DIR=data
fi
echo "Downloading data to $DATA_DIR"
if [ ! -d "$DATA_DIR/CATDOG" ]; then
kaggle datasets download tongpython/cat-and-dog -p $DATA_DIR/CATDOG --unzip
fi

View File

@@ -272,33 +272,21 @@
"\n",
"- For file data sources, the raw floating point values that eventually get quantized, converted into field elements and stored in `witness.json` to be consumed by the circuit are stored. The output data contains the expected floating point values returned as outputs from running your vanilla pytorch model on the given inputs.\n",
"- For on chain data sources, the input_data field contains all the data necessary to read and format the on chain data into something digestable by EZKL (aka field elements :-D). \n",
"Here is what the schema for an on-chain data source graph input file should look like:\n",
"Here is what the schema for an on-chain data source graph input file should look like for a single call data source:\n",
" \n",
"```json\n",
"{\n",
" \"input_data\": {\n",
" \"rpc\": \"http://localhost:3030\", // The rpc endpoint of the chain you are deploying your verifier to\n",
" \"calls\": [\n",
" {\n",
" \"call_data\": [\n",
" [\n",
" \"71e5ee5f0000000000000000000000000000000000000000000000000000000000000000\", // The abi encoded call data to a view function that returns a single on-chain data point (we only support uint256 returns for now)\n",
" 7 // The number of decimal places of the large uint256 value. This is our way of representing large wei values as floating points on chain, since the evm only natively supports integer values.\n",
" ],\n",
" [\n",
" \"71e5ee5f0000000000000000000000000000000000000000000000000000000000000001\",\n",
" 5\n",
" ],\n",
" [\n",
" \"71e5ee5f0000000000000000000000000000000000000000000000000000000000000002\",\n",
" 5\n",
" ]\n",
" ],\n",
" \"address\": \"5fbdb2315678afecb367f032d93f642f64180aa3\" // The address of the contract that we are calling to get the data. \n",
" }\n",
" ]\n",
" }\n",
"}"
" \"input_data\": {\n",
" \"rpc\": \"http://localhost:3030\", // The rpc endpoint of the chain you are deploying your verifier to\n",
" \"calls\": {\n",
" \"call_data\": \"1f3be514000000000000000000000000c6962004f452be9203591991d15f6b388e09e8d00000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000b000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000009000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000\", // The abi encoded call data to a view function that returns an array of on-chain data points we are attesting to. \n",
" \"decimals\": 0, // The number of decimal places of the large uint256 value. This is our way of representing large wei values as floating points on chain, since the evm only natively supports integer values.\n",
" \"address\": \"9A213F53334279C128C37DA962E5472eCD90554f\", // The address of the contract that we are calling to get the data. \n",
" \"len\": 12 // The number of data points returned by the view function (the length of the array)\n",
" }\n",
" }\n",
"}\n",
"```"
]
},
{
@@ -307,7 +295,7 @@
"metadata": {},
"outputs": [],
"source": [
"await ezkl.setup_test_evm_witness(\n",
"await ezkl.setup_test_evm_data(\n",
" data_path,\n",
" compiled_model_path,\n",
" # we write the call data to the same file as the input data\n",
@@ -484,8 +472,8 @@
"\n",
"res = await ezkl.deploy_evm(\n",
" addr_path_verifier,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
" 'http://127.0.0.1:3030'\n",
")\n",
"\n",
"assert res == True"
@@ -538,9 +526,9 @@
"res = await ezkl.deploy_da_evm(\n",
" addr_path_da,\n",
" input_path,\n",
" RPC_URL,\n",
" settings_path,\n",
" sol_code_path,\n",
" RPC_URL,\n",
" )\n"
]
},
@@ -569,8 +557,8 @@
"\n",
"res = await ezkl.verify_evm(\n",
" addr,\n",
" proof_path,\n",
" RPC_URL,\n",
" proof_path,\n",
" addr_da,\n",
")"
]
@@ -578,7 +566,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "ezkl",
"display_name": ".env",
"language": "python",
"name": "python3"
},
@@ -592,7 +580,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.5"
"version": "3.12.9"
},
"orig_nbformat": 4
},

View File

@@ -337,6 +337,7 @@
"w3 = Web3(HTTPProvider(RPC_URL))\n",
"\n",
"def test_on_chain_data(res):\n",
" print(f'poseidon_hash: {res[\"processed_outputs\"][\"poseidon_hash\"]}')\n",
" # Step 0: Convert the tensor to a flat list\n",
" data = [int(ezkl.felt_to_big_endian(res['processed_outputs']['poseidon_hash'][0]), 0)]\n",
"\n",
@@ -356,6 +357,9 @@
" arr.push(_numbers[i]);\n",
" }\n",
" }\n",
" function getArr() public view returns (uint[] memory) {\n",
" return arr;\n",
" }\n",
" }\n",
" '''\n",
"\n",
@@ -382,31 +386,30 @@
" contract = w3.eth.contract(address=tx_receipt['contractAddress'], abi=abi)\n",
"\n",
" # Step 4: Interact with the contract\n",
" calldata = []\n",
" for i, _ in enumerate(data):\n",
" call = contract.functions.arr(i).build_transaction()\n",
" calldata.append((call['data'][2:], 0))\n",
" calldata = contract.functions.getArr().build_transaction()['data'][2:]\n",
"\n",
" # Prepare the calls_to_account object\n",
" # If you were calling view functions across multiple contracts,\n",
" # you would have multiple entries in the calls_to_account array,\n",
" # one for each contract.\n",
" calls_to_account = [{\n",
" decimals = [0] * len(data)\n",
" call_to_account = {\n",
" 'call_data': calldata,\n",
" 'decimals': decimals,\n",
" 'address': contract.address[2:], # remove the '0x' prefix\n",
" }]\n",
" }\n",
"\n",
" print(f'calls_to_account: {calls_to_account}')\n",
" print(f'call_to_account: {call_to_account}')\n",
"\n",
" return calls_to_account\n",
" return call_to_account\n",
"\n",
"# Now let's start the Anvil process. You don't need to do this if you are deploying to a non-local chain.\n",
"start_anvil()\n",
"\n",
"# Now let's call our function, passing in the same input tensor we used to export the model 2 cells above.\n",
"calls_to_account = test_on_chain_data(res)\n",
"call_to_account = test_on_chain_data(res)\n",
"\n",
"data = dict(input_data = [data_array], output_data = {'rpc': RPC_URL, 'calls': calls_to_account })\n",
"data = dict(input_data = [data_array], output_data = {'rpc': RPC_URL, 'call': call_to_account })\n",
"\n",
"# Serialize on-chain data into file:\n",
"json.dump(data, open(\"input.json\", 'w'))\n",
@@ -540,8 +543,8 @@
"\n",
"res = await ezkl.deploy_evm(\n",
" addr_path_verifier,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
" 'http://127.0.0.1:3030'\n",
")\n",
"\n",
"assert res == True"
@@ -594,9 +597,9 @@
"res = await ezkl.deploy_da_evm(\n",
" addr_path_da,\n",
" input_path,\n",
" RPC_URL,\n",
" settings_path,\n",
" sol_code_path,\n",
" RPC_URL,\n",
" )\n"
]
},
@@ -625,8 +628,8 @@
"\n",
"res = await ezkl.verify_evm(\n",
" addr,\n",
" proof_path,\n",
" RPC_URL,\n",
" proof_path,\n",
" addr_da,\n",
")"
]
@@ -634,7 +637,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "ezkl",
"display_name": ".env",
"language": "python",
"name": "python3"
},
@@ -648,7 +651,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.7"
"version": "3.12.9"
},
"orig_nbformat": 4
},

View File

@@ -276,33 +276,21 @@
"\n",
"- For file data sources, the raw floating point values that eventually get quantized, converted into field elements and stored in `witness.json` to be consumed by the circuit are stored. The output data contains the expected floating point values returned as outputs from running your vanilla pytorch model on the given inputs.\n",
"- For on chain data sources, the input_data field contains all the data necessary to read and format the on chain data into something digestable by EZKL (aka field elements :-D). \n",
"Here is what the schema for an on-chain data source graph input file should look like:\n",
"Here is what the schema for an on-chain data source graph input file should look like for a single call data source:\n",
" \n",
"```json\n",
"{\n",
" \"input_data\": {\n",
" \"rpc\": \"http://localhost:3030\", // The rpc endpoint of the chain you are deploying your verifier to\n",
" \"calls\": [\n",
" {\n",
" \"call_data\": [\n",
" [\n",
" \"71e5ee5f0000000000000000000000000000000000000000000000000000000000000000\", // The abi encoded call data to a view function that returns a single on-chain data point (we only support uint256 returns for now)\n",
" 7 // The number of decimal places of the large uint256 value. This is our way of representing large wei values as floating points on chain, since the evm only natively supports integer values.\n",
" ],\n",
" [\n",
" \"71e5ee5f0000000000000000000000000000000000000000000000000000000000000001\",\n",
" 5\n",
" ],\n",
" [\n",
" \"71e5ee5f0000000000000000000000000000000000000000000000000000000000000002\",\n",
" 5\n",
" ]\n",
" ],\n",
" \"address\": \"5fbdb2315678afecb367f032d93f642f64180aa3\" // The address of the contract that we are calling to get the data. \n",
" }\n",
" ]\n",
" }\n",
"}"
" \"input_data\": {\n",
" \"rpc\": \"http://localhost:3030\", // The rpc endpoint of the chain you are deploying your verifier to\n",
" \"calls\": {\n",
" \"call_data\": \"1f3be514000000000000000000000000c6962004f452be9203591991d15f6b388e09e8d00000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000b000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000009000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000\", // The abi encoded call data to a view function that returns an array of on-chain data points we are attesting to. \n",
" \"decimals\": 0, // The number of decimal places of the large uint256 value. This is our way of representing large wei values as floating points on chain, since the evm only natively supports integer values.\n",
" \"address\": \"9A213F53334279C128C37DA962E5472eCD90554f\", // The address of the contract that we are calling to get the data. \n",
" \"len\": 3 // The number of data points returned by the view function (the length of the array)\n",
" }\n",
" }\n",
"}\n",
"```"
]
},
{
@@ -311,7 +299,7 @@
"metadata": {},
"outputs": [],
"source": [
"await ezkl.setup_test_evm_witness(\n",
"await ezkl.setup_test_evm_data(\n",
" data_path,\n",
" compiled_model_path,\n",
" # we write the call data to the same file as the input data\n",
@@ -337,7 +325,7 @@
"metadata": {},
"outputs": [],
"source": [
"res = await ezkl.get_srs( settings_path)\n"
"res = await ezkl.get_srs( settings_path)"
]
},
{
@@ -348,27 +336,6 @@
"We now need to generate the circuit witness. These are the model outputs (and any hashes) that are generated when feeding the previously generated `input.json` through the circuit / model. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!export RUST_BACKTRACE=1\n",
"\n",
"witness_path = \"witness.json\"\n",
"\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Here we setup verifying and proving keys for the circuit. As the name suggests the proving key is needed for ... proving and the verifying key is needed for ... verifying. "
]
},
{
"cell_type": "code",
"execution_count": null,
@@ -391,6 +358,27 @@
"assert os.path.isfile(settings_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"!export RUST_BACKTRACE=1\n",
"\n",
"witness_path = \"witness.json\"\n",
"\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)"
]
},
{
"attachments": {},
"cell_type": "markdown",
"metadata": {},
"source": [
"Here we setup verifying and proving keys for the circuit. As the name suggests the proving key is needed for ... proving and the verifying key is needed for ... verifying. "
]
},
{
"attachments": {},
"cell_type": "markdown",
@@ -486,8 +474,8 @@
"\n",
"res = await ezkl.deploy_evm(\n",
" addr_path_verifier,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
" 'http://127.0.0.1:3030'\n",
")\n",
"\n",
"assert res == True"
@@ -541,9 +529,9 @@
"res = await ezkl.deploy_da_evm(\n",
" addr_path_da,\n",
" input_path,\n",
" RPC_URL,\n",
" settings_path,\n",
" sol_code_path,\n",
" RPC_URL,\n",
" )\n"
]
},
@@ -572,8 +560,8 @@
"\n",
"res = await ezkl.verify_evm(\n",
" addr,\n",
" proof_path,\n",
" RPC_URL,\n",
" proof_path,\n",
" addr_da,\n",
")"
]
@@ -581,7 +569,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "ezkl",
"display_name": ".env",
"language": "python",
"name": "python3"
},
@@ -595,7 +583,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.13"
"version": "3.11.5"
},
"orig_nbformat": 4
},

View File

@@ -453,8 +453,8 @@
"\n",
"res = await ezkl.deploy_evm(\n",
" address_path,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
" 'http://127.0.0.1:3030'\n",
")\n",
"\n",
"assert res == True\n",
@@ -474,8 +474,8 @@
"\n",
"res = await ezkl.verify_evm(\n",
" addr,\n",
" \"http://127.0.0.1:3030\",\n",
" proof_path,\n",
" \"http://127.0.0.1:3030\"\n",
")\n",
"assert res == True"
]
@@ -510,4 +510,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}

View File

@@ -462,8 +462,8 @@
"\n",
"res = await ezkl.deploy_evm(\n",
" address_path,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
" 'http://127.0.0.1:3030'\n",
")\n",
"\n",
"assert res == True\n",
@@ -483,8 +483,8 @@
"\n",
"res = await ezkl.verify_evm(\n",
" addr,\n",
" \"http://127.0.0.1:3030\",\n",
" proof_path,\n",
" \"http://127.0.0.1:3030\"\n",
")\n",
"assert res == True"
]
@@ -512,4 +512,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}

View File

@@ -1,462 +0,0 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Mean of ERC20 transfer amounts\n",
"\n",
"This notebook shows how to calculate the mean of ERC20 transfer amounts, pulling data in from a Postgres database. First we install and get the necessary libraries running. \n",
"The first of which is [shovel](https://indexsupply.com/shovel/docs/#getting-started), which is a library that allows us to pull data from the Ethereum blockchain into a Postgres database.\n",
"\n",
"Make sure you install postgres if needed https://indexsupply.com/shovel/docs/#getting-started. \n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import getpass\n",
"import json\n",
"import time\n",
"import subprocess\n",
"\n",
"# swap out for the relevant linux/amd64, darwin/arm64, darwin/amd64, windows/amd64\n",
"os.system(\"curl -LO https://indexsupply.net/bin/1.0/linux/amd64/shovel\")\n",
"os.system(\"chmod +x shovel\")\n",
"\n",
"\n",
"os.environ[\"PG_URL\"] = \"postgres://\" + getpass.getuser() + \":@localhost:5432/shovel\"\n",
"\n",
"# create a config.json file with the following contents\n",
"config = {\n",
" \"pg_url\": \"$PG_URL\",\n",
" \"eth_sources\": [\n",
" {\"name\": \"mainnet\", \"chain_id\": 1, \"url\": \"https://ethereum-rpc.publicnode.com\"},\n",
" {\"name\": \"base\", \"chain_id\": 8453, \"url\": \"https://base-rpc.publicnode.com\"}\n",
" ],\n",
" \"integrations\": [{\n",
" \"name\": \"usdc_transfer\",\n",
" \"enabled\": True,\n",
" \"sources\": [{\"name\": \"mainnet\"}, {\"name\": \"base\"}],\n",
" \"table\": {\n",
" \"name\": \"usdc\",\n",
" \"columns\": [\n",
" {\"name\": \"log_addr\", \"type\": \"bytea\"},\n",
" {\"name\": \"block_num\", \"type\": \"numeric\"},\n",
" {\"name\": \"f\", \"type\": \"bytea\"},\n",
" {\"name\": \"t\", \"type\": \"bytea\"},\n",
" {\"name\": \"v\", \"type\": \"numeric\"}\n",
" ]\n",
" },\n",
" \"block\": [\n",
" {\"name\": \"block_num\", \"column\": \"block_num\"},\n",
" {\n",
" \"name\": \"log_addr\",\n",
" \"column\": \"log_addr\",\n",
" \"filter_op\": \"contains\",\n",
" \"filter_arg\": [\n",
" \"a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48\",\n",
" \"833589fCD6eDb6E08f4c7C32D4f71b54bdA02913\"\n",
" ]\n",
" }\n",
" ],\n",
" \"event\": {\n",
" \"name\": \"Transfer\",\n",
" \"type\": \"event\",\n",
" \"anonymous\": False,\n",
" \"inputs\": [\n",
" {\"indexed\": True, \"name\": \"from\", \"type\": \"address\", \"column\": \"f\"},\n",
" {\"indexed\": True, \"name\": \"to\", \"type\": \"address\", \"column\": \"t\"},\n",
" {\"indexed\": False, \"name\": \"value\", \"type\": \"uint256\", \"column\": \"v\"}\n",
" ]\n",
" }\n",
" }]\n",
"}\n",
"\n",
"# write the config to a file\n",
"with open(\"config.json\", \"w\") as f:\n",
" f.write(json.dumps(config))\n",
"\n",
"\n",
"# print the two env variables\n",
"os.system(\"echo $PG_URL\")\n",
"\n",
"os.system(\"createdb -h localhost -p 5432 shovel\")\n",
"\n",
"os.system(\"echo shovel is now installed. starting:\")\n",
"\n",
"command = [\"./shovel\", \"-config\", \"config.json\"]\n",
"proc = subprocess.Popen(command)\n",
"\n",
"os.system(\"echo shovel started.\")\n",
"\n",
"time.sleep(10)\n",
"\n",
"# after we've fetched some data -- kill the process\n",
"proc.terminate()\n",
"\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "2wIAHwqH2_mo"
},
"source": [
"**Import Dependencies**"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "9Byiv2Nc2MsK"
},
"outputs": [],
"source": [
"# check if notebook is in colab\n",
"try:\n",
" # install ezkl\n",
" import google.colab\n",
" import subprocess\n",
" import sys\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"ezkl\"])\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"onnx\"])\n",
"\n",
"# rely on local installation of ezkl if the notebook is not in colab\n",
"except:\n",
" pass\n",
"\n",
"import ezkl\n",
"import torch\n",
"import datetime\n",
"import pandas as pd\n",
"import requests\n",
"import json\n",
"import os\n",
"\n",
"import logging\n",
"# # uncomment for more descriptive logging \n",
"FORMAT = '%(levelname)s %(name)s %(asctime)-15s %(filename)s:%(lineno)d %(message)s'\n",
"logging.basicConfig(format=FORMAT)\n",
"logging.getLogger().setLevel(logging.DEBUG)\n",
"\n",
"print(\"ezkl version: \", ezkl.__version__)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "osjj-0Ta3E8O"
},
"source": [
"**Create Computational Graph**"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "x1vl9ZXF3EEW",
"outputId": "bda21d02-fe5f-4fb2-8106-f51a8e2e67aa"
},
"outputs": [],
"source": [
"from torch import nn\n",
"import torch\n",
"\n",
"\n",
"class Model(nn.Module):\n",
" def __init__(self):\n",
" super(Model, self).__init__()\n",
"\n",
" # x is a time series \n",
" def forward(self, x):\n",
" return [torch.mean(x)]\n",
"\n",
"\n",
"\n",
"\n",
"circuit = Model()\n",
"\n",
"\n",
"\n",
"\n",
"x = 0.1*torch.rand(1,*[1,5], requires_grad=True)\n",
"\n",
"# # print(torch.__version__)\n",
"device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n",
"\n",
"print(device)\n",
"\n",
"circuit.to(device)\n",
"\n",
"# Flips the neural net into inference mode\n",
"circuit.eval()\n",
"\n",
"# Export the model\n",
"torch.onnx.export(circuit, # model being run\n",
" x, # model input (or a tuple for multiple inputs)\n",
" \"lol.onnx\", # where to save the model (can be a file or file-like object)\n",
" export_params=True, # store the trained parameter weights inside the model file\n",
" opset_version=11, # the ONNX version to export the model to\n",
" do_constant_folding=True, # whether to execute constant folding for optimization\n",
" input_names = ['input'], # the model's input names\n",
" output_names = ['output'], # the model's output names\n",
" dynamic_axes={'input' : {0 : 'batch_size'}, # variable length axes\n",
" 'output' : {0 : 'batch_size'}})\n",
"\n",
"# export(circuit, input_shape=[1, 20])\n",
"\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "E3qCeX-X5xqd"
},
"source": [
"**Set Data Source and Get Data**"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "6RAMplxk5xPk",
"outputId": "bd2158fe-0c00-44fd-e632-6a3f70cdb7c9"
},
"outputs": [],
"source": [
"import getpass\n",
"# make an input.json file from the df above\n",
"input_filename = os.path.join('input.json')\n",
"\n",
"pg_input_file = dict(input_data = {\n",
" \"host\": \"localhost\",\n",
" # make sure you replace this with your own username\n",
" \"user\": getpass.getuser(),\n",
" \"dbname\": \"shovel\",\n",
" \"password\": \"\",\n",
" \"query\": \"SELECT v FROM usdc ORDER BY block_num DESC LIMIT 5\",\n",
" \"port\": \"5432\",\n",
"})\n",
"\n",
"json_formatted_str = json.dumps(pg_input_file, indent=2)\n",
"print(json_formatted_str)\n",
"\n",
"\n",
" # Serialize data into file:\n",
"json.dump(pg_input_file, open(input_filename, 'w' ))\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# this corresponds to 4 batches\n",
"calibration_filename = os.path.join('calibration.json')\n",
"\n",
"pg_cal_file = dict(input_data = {\n",
" \"host\": \"localhost\",\n",
" # make sure you replace this with your own username\n",
" \"user\": getpass.getuser(),\n",
" \"dbname\": \"shovel\",\n",
" \"password\": \"\",\n",
" \"query\": \"SELECT v FROM usdc ORDER BY block_num DESC LIMIT 20\",\n",
" \"port\": \"5432\",\n",
"})\n",
"\n",
" # Serialize data into file:\n",
"json.dump( pg_cal_file, open(calibration_filename, 'w' ))"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "eLJ7oirQ_HQR"
},
"source": [
"**EZKL Workflow**"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "rNw0C9QL6W88"
},
"outputs": [],
"source": [
"import subprocess\n",
"import os\n",
"\n",
"onnx_filename = os.path.join('lol.onnx')\n",
"compiled_filename = os.path.join('lol.compiled')\n",
"settings_filename = os.path.join('settings.json')\n",
"\n",
"run_args = ezkl.PyRunArgs()\n",
"run_args.decomp_legs = 4\n",
"\n",
"# Generate settings using ezkl\n",
"res = ezkl.gen_settings(onnx_filename, settings_filename, py_run_args=run_args)\n",
"\n",
"assert res == True\n",
"\n",
"res = await ezkl.calibrate_settings(input_filename, onnx_filename, settings_filename, \"resources\")\n",
"\n",
"assert res == True\n",
"\n",
"await ezkl.get_srs(settings_filename)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"ezkl.compile_circuit(onnx_filename, compiled_filename, settings_filename)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "4MmE9SX66_Il",
"outputId": "16403639-66a4-4280-ac7f-6966b75de5a3"
},
"outputs": [],
"source": [
"# generate settings\n",
"\n",
"\n",
"# show the settings.json\n",
"with open(\"settings.json\") as f:\n",
" data = json.load(f)\n",
" json_formatted_str = json.dumps(data, indent=2)\n",
"\n",
" print(json_formatted_str)\n",
"\n",
"assert os.path.exists(\"settings.json\")\n",
"assert os.path.exists(\"input.json\")\n",
"assert os.path.exists(\"lol.onnx\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "fULvvnK7_CMb"
},
"outputs": [],
"source": [
"pk_path = os.path.join('test.pk')\n",
"vk_path = os.path.join('test.vk')\n",
"\n",
"\n",
"# setup the proof\n",
"res = ezkl.setup(\n",
" compiled_filename,\n",
" vk_path,\n",
" pk_path\n",
" )\n",
"\n",
"assert res == True\n",
"assert os.path.isfile(vk_path)\n",
"assert os.path.isfile(pk_path)\n",
"assert os.path.isfile(settings_filename)\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"witness_path = \"witness.json\"\n",
"\n",
"# generate the witness\n",
"res = await ezkl.gen_witness(\n",
" input_filename,\n",
" compiled_filename,\n",
" witness_path\n",
" )\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "Oog3j6Kd-Wed",
"outputId": "5839d0c1-5b43-476e-c2f8-6707de562260"
},
"outputs": [],
"source": [
"# prove the zk circuit\n",
"# GENERATE A PROOF\n",
"proof_path = os.path.join('test.pf')\n",
"\n",
"\n",
"proof = ezkl.prove(\n",
" witness_path,\n",
" compiled_filename,\n",
" pk_path,\n",
" proof_path,\n",
" \"single\"\n",
" )\n",
"\n",
"\n",
"print(\"proved\")\n",
"\n",
"assert os.path.isfile(proof_path)\n",
"\n"
]
}
],
"metadata": {
"colab": {
"provenance": []
},
"kernelspec": {
"display_name": ".env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.7"
}
},
"nbformat": 4,
"nbformat_minor": 0
}

View File

@@ -504,8 +504,8 @@
"\n",
"res = await ezkl.deploy_evm(\n",
" address_path,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
" 'http://127.0.0.1:3030'\n",
")\n",
"\n",
"assert res == True\n",
@@ -527,8 +527,8 @@
"\n",
"res = await ezkl.verify_evm(\n",
" addr,\n",
" proof_path,\n",
" \"http://127.0.0.1:3030\"\n",
" \"http://127.0.0.1:3030\",\n",
" proof_path\n",
")\n",
"assert res == True"
]
@@ -558,4 +558,4 @@
},
"nbformat": 4,
"nbformat_minor": 0
}
}

View File

@@ -220,15 +220,6 @@
"Check that the generated verifiers are identical for all models."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"start_anvil()"
]
},
{
"cell_type": "code",
"execution_count": null,
@@ -270,8 +261,8 @@
"\n",
"res = await ezkl.deploy_evm(\n",
" addr_path_verifier,\n",
" sol_code_path,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
" \"verifier/reusable\"\n",
")\n",
"\n",
@@ -297,7 +288,7 @@
"for name in names:\n",
" addr_path_vk = \"addr_vk.txt\"\n",
" sol_key_code_path = os.path.join(name, 'test_key.sol')\n",
" res = await ezkl.deploy_evm(addr_path_vk, sol_key_code_path, 'http://127.0.0.1:3030', \"vka\")\n",
" res = await ezkl.deploy_evm(addr_path_vk, 'http://127.0.0.1:3030', sol_key_code_path, \"vka\")\n",
" assert res == True\n",
"\n",
" with open(addr_path_vk, 'r') as file:\n",
@@ -307,8 +298,8 @@
" sol_code_path = os.path.join(name, 'vk.sol')\n",
" res = await ezkl.verify_evm(\n",
" addr,\n",
" proof_path,\n",
" \"http://127.0.0.1:3030\",\n",
" proof_path,\n",
" addr_vk = addr_vk\n",
" )\n",
" assert res == True"

View File

@@ -20,7 +20,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
@@ -60,7 +60,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
@@ -94,7 +94,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
@@ -134,7 +134,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
@@ -183,7 +183,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
@@ -201,7 +201,7 @@
"run_args.input_visibility = \"public\"\n",
"run_args.param_visibility = \"private\"\n",
"run_args.output_visibility = \"public\"\n",
"run_args.decomp_legs=6\n",
"run_args.decomp_legs=5\n",
"run_args.num_inner_cols = 1\n",
"run_args.variables = [(\"batch_size\", 1)]"
]
@@ -270,7 +270,7 @@
"{\n",
" \"input_data\": {\n",
" \"rpc\": \"http://localhost:3030\", // The rpc endpoint of the chain you are deploying your verifier to\n",
" \"calls\": {\n",
" \"call\": {\n",
" \"call_data\": \"1f3be514000000000000000000000000c6962004f452be9203591991d15f6b388e09e8d00000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000c000000000000000000000000000000000000000000000000000000000000000b000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000009000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000003000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000\", // The abi encoded call data to a view function that returns an array of on-chain data points we are attesting to. \n",
" \"decimals\": 0, // The number of decimal places of the large uint256 value. This is our way of representing large wei values as floating points on chain, since the evm only natively supports integer values.\n",
" \"address\": \"9A213F53334279C128C37DA962E5472eCD90554f\", // The address of the contract that we are calling to get the data. \n",
@@ -295,7 +295,6 @@
"import torch\n",
"import requests\n",
"\n",
"# This function counts the decimal places of a floating point number\n",
"def count_decimal_places(num):\n",
" num_str = str(num)\n",
" if '.' in num_str:\n",
@@ -303,69 +302,28 @@
" else:\n",
" return 0\n",
"\n",
"# setup web3 instance\n",
"w3 = Web3(HTTPProvider(RPC_URL)) \n",
"\n",
"def set_next_block_timestamp(anvil_url, timestamp):\n",
" # Send the JSON-RPC request to Anvil\n",
" payload = {\n",
" \"jsonrpc\": \"2.0\",\n",
" \"id\": 1,\n",
" \"method\": \"evm_setNextBlockTimestamp\",\n",
" \"params\": [timestamp]\n",
" }\n",
" response = requests.post(anvil_url, json=payload)\n",
" if response.status_code == 200:\n",
" print(f\"Next block timestamp set to: {timestamp}\")\n",
" else:\n",
" print(f\"Failed to set next block timestamp: {response.text}\")\n",
"\n",
"def on_chain_data(tensor):\n",
" # Step 0: Convert the tensor to a flat list\n",
" data = tensor.view(-1).tolist()\n",
"\n",
" # Step 1: Prepare the calldata\n",
" secondsAgo = [len(data) - 1 - i for i in range(len(data))]\n",
"\n",
" # Step 2: Prepare and compile the contract UniTickAttestor contract\n",
" contract_source_code = '''\n",
" // SPDX-License-Identifier: MIT\n",
" pragma solidity ^0.8.20;\n",
"\n",
" /// @title Pool state that is not stored\n",
" /// @notice Contains view functions to provide information about the pool that is computed rather than stored on the\n",
" /// blockchain. The functions here may have variable gas costs.\n",
" interface IUniswapV3PoolDerivedState {\n",
" /// @notice Returns the cumulative tick and liquidity as of each timestamp `secondsAgo` from the current block timestamp\n",
" /// @dev To get a time weighted average tick or liquidity-in-range, you must call this with two values, one representing\n",
" /// the beginning of the period and another for the end of the period. E.g., to get the last hour time-weighted average tick,\n",
" /// you must call it with secondsAgos = [3600, 0].\n",
" /// log base sqrt(1.0001) of token1 / token0. The TickMath library can be used to go from a tick value to a ratio.\n",
" /// @dev The time weighted average tick represents the geometric time weighted average price of the pool, in\n",
" /// @param secondsAgos From how long ago each cumulative tick and liquidity value should be returned\n",
" /// @return tickCumulatives Cumulative tick values as of each `secondsAgos` from the current block timestamp\n",
" /// @return secondsPerLiquidityCumulativeX128s Cumulative seconds per liquidity-in-range value as of each `secondsAgos` from the current block\n",
" /// timestamp\n",
" function observe(\n",
" uint32[] calldata secondsAgos\n",
" )\n",
" external\n",
" view\n",
" returns (\n",
" int56[] memory tickCumulatives,\n",
" uint160[] memory secondsPerLiquidityCumulativeX128s\n",
" );\n",
" ) external view returns (\n",
" int56[] memory tickCumulatives,\n",
" uint160[] memory secondsPerLiquidityCumulativeX128s\n",
" );\n",
" }\n",
"\n",
" /// @title Uniswap Wrapper around `pool.observe` that stores the parameters for fetching and then attesting to historical data\n",
" /// @notice Provides functions to integrate with V3 pool oracle\n",
" contract UniTickAttestor {\n",
" /**\n",
" * @notice Calculates time-weighted means of tick and liquidity for a given Uniswap V3 pool\n",
" * @param pool Address of the pool that we want to observe\n",
" * @param secondsAgo Number of seconds in the past from which to calculate the time-weighted means\n",
" * @return tickCumulatives The cumulative tick values as of each `secondsAgo` from the current block timestamp\n",
" */\n",
" int256[] private cachedTicks;\n",
"\n",
" function consult(\n",
" IUniswapV3PoolDerivedState pool,\n",
" uint32[] memory secondsAgo\n",
@@ -376,6 +334,21 @@
" tickCumulatives[i] = int256(_ticks[i]);\n",
" }\n",
" }\n",
"\n",
" function cache_price(\n",
" IUniswapV3PoolDerivedState pool,\n",
" uint32[] memory secondsAgo\n",
" ) public {\n",
" (int56[] memory _ticks,) = pool.observe(secondsAgo);\n",
" cachedTicks = new int256[](_ticks.length);\n",
" for (uint256 i = 0; i < _ticks.length; i++) {\n",
" cachedTicks[i] = int256(_ticks[i]);\n",
" }\n",
" }\n",
"\n",
" function readPriceCache() public view returns (int256[] memory) {\n",
" return cachedTicks;\n",
" }\n",
" }\n",
" '''\n",
"\n",
@@ -385,69 +358,44 @@
" \"settings\": {\"outputSelection\": {\"*\": {\"*\": [\"metadata\", \"evm.bytecode\", \"abi\"]}}}\n",
" })\n",
"\n",
" # Get bytecode\n",
" bytecode = compiled_sol['contracts']['UniTickAttestor.sol']['UniTickAttestor']['evm']['bytecode']['object']\n",
"\n",
" # Get ABI\n",
" # In production if you are reading from really large contracts you can just use\n",
" # a stripped down version of the ABI of the contract you are calling, containing only the view functions you will fetch data from.\n",
" abi = json.loads(compiled_sol['contracts']['UniTickAttestor.sol']['UniTickAttestor']['metadata'])['output']['abi']\n",
"\n",
" # Step 3: Deploy the contract\n",
" # Deploy contract\n",
" UniTickAttestor = w3.eth.contract(abi=abi, bytecode=bytecode)\n",
" tx_hash = UniTickAttestor.constructor().transact()\n",
" tx_receipt = w3.eth.wait_for_transaction_receipt(tx_hash)\n",
" # If you are deploying to production you can skip the 3 lines of code above and just instantiate the contract like this,\n",
" # passing the address and abi of the contract you are fetching data from.\n",
" contract = w3.eth.contract(address=tx_receipt['contractAddress'], abi=abi)\n",
"\n",
" # Step 4: Interact with the contract\n",
" call = contract.functions.consult(\n",
" # Address of the UniV3 usdc-weth pool 0.005 fee\n",
" # Step 4: Store data via cache_price transaction\n",
" tx_hash = contract.functions.cache_price(\n",
" \"0xC6962004f452bE9203591991D15f6b388e09E8D0\",\n",
" secondsAgo\n",
" ).build_transaction()\n",
" result = contract.functions.consult(\n",
" # Address of the UniV3 usdc-weth pool 0.005 fee\n",
" \"0xC6962004f452bE9203591991D15f6b388e09E8D0\",\n",
" secondsAgo\n",
" ).call()\n",
" \n",
" print(f'result: {result}')\n",
" ).transact()\n",
" tx_receipt = w3.eth.wait_for_transaction_receipt(tx_hash)\n",
"\n",
" # Step 5: Prepare calldata for readPriceCache\n",
" call = contract.functions.readPriceCache().build_transaction()\n",
" calldata = call['data'][2:]\n",
"\n",
" time_stamp = w3.eth.get_block('latest')['timestamp']\n",
" # Get stored data\n",
" result = contract.functions.readPriceCache().call()\n",
" print(f'Cached ticks: {result}')\n",
"\n",
" print(f'time_stamp: {time_stamp}')\n",
" decimals = [0] * len(data)\n",
"\n",
" # Set the next block timestamp using the fetched time_stamp\n",
" set_next_block_timestamp(RPC_URL, time_stamp)\n",
"\n",
"\n",
" # Prepare the calls_to_account object\n",
" # If you were calling view functions across multiple contracts,\n",
" # you would have multiple entries in the calls_to_account array,\n",
" # one for each contract.\n",
" call_to_account = {\n",
" 'call_data': calldata,\n",
" 'decimals': 0,\n",
" 'address': contract.address[2:], # remove the '0x' prefix\n",
" 'len': len(data),\n",
" 'decimals': decimals,\n",
" 'address': contract.address[2:],\n",
" }\n",
"\n",
" print(f'call_to_account: {call_to_account}')\n",
"\n",
" return call_to_account\n",
"\n",
"# Now let's start the Anvil process. You don't need to do this if you are deploying to a non-local chain.\n",
"start_anvil()\n",
"call_to_account = on_chain_data(x)\n",
"\n",
"# Now let's call our function, passing in the same input tensor we used to export the model 2 cells above.\n",
"calls_to_account = on_chain_data(x)\n",
"\n",
"data = dict(input_data = {'rpc': RPC_URL, 'calls': calls_to_account })\n",
"\n",
"# Serialize on-chain data into file:\n",
"data = dict(input_data = {'rpc': RPC_URL, 'call': call_to_account })\n",
"json.dump(data, open(\"input.json\", 'w'))"
]
},
@@ -614,8 +562,8 @@
"\n",
"res = await ezkl.deploy_evm(\n",
" addr_path_verifier,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
" 'http://127.0.0.1:3030'\n",
")\n",
"\n",
"assert res == True"
@@ -668,9 +616,9 @@
"res = await ezkl.deploy_da_evm(\n",
" addr_path_da,\n",
" input_path,\n",
" RPC_URL,\n",
" settings_path,\n",
" sol_code_path,\n",
" RPC_URL,\n",
" )\n"
]
},
@@ -692,34 +640,7 @@
"source": [
"# !export RUST_BACKTRACE=1\n",
"\n",
"calls_to_account = on_chain_data(x)\n",
"\n",
"data = dict(input_data = {'rpc': RPC_URL, 'calls': calls_to_account })\n",
"\n",
"# Serialize on-chain data into file:\n",
"json.dump(data, open(\"input.json\", 'w'))\n",
"\n",
"# setup web3 instance\n",
"w3 = Web3(HTTPProvider(RPC_URL)) \n",
"\n",
"time_stamp = w3.eth.get_block('latest')['timestamp']\n",
"\n",
"print(f'time_stamp: {time_stamp}')\n",
"\n",
"\n",
"witness_path = \"witness.json\"\n",
"\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"\n",
"res = ezkl.prove(\n",
" witness_path,\n",
" compiled_model_path,\n",
" pk_path,\n",
" proof_path,\n",
" \"single\",\n",
" )\n",
"\n",
"print(res)\n",
"# print(res)\n",
"assert os.path.isfile(proof_path)\n",
"# read the verifier address\n",
"addr_verifier = None\n",
@@ -732,8 +653,8 @@
"\n",
"res = await ezkl.verify_evm(\n",
" addr,\n",
" proof_path,\n",
" RPC_URL,\n",
" proof_path,\n",
" addr_da,\n",
")"
]

View File

@@ -666,7 +666,7 @@
},
{
"cell_type": "code",
"execution_count": 11,
"execution_count": null,
"metadata": {},
"outputs": [
{
@@ -689,8 +689,8 @@
"# await\n",
"res = await ezkl.deploy_evm(\n",
" address_path,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
" 'http://127.0.0.1:3030'\n",
")\n",
"\n",
"assert res == True\n",
@@ -701,7 +701,7 @@
},
{
"cell_type": "code",
"execution_count": 12,
"execution_count": null,
"metadata": {},
"outputs": [
{
@@ -722,8 +722,8 @@
"\n",
"res = await ezkl.verify_evm(\n",
" addr,\n",
" \"http://127.0.0.1:3030\",\n",
" proof_path,\n",
" \"http://127.0.0.1:3030\"\n",
")\n",
"assert res == True"
]
@@ -743,7 +743,8 @@
"provenance": []
},
"kernelspec": {
"display_name": "Python 3",
"display_name": ".env",
"language": "python",
"name": "python3"
},
"language_info": {
@@ -756,7 +757,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.2"
"version": "3.12.9"
}
},
"nbformat": 4,

View File

@@ -849,8 +849,8 @@
"\n",
"res = await ezkl.deploy_evm(\n",
" address_path,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
" 'http://127.0.0.1:3030'\n",
")\n",
"\n",
"assert res == True\n",
@@ -870,8 +870,8 @@
"\n",
"res = await ezkl.verify_evm(\n",
" addr,\n",
" proof_path,\n",
" \"http://127.0.0.1:3030\"\n",
" \"http://127.0.0.1:3030\",\n",
" proof_path\n",
")\n",
"assert res == True"
]
@@ -905,4 +905,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}

View File

@@ -246,7 +246,7 @@
"metadata": {},
"outputs": [],
"source": [
"ezkl.setup_test_evm_witness(\n",
"ezkl.setup_test_evm_data(\n",
" data_path,\n",
" compiled_model_path,\n",
" # we write the call data to the same file as the input data\n",
@@ -358,8 +358,8 @@
"\n",
"res = await ezkl.deploy_evm(\n",
" addr_path_verifier,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
" 'http://127.0.0.1:3030'\n",
")\n",
"\n",
"assert res == True"
@@ -374,14 +374,6 @@
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "cc888848",
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
@@ -413,9 +405,9 @@
"res = await ezkl.deploy_da_evm(\n",
" addr_path_da,\n",
" input_path,\n",
" RPC_URL,\n",
" settings_path,\n",
" sol_code_path,\n",
" RPC_URL,\n",
" )"
]
},
@@ -478,8 +470,8 @@
"\n",
"res = ezkl.verify_evm(\n",
" addr,\n",
" proof_path,\n",
" RPC_URL,\n",
" proof_path,\n",
" addr_da,\n",
")"
]
@@ -525,7 +517,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"display_name": ".env",
"language": "python",
"name": "python3"
},
@@ -539,7 +531,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.2"
"version": "3.12.9"
}
},
"nbformat": 4,

View File

@@ -0,0 +1,106 @@
{
"input_data": [
[
8761,
7654,
8501,
2404,
6929,
8858,
5946,
3673,
4131,
3854,
8137,
8239,
9038,
6299,
1118,
9737,
208,
7954,
3691,
610,
3468,
3314,
8658,
8366,
2850,
477,
6114,
232,
4601,
7420,
5713,
2936,
6061,
2870,
8421,
177,
7107,
7382,
6115,
5487,
8502,
2559,
1875,
129,
8533,
8201,
8414,
4775,
9817,
3127,
8761,
7654,
8501,
2404,
6929,
8858,
5946,
3673,
4131,
3854,
8137,
8239,
9038,
6299,
1118,
9737,
208,
7954,
3691,
610,
3468,
3314,
8658,
8366,
2850,
477,
6114,
232,
4601,
7420,
5713,
2936,
6061,
2870,
8421,
177,
7107,
7382,
6115,
5487,
8502,
2559,
1875,
129,
8533,
8201,
8414,
4775,
9817,
3127
]
]
}

Binary file not shown.

View File

@@ -0,0 +1 @@
{"run_args":{"input_scale":7,"param_scale":7,"scale_rebase_multiplier":1,"lookup_range":[-32768,32768],"logrows":17,"num_inner_cols":2,"variables":[["batch_size",1]],"input_visibility":"Private","output_visibility":"Public","param_visibility":"Private","rebase_frac_zero_constants":false,"check_mode":"UNSAFE","commitment":"KZG","decomp_base":16384,"decomp_legs":2,"bounded_log_lookup":false,"ignore_range_check_inputs_outputs":false},"num_rows":54,"total_assignments":109,"total_const_size":4,"total_dynamic_col_size":0,"max_dynamic_input_len":0,"num_dynamic_lookups":0,"num_shuffles":0,"total_shuffle_col_size":0,"model_instance_shapes":[[1,1]],"model_output_scales":[7],"model_input_scales":[7],"module_sizes":{"polycommit":[],"poseidon":[0,[0]]},"required_lookups":[],"required_range_checks":[[-1,1],[0,16383]],"check_mode":"UNSAFE","version":"0.0.0","num_blinding_factors":null,"timestamp":1739396322131,"input_types":["F32"],"output_types":["F32"]}

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -706,9 +706,9 @@ def setup_aggregate(sample_snarks:typing.Sequence[str | os.PathLike | pathlib.Pa
"""
...
def setup_test_evm_witness(data_path:str | os.PathLike | pathlib.Path,compiled_circuit_path:str | os.PathLike | pathlib.Path,test_data:str | os.PathLike | pathlib.Path,input_source:PyTestDataSource,output_source:PyTestDataSource,rpc_url:typing.Optional[str]) -> typing.Any:
def setup_test_evm_data(data_path:str | os.PathLike | pathlib.Path,compiled_circuit_path:str | os.PathLike | pathlib.Path,test_data:str | os.PathLike | pathlib.Path,input_source:PyTestDataSource,output_source:PyTestDataSource,rpc_url:typing.Optional[str]) -> typing.Any:
r"""
Setup test evm witness
Setup test evm data
Arguments
---------

View File

@@ -1,3 +1,3 @@
[toolchain]
channel = "nightly-2024-07-18"
channel = "nightly-2025-02-17"
components = ["rustfmt", "clippy"]

View File

@@ -28,6 +28,8 @@ use std::env;
#[tokio::main(flavor = "current_thread")]
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
pub async fn main() {
use log::debug;
let args = Cli::parse();
if let Some(generator) = args.generator {
@@ -42,7 +44,7 @@ pub async fn main() {
} else {
info!("Running with CPU");
}
info!(
debug!(
"command: \n {}",
&command.as_json().to_colored_json_auto().unwrap()
);

View File

@@ -1,34 +1,34 @@
use crate::Commitments;
use crate::RunArgs;
use crate::circuit::CheckMode;
use crate::circuit::InputType;
use crate::circuit::modules::Module;
use crate::circuit::modules::polycommit::PolyCommitChip;
use crate::circuit::modules::poseidon::{
spec::{PoseidonSpec, POSEIDON_RATE, POSEIDON_WIDTH},
PoseidonChip,
spec::{POSEIDON_RATE, POSEIDON_WIDTH, PoseidonSpec},
};
use crate::circuit::modules::Module;
use crate::circuit::InputType;
use crate::circuit::{CheckMode, Tolerance};
use crate::commands::*;
use crate::fieldutils::{felt_to_integer_rep, integer_rep_to_felt, IntegerRep};
use crate::fieldutils::{IntegerRep, felt_to_integer_rep, integer_rep_to_felt};
use crate::graph::TestDataSource;
use crate::graph::{
quantize_float, scale_to_multiplier, GraphCircuit, GraphSettings, Model, Visibility,
GraphCircuit, GraphSettings, Model, Visibility, quantize_float, scale_to_multiplier,
};
use crate::pfsys::evm::aggregation_kzg::AggregationCircuit;
use crate::pfsys::{
load_pk, load_vk, save_params, save_vk, srs::gen_srs as ezkl_gen_srs, srs::load_srs_prover,
ProofType, TranscriptType,
ProofType, TranscriptType, load_pk, load_vk, save_params, save_vk,
srs::gen_srs as ezkl_gen_srs, srs::load_srs_prover,
};
use crate::Commitments;
use crate::RunArgs;
use halo2_proofs::poly::ipa::commitment::IPACommitmentScheme;
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
use halo2curves::bn256::{Bn256, Fq, Fr, G1Affine, G1};
use halo2curves::bn256::{Bn256, Fq, Fr, G1, G1Affine};
use pyo3::exceptions::{PyIOError, PyRuntimeError};
use pyo3::prelude::*;
use pyo3::wrap_pyfunction;
use pyo3_log;
use pyo3_stub_gen::{
define_stub_info_gatherer, derive::gen_stub_pyclass, derive::gen_stub_pyclass_enum,
derive::gen_stub_pyfunction, TypeInfo,
TypeInfo, define_stub_info_gatherer, derive::gen_stub_pyclass, derive::gen_stub_pyclass_enum,
derive::gen_stub_pyfunction,
};
use snark_verifier::util::arithmetic::PrimeField;
use std::collections::HashSet;
@@ -155,9 +155,6 @@ impl pyo3::ToPyObject for PyG1Affine {
#[derive(Clone)]
#[gen_stub_pyclass]
struct PyRunArgs {
#[pyo3(get, set)]
/// float: The tolerance for error on model outputs
pub tolerance: f32,
#[pyo3(get, set)]
/// int: The denominator in the fixed point representation used when quantizing inputs
pub input_scale: crate::Scale,
@@ -209,6 +206,9 @@ struct PyRunArgs {
/// bool: Should the circuit use range checks for inputs and outputs (set to false if the input is a felt)
#[pyo3(get, set)]
pub ignore_range_check_inputs_outputs: bool,
/// float: epsilon used for arguments that use division
#[pyo3(get, set)]
pub epsilon: f64,
}
/// default instantiation of PyRunArgs
@@ -225,7 +225,6 @@ impl From<PyRunArgs> for RunArgs {
fn from(py_run_args: PyRunArgs) -> Self {
RunArgs {
bounded_log_lookup: py_run_args.bounded_log_lookup,
tolerance: Tolerance::from(py_run_args.tolerance),
input_scale: py_run_args.input_scale,
param_scale: py_run_args.param_scale,
num_inner_cols: py_run_args.num_inner_cols,
@@ -242,15 +241,16 @@ impl From<PyRunArgs> for RunArgs {
decomp_base: py_run_args.decomp_base,
decomp_legs: py_run_args.decomp_legs,
ignore_range_check_inputs_outputs: py_run_args.ignore_range_check_inputs_outputs,
epsilon: Some(py_run_args.epsilon),
}
}
}
impl Into<PyRunArgs> for RunArgs {
fn into(self) -> PyRunArgs {
let eps = self.get_epsilon();
PyRunArgs {
bounded_log_lookup: self.bounded_log_lookup,
tolerance: self.tolerance.val,
input_scale: self.input_scale,
param_scale: self.param_scale,
num_inner_cols: self.num_inner_cols,
@@ -267,6 +267,7 @@ impl Into<PyRunArgs> for RunArgs {
decomp_base: self.decomp_base,
decomp_legs: self.decomp_legs,
ignore_range_check_inputs_outputs: self.ignore_range_check_inputs_outputs,
epsilon: eps,
}
}
}
@@ -337,6 +338,8 @@ enum PyInputType {
Int,
///
TDim,
///
Unknown,
}
impl From<InputType> for PyInputType {
@@ -348,6 +351,7 @@ impl From<InputType> for PyInputType {
InputType::F64 => PyInputType::F64,
InputType::Int => PyInputType::Int,
InputType::TDim => PyInputType::TDim,
InputType::Unknown => PyInputType::Unknown,
}
}
}
@@ -361,6 +365,7 @@ impl From<PyInputType> for InputType {
PyInputType::F64 => InputType::F64,
PyInputType::Int => InputType::Int,
PyInputType::TDim => InputType::TDim,
PyInputType::Unknown => InputType::Unknown,
}
}
}
@@ -375,6 +380,7 @@ impl FromStr for PyInputType {
"f64" => Ok(PyInputType::F64),
"int" => Ok(PyInputType::Int),
"tdim" => Ok(PyInputType::TDim),
"unknown" => Ok(PyInputType::Unknown),
_ => Err("Invalid value for InputType".to_string()),
}
}
@@ -592,7 +598,7 @@ fn poseidon_hash(message: Vec<PyFelt>) -> PyResult<Vec<PyFelt>> {
/// Arguments
/// -------
/// message: list[str]
/// List of field elements represnted as strings
/// List of field elements represented as strings
///
/// vk_path: str
/// Path to the verification key
@@ -651,7 +657,7 @@ fn kzg_commit(
/// Arguments
/// -------
/// message: list[str]
/// List of field elements represnted as strings
/// List of field elements represented as strings
///
/// vk_path: str
/// Path to the verification key
@@ -962,6 +968,8 @@ fn gen_settings(
output=PathBuf::from(DEFAULT_SETTINGS),
variables=Vec::from([("batch_size".to_string(), 1)]),
seed=DEFAULT_SEED.parse().unwrap(),
min=None,
max=None
))]
#[gen_stub_pyfunction]
fn gen_random_data(
@@ -969,8 +977,10 @@ fn gen_random_data(
output: PathBuf,
variables: Vec<(String, usize)>,
seed: u64,
min: Option<f32>,
max: Option<f32>,
) -> Result<bool, PyErr> {
crate::execute::gen_random_data(model, output, variables, seed).map_err(|e| {
crate::execute::gen_random_data(model, output, variables, seed, min, max).map_err(|e| {
let err_str = format!("Failed to generate settings: {}", e);
PyRuntimeError::new_err(err_str)
})?;
@@ -1009,7 +1019,7 @@ fn gen_random_data(
/// bool
///
#[pyfunction(signature = (
data = PathBuf::from(DEFAULT_CALIBRATION_FILE),
data = String::from(DEFAULT_CALIBRATION_FILE),
model = PathBuf::from(DEFAULT_MODEL),
settings = PathBuf::from(DEFAULT_SETTINGS),
target = CalibrationTarget::default(), // default is "resources
@@ -1021,7 +1031,7 @@ fn gen_random_data(
#[gen_stub_pyfunction]
fn calibrate_settings(
py: Python,
data: PathBuf,
data: String,
model: PathBuf,
settings: PathBuf,
target: CalibrationTarget,
@@ -1076,7 +1086,7 @@ fn calibrate_settings(
/// Python object containing the witness values
///
#[pyfunction(signature = (
data=PathBuf::from(DEFAULT_DATA),
data=String::from(DEFAULT_DATA),
model=PathBuf::from(DEFAULT_COMPILED_CIRCUIT),
output=PathBuf::from(DEFAULT_WITNESS),
vk_path=None,
@@ -1085,7 +1095,7 @@ fn calibrate_settings(
#[gen_stub_pyfunction]
fn gen_witness(
py: Python,
data: PathBuf,
data: String,
model: PathBuf,
output: Option<PathBuf>,
vk_path: Option<PathBuf>,
@@ -1754,7 +1764,7 @@ fn create_evm_vka(
/// bool
///
#[pyfunction(signature = (
input_data=PathBuf::from(DEFAULT_DATA),
input_data=String::from(DEFAULT_DATA),
settings_path=PathBuf::from(DEFAULT_SETTINGS),
sol_code_path=PathBuf::from(DEFAULT_SOL_CODE_DA),
abi_path=PathBuf::from(DEFAULT_VERIFIER_DA_ABI),
@@ -1763,7 +1773,7 @@ fn create_evm_vka(
#[gen_stub_pyfunction]
fn create_evm_data_attestation(
py: Python,
input_data: PathBuf,
input_data: String,
settings_path: PathBuf,
sol_code_path: PathBuf,
abi_path: PathBuf,
@@ -1819,20 +1829,20 @@ fn create_evm_data_attestation(
test_data,
input_source,
output_source,
rpc_url=None,
rpc_url,
))]
#[gen_stub_pyfunction]
fn setup_test_evm_witness(
fn setup_test_evm_data(
py: Python,
data_path: PathBuf,
data_path: String,
compiled_circuit_path: PathBuf,
test_data: PathBuf,
input_source: PyTestDataSource,
output_source: PyTestDataSource,
rpc_url: Option<String>,
rpc_url: String,
) -> PyResult<Bound<'_, PyAny>> {
pyo3_async_runtimes::tokio::future_into_py(py, async move {
crate::execute::setup_test_evm_witness(
crate::execute::setup_test_evm_data(
data_path,
compiled_circuit_path,
test_data,
@@ -1842,7 +1852,7 @@ fn setup_test_evm_witness(
)
.await
.map_err(|e| {
let err_str = format!("Failed to run setup_test_evm_witness: {}", e);
let err_str = format!("Failed to run setup_test_evm_data: {}", e);
PyRuntimeError::new_err(err_str)
})?;
@@ -1853,8 +1863,8 @@ fn setup_test_evm_witness(
/// deploys the solidity verifier
#[pyfunction(signature = (
addr_path,
rpc_url,
sol_code_path=PathBuf::from(DEFAULT_SOL_CODE),
rpc_url=None,
contract_type=ContractType::default(),
optimizer_runs=DEFAULT_OPTIMIZER_RUNS.parse().unwrap(),
private_key=None,
@@ -1863,8 +1873,8 @@ fn setup_test_evm_witness(
fn deploy_evm(
py: Python,
addr_path: PathBuf,
rpc_url: String,
sol_code_path: PathBuf,
rpc_url: Option<String>,
contract_type: ContractType,
optimizer_runs: usize,
private_key: Option<String>,
@@ -1892,9 +1902,9 @@ fn deploy_evm(
#[pyfunction(signature = (
addr_path,
input_data,
rpc_url,
settings_path=PathBuf::from(DEFAULT_SETTINGS),
sol_code_path=PathBuf::from(DEFAULT_SOL_CODE_DA),
rpc_url=None,
optimizer_runs=DEFAULT_OPTIMIZER_RUNS.parse().unwrap(),
private_key=None
))]
@@ -1902,10 +1912,10 @@ fn deploy_evm(
fn deploy_da_evm(
py: Python,
addr_path: PathBuf,
input_data: PathBuf,
input_data: String,
rpc_url: String,
settings_path: PathBuf,
sol_code_path: PathBuf,
rpc_url: Option<String>,
optimizer_runs: usize,
private_key: Option<String>,
) -> PyResult<Bound<'_, PyAny>> {
@@ -1945,15 +1955,15 @@ fn deploy_da_evm(
/// does the verifier use data attestation ?
///
/// addr_vk: str
/// The addess of the separate VK contract (if the verifier key is rendered as a separate contract)
/// The address of the separate VK contract (if the verifier key is rendered as a separate contract)
/// Returns
/// -------
/// bool
///
#[pyfunction(signature = (
addr_verifier,
rpc_url,
proof_path=PathBuf::from(DEFAULT_PROOF),
rpc_url=None,
addr_da = None,
addr_vk = None,
))]
@@ -1961,8 +1971,8 @@ fn deploy_da_evm(
fn verify_evm<'a>(
py: Python<'a>,
addr_verifier: &'a str,
rpc_url: String,
proof_path: PathBuf,
rpc_url: Option<String>,
addr_da: Option<&'a str>,
addr_vk: Option<&'a str>,
) -> PyResult<Bound<'a, PyAny>> {
@@ -2107,7 +2117,7 @@ fn ezkl(m: &Bound<'_, PyModule>) -> PyResult<()> {
m.add_function(wrap_pyfunction!(deploy_evm, m)?)?;
m.add_function(wrap_pyfunction!(deploy_da_evm, m)?)?;
m.add_function(wrap_pyfunction!(verify_evm, m)?)?;
m.add_function(wrap_pyfunction!(setup_test_evm_witness, m)?)?;
m.add_function(wrap_pyfunction!(setup_test_evm_data, m)?)?;
m.add_function(wrap_pyfunction!(create_evm_verifier_aggr, m)?)?;
m.add_function(wrap_pyfunction!(create_evm_data_attestation, m)?)?;
m.add_function(wrap_pyfunction!(encode_evm_calldata, m)?)?;

View File

@@ -1,7 +1,7 @@
/*
An easy-to-use implementation of the Poseidon Hash in the form of a Halo2 Chip. While the Poseidon Hash function
is already implemented in halo2_gadgets, there is no wrapper chip that makes it easy to use in other circuits.
Thanks to https://github.com/summa-dev/summa-solvency/blob/master/src/chips/poseidon/hash.rs for the inspiration (and also helping us understand how to use this).
Thanks to https://github.com/summa-dev/summa-solvency/blob/master/zk_prover/src/chips/poseidon/hash.rs for the inspiration (and also helping us understand how to use this).
*/
use std::collections::HashMap;

View File

@@ -1,7 +1,7 @@
/*
An easy-to-use implementation of the Poseidon Hash in the form of a Halo2 Chip. While the Poseidon Hash function
is already implemented in halo2_gadgets, there is no wrapper chip that makes it easy to use in other circuits.
Thanks to https://github.com/summa-dev/summa-solvency/blob/master/src/chips/poseidon/hash.rs for the inspiration (and also helping us understand how to use this).
Thanks to https://github.com/summa-dev/summa-solvency/blob/master/zk_prover/src/chips/poseidon/hash.rs for the inspiration (and also helping us understand how to use this).
*/
pub mod poseidon_params;

View File

@@ -21,7 +21,10 @@ pub enum BaseOp {
/// Matches a [BaseOp] to an operation over inputs
impl BaseOp {
/// forward func
/// forward func for non-accumulating operations
/// # Panics
/// Panics if called on an accumulating operation
/// # Examples
pub fn nonaccum_f<
T: TensorType + Add<Output = T> + Sub<Output = T> + Mul<Output = T> + Neg<Output = T>,
>(
@@ -37,7 +40,9 @@ impl BaseOp {
}
}
/// forward func
/// forward func for accumulating operations
/// # Panics
/// Panics if called on a non-accumulating operation
pub fn accum_f<
T: TensorType + Add<Output = T> + Sub<Output = T> + Mul<Output = T> + Neg<Output = T>,
>(

View File

@@ -20,7 +20,6 @@ use crate::{
circuit::{
ops::base::BaseOp,
table::{Range, RangeCheck, Table},
utils,
},
tensor::{Tensor, TensorType, ValTensor, VarTensor},
};
@@ -85,55 +84,6 @@ impl CheckMode {
}
}
#[allow(missing_docs)]
/// An enum representing the tolerance we can accept for the accumulated arguments, either absolute or percentage
#[derive(Clone, Default, Debug, PartialEq, PartialOrd, Serialize, Deserialize, Copy)]
pub struct Tolerance {
pub val: f32,
pub scale: utils::F32,
}
impl std::fmt::Display for Tolerance {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:.2}", self.val)
}
}
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
impl ToFlags for Tolerance {
/// Convert the struct to a subcommand string
fn to_flags(&self) -> Vec<String> {
vec![format!("{}", self)]
}
}
impl FromStr for Tolerance {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if let Ok(val) = s.parse::<f32>() {
Ok(Tolerance {
val,
scale: utils::F32(1.0),
})
} else {
Err(
"Invalid tolerance value provided. It should expressed as a percentage (f32)."
.to_string(),
)
}
}
}
impl From<f32> for Tolerance {
fn from(value: f32) -> Self {
Tolerance {
val: value,
scale: utils::F32(1.0),
}
}
}
#[cfg(feature = "python-bindings")]
/// Converts CheckMode into a PyObject (Required for CheckMode to be compatible with Python)
impl IntoPy<PyObject> for CheckMode {
@@ -158,29 +108,6 @@ impl<'source> FromPyObject<'source> for CheckMode {
}
}
#[cfg(feature = "python-bindings")]
/// Converts Tolerance into a PyObject (Required for Tolerance to be compatible with Python)
impl IntoPy<PyObject> for Tolerance {
fn into_py(self, py: Python) -> PyObject {
(self.val, self.scale.0).to_object(py)
}
}
#[cfg(feature = "python-bindings")]
/// Obtains Tolerance from PyObject (Required for Tolerance to be compatible with Python)
impl<'source> FromPyObject<'source> for Tolerance {
fn extract_bound(ob: &pyo3::Bound<'source, pyo3::PyAny>) -> PyResult<Self> {
if let Ok((val, scale)) = <(f32, f32)>::extract_bound(ob) {
Ok(Tolerance {
val,
scale: utils::F32(scale),
})
} else {
Err(PyValueError::new_err("Invalid tolerance value provided. "))
}
}
}
/// A struct representing the selectors for the dynamic lookup tables
#[derive(Clone, Debug, Default)]
pub struct DynamicLookups {

View File

@@ -1,9 +1,9 @@
use super::*;
use crate::{
circuit::{layouts, utils, Tolerance},
fieldutils::{integer_rep_to_felt, IntegerRep},
circuit::{layouts, utils},
fieldutils::{IntegerRep, integer_rep_to_felt},
graph::multiplier_to_scale,
tensor::{self, Tensor, TensorType, ValTensor},
tensor::{self, DataFormat, Tensor, TensorType, ValTensor},
};
use halo2curves::ff::PrimeField;
use serde::{Deserialize, Serialize};
@@ -15,10 +15,12 @@ use serde::{Deserialize, Serialize};
pub enum HybridOp {
Ln {
scale: utils::F32,
eps: f64,
},
Rsqrt {
input_scale: utils::F32,
output_scale: utils::F32,
eps: f64,
},
Sqrt {
scale: utils::F32,
@@ -42,6 +44,7 @@ pub enum HybridOp {
Recip {
input_scale: utils::F32,
output_scale: utils::F32,
eps: f64,
},
Div {
denom: utils::F32,
@@ -57,11 +60,13 @@ pub enum HybridOp {
stride: Vec<usize>,
kernel_shape: Vec<usize>,
normalized: bool,
data_format: DataFormat,
},
MaxPool {
padding: Vec<(usize, usize)>,
stride: Vec<usize>,
pool_dims: Vec<usize>,
data_format: DataFormat,
},
ReduceMin {
axes: Vec<usize>,
@@ -75,9 +80,9 @@ pub enum HybridOp {
input_scale: utils::F32,
output_scale: utils::F32,
axes: Vec<usize>,
eps: f64,
},
Output {
tol: Tolerance,
decomp: bool,
},
Greater,
@@ -127,12 +132,13 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
HybridOp::Rsqrt {
input_scale,
output_scale,
eps,
} => format!(
"RSQRT (input_scale={}, output_scale={})",
input_scale, output_scale
"RSQRT (input_scale={}, output_scale={}, eps={})",
input_scale, output_scale, eps
),
HybridOp::Sqrt { scale } => format!("SQRT(scale={})", scale),
HybridOp::Ln { scale } => format!("LN(scale={})", scale),
HybridOp::Ln { scale, eps } => format!("LN(scale={}, eps={})", scale, eps),
HybridOp::RoundHalfToEven { scale, legs } => {
format!("ROUND_HALF_TO_EVEN(scale={}, legs={})", scale, legs)
}
@@ -145,9 +151,10 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
HybridOp::Recip {
input_scale,
output_scale,
eps,
} => format!(
"RECIP (input_scale={}, output_scale={})",
input_scale, output_scale
"RECIP (input_scale={}, output_scale={}, eps={})",
input_scale, output_scale, eps
),
HybridOp::Div { denom } => format!("DIV (denom={})", denom),
HybridOp::SumPool {
@@ -155,9 +162,10 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
stride,
kernel_shape,
normalized,
data_format,
} => format!(
"SUMPOOL (padding={:?}, stride={:?}, kernel_shape={:?}, normalized={})",
padding, stride, kernel_shape, normalized
"SUMPOOL (padding={:?}, stride={:?}, kernel_shape={:?}, normalized={}, data_format={:?})",
padding, stride, kernel_shape, normalized, data_format
),
HybridOp::ReduceMax { axes } => format!("REDUCEMAX (axes={:?})", axes),
HybridOp::ReduceArgMax { dim } => format!("REDUCEARGMAX (dim={})", dim),
@@ -165,9 +173,10 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
padding,
stride,
pool_dims,
data_format,
} => format!(
"MaxPool (padding={:?}, stride={:?}, pool_dims={:?})",
padding, stride, pool_dims
"MaxPool (padding={:?}, stride={:?}, pool_dims={:?}, data_format={:?})",
padding, stride, pool_dims, data_format
),
HybridOp::ReduceMin { axes } => format!("REDUCEMIN (axes={:?})", axes),
HybridOp::ReduceArgMin { dim } => format!("REDUCEARGMIN (dim={})", dim),
@@ -175,14 +184,15 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
input_scale,
output_scale,
axes,
eps,
} => {
format!(
"SOFTMAX (input_scale={}, output_scale={}, axes={:?})",
input_scale, output_scale, axes
"SOFTMAX (input_scale={}, output_scale={}, axes={:?}, eps={})",
input_scale, output_scale, axes, eps
)
}
HybridOp::Output { tol, decomp } => {
format!("OUTPUT (tol={:?}, decomp={})", tol, decomp)
HybridOp::Output { decomp } => {
format!("OUTPUT (decomp={})", decomp)
}
HybridOp::Greater => "GREATER".to_string(),
HybridOp::GreaterEqual => "GREATEREQUAL".to_string(),
@@ -209,17 +219,21 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
HybridOp::Rsqrt {
input_scale,
output_scale,
eps,
} => layouts::rsqrt(
config,
region,
values[..].try_into()?,
*input_scale,
*output_scale,
*eps,
)?,
HybridOp::Sqrt { scale } => {
layouts::sqrt(config, region, values[..].try_into()?, *scale)?
}
HybridOp::Ln { scale } => layouts::ln(config, region, values[..].try_into()?, *scale)?,
HybridOp::Ln { scale, eps } => {
layouts::ln(config, region, values[..].try_into()?, *scale, *eps)?
}
HybridOp::RoundHalfToEven { scale, legs } => {
layouts::round_half_to_even(config, region, values[..].try_into()?, *scale, *legs)?
}
@@ -239,6 +253,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
stride,
kernel_shape,
normalized,
data_format,
} => layouts::sumpool(
config,
region,
@@ -247,16 +262,19 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
stride,
kernel_shape,
*normalized,
*data_format,
)?,
HybridOp::Recip {
input_scale,
output_scale,
eps,
} => layouts::recip(
config,
region,
values[..].try_into()?,
integer_rep_to_felt(input_scale.0 as IntegerRep),
integer_rep_to_felt(output_scale.0 as IntegerRep),
*eps,
)?,
HybridOp::Div { denom, .. } => {
if denom.0.fract() == 0.0 {
@@ -287,6 +305,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
padding,
stride,
pool_dims,
data_format,
} => layouts::max_pool(
config,
region,
@@ -294,6 +313,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
padding,
stride,
pool_dims,
*data_format,
)?,
HybridOp::ReduceMax { axes } => {
layouts::max_axes(config, region, values[..].try_into()?, axes)?
@@ -311,6 +331,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
input_scale,
output_scale,
axes,
eps,
} => layouts::softmax_axes(
config,
region,
@@ -318,15 +339,11 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
*input_scale,
*output_scale,
axes,
*eps,
)?,
HybridOp::Output { tol, decomp } => layouts::output(
config,
region,
values[..].try_into()?,
tol.scale,
tol.val,
*decomp,
)?,
HybridOp::Output { decomp } => {
layouts::output(config, region, values[..].try_into()?, *decomp)?
}
HybridOp::Greater => layouts::greater(config, region, values[..].try_into()?)?,
HybridOp::GreaterEqual => {
layouts::greater_equal(config, region, values[..].try_into()?)?
@@ -363,6 +380,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
} => multiplier_to_scale((output_scale.0 * input_scale.0) as f64),
HybridOp::Ln {
scale: output_scale,
eps: _,
} => 4 * multiplier_to_scale(output_scale.0 as f64),
_ => in_scales[0],
};

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,8 @@
use std::any::Any;
use serde::{Deserialize, Serialize};
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use tract_onnx::prelude::DatumType;
use crate::{
graph::quantize_tensor,
@@ -96,6 +98,8 @@ pub enum InputType {
Int,
///
TDim,
///
Unknown,
}
impl InputType {
@@ -132,6 +136,7 @@ impl InputType {
let int_input = input.clone().to_i64().unwrap();
*input = T::from_i64(int_input).unwrap();
}
InputType::Unknown => {}
}
}
}
@@ -152,6 +157,30 @@ impl std::str::FromStr for InputType {
}
}
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
impl From<DatumType> for InputType {
/// # Panics
/// Panics if the datum type is not supported
fn from(datum_type: DatumType) -> Self {
match datum_type {
DatumType::Bool => InputType::Bool,
DatumType::F16 => InputType::F16,
DatumType::F32 => InputType::F32,
DatumType::F64 => InputType::F64,
DatumType::I8 => InputType::Int,
DatumType::I16 => InputType::Int,
DatumType::I32 => InputType::Int,
DatumType::I64 => InputType::Int,
DatumType::U8 => InputType::Int,
DatumType::U16 => InputType::Int,
DatumType::U32 => InputType::Int,
DatumType::U64 => InputType::Int,
DatumType::TDim => InputType::TDim,
_ => unimplemented!(),
}
}
}
///
#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
pub struct Input {
@@ -290,13 +319,8 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Constant<F> {
}
impl<
F: PrimeField
+ TensorType
+ PartialOrd
+ std::hash::Hash
+ Serialize
+ for<'de> Deserialize<'de>,
> Op<F> for Constant<F>
F: PrimeField + TensorType + PartialOrd + std::hash::Hash + Serialize + for<'de> Deserialize<'de>,
> Op<F> for Constant<F>
{
fn as_any(&self) -> &dyn Any {
self

View File

@@ -4,6 +4,7 @@ use crate::{
utils::{self, F32},
},
tensor::{self, Tensor, TensorError},
tensor::{DataFormat, KernelFormat},
};
use super::{base::BaseOp, *};
@@ -43,10 +44,12 @@ pub enum PolyOp {
padding: Vec<(usize, usize)>,
stride: Vec<usize>,
group: usize,
data_format: DataFormat,
kernel_format: KernelFormat,
},
Downsample {
axis: usize,
stride: usize,
stride: isize,
modulo: usize,
},
DeConv {
@@ -54,6 +57,8 @@ pub enum PolyOp {
output_padding: Vec<usize>,
stride: Vec<usize>,
group: usize,
data_format: DataFormat,
kernel_format: KernelFormat,
},
Add,
Sub,
@@ -103,13 +108,8 @@ pub enum PolyOp {
}
impl<
F: PrimeField
+ TensorType
+ PartialOrd
+ std::hash::Hash
+ Serialize
+ for<'de> Deserialize<'de>,
> Op<F> for PolyOp
F: PrimeField + TensorType + PartialOrd + std::hash::Hash + Serialize + for<'de> Deserialize<'de>,
> Op<F> for PolyOp
{
/// Returns a reference to the Any trait.
fn as_any(&self) -> &dyn Any {
@@ -165,10 +165,12 @@ impl<
stride,
padding,
group,
data_format,
kernel_format,
} => {
format!(
"CONV (stride={:?}, padding={:?}, group={})",
stride, padding, group
"CONV (stride={:?}, padding={:?}, group={}, data_format={:?}, kernel_format={:?})",
stride, padding, group, data_format, kernel_format
)
}
PolyOp::DeConv {
@@ -176,10 +178,12 @@ impl<
padding,
output_padding,
group,
data_format,
kernel_format,
} => {
format!(
"DECONV (stride={:?}, padding={:?}, output_padding={:?}, group={})",
stride, padding, output_padding, group
"DECONV (stride={:?}, padding={:?}, output_padding={:?}, group={}, data_format={:?}, kernel_format={:?})",
stride, padding, output_padding, group, data_format, kernel_format
)
}
PolyOp::Concat { axis } => format!("CONCAT (axis={})", axis),
@@ -242,6 +246,8 @@ impl<
padding,
stride,
group,
data_format,
kernel_format,
} => layouts::conv(
config,
region,
@@ -249,6 +255,8 @@ impl<
padding,
stride,
*group,
*data_format,
*kernel_format,
)?,
PolyOp::GatherElements { dim, constant_idx } => {
if let Some(idx) = constant_idx {
@@ -309,6 +317,8 @@ impl<
output_padding,
stride,
group,
data_format,
kernel_format,
} => layouts::deconv(
config,
region,
@@ -317,6 +327,8 @@ impl<
output_padding,
stride,
*group,
*data_format,
*kernel_format,
)?,
PolyOp::Add => layouts::pairwise(config, region, values[..].try_into()?, BaseOp::Add)?,
PolyOp::Sub => layouts::pairwise(config, region, values[..].try_into()?, BaseOp::Sub)?,

View File

@@ -1,5 +1,6 @@
use crate::circuit::ops::poly::PolyOp;
use crate::circuit::*;
use crate::tensor::{DataFormat, KernelFormat};
use crate::tensor::{Tensor, TensorType, ValTensor, VarTensor};
use halo2_proofs::{
circuit::{Layouter, SimpleFloorPlanner, Value},
@@ -1065,6 +1066,8 @@ mod conv {
padding: vec![(1, 1); 2],
stride: vec![2; 2],
group: 1,
data_format: DataFormat::default(),
kernel_format: KernelFormat::default(),
}),
)
.map_err(|_| Error::Synthesis)
@@ -1220,6 +1223,8 @@ mod conv_col_ultra_overflow {
padding: vec![(1, 1); 2],
stride: vec![2; 2],
group: 1,
data_format: DataFormat::default(),
kernel_format: KernelFormat::default(),
}),
)
.map_err(|_| Error::Synthesis)
@@ -1377,6 +1382,8 @@ mod conv_relu_col_ultra_overflow {
padding: vec![(1, 1); 2],
stride: vec![2; 2],
group: 1,
data_format: DataFormat::default(),
kernel_format: KernelFormat::default(),
}),
)
.map_err(|_| Error::Synthesis);

View File

@@ -1,6 +1,6 @@
use alloy::primitives::Address as H160;
use clap::{Command, Parser, Subcommand};
use clap_complete::{generate, Generator, Shell};
use clap_complete::{Generator, Shell, generate};
#[cfg(feature = "python-bindings")]
use pyo3::{conversion::FromPyObject, exceptions::PyValueError, prelude::*};
use serde::{Deserialize, Serialize};
@@ -8,7 +8,7 @@ use std::path::PathBuf;
use std::str::FromStr;
use tosubcommand::{ToFlags, ToSubcommand};
use crate::{pfsys::ProofType, Commitments, RunArgs};
use crate::{Commitments, RunArgs, pfsys::ProofType};
use crate::circuit::CheckMode;
use crate::graph::TestDataSource;
@@ -360,8 +360,13 @@ pub fn get_styles() -> clap::builder::Styles {
}
/// Print completions for the given generator
pub fn print_completions<G: Generator>(gen: G, cmd: &mut Command) {
generate(gen, cmd, cmd.get_name().to_string(), &mut std::io::stdout());
pub fn print_completions<G: Generator>(r#gen: G, cmd: &mut Command) {
generate(
r#gen,
cmd,
cmd.get_name().to_string(),
&mut std::io::stdout(),
);
}
#[allow(missing_docs)]
@@ -377,6 +382,44 @@ pub struct Cli {
pub command: Option<Commands>,
}
/// Custom parser for data field that handles both direct JSON strings and file paths with '@' prefix
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, PartialOrd)]
pub struct DataField(pub String);
impl FromStr for DataField {
type Err = String;
fn from_str(s: &str) -> Result<Self, Self::Err> {
// Check if the input starts with '@'
if s.starts_with('@') {
// Extract the file path (remove the '@' prefix)
let file_path = &s[1..];
// Read the file content
let content = std::fs::read_to_string(file_path)
.map_err(|e| format!("Failed to read data file '{}': {}", file_path, e))?;
// Return the file content as the data field value
Ok(DataField(content))
} else {
// Use the input string directly
Ok(DataField(s.to_string()))
}
}
}
impl ToFlags for DataField {
fn to_flags(&self) -> Vec<String> {
vec![self.0.clone()]
}
}
impl std::fmt::Display for DataField {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.0)
}
}
#[allow(missing_docs)]
#[derive(Debug, Subcommand, Clone, Deserialize, Serialize, PartialEq, PartialOrd, ToSubcommand)]
pub enum Commands {
@@ -395,9 +438,9 @@ pub enum Commands {
/// Generates the witness from an input file.
GenWitness {
/// The path to the .json data file
#[arg(short = 'D', long, default_value = DEFAULT_DATA, value_hint = clap::ValueHint::FilePath)]
data: Option<PathBuf>,
/// The path to the .json data file (with @ prefix) or a raw data string of the form '{"input_data": [[1, 2, 3]]}'
#[arg(short = 'D', long, default_value = DEFAULT_DATA, value_parser = DataField::from_str)]
data: Option<DataField>,
/// The path to the compiled model file (generated using the compile-circuit command)
#[arg(short = 'M', long, default_value = DEFAULT_COMPILED_CIRCUIT, value_hint = clap::ValueHint::FilePath)]
compiled_circuit: Option<PathBuf>,
@@ -438,12 +481,18 @@ pub enum Commands {
/// random seed for reproducibility (optional)
#[arg(long, value_hint = clap::ValueHint::Other, default_value = DEFAULT_SEED)]
seed: u64,
/// min value for random data
#[arg(long, value_hint = clap::ValueHint::Other)]
min: Option<f32>,
/// max value for random data
#[arg(long, value_hint = clap::ValueHint::Other)]
max: Option<f32>,
},
/// Calibrates the proving scale, lookup bits and logrows from a circuit settings file.
CalibrateSettings {
/// The path to the .json calibration data file.
#[arg(short = 'D', long, default_value = DEFAULT_CALIBRATION_FILE, value_hint = clap::ValueHint::FilePath)]
data: Option<PathBuf>,
data: Option<String>,
/// The path to the .onnx model file
#[arg(short = 'M', long, default_value = DEFAULT_MODEL, value_hint = clap::ValueHint::FilePath)]
model: Option<PathBuf>,
@@ -627,7 +676,7 @@ pub enum Commands {
SetupTestEvmData {
/// The path to the .json data file, which should include both the network input (possibly private) and the network output (public input to the proof)
#[arg(short = 'D', long, value_hint = clap::ValueHint::FilePath)]
data: Option<PathBuf>,
data: Option<String>,
/// The path to the compiled model file (generated using the compile-circuit command)
#[arg(short = 'M', long, value_hint = clap::ValueHint::FilePath)]
compiled_circuit: Option<PathBuf>,
@@ -636,9 +685,9 @@ pub enum Commands {
/// Should include both the network input (possibly private) and the network output (public input to the proof)
#[arg(short = 'T', long, value_hint = clap::ValueHint::FilePath)]
test_data: PathBuf,
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
/// RPC URL for an Ethereum node
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: Option<String>,
rpc_url: String,
/// where the input data come from
#[arg(long, default_value = "on-chain", value_hint = clap::ValueHint::Other)]
input_source: TestDataSource,
@@ -646,19 +695,6 @@ pub enum Commands {
#[arg(long, default_value = "on-chain", value_hint = clap::ValueHint::Other)]
output_source: TestDataSource,
},
/// The Data Attestation Verifier contract stores the account calls to fetch data to feed into ezkl. This call data can be updated by an admin account. This tests that admin account is able to update this call data.
#[command(arg_required_else_help = true)]
TestUpdateAccountCalls {
/// The path to the verifier contract's address
#[arg(long, value_hint = clap::ValueHint::Other)]
addr: H160Flag,
/// The path to the .json data file.
#[arg(short = 'D', long, value_hint = clap::ValueHint::FilePath)]
data: Option<PathBuf>,
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: Option<String>,
},
/// Swaps the positions in the transcript that correspond to commitments
SwapProofCommitments {
/// The path to the proof file
@@ -736,7 +772,7 @@ pub enum Commands {
},
/// Creates an Evm verifier artifact for a single proof to be used by the reusable verifier
#[command(name = "create-evm-vka")]
CreateEvmVKArtifact {
CreateEvmVka {
/// The path to SRS, if None will use ~/.ezkl/srs/kzg{logrows}.srs
#[arg(long, value_hint = clap::ValueHint::FilePath)]
srs_path: Option<PathBuf>,
@@ -755,7 +791,7 @@ pub enum Commands {
},
/// Creates an Evm verifier that attests to on-chain inputs for a single proof
#[command(name = "create-evm-da")]
CreateEvmDataAttestation {
CreateEvmDa {
/// The path to load circuit settings .json file from (generated using the gen-settings command)
#[arg(short = 'S', long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
settings_path: Option<PathBuf>,
@@ -771,7 +807,7 @@ pub enum Commands {
/// view functions that return the data that the network
/// ingests as inputs.
#[arg(short = 'D', long, default_value = DEFAULT_DATA, value_hint = clap::ValueHint::FilePath)]
data: Option<PathBuf>,
data: Option<String>,
/// The path to the witness file. This is needed for proof swapping for kzg commitments.
#[arg(short = 'W', long, default_value = DEFAULT_WITNESS, value_hint = clap::ValueHint::FilePath)]
witness: Option<PathBuf>,
@@ -846,9 +882,9 @@ pub enum Commands {
/// The path to the Solidity code (generated using the create-evm-verifier command)
#[arg(long, default_value = DEFAULT_SOL_CODE, value_hint = clap::ValueHint::FilePath)]
sol_code_path: Option<PathBuf>,
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: Option<String>,
/// RPC URL for an Ethereum node
#[arg(short = 'U', long, default_value = DEFAULT_CONTRACT_ADDRESS, value_hint = clap::ValueHint::Url)]
rpc_url: String,
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS, value_hint = clap::ValueHint::Other)]
/// The path to output the contract address
addr_path: Option<PathBuf>,
@@ -864,19 +900,19 @@ pub enum Commands {
},
/// Deploys an evm verifier that allows for data attestation
#[command(name = "deploy-evm-da")]
DeployEvmDataAttestation {
DeployEvmDa {
/// The path to the .json data file, which should include both the network input (possibly private) and the network output (public input to the proof)
#[arg(short = 'D', long, default_value = DEFAULT_DATA, value_hint = clap::ValueHint::FilePath)]
data: Option<PathBuf>,
data: Option<String>,
/// The path to load circuit settings .json file from (generated using the gen-settings command)
#[arg(long, default_value = DEFAULT_SETTINGS, value_hint = clap::ValueHint::FilePath)]
settings_path: Option<PathBuf>,
/// The path to the Solidity code
#[arg(long, default_value = DEFAULT_SOL_CODE_DA, value_hint = clap::ValueHint::FilePath)]
sol_code_path: Option<PathBuf>,
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
/// RPC URL for an Ethereum node
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: Option<String>,
rpc_url: String,
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS_DA, value_hint = clap::ValueHint::FilePath)]
/// The path to output the contract address
addr_path: Option<PathBuf>,
@@ -896,9 +932,9 @@ pub enum Commands {
/// The path to verifier contract's address
#[arg(long, default_value = DEFAULT_CONTRACT_ADDRESS, value_hint = clap::ValueHint::Other)]
addr_verifier: H160Flag,
/// RPC URL for an Ethereum node, if None will use Anvil but WON'T persist state
/// RPC URL for an Ethereum node
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: Option<String>,
rpc_url: String,
/// does the verifier use data attestation ?
#[arg(long, value_hint = clap::ValueHint::Other)]
addr_da: Option<H160Flag>,

File diff suppressed because one or more lines are too long

View File

@@ -1,32 +1,30 @@
use crate::circuit::region::RegionSettings;
use crate::EZKL_BUF_CAPACITY;
use crate::circuit::CheckMode;
use crate::circuit::region::RegionSettings;
use crate::commands::CalibrationTarget;
use crate::eth::{
deploy_contract_via_solidity, deploy_da_verifier_via_solidity, fix_da_multi_sol,
fix_da_single_sol,
};
use crate::eth::{deploy_contract_via_solidity, deploy_da_verifier_via_solidity, fix_da_sol};
#[allow(unused_imports)]
use crate::eth::{get_contract_artifacts, verify_proof_via_solidity};
use crate::graph::input::{Calls, GraphData};
use crate::graph::input::GraphData;
use crate::graph::{GraphCircuit, GraphSettings, GraphWitness, Model};
use crate::graph::{TestDataSource, TestSources};
use crate::pfsys::evm::aggregation_kzg::{AggregationCircuit, PoseidonTranscript};
use crate::pfsys::{
create_keys, load_pk, load_vk, save_params, save_pk, Snark, StrategyType, TranscriptType,
ProofSplitCommit, create_proof_circuit, swap_proof_commitments_polycommit, verify_proof_circuit,
};
use crate::pfsys::{
create_proof_circuit, swap_proof_commitments_polycommit, verify_proof_circuit, ProofSplitCommit,
Snark, StrategyType, TranscriptType, create_keys, load_pk, load_vk, save_params, save_pk,
};
use crate::pfsys::{save_vk, srs::*};
use crate::tensor::TensorError;
use crate::EZKL_BUF_CAPACITY;
use crate::{commands::*, EZKLError};
use crate::{Commitments, RunArgs};
use crate::{EZKLError, commands::*};
use colored::Colorize;
#[cfg(unix)]
use gag::Gag;
use halo2_proofs::dev::VerifyFailure;
use halo2_proofs::plonk::{self, Circuit};
use halo2_proofs::poly::VerificationStrategy;
use halo2_proofs::poly::commitment::{CommitmentScheme, Params};
use halo2_proofs::poly::commitment::{ParamsProver, Verifier};
use halo2_proofs::poly::ipa::commitment::{IPACommitmentScheme, ParamsIPA};
@@ -39,7 +37,6 @@ use halo2_proofs::poly::kzg::strategy::AccumulatorStrategy as KZGAccumulatorStra
use halo2_proofs::poly::kzg::{
commitment::ParamsKZG, strategy::SingleStrategy as KZGSingleStrategy,
};
use halo2_proofs::poly::VerificationStrategy;
use halo2_proofs::transcript::{EncodedChallenge, TranscriptReadBuffer};
use halo2_solidity_verifier;
use halo2curves::bn256::{Bn256, Fr, G1Affine};
@@ -48,14 +45,15 @@ use halo2curves::serde::SerdeObject;
use indicatif::{ProgressBar, ProgressStyle};
use instant::Instant;
use itertools::Itertools;
use lazy_static::lazy_static;
use log::debug;
use log::{info, trace, warn};
use serde::de::DeserializeOwned;
use serde::Serialize;
use serde::de::DeserializeOwned;
use snark_verifier::loader::native::NativeLoader;
use snark_verifier::system::halo2::Config;
use snark_verifier::system::halo2::compile;
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
use snark_verifier::system::halo2::Config;
use std::fs::File;
use std::io::BufWriter;
use std::io::{Cursor, Write};
@@ -68,8 +66,6 @@ use thiserror::Error;
use tract_onnx::prelude::IntoTensor;
use tract_onnx::prelude::Tensor as TractTensor;
use lazy_static::lazy_static;
lazy_static! {
#[derive(Debug)]
/// The path to the ezkl related data.
@@ -141,11 +137,15 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
data,
variables,
seed,
min,
max,
} => gen_random_data(
model.unwrap_or(DEFAULT_MODEL.into()),
data.unwrap_or(DEFAULT_DATA.into()),
variables,
seed,
min,
max,
),
Commands::CalibrateSettings {
model,
@@ -176,7 +176,7 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
srs_path,
} => gen_witness(
compiled_circuit.unwrap_or(DEFAULT_COMPILED_CIRCUIT.into()),
data.unwrap_or(DEFAULT_DATA.into()),
data.unwrap_or(DataField(DEFAULT_DATA.into())).to_string(),
Some(output.unwrap_or(DEFAULT_WITNESS.into())),
vk_path,
srs_path,
@@ -215,8 +215,7 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
addr_vk,
)
.map(|e| serde_json::to_string(&e).unwrap()),
Commands::CreateEvmVKArtifact {
Commands::CreateEvmVka {
vk_path,
srs_path,
settings_path,
@@ -232,7 +231,7 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
)
.await
}
Commands::CreateEvmDataAttestation {
Commands::CreateEvmDa {
settings_path,
sol_code_path,
abi_path,
@@ -301,7 +300,7 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
input_source,
output_source,
} => {
setup_test_evm_witness(
setup_test_evm_data(
data.unwrap_or(DEFAULT_DATA.into()),
compiled_circuit.unwrap_or(DEFAULT_COMPILED_CIRCUIT.into()),
test_data,
@@ -311,11 +310,6 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
)
.await
}
Commands::TestUpdateAccountCalls {
addr,
data,
rpc_url,
} => test_update_account_calls(addr, data.unwrap_or(DEFAULT_DATA.into()), rpc_url).await,
Commands::SwapProofCommitments {
proof_path,
witness_path,
@@ -442,7 +436,7 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
)
.await
}
Commands::DeployEvmDataAttestation {
Commands::DeployEvmDa {
data,
settings_path,
sol_code_path,
@@ -516,7 +510,9 @@ fn update_ezkl_binary(version: &Option<String>) -> Result<String, EZKLError> {
.status()
.is_err()
{
log::warn!("bash is not installed on this system, trying to run the install script with sh (may fail)");
log::warn!(
"bash is not installed on this system, trying to run the install script with sh (may fail)"
);
"sh"
} else {
"bash"
@@ -725,7 +721,7 @@ pub(crate) fn table(model: PathBuf, run_args: RunArgs) -> Result<String, EZKLErr
pub(crate) async fn gen_witness(
compiled_circuit_path: PathBuf,
data: PathBuf,
data: String,
output: Option<PathBuf>,
vk_path: Option<PathBuf>,
srs_path: Option<PathBuf>,
@@ -733,7 +729,7 @@ pub(crate) async fn gen_witness(
// these aren't real values so the sanity checks are mostly meaningless
let mut circuit = GraphCircuit::load(compiled_circuit_path)?;
let data: GraphData = GraphData::from_path(data)?;
let data = GraphData::from_str(&data)?;
let settings = circuit.settings().clone();
let vk = if let Some(vk) = vk_path {
@@ -847,6 +843,8 @@ pub(crate) fn gen_random_data(
data_path: PathBuf,
variables: Vec<(String, usize)>,
seed: u64,
min: Option<f32>,
max: Option<f32>,
) -> Result<String, EZKLError> {
let mut file = std::fs::File::open(&model_path).map_err(|e| {
crate::graph::errors::GraphError::ReadWriteFileError(
@@ -865,22 +863,32 @@ pub(crate) fn gen_random_data(
.collect::<tract_onnx::prelude::TractResult<Vec<_>>>()
.map_err(|e| EZKLError::from(e.to_string()))?;
let min = min.unwrap_or(0.0);
let max = max.unwrap_or(1.0);
/// Generates a random tensor of a given size and type.
fn random(
sizes: &[usize],
datum_type: tract_onnx::prelude::DatumType,
seed: u64,
min: f32,
max: f32,
) -> TractTensor {
use rand::{Rng, SeedableRng};
let mut rng = rand::rngs::StdRng::seed_from_u64(seed);
let mut tensor = TractTensor::zero::<f32>(sizes).unwrap();
let slice = tensor.as_slice_mut::<f32>().unwrap();
slice.iter_mut().for_each(|x| *x = rng.gen());
slice.iter_mut().for_each(|x| *x = rng.gen_range(min..max));
tensor.cast_to_dt(datum_type).unwrap().into_owned()
}
fn tensor_for_fact(fact: &tract_onnx::prelude::TypedFact, seed: u64) -> TractTensor {
fn tensor_for_fact(
fact: &tract_onnx::prelude::TypedFact,
seed: u64,
min: f32,
max: f32,
) -> TractTensor {
if let Some(value) = &fact.konst {
return value.clone().into_tensor();
}
@@ -891,12 +899,14 @@ pub(crate) fn gen_random_data(
.expect("Expected concrete shape, found: {fact:?}"),
fact.datum_type,
seed,
min,
max,
)
}
let generated = input_facts
.iter()
.map(|v| tensor_for_fact(v, seed))
.map(|v| tensor_for_fact(v, seed, min, max))
.collect_vec();
let data = GraphData::from_tract_data(&generated)?;
@@ -1044,7 +1054,7 @@ impl AccuracyResults {
#[allow(clippy::too_many_arguments)]
pub(crate) async fn calibrate(
model_path: PathBuf,
data: PathBuf,
data: String,
settings_path: PathBuf,
target: CalibrationTarget,
lookup_safety_margin: f64,
@@ -1058,7 +1068,7 @@ pub(crate) async fn calibrate(
use crate::fieldutils::IntegerRep;
let data = GraphData::from_path(data)?;
let data = GraphData::from_str(&data)?;
// load the pre-generated settings
let settings = GraphSettings::load(&settings_path)?;
// now retrieve the run args
@@ -1522,7 +1532,7 @@ pub(crate) async fn create_evm_data_attestation(
settings_path: PathBuf,
sol_code_path: PathBuf,
abi_path: PathBuf,
input: PathBuf,
input: String,
witness: Option<PathBuf>,
) -> Result<String, EZKLError> {
#[allow(unused_imports)]
@@ -1536,52 +1546,30 @@ pub(crate) async fn create_evm_data_attestation(
// if input is not provided, we just instantiate dummy input data
let data =
GraphData::from_path(input).unwrap_or_else(|_| GraphData::new(DataSource::File(vec![])));
GraphData::from_str(&input).unwrap_or_else(|_| GraphData::new(DataSource::File(vec![])));
debug!("data attestation data: {:?}", data);
// The number of input and output instances we attest to for the single call data attestation
let mut input_len = None;
let mut output_len = None;
let output_data = if let Some(DataSource::OnChain(source)) = data.output_data {
if let Some(DataSource::OnChain(source)) = data.output_data {
if visibility.output.is_private() {
return Err("private output data on chain is not supported on chain".into());
}
let mut on_chain_output_data = vec![];
match source.calls {
Calls::Multiple(calls) => {
for call in calls {
on_chain_output_data.push(call);
}
}
Calls::Single(call) => {
output_len = Some(call.len);
}
}
Some(on_chain_output_data)
} else {
None
output_len = Some(source.call.decimals.len());
};
let input_data = if let DataSource::OnChain(source) = data.input_data {
if let DataSource::OnChain(source) = data.input_data {
if visibility.input.is_private() {
return Err("private input data on chain is not supported on chain".into());
}
let mut on_chain_input_data = vec![];
match source.calls {
Calls::Multiple(calls) => {
for call in calls {
on_chain_input_data.push(call);
}
}
Calls::Single(call) => {
input_len = Some(call.len);
}
}
Some(on_chain_input_data)
} else {
None
input_len = Some(source.call.decimals.len());
};
// If both model inputs and outputs are attested to then we
// Read the settings file. Look if either the run_ars.input_visibility, run_args.output_visibility or run_args.param_visibility is KZGCommit
// if so, then we need to load the witness
@@ -1602,33 +1590,25 @@ pub(crate) async fn create_evm_data_attestation(
None
};
// if either input_len or output_len is Some then we are in the single call data attestation mode
if input_len.is_some() || output_len.is_some() {
let output = fix_da_single_sol(input_len, output_len)?;
let mut f = File::create(sol_code_path.clone())?;
let _ = f.write(output.as_bytes());
// fetch abi of the contract
let (abi, _, _) = get_contract_artifacts(sol_code_path, "DataAttestationSingle", 0).await?;
// save abi to file
serde_json::to_writer(std::fs::File::create(abi_path)?, &abi)?;
} else {
let output = fix_da_multi_sol(input_data, output_data, commitment_bytes)?;
let mut f = File::create(sol_code_path.clone())?;
let _ = f.write(output.as_bytes());
// fetch abi of the contract
let (abi, _, _) = get_contract_artifacts(sol_code_path, "DataAttestationMulti", 0).await?;
// save abi to file
serde_json::to_writer(std::fs::File::create(abi_path)?, &abi)?;
}
let output: String = fix_da_sol(
commitment_bytes,
input_len.is_none() && output_len.is_none(),
)?;
let mut f = File::create(sol_code_path.clone())?;
let _ = f.write(output.as_bytes());
// fetch abi of the contract
let (abi, _, _) = get_contract_artifacts(sol_code_path, "DataAttestation", 0).await?;
// save abi to file
serde_json::to_writer(std::fs::File::create(abi_path)?, &abi)?;
Ok(String::new())
}
pub(crate) async fn deploy_da_evm(
data: PathBuf,
data: String,
settings_path: PathBuf,
sol_code_path: PathBuf,
rpc_url: Option<String>,
rpc_url: String,
addr_path: PathBuf,
runs: usize,
private_key: Option<String>,
@@ -1637,7 +1617,7 @@ pub(crate) async fn deploy_da_evm(
settings_path,
data,
sol_code_path,
rpc_url.as_deref(),
&rpc_url,
runs,
private_key.as_deref(),
)
@@ -1652,7 +1632,7 @@ pub(crate) async fn deploy_da_evm(
pub(crate) async fn deploy_evm(
sol_code_path: PathBuf,
rpc_url: Option<String>,
rpc_url: String,
addr_path: PathBuf,
runs: usize,
private_key: Option<String>,
@@ -1665,7 +1645,7 @@ pub(crate) async fn deploy_evm(
};
let contract_address = deploy_contract_via_solidity(
sol_code_path,
rpc_url.as_deref(),
&rpc_url,
runs,
private_key.as_deref(),
contract_name,
@@ -1708,7 +1688,7 @@ pub(crate) fn encode_evm_calldata(
pub(crate) async fn verify_evm(
proof_path: PathBuf,
addr_verifier: H160Flag,
rpc_url: Option<String>,
rpc_url: String,
addr_da: Option<H160Flag>,
addr_vk: Option<H160Flag>,
) -> Result<String, EZKLError> {
@@ -1722,7 +1702,7 @@ pub(crate) async fn verify_evm(
addr_verifier.into(),
addr_da.into(),
addr_vk.map(|s| s.into()),
rpc_url.as_deref(),
&rpc_url,
)
.await?
} else {
@@ -1730,7 +1710,7 @@ pub(crate) async fn verify_evm(
proof.clone(),
addr_verifier.into(),
addr_vk.map(|s| s.into()),
rpc_url.as_deref(),
&rpc_url,
)
.await?
};
@@ -1867,17 +1847,17 @@ pub(crate) fn setup(
Ok(String::new())
}
pub(crate) async fn setup_test_evm_witness(
data_path: PathBuf,
pub(crate) async fn setup_test_evm_data(
data_path: String,
compiled_circuit_path: PathBuf,
test_data: PathBuf,
rpc_url: Option<String>,
rpc_url: String,
input_source: TestDataSource,
output_source: TestDataSource,
) -> Result<String, EZKLError> {
use crate::graph::TestOnChainData;
let mut data = GraphData::from_path(data_path)?;
let mut data = GraphData::from_str(&data_path)?;
let mut circuit = GraphCircuit::load(compiled_circuit_path)?;
// if both input and output are from files fail
@@ -1903,17 +1883,6 @@ pub(crate) async fn setup_test_evm_witness(
}
use crate::pfsys::ProofType;
pub(crate) async fn test_update_account_calls(
addr: H160Flag,
data: PathBuf,
rpc_url: Option<String>,
) -> Result<String, EZKLError> {
use crate::eth::update_account_calls;
update_account_calls(addr.into(), data, rpc_url.as_deref()).await?;
Ok(String::new())
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn prove(

View File

@@ -33,7 +33,7 @@ pub enum GraphError {
#[error("a node is missing required params: {0}")]
MissingParams(String),
/// A node has missing parameters
#[error("a node is has misformed params: {0}")]
#[error("a node has misformed params: {0}")]
MisformedParams(String),
/// Error in the configuration of the visibility of variables
#[error("there should be at least one set of public variables")]
@@ -98,8 +98,6 @@ pub enum GraphError {
feature = "ezkl",
not(all(target_arch = "wasm32", target_os = "unknown"))
))]
#[error("[tokio postgres] {0}")]
TokioPostgresError(#[from] tokio_postgres::Error),
/// Eth error
#[cfg(all(
feature = "ezkl",
@@ -141,7 +139,9 @@ pub enum GraphError {
#[error("range check {0} is too large")]
RangeCheckTooLarge(usize),
///Cannot use on-chain data source as private data
#[error("cannot use on-chain data source as 1) output for on-chain test 2) as private data 3) as input when using wasm.")]
#[error(
"cannot use on-chain data source as 1) output for on-chain test 2) as private data 3) as input when using wasm."
)]
OnChainDataSource,
/// Missing data source
#[error("missing data source")]

View File

@@ -1,19 +1,15 @@
use super::errors::GraphError;
use super::quantize_float;
use crate::EZKL_BUF_CAPACITY;
use crate::circuit::InputType;
use crate::fieldutils::integer_rep_to_felt;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use crate::graph::postgres::Client;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use crate::tensor::Tensor;
use crate::EZKL_BUF_CAPACITY;
use halo2curves::bn256::Fr as Fp;
#[cfg(feature = "python-bindings")]
use pyo3::ToPyObject;
#[cfg(feature = "python-bindings")]
use pyo3::prelude::*;
#[cfg(feature = "python-bindings")]
use pyo3::types::PyDict;
#[cfg(feature = "python-bindings")]
use pyo3::ToPyObject;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::io::BufReader;
use std::io::BufWriter;
@@ -21,13 +17,10 @@ use std::io::Read;
use std::panic::UnwindSafe;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use tract_onnx::tract_core::{
tract_data::{prelude::Tensor as TractTensor, TVec},
tract_data::{TVec, prelude::Tensor as TractTensor},
value::TValue,
};
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use tract_onnx::tract_hir::tract_num_traits::ToPrimitive;
type Decimals = u8;
type Call = String;
type RPCUrl = String;
@@ -168,85 +161,26 @@ impl<'de> Deserialize<'de> for FileSourceInner {
/// Organized as a vector of vectors where each inner vector represents a row/entry
pub type FileSource = Vec<Vec<FileSourceInner>>;
/// Represents different types of calls for fetching on-chain data
#[derive(Clone, Debug, PartialOrd, PartialEq)]
pub enum Calls {
/// Multiple calls to different accounts, each returning individual values
Multiple(Vec<CallsToAccount>),
/// Single call returning an array of values
Single(CallToAccount),
}
/// Represents which parts of the model (input/output) are attested to on-chain
pub type InputOutput = (bool, bool);
impl Default for Calls {
fn default() -> Self {
Calls::Multiple(Vec::new())
}
}
impl Serialize for Calls {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Calls::Single(data) => data.serialize(serializer),
Calls::Multiple(data) => data.serialize(serializer),
}
}
}
// !!! ALWAYS USE JSON SERIALIZATION FOR GRAPH INPUT
// UNTAGGED ENUMS WONT WORK :( as highlighted here:
impl<'de> Deserialize<'de> for Calls {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let this_json: Box<serde_json::value::RawValue> = Deserialize::deserialize(deserializer)?;
let multiple_try: Result<Vec<CallsToAccount>, _> = serde_json::from_str(this_json.get());
if let Ok(t) = multiple_try {
return Ok(Calls::Multiple(t));
}
let single_try: Result<CallToAccount, _> = serde_json::from_str(this_json.get());
if let Ok(t) = single_try {
return Ok(Calls::Single(t));
}
Err(serde::de::Error::custom("failed to deserialize Calls"))
}
}
/// Configuration for accessing on-chain data sources
#[derive(Clone, Debug, Deserialize, Serialize, Default, PartialOrd, PartialEq)]
pub struct OnChainSource {
/// Call specifications for fetching data
pub calls: Calls,
pub call: CallToAccount,
/// RPC endpoint URL for accessing the chain
pub rpc: RPCUrl,
}
impl OnChainSource {
/// Creates a new OnChainSource with multiple calls
///
/// # Arguments
/// * `calls` - Vector of call specifications
/// * `rpc` - RPC endpoint URL
pub fn new_multiple(calls: Vec<CallsToAccount>, rpc: RPCUrl) -> Self {
OnChainSource {
calls: Calls::Multiple(calls),
rpc,
}
}
/// Creates a new OnChainSource with a single call
/// Creates a new OnChainSource
///
/// # Arguments
/// * `call` - Call specification
/// * `rpc` - RPC endpoint URL
pub fn new_single(call: CallToAccount, rpc: RPCUrl) -> Self {
OnChainSource {
calls: Calls::Single(call),
rpc,
}
pub fn new(call: CallToAccount, rpc: RPCUrl) -> Self {
OnChainSource { call, rpc }
}
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
@@ -262,12 +196,9 @@ impl OnChainSource {
data: &FileSource,
scales: Vec<crate::Scale>,
mut shapes: Vec<Vec<usize>>,
rpc: Option<&str>,
) -> Result<(Vec<Tensor<Fp>>, Self), GraphError> {
use crate::eth::{
evm_quantize_multi, read_on_chain_inputs_multi, test_on_chain_data,
DEFAULT_ANVIL_ENDPOINT,
};
rpc: &str,
) -> Result<Self, GraphError> {
use crate::eth::{read_on_chain_inputs, test_on_chain_data};
use log::debug;
// Set up local anvil instance for reading on-chain data
@@ -281,46 +212,15 @@ impl OnChainSource {
shapes[idx] = vec![i.len()];
}
}
let used_rpc = rpc.to_string();
let calls_to_accounts = test_on_chain_data(client.clone(), data).await?;
debug!("Calls to accounts: {:?}", calls_to_accounts);
let inputs =
read_on_chain_inputs_multi(client.clone(), client_address, &calls_to_accounts).await?;
let call_to_account = test_on_chain_data(client.clone(), data).await?;
debug!("Call to account: {:?}", call_to_account);
let inputs = read_on_chain_inputs(client.clone(), client_address, &call_to_account).await?;
debug!("Inputs: {:?}", inputs);
let mut quantized_evm_inputs = vec![];
let mut prev = 0;
for (idx, i) in data.iter().enumerate() {
quantized_evm_inputs.extend(
evm_quantize_multi(
client.clone(),
vec![scales[idx]; i.len()],
&(
inputs.0[prev..i.len()].to_vec(),
inputs.1[prev..i.len()].to_vec(),
),
)
.await?,
);
prev += i.len();
}
// on-chain data has already been quantized at this point. Just need to reshape it and push into tensor vector
let mut inputs: Vec<Tensor<Fp>> = vec![];
for (input, shape) in [quantized_evm_inputs].iter().zip(shapes) {
let mut t: Tensor<Fp> = input.iter().cloned().collect();
t.reshape(&shape)?;
inputs.push(t);
}
let used_rpc = rpc.unwrap_or(DEFAULT_ANVIL_ENDPOINT).to_string();
// Fill the input_data field of the GraphData struct
Ok((
inputs,
OnChainSource::new_multiple(calls_to_accounts.clone(), used_rpc),
))
Ok(OnChainSource::new(call_to_account, used_rpc))
}
}
@@ -342,11 +242,9 @@ pub struct CallToAccount {
/// ABI-encoded function call data
pub call_data: Call,
/// Number of decimal places for float conversion
pub decimals: Decimals,
pub decimals: Vec<Decimals>,
/// Contract address to call
pub address: String,
/// Expected length of returned array
pub len: usize,
}
/// Represents different sources of input/output data for the EZKL model
@@ -357,9 +255,6 @@ pub enum DataSource {
File(FileSource),
/// Data fetched from blockchain contracts
OnChain(OnChainSource),
/// Data from a PostgreSQL database
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
DB(PostgresSource),
}
impl Default for DataSource {
@@ -420,15 +315,6 @@ impl<'de> Deserialize<'de> for DataSource {
return Ok(DataSource::OnChain(t));
}
// Try deserializing as PostgresSource if feature enabled
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
{
let third_try: Result<PostgresSource, _> = serde_json::from_str(this_json.get());
if let Ok(t) = third_try {
return Ok(DataSource::DB(t));
}
}
Err(serde::de::Error::custom("failed to deserialize DataSource"))
}
}
@@ -478,7 +364,7 @@ impl GraphData {
return Err(GraphError::InvalidDims(
0,
"non file data cannot be split into batches".to_string(),
))
));
}
}
Ok(inputs)
@@ -528,6 +414,27 @@ impl GraphData {
}
}
/// Loads graph input data from a string, first seeing if it is a file path or JSON data
/// If it is a file path, it will load the data from the file
/// Otherwise, it will attempt to parse the string as JSON data
///
/// # Arguments
/// * `data` - String containing the input data
/// # Returns
/// A new GraphData instance containing the loaded data
pub fn from_str(data: &str) -> Result<Self, GraphError> {
let graph_input = serde_json::from_str(data);
match graph_input {
Ok(graph_input) => {
return Ok(graph_input);
}
Err(_) => {
let path = std::path::PathBuf::from(data);
GraphData::from_path(path)
}
}
}
/// Loads graph input data from a file
///
/// # Arguments
@@ -591,13 +498,8 @@ impl GraphData {
return Err(GraphError::InvalidDims(
0,
"on-chain data cannot be split into batches".to_string(),
))
));
}
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
GraphData {
input_data: DataSource::DB(data),
output_data: _,
} => data.fetch_and_format_as_file().await?,
};
// Process each input tensor according to its shape
@@ -609,8 +511,11 @@ impl GraphData {
if input.len() % input_size != 0 {
return Err(GraphError::InvalidDims(
0,
"calibration data length must be evenly divisible by the original input_size"
.to_string(),
format!(
"calibration data length (={}) must be evenly divisible by the original input_size(={})",
input.len(),
input_size
),
));
}
@@ -658,45 +563,12 @@ impl GraphData {
}
}
#[cfg(feature = "python-bindings")]
impl ToPyObject for CallsToAccount {
/// Converts CallsToAccount to Python object
fn to_object(&self, py: Python) -> PyObject {
let dict = PyDict::new(py);
dict.set_item("account", &self.address).unwrap();
dict.set_item("call_data", &self.call_data).unwrap();
dict.to_object(py)
}
}
// Additional Python bindings for various types...
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_postgres_source_new() {
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
{
let source = PostgresSource::new(
"localhost".to_string(),
"5432".to_string(),
"user".to_string(),
"SELECT * FROM table".to_string(),
"database".to_string(),
"password".to_string(),
);
assert_eq!(source.host, "localhost");
assert_eq!(source.port, "5432");
assert_eq!(source.user, "user");
assert_eq!(source.query, "SELECT * FROM table");
assert_eq!(source.dbname, "database");
assert_eq!(source.password, "password");
}
}
#[test]
fn test_data_source_serialization_round_trip() {
// Test backwards compatibility with old format
@@ -739,95 +611,6 @@ mod tests {
}
}
/// Source data from a PostgreSQL database
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
#[derive(Clone, Debug, Deserialize, Serialize, Default, PartialOrd, PartialEq)]
pub struct PostgresSource {
/// Database host address
pub host: RPCUrl,
/// Database user name
pub user: String,
/// Database password
pub password: String,
/// SQL query to execute
pub query: String,
/// Database name
pub dbname: String,
/// Database port
pub port: String,
}
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
impl PostgresSource {
/// Creates a new PostgreSQL data source
pub fn new(
host: RPCUrl,
port: String,
user: String,
query: String,
dbname: String,
password: String,
) -> Self {
PostgresSource {
host,
user,
password,
query,
dbname,
port,
}
}
/// Fetches data from the PostgreSQL database
pub async fn fetch(&self) -> Result<Vec<Vec<pg_bigdecimal::PgNumeric>>, GraphError> {
// Configuration string
let config = if self.password.is_empty() {
format!(
"host={} user={} dbname={} port={}",
self.host, self.user, self.dbname, self.port
)
} else {
format!(
"host={} user={} dbname={} port={} password={}",
self.host, self.user, self.dbname, self.port, self.password
)
};
let mut client = Client::connect(&config).await?;
let mut res: Vec<pg_bigdecimal::PgNumeric> = Vec::new();
// Extract rows from query
for row in client.query(&self.query, &[]).await? {
for i in 0..row.len() {
res.push(row.get(i));
}
}
Ok(vec![res])
}
/// Fetches and formats data as FileSource
pub async fn fetch_and_format_as_file(&self) -> Result<Vec<Vec<FileSourceInner>>, GraphError> {
Ok(self
.fetch()
.await?
.iter()
.map(|d| {
d.iter()
.map(|d| {
FileSourceInner::Float(
d.n.as_ref()
.unwrap()
.to_f64()
.ok_or("could not convert decimal to f64")
.unwrap(),
)
})
.collect()
})
.collect())
}
}
#[cfg(feature = "python-bindings")]
impl ToPyObject for CallToAccount {
fn to_object(&self, py: Python) -> PyObject {
@@ -835,21 +618,10 @@ impl ToPyObject for CallToAccount {
dict.set_item("account", &self.address).unwrap();
dict.set_item("call_data", &self.call_data).unwrap();
dict.set_item("decimals", &self.decimals).unwrap();
dict.set_item("len", &self.len).unwrap();
dict.to_object(py)
}
}
#[cfg(feature = "python-bindings")]
impl ToPyObject for Calls {
fn to_object(&self, py: Python) -> PyObject {
match self {
Calls::Multiple(calls) => calls.to_object(py),
Calls::Single(call) => call.to_object(py),
}
}
}
#[cfg(feature = "python-bindings")]
impl ToPyObject for DataSource {
fn to_object(&self, py: Python) -> PyObject {
@@ -858,18 +630,10 @@ impl ToPyObject for DataSource {
DataSource::OnChain(source) => {
let dict = PyDict::new(py);
dict.set_item("rpc_url", &source.rpc).unwrap();
dict.set_item("calls_to_accounts", &source.calls.to_object(py))
dict.set_item("calls_to_accounts", &source.call.to_object(py))
.unwrap();
dict.to_object(py)
}
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
DataSource::DB(source) => {
let dict = PyDict::new(py);
dict.set_item("host", &source.host).unwrap();
dict.set_item("user", &source.user).unwrap();
dict.set_item("query", &source.query).unwrap();
dict.to_object(py)
}
}
}
}

View File

@@ -6,9 +6,6 @@ pub mod model;
pub mod modules;
/// Inner elements of a computational graph that represent a single operation / constraints.
pub mod node;
/// postgres helper functions
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
pub mod postgres;
/// Helper functions
pub mod utilities;
/// Representations of a computational graph's variables.
@@ -36,12 +33,12 @@ use self::modules::{GraphModules, ModuleConfigs, ModuleForwardResult, ModuleSize
use crate::circuit::lookup::LookupOp;
use crate::circuit::modules::ModulePlanner;
use crate::circuit::region::{ConstantsMap, RegionSettings};
use crate::circuit::table::{num_cols_required, Range, Table, RESERVED_BLINDING_ROWS_PAD};
use crate::circuit::table::{RESERVED_BLINDING_ROWS_PAD, Range, Table, num_cols_required};
use crate::circuit::{CheckMode, InputType};
use crate::fieldutils::{felt_to_f64, IntegerRep};
use crate::fieldutils::{IntegerRep, felt_to_f64};
use crate::pfsys::PrettyElements;
use crate::tensor::{Tensor, ValTensor};
use crate::{RunArgs, EZKL_BUF_CAPACITY};
use crate::{EZKL_BUF_CAPACITY, RunArgs};
use halo2_proofs::{
circuit::Layouter,
@@ -56,13 +53,13 @@ use maybe_rayon::prelude::{IntoParallelRefIterator, ParallelIterator};
pub use model::*;
pub use node::*;
#[cfg(feature = "python-bindings")]
use pyo3::ToPyObject;
#[cfg(feature = "python-bindings")]
use pyo3::prelude::*;
#[cfg(feature = "python-bindings")]
use pyo3::types::PyDict;
#[cfg(feature = "python-bindings")]
use pyo3::types::PyDictMethods;
#[cfg(feature = "python-bindings")]
use pyo3::ToPyObject;
use serde::{Deserialize, Serialize};
use std::ops::Deref;
@@ -455,6 +452,10 @@ pub struct GraphSettings {
pub num_blinding_factors: Option<usize>,
/// unix time timestamp
pub timestamp: Option<u128>,
/// Model inputs types (if any)
pub input_types: Option<Vec<InputType>>,
/// Model outputs types (if any)
pub output_types: Option<Vec<InputType>>,
}
impl GraphSettings {
@@ -760,7 +761,7 @@ pub struct TestOnChainData {
/// The path to the test witness
pub data: std::path::PathBuf,
/// rpc endpoint
pub rpc: Option<String>,
pub rpc: String,
/// data sources for the on chain data
pub data_sources: TestSources,
}
@@ -1007,10 +1008,6 @@ impl GraphCircuit {
DataSource::File(file_data) => {
self.load_file_data(file_data, &shapes, scales, input_types)
}
DataSource::DB(pg) => {
let data = pg.fetch_and_format_as_file().await?;
self.load_file_data(&data, &shapes, scales, input_types)
}
}
}
@@ -1022,24 +1019,11 @@ impl GraphCircuit {
shapes: &Vec<Vec<usize>>,
scales: Vec<crate::Scale>,
) -> Result<Vec<Tensor<Fp>>, GraphError> {
use crate::eth::{
evm_quantize_multi, evm_quantize_single, read_on_chain_inputs_multi,
read_on_chain_inputs_single, setup_eth_backend,
};
let (client, client_address) = setup_eth_backend(Some(&source.rpc), None).await?;
let quantized_evm_inputs = match source.calls {
input::Calls::Single(call) => {
let (inputs, decimals) =
read_on_chain_inputs_single(client.clone(), client_address, call).await?;
evm_quantize_single(client, scales, &inputs, decimals).await?
}
input::Calls::Multiple(calls) => {
let inputs =
read_on_chain_inputs_multi(client.clone(), client_address, &calls).await?;
evm_quantize_multi(client, scales, &inputs).await?
}
};
use crate::eth::{evm_quantize, read_on_chain_inputs, setup_eth_backend};
let (client, client_address) = setup_eth_backend(&source.rpc, None).await?;
let input = read_on_chain_inputs(client.clone(), client_address, &source.call).await?;
let quantized_evm_inputs =
evm_quantize(client, scales, &input, &source.call.decimals).await?;
// on-chain data has already been quantized at this point. Just need to reshape it and push into tensor vector
let mut inputs: Vec<Tensor<Fp>> = vec![];
for (input, shape) in [quantized_evm_inputs].iter().zip(shapes) {
@@ -1440,6 +1424,8 @@ impl GraphCircuit {
let output_scales = self.model().graph.get_output_scales()?;
let input_shapes = self.model().graph.input_shapes()?;
let output_shapes = self.model().graph.output_shapes()?;
let mut input_data = None;
let mut output_data = None;
if matches!(
test_on_chain_data.data_sources.input,
@@ -1450,23 +1436,12 @@ impl GraphCircuit {
return Err(GraphError::OnChainDataSource);
}
let input_data = match &data.input_data {
DataSource::File(input_data) => input_data,
input_data = match &data.input_data {
DataSource::File(input_data) => Some(input_data),
_ => {
return Err(GraphError::OnChainDataSource);
return Err(GraphError::MissingDataSource);
}
};
// Get the flatten length of input_data
// if the input source is a field then set scale to 0
let datam: (Vec<Tensor<Fp>>, OnChainSource) = OnChainSource::test_from_file_data(
input_data,
input_scales,
input_shapes,
test_on_chain_data.rpc.as_deref(),
)
.await?;
data.input_data = datam.1.into();
}
if matches!(
test_on_chain_data.data_sources.output,
@@ -1477,20 +1452,39 @@ impl GraphCircuit {
return Err(GraphError::OnChainDataSource);
}
let output_data = match &data.output_data {
Some(DataSource::File(output_data)) => output_data,
Some(DataSource::OnChain(_)) => return Err(GraphError::OnChainDataSource),
output_data = match &data.output_data {
Some(DataSource::File(output_data)) => Some(output_data),
_ => return Err(GraphError::MissingDataSource),
};
let datum: (Vec<Tensor<Fp>>, OnChainSource) = OnChainSource::test_from_file_data(
output_data,
output_scales,
output_shapes,
test_on_chain_data.rpc.as_deref(),
)
.await?;
data.output_data = Some(datum.1.into());
}
// Merge the input and output data
let mut file_data: Vec<Vec<input::FileSourceInner>> = vec![];
let mut scales: Vec<crate::Scale> = vec![];
let mut shapes: Vec<Vec<usize>> = vec![];
if let Some(input_data) = input_data {
file_data.extend(input_data.clone());
scales.extend(input_scales.clone());
shapes.extend(input_shapes.clone());
}
if let Some(output_data) = output_data {
file_data.extend(output_data.clone());
scales.extend(output_scales.clone());
shapes.extend(output_shapes.clone());
};
// print file data
debug!("file data: {:?}", file_data);
let on_chain_data: OnChainSource =
OnChainSource::test_from_file_data(&file_data, scales, shapes, &test_on_chain_data.rpc)
.await?;
// Here we update the GraphData struct with the on-chain data
if input_data.is_some() {
data.input_data = on_chain_data.clone().into();
}
if output_data.is_some() {
data.output_data = Some(on_chain_data.into());
}
debug!("test on-chain data: {:?}", data);
// Save the updated GraphData struct to the data_path
data.save(test_on_chain_data.data)?;
Ok(())

View File

@@ -1,7 +1,6 @@
use super::errors::GraphError;
use super::extract_const_quantized_values;
use super::node::*;
use super::scale_to_multiplier;
use super::vars::*;
use super::GraphSettings;
use crate::circuit::hybrid::HybridOp;
@@ -379,9 +378,15 @@ pub struct ParsedNodes {
pub nodes: BTreeMap<usize, NodeType>,
inputs: Vec<usize>,
outputs: Vec<Outlet>,
output_types: Vec<InputType>,
}
impl ParsedNodes {
/// Returns the output types of the computational graph.
pub fn get_output_types(&self) -> Vec<InputType> {
self.output_types.clone()
}
/// Returns the number of the computational graph's inputs
pub fn num_inputs(&self) -> usize {
self.inputs.len()
@@ -491,6 +496,16 @@ impl Model {
Ok(om)
}
/// Gets the input types from the parsed nodes
pub fn get_input_types(&self) -> Result<Vec<InputType>, GraphError> {
self.graph.get_input_types()
}
/// Gets the output types from the parsed nodes
pub fn get_output_types(&self) -> Vec<InputType> {
self.graph.get_output_types()
}
///
pub fn save(&self, path: PathBuf) -> Result<(), GraphError> {
let f = std::fs::File::create(&path).map_err(|e| {
@@ -574,6 +589,11 @@ impl Model {
required_range_checks: res.range_checks.into_iter().collect(),
model_output_scales: self.graph.get_output_scales()?,
model_input_scales: self.graph.get_input_scales(),
input_types: match self.get_input_types() {
Ok(x) => Some(x),
Err(_) => None,
},
output_types: Some(self.get_output_types()),
num_dynamic_lookups: res.num_dynamic_lookups,
total_dynamic_col_size: res.dynamic_lookup_col_coord,
num_shuffles: res.num_shuffles,
@@ -704,6 +724,11 @@ impl Model {
nodes,
inputs: model.inputs.iter().map(|o| o.node).collect(),
outputs: model.outputs.iter().map(|o| (o.node, o.slot)).collect(),
output_types: model
.outputs
.iter()
.map(|o| Ok::<InputType, GraphError>(model.outlet_fact(*o)?.datum_type.into()))
.collect::<Result<Vec<_>, GraphError>>()?,
};
let duration = start_time.elapsed();
@@ -862,6 +887,15 @@ impl Model {
nodes: subgraph_nodes,
inputs: model.inputs.iter().map(|o| o.node).collect(),
outputs: model.outputs.iter().map(|o| (o.node, o.slot)).collect(),
output_types: model
.outputs
.iter()
.map(|o| {
Ok::<InputType, GraphError>(
model.outlet_fact(*o)?.datum_type.into(),
)
})
.collect::<Result<Vec<_>, GraphError>>()?,
};
let om = Model {
@@ -1138,17 +1172,10 @@ impl Model {
})?;
if run_args.output_visibility.is_public() || run_args.output_visibility.is_fixed() {
let output_scales = self.graph.get_output_scales().map_err(|e| {
error!("{}", e);
halo2_proofs::plonk::Error::Synthesis
})?;
let res = outputs
.iter()
.enumerate()
.map(|(i, output)| {
let mut tol: crate::circuit::Tolerance = run_args.tolerance;
tol.scale = scale_to_multiplier(output_scales[i]).into();
let comparators = if run_args.output_visibility == Visibility::Public {
let res = vars
.instance
@@ -1171,7 +1198,6 @@ impl Model {
&mut thread_safe_region,
&[output.clone(), comparators],
Box::new(HybridOp::Output {
tol,
decomp: !run_args.ignore_range_check_inputs_outputs,
}),
)
@@ -1433,11 +1459,9 @@ impl Model {
let outputs = self.layout_nodes(&mut model_config, &mut region, &mut results)?;
if self.visibility.output.is_public() || self.visibility.output.is_fixed() {
let output_scales = self.graph.get_output_scales()?;
let res = outputs
.iter()
.enumerate()
.map(|(i, output)| {
.map(|output| {
let mut comparator: ValTensor<Fp> = (0..output.len())
.map(|_| {
if !self.visibility.output.is_fixed() {
@@ -1450,14 +1474,10 @@ impl Model {
.into();
comparator.reshape(output.dims())?;
let mut tol = run_args.tolerance;
tol.scale = scale_to_multiplier(output_scales[i]).into();
dummy_config.layout(
&mut region,
&[output.clone(), comparator],
Box::new(HybridOp::Output {
tol,
decomp: !run_args.ignore_range_check_inputs_outputs,
}),
)
@@ -1579,4 +1599,16 @@ impl Model {
}
Ok(instance_shapes)
}
/// Input types of the computational graph's public inputs (if any)
pub fn instance_types(&self) -> Result<Vec<InputType>, GraphError> {
let mut instance_types = vec![];
if self.visibility.input.is_public() {
instance_types.extend(self.graph.get_input_types()?);
}
if self.visibility.output.is_public() {
instance_types.extend(self.graph.get_output_types());
}
Ok(instance_types)
}
}

View File

@@ -1,493 +0,0 @@
use log::{debug, error, info};
use std::fmt::Debug;
use std::net::IpAddr;
#[cfg(all(not(not(feature = "ezkl")), unix))]
use std::path::Path;
use std::str::FromStr;
use std::sync::Arc;
use std::time::Duration;
use std::{fmt, pin::Pin};
use tokio::task::JoinHandle;
#[doc(inline)]
pub use tokio_postgres::config::{
ChannelBinding, Host, LoadBalanceHosts, SslMode, TargetSessionAttrs,
};
use tokio_postgres::tls::NoTlsStream;
use tokio_postgres::NoTls;
use tokio_postgres::{error::DbError, types::ToSql, Error, Row, Socket, ToStatement};
/// Connection configuration.
///
/// Configuration can be parsed from libpq-style connection strings. These strings come in two formats:
///
///
#[derive(Clone)]
pub struct Config {
config: tokio_postgres::Config,
notice_callback: Arc<dyn Fn(DbError) + Send + Sync>,
}
impl fmt::Debug for Config {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.debug_struct("Config")
.field("config", &self.config)
.finish()
}
}
impl Default for Config {
fn default() -> Config {
Config::new()
}
}
impl Config {
/// Creates a new configuration.
pub fn new() -> Config {
tokio_postgres::Config::new().into()
}
/// Sets the user to authenticate with.
///
/// If the user is not set, then this defaults to the user executing this process.
pub fn user(&mut self, user: &str) -> &mut Config {
self.config.user(user);
self
}
/// Gets the user to authenticate with, if one has been configured with
/// the `user` method.
pub fn get_user(&self) -> Option<&str> {
self.config.get_user()
}
/// Sets the password to authenticate with.
pub fn password<T>(&mut self, password: T) -> &mut Config
where
T: AsRef<[u8]>,
{
self.config.password(password);
self
}
/// Gets the password to authenticate with, if one has been configured with
/// the `password` method.
pub fn get_password(&self) -> Option<&[u8]> {
self.config.get_password()
}
/// Sets the name of the database to connect to.
///
/// Defaults to the user.
pub fn dbname(&mut self, dbname: &str) -> &mut Config {
self.config.dbname(dbname);
self
}
/// Gets the name of the database to connect to, if one has been configured
/// with the `dbname` method.
pub fn get_dbname(&self) -> Option<&str> {
self.config.get_dbname()
}
/// Sets command line options used to configure the server.
pub fn options(&mut self, options: &str) -> &mut Config {
self.config.options(options);
self
}
/// Gets the command line options used to configure the server, if the
/// options have been set with the `options` method.
pub fn get_options(&self) -> Option<&str> {
self.config.get_options()
}
/// Sets the value of the `application_name` runtime parameter.
pub fn application_name(&mut self, application_name: &str) -> &mut Config {
self.config.application_name(application_name);
self
}
/// Gets the value of the `application_name` runtime parameter, if it has
/// been set with the `application_name` method.
pub fn get_application_name(&self) -> Option<&str> {
self.config.get_application_name()
}
/// Sets the SSL configuration.
///
/// Defaults to `prefer`.
pub fn ssl_mode(&mut self, ssl_mode: SslMode) -> &mut Config {
self.config.ssl_mode(ssl_mode);
self
}
/// Gets the SSL configuration.
pub fn get_ssl_mode(&self) -> SslMode {
self.config.get_ssl_mode()
}
/// Adds a host to the configuration.
///
/// Multiple hosts can be specified by calling this method multiple times, and each will be tried in order. On Unix
/// systems, a host starting with a `/` is interpreted as a path to a directory containing Unix domain sockets.
/// There must be either no hosts, or the same number of hosts as hostaddrs.
pub fn host(&mut self, host: &str) -> &mut Config {
self.config.host(host);
self
}
/// Gets the hosts that have been added to the configuration with `host`.
pub fn get_hosts(&self) -> &[Host] {
self.config.get_hosts()
}
/// Gets the hostaddrs that have been added to the configuration with `hostaddr`.
pub fn get_hostaddrs(&self) -> &[IpAddr] {
self.config.get_hostaddrs()
}
/// Adds a Unix socket host to the configuration.
///
/// Unlike `host`, this method allows non-UTF8 paths.
#[cfg(all(not(not(feature = "ezkl")), unix))]
pub fn host_path<T>(&mut self, host: T) -> &mut Config
where
T: AsRef<Path>,
{
self.config.host_path(host);
self
}
/// Adds a hostaddr to the configuration.
///
/// Multiple hostaddrs can be specified by calling this method multiple times, and each will be tried in order.
/// There must be either no hostaddrs, or the same number of hostaddrs as hosts.
pub fn hostaddr(&mut self, hostaddr: IpAddr) -> &mut Config {
self.config.hostaddr(hostaddr);
self
}
/// Adds a port to the configuration.
///
/// Multiple ports can be specified by calling this method multiple times. There must either be no ports, in which
/// case the default of 5432 is used, a single port, in which it is used for all hosts, or the same number of ports
/// as hosts.
pub fn port(&mut self, port: u16) -> &mut Config {
self.config.port(port);
self
}
/// Gets the ports that have been added to the configuration with `port`.
pub fn get_ports(&self) -> &[u16] {
self.config.get_ports()
}
/// Sets the timeout applied to socket-level connection attempts.
///
/// Note that hostnames can resolve to multiple IP addresses, and this timeout will apply to each address of each
/// host separately. Defaults to no limit.
pub fn connect_timeout(&mut self, connect_timeout: Duration) -> &mut Config {
self.config.connect_timeout(connect_timeout);
self
}
/// Gets the connection timeout, if one has been set with the
/// `connect_timeout` method.
pub fn get_connect_timeout(&self) -> Option<&Duration> {
self.config.get_connect_timeout()
}
/// Sets the TCP user timeout.
///
/// This is ignored for Unix domain socket connections. It is only supported on systems where
/// TCP_USER_TIMEOUT is available and will default to the system default if omitted or set to 0;
/// on other systems, it has no effect.
pub fn tcp_user_timeout(&mut self, tcp_user_timeout: Duration) -> &mut Config {
self.config.tcp_user_timeout(tcp_user_timeout);
self
}
/// Gets the TCP user timeout, if one has been set with the
/// `user_timeout` method.
pub fn get_tcp_user_timeout(&self) -> Option<&Duration> {
self.config.get_tcp_user_timeout()
}
/// Controls the use of TCP keepalive.
///
/// This is ignored for Unix domain socket connections. Defaults to `true`.
pub fn keepalives(&mut self, keepalives: bool) -> &mut Config {
self.config.keepalives(keepalives);
self
}
/// Reports whether TCP keepalives will be used.
pub fn get_keepalives(&self) -> bool {
self.config.get_keepalives()
}
/// Sets the amount of idle time before a keepalive packet is sent on the connection.
///
/// This is ignored for Unix domain sockets, or if the `keepalives` option is disabled. Defaults to 2 hours.
pub fn keepalives_idle(&mut self, keepalives_idle: Duration) -> &mut Config {
self.config.keepalives_idle(keepalives_idle);
self
}
/// Gets the configured amount of idle time before a keepalive packet will
/// be sent on the connection.
pub fn get_keepalives_idle(&self) -> Duration {
self.config.get_keepalives_idle()
}
/// Sets the time interval between TCP keepalive probes.
/// On Windows, this sets the value of the tcp_keepalive structs keepaliveinterval field.
///
/// This is ignored for Unix domain sockets, or if the `keepalives` option is disabled.
pub fn keepalives_interval(&mut self, keepalives_interval: Duration) -> &mut Config {
self.config.keepalives_interval(keepalives_interval);
self
}
/// Gets the time interval between TCP keepalive probes.
pub fn get_keepalives_interval(&self) -> Option<Duration> {
self.config.get_keepalives_interval()
}
/// Sets the maximum number of TCP keepalive probes that will be sent before dropping a connection.
///
/// This is ignored for Unix domain sockets, or if the `keepalives` option is disabled.
pub fn keepalives_retries(&mut self, keepalives_retries: u32) -> &mut Config {
self.config.keepalives_retries(keepalives_retries);
self
}
/// Gets the maximum number of TCP keepalive probes that will be sent before dropping a connection.
pub fn get_keepalives_retries(&self) -> Option<u32> {
self.config.get_keepalives_retries()
}
/// Sets the requirements of the session.
///
/// This can be used to connect to the primary server in a clustered database rather than one of the read-only
/// secondary servers. Defaults to `Any`.
pub fn target_session_attrs(
&mut self,
target_session_attrs: TargetSessionAttrs,
) -> &mut Config {
self.config.target_session_attrs(target_session_attrs);
self
}
/// Gets the requirements of the session.
pub fn get_target_session_attrs(&self) -> TargetSessionAttrs {
self.config.get_target_session_attrs()
}
/// Sets the channel binding behavior.
///
/// Defaults to `prefer`.
pub fn channel_binding(&mut self, channel_binding: ChannelBinding) -> &mut Config {
self.config.channel_binding(channel_binding);
self
}
/// Gets the channel binding behavior.
pub fn get_channel_binding(&self) -> ChannelBinding {
self.config.get_channel_binding()
}
/// Sets the host load balancing behavior.
///
/// Defaults to `disable`.
pub fn load_balance_hosts(&mut self, load_balance_hosts: LoadBalanceHosts) -> &mut Config {
self.config.load_balance_hosts(load_balance_hosts);
self
}
/// Gets the host load balancing behavior.
pub fn get_load_balance_hosts(&self) -> LoadBalanceHosts {
self.config.get_load_balance_hosts()
}
/// Sets the notice callback.
///
/// This callback will be invoked with the contents of every
/// [`AsyncMessage::Notice`] that is received by the connection. Notices use
/// the same structure as errors, but they are not "errors" per-se.
///
/// Notices are distinct from notifications, which are instead accessible
/// via the [`Notifications`] API.
///
/// [`AsyncMessage::Notice`]: tokio_postgres::AsyncMessage::Notice
/// [`Notifications`]: crate::Notifications
pub fn notice_callback<F>(&mut self, f: F) -> &mut Config
where
F: Fn(DbError) + Send + Sync + 'static,
{
self.notice_callback = Arc::new(f);
self
}
/// Opens a connection to a PostgreSQL database.
pub async fn connect(&self) -> Result<Client, Error> {
let (client, connection) = self.config.connect(NoTls).await?;
let connection = Connection::new(connection);
Ok(Client::new(client, connection))
}
}
impl FromStr for Config {
type Err = Error;
fn from_str(s: &str) -> Result<Config, Error> {
s.parse::<tokio_postgres::Config>().map(Config::from)
}
}
impl From<tokio_postgres::Config> for Config {
fn from(config: tokio_postgres::Config) -> Config {
Config {
config,
notice_callback: Arc::new(|notice| {
info!("{}: {}", notice.severity(), notice.message())
}),
}
}
}
#[allow(missing_debug_implementations, dead_code)]
/// An asynchronous PostgreSQL connection. We use this to keep the connection alive / keep it pinned so that it doesn't
/// get dropped.
pub struct Connection {
/// The underlying connection stream.
connection: Pin<Box<tokio_postgres::Connection<Socket, NoTlsStream>>>,
}
impl Connection {
/// Creates a new connection.
pub fn new(connection: tokio_postgres::Connection<Socket, NoTlsStream>) -> Self {
Connection {
connection: Box::pin(connection),
}
}
/// start the connection
pub async fn start(self) {
if let Err(e) = self.connection.await {
error!("connection error: {}", e);
}
}
}
#[allow(missing_debug_implementations, dead_code)]
/// An asynchronous PostgreSQL client.
pub struct Client {
connection: JoinHandle<()>,
client: tokio_postgres::Client,
}
impl Drop for Client {
fn drop(&mut self) {
let _ = self.close_inner();
}
}
impl Client {
pub(crate) fn new(client: tokio_postgres::Client, connection: Connection) -> Client {
// The connection object performs the actual communication with the database,
// so spawn it off to run on its own.
let thread = tokio::spawn(async move {
connection.start().await;
});
Client {
client,
connection: thread,
}
}
/// A convenience function which parses a configuration string into a `Config` and then connects to the database.
///
/// See the documentation for [`Config`] for information about the connection syntax.
///
/// [`Config`]: config/struct.Config.html
pub async fn connect(params: &str) -> Result<Client, Error> {
debug!("Connecting to database with params: {}", params);
params.parse::<Config>()?.connect().await
}
/// Returns a new `Config` object which can be used to configure and connect to a database.
pub fn configure() -> Config {
Config::new()
}
/// Executes a statement, returning the number of rows modified.
///
/// A statement may contain parameters, specified by `$n`, where `n` is the index of the parameter of the list
/// provided, 1-indexed.
///
/// If the statement does not modify any rows (e.g. `SELECT`), 0 is returned.
///
/// The `query` argument can either be a `Statement`, or a raw query string. If the same statement will be
/// repeatedly executed (perhaps with different query parameters), consider preparing the statement up front
/// with the `prepare` method.
///
pub async fn execute<T>(
&mut self,
query: &T,
params: &[&(dyn ToSql + Sync)],
) -> Result<u64, Error>
where
T: ?Sized + ToStatement + Debug,
{
debug!("Executing query: {:?}", query);
self.client.execute(query, params).await
}
/// Executes a statement, returning the resulting rows.
///
/// A statement may contain parameters, specified by `$n`, where `n` is the index of the parameter of the list
/// provided, 1-indexed.
///
/// The `query` argument can either be a `Statement`, or a raw query string. If the same statement will be
/// repeatedly executed (perhaps with different query parameters), consider preparing the statement up front
/// with the `prepare` method.
///
/// # Examples
///
pub async fn query<T>(
&mut self,
query: &T,
params: &[&(dyn ToSql + Sync)],
) -> Result<Vec<Row>, Error>
where
T: ?Sized + ToStatement + Debug,
{
debug!("Executing query: {:?}", query);
self.client.query(query, params).await
}
/// Determines if the client's connection has already closed.
///
/// If this returns `true`, the client is no longer usable.
pub fn is_closed(&self) -> bool {
self.client.is_closed()
}
/// Closes the client's connection to the server.
///
/// This is equivalent to `Client`'s `Drop` implementation, except that it returns any error encountered to the
/// caller.
pub fn close(mut self) -> Result<(), Error> {
self.close_inner()
}
fn close_inner(&mut self) -> Result<(), Error> {
self.client.__private_api_close();
Ok(())
}
}

View File

@@ -1,14 +1,14 @@
use super::errors::GraphError;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use super::VarScales;
use super::errors::GraphError;
use super::{Rescaled, SupportedOp, Visibility};
use crate::circuit::Op;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use crate::circuit::hybrid::HybridOp;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use crate::circuit::lookup::LookupOp;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use crate::circuit::poly::PolyOp;
use crate::circuit::Op;
use crate::fieldutils::IntegerRep;
use crate::tensor::{Tensor, TensorError, TensorType};
use halo2curves::bn256::Fr as Fp;
@@ -22,6 +22,7 @@ use std::sync::Arc;
use tract_onnx::prelude::{DatumType, Node as OnnxNode, TypedFact, TypedOp};
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use tract_onnx::tract_core::ops::{
Downsample,
array::{
Gather, GatherElements, GatherNd, MultiBroadcastTo, OneHot, ScatterElements, ScatterNd,
Slice, Topk,
@@ -31,7 +32,6 @@ use tract_onnx::tract_core::ops::{
einsum::EinSum,
element_wise::ElementWiseOp,
nn::{LeakyRelu, Reduce, Softmax},
Downsample,
};
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use tract_onnx::tract_hir::{
@@ -39,9 +39,8 @@ use tract_onnx::tract_hir::{
ops::array::{Pad, PadMode, TypedConcat},
ops::cnn::PoolSpec,
ops::konst::Const,
ops::nn::DataFormat,
tract_core::ops::cast::Cast,
tract_core::ops::cnn::{conv::KernelFormat, MaxPool, SumPool},
tract_core::ops::cnn::{MaxPool, SumPool},
};
/// Quantizes an iterable of f64 to a [Tensor] of IntegerRep using a fixed point representation.
@@ -859,6 +858,7 @@ pub fn new_op_from_onnx(
SupportedOp::Hybrid(HybridOp::Recip {
input_scale: (scale_to_multiplier(in_scale) as f32).into(),
output_scale: (scale_to_multiplier(max_scale) as f32).into(),
eps: run_args.get_epsilon(),
})
}
@@ -904,6 +904,7 @@ pub fn new_op_from_onnx(
SupportedOp::Hybrid(HybridOp::Rsqrt {
input_scale: (scale_to_multiplier(in_scale) as f32).into(),
output_scale: (scale_to_multiplier(max_scale) as f32).into(),
eps: run_args.get_epsilon(),
})
}
"Exp" => SupportedOp::Nonlinear(LookupOp::Exp {
@@ -914,6 +915,7 @@ pub fn new_op_from_onnx(
if run_args.bounded_log_lookup {
SupportedOp::Hybrid(HybridOp::Ln {
scale: scale_to_multiplier(input_scales[0]).into(),
eps: run_args.get_epsilon(),
})
} else {
SupportedOp::Nonlinear(LookupOp::Ln {
@@ -1132,6 +1134,7 @@ pub fn new_op_from_onnx(
input_scale: scale_to_multiplier(in_scale).into(),
output_scale: scale_to_multiplier(max_scale).into(),
axes: softmax_op.axes.to_vec(),
eps: run_args.get_epsilon(),
})
}
"MaxPool" => {
@@ -1146,13 +1149,6 @@ pub fn new_op_from_onnx(
let pool_spec: &PoolSpec = &sumpool_node.pool_spec;
// only support pytorch type formatting for now
if pool_spec.data_format != DataFormat::NCHW {
return Err(GraphError::MissingParams(
"data in wrong format".to_string(),
));
}
let stride = extract_strides(pool_spec)?;
let padding = extract_padding(pool_spec, &input_dims[0])?;
let kernel_shape = &pool_spec.kernel_shape;
@@ -1161,6 +1157,7 @@ pub fn new_op_from_onnx(
padding,
stride: stride.to_vec(),
pool_dims: kernel_shape.to_vec(),
data_format: pool_spec.data_format.into(),
})
}
"Ceil" => {
@@ -1314,15 +1311,6 @@ pub fn new_op_from_onnx(
}
}
if ((conv_node.pool_spec.data_format != DataFormat::NCHW)
&& (conv_node.pool_spec.data_format != DataFormat::CHW))
|| (conv_node.kernel_fmt != KernelFormat::OIHW)
{
return Err(GraphError::MisformedParams(
"data or kernel in wrong format".to_string(),
));
}
let pool_spec = &conv_node.pool_spec;
let stride = extract_strides(pool_spec)?;
@@ -1350,6 +1338,8 @@ pub fn new_op_from_onnx(
padding,
stride,
group,
data_format: conv_node.pool_spec.data_format.into(),
kernel_format: conv_node.kernel_fmt.into(),
})
}
"Not" => SupportedOp::Linear(PolyOp::Not),
@@ -1373,14 +1363,6 @@ pub fn new_op_from_onnx(
}
}
if (deconv_node.pool_spec.data_format != DataFormat::NCHW)
|| (deconv_node.kernel_format != KernelFormat::OIHW)
{
return Err(GraphError::MisformedParams(
"data or kernel in wrong format".to_string(),
));
}
let pool_spec = &deconv_node.pool_spec;
let stride = extract_strides(pool_spec)?;
@@ -1406,6 +1388,8 @@ pub fn new_op_from_onnx(
output_padding: deconv_node.adjustments.to_vec(),
stride,
group: deconv_node.group,
data_format: deconv_node.pool_spec.data_format.into(),
kernel_format: deconv_node.kernel_format.into(),
})
}
"Downsample" => {
@@ -1418,7 +1402,7 @@ pub fn new_op_from_onnx(
SupportedOp::Linear(PolyOp::Downsample {
axis: downsample_node.axis,
stride: downsample_node.stride as usize,
stride: downsample_node.stride,
modulo: downsample_node.modulo,
})
}
@@ -1489,13 +1473,6 @@ pub fn new_op_from_onnx(
let pool_spec: &PoolSpec = &sumpool_node.pool_spec;
// only support pytorch type formatting for now
if pool_spec.data_format != DataFormat::NCHW {
return Err(GraphError::MissingParams(
"data in wrong format".to_string(),
));
}
let stride = extract_strides(pool_spec)?;
let padding = extract_padding(pool_spec, &input_dims[0])?;
@@ -1504,6 +1481,7 @@ pub fn new_op_from_onnx(
stride: stride.to_vec(),
kernel_shape: pool_spec.kernel_shape.to_vec(),
normalized: sumpool_node.normalize,
data_format: pool_spec.data_format.into(),
})
}
"Pad" => {

View File

@@ -97,12 +97,11 @@ impl From<String> for EZKLError {
use std::str::FromStr;
use circuit::{table::Range, CheckMode, Tolerance};
use circuit::{CheckMode, table::Range};
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use clap::Args;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use fieldutils::IntegerRep;
use graph::{Visibility, MAX_PUBLIC_SRS};
use graph::{MAX_PUBLIC_SRS, Visibility};
use halo2_proofs::poly::{
ipa::commitment::IPACommitmentScheme, kzg::commitment::KZGCommitmentScheme,
};
@@ -275,10 +274,6 @@ impl From<String> for Commitments {
derive(Args, ToFlags)
)]
pub struct RunArgs {
/// Error tolerance for model outputs
/// Only applicable when outputs are public
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(short = 'T', long, default_value = "0", value_hint = clap::ValueHint::Other))]
pub tolerance: Tolerance,
/// Fixed point scaling factor for quantizing inputs
/// Higher values provide more precision but increase circuit complexity
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(short = 'S', long, default_value = "7", value_hint = clap::ValueHint::Other))]
@@ -355,6 +350,16 @@ pub struct RunArgs {
arg(long, default_value = "false")
)]
pub ignore_range_check_inputs_outputs: bool,
/// Optional override for epsilon value
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), arg(long))]
pub epsilon: Option<f64>,
}
impl RunArgs {
/// Returns the epsilon value
pub fn get_epsilon(&self) -> f64 {
self.epsilon.unwrap_or(f64::EPSILON)
}
}
impl Default for RunArgs {
@@ -365,7 +370,6 @@ impl Default for RunArgs {
fn default() -> Self {
Self {
bounded_log_lookup: false,
tolerance: Tolerance::default(),
input_scale: 7,
param_scale: 7,
scale_rebase_multiplier: 1,
@@ -382,6 +386,7 @@ impl Default for RunArgs {
decomp_base: 16384,
decomp_legs: 2,
ignore_range_check_inputs_outputs: false,
epsilon: None,
}
}
}
@@ -399,6 +404,16 @@ impl RunArgs {
pub fn validate(&self) -> Result<(), String> {
let mut errors = Vec::new();
// check if the largest represented integer in the decomposed form overflows IntegerRep
// try it with the largest possible value
let max_decomp = (self.decomp_base as IntegerRep).checked_pow(self.decomp_legs as u32);
if max_decomp.is_none() {
errors.push(format!(
"decomp_base^decomp_legs overflows IntegerRep: {}^{}",
self.decomp_base, self.decomp_legs
));
}
// Visibility validations
if self.param_visibility == Visibility::Public {
errors.push(
@@ -407,10 +422,6 @@ impl RunArgs {
);
}
if self.tolerance.val > 0.0 && self.output_visibility != Visibility::Public {
errors.push("Non-zero tolerance requires output_visibility to be public".to_string());
}
// Scale validations
if self.scale_rebase_multiplier < 1 {
errors.push("scale_rebase_multiplier must be >= 1".to_string());
@@ -459,11 +470,6 @@ impl RunArgs {
warn!("logrows exceeds maximum public SRS size");
}
// Validate tolerance is non-negative
if self.tolerance.val < 0.0 {
errors.push("tolerance cannot be negative".to_string());
}
// Performance warnings
if self.input_scale > 20 || self.param_scale > 20 {
warn!("High scale values (>20) may impact performance");
@@ -610,23 +616,6 @@ mod tests {
assert!(err.contains("num_inner_cols must be >= 1"));
}
#[test]
fn test_invalid_tolerance() {
let mut args = RunArgs::default();
args.tolerance.val = 1.0;
args.output_visibility = Visibility::Private;
let err = args.validate().unwrap_err();
assert!(err.contains("Non-zero tolerance requires output_visibility to be public"));
}
#[test]
fn test_negative_tolerance() {
let mut args = RunArgs::default();
args.tolerance.val = -1.0;
let err = args.validate().unwrap_err();
assert!(err.contains("tolerance cannot be negative"));
}
#[test]
fn test_zero_batch_size() {
let mut args = RunArgs::default();

View File

@@ -17,16 +17,16 @@ use crate::{Commitments, EZKL_BUF_CAPACITY, EZKL_KEY_FORMAT};
use clap::ValueEnum;
use halo2_proofs::circuit::Value;
use halo2_proofs::plonk::{
create_proof, keygen_pk, keygen_vk_custom, verify_proof, Circuit, ProvingKey, VerifyingKey,
Circuit, ProvingKey, VerifyingKey, create_proof, keygen_pk, keygen_vk_custom, verify_proof,
};
use halo2_proofs::poly::VerificationStrategy;
use halo2_proofs::poly::commitment::{CommitmentScheme, Params, ParamsProver, Prover, Verifier};
use halo2_proofs::poly::ipa::commitment::IPACommitmentScheme;
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
use halo2_proofs::poly::VerificationStrategy;
use halo2_proofs::transcript::{EncodedChallenge, TranscriptReadBuffer, TranscriptWriterBuffer};
use halo2curves::CurveAffine;
use halo2curves::ff::{FromUniformBytes, PrimeField, WithSmallOrderMulGroup};
use halo2curves::serde::SerdeObject;
use halo2curves::CurveAffine;
use instant::Instant;
use log::{debug, info, trace};
#[cfg(not(feature = "det-prove"))]
@@ -51,6 +51,9 @@ use pyo3::types::PyDictMethods;
use halo2curves::bn256::{Bn256, Fr, G1Affine};
/// Converts a string to a `SerdeFormat`.
/// # Panics
/// Panics if the provided `s` is not a valid `SerdeFormat` (i.e. not one of "processed", "raw-bytes-unchecked", or "raw-bytes").
fn serde_format_from_str(s: &str) -> halo2_proofs::SerdeFormat {
match s {
"processed" => halo2_proofs::SerdeFormat::Processed,
@@ -321,7 +324,7 @@ where
}
#[cfg(feature = "python-bindings")]
use pyo3::{types::PyDict, PyObject, Python, ToPyObject};
use pyo3::{PyObject, Python, ToPyObject, types::PyDict};
#[cfg(feature = "python-bindings")]
impl<F: PrimeField + SerdeObject + Serialize, C: CurveAffine + Serialize> ToPyObject for Snark<F, C>
where
@@ -345,9 +348,9 @@ where
}
impl<
F: PrimeField + SerdeObject + Serialize + FromUniformBytes<64> + DeserializeOwned,
C: CurveAffine + Serialize + DeserializeOwned,
> Snark<F, C>
F: PrimeField + SerdeObject + Serialize + FromUniformBytes<64> + DeserializeOwned,
C: CurveAffine + Serialize + DeserializeOwned,
> Snark<F, C>
where
C::Scalar: Serialize + DeserializeOwned,
C::ScalarExt: Serialize + DeserializeOwned,

View File

@@ -1,6 +1,6 @@
use thiserror::Error;
use super::ops::DecompositionError;
use super::{ops::DecompositionError, DataFormat};
/// A wrapper for tensor related errors.
#[derive(Debug, Error)]
@@ -44,4 +44,7 @@ pub enum TensorError {
/// Index out of bounds
#[error("index {0} out of bounds for dimension {1}")]
IndexOutOfBounds(usize, usize),
/// Invalid data conversion
#[error("invalid data conversion from format {0} to {1}")]
InvalidDataConversion(DataFormat, DataFormat),
}

View File

@@ -9,6 +9,7 @@ pub mod var;
pub use errors::TensorError;
use core::hash::Hash;
use halo2curves::ff::PrimeField;
use maybe_rayon::{
prelude::{
@@ -26,7 +27,7 @@ pub use var::*;
use crate::{
circuit::utils,
fieldutils::{integer_rep_to_felt, IntegerRep},
fieldutils::{IntegerRep, integer_rep_to_felt},
graph::Visibility,
};
@@ -61,7 +62,7 @@ pub trait TensorType: Clone + Debug + 'static {
}
macro_rules! tensor_type {
($rust_type:ty, $tensor_type:ident, $zero:expr, $one:expr) => {
($rust_type:ty, $tensor_type:ident, $zero:expr_2021, $one:expr_2021) => {
impl TensorType for $rust_type {
fn zero() -> Option<Self> {
Some($zero)
@@ -414,7 +415,7 @@ impl<T: Clone + TensorType + PrimeField> Tensor<T> {
Err(_) => {
return Err(TensorError::FileLoadError(
"Failed to read tensor".to_string(),
))
));
}
}
}
@@ -925,6 +926,9 @@ impl<T: Clone + TensorType> Tensor<T> {
));
}
self.dims = vec![];
}
if self.dims() == &[0] && new_dims.iter().product::<usize>() == 1 {
self.dims = Vec::from(new_dims);
} else {
let product = if new_dims != [0] {
new_dims.iter().product::<usize>()
@@ -1103,6 +1107,10 @@ impl<T: Clone + TensorType> Tensor<T> {
let mut output = self.clone();
output.reshape(shape)?;
return Ok(output);
} else if self.dims() == &[0] && shape.iter().product::<usize>() == 1 {
let mut output = self.clone();
output.reshape(shape)?;
return Ok(output);
}
if self.dims().len() > shape.len() {
@@ -1253,7 +1261,7 @@ impl<T: Clone + TensorType> Tensor<T> {
None => {
return Err(TensorError::DimError(
"Cannot get last element of empty tensor".to_string(),
))
));
}
};
@@ -1278,7 +1286,7 @@ impl<T: Clone + TensorType> Tensor<T> {
None => {
return Err(TensorError::DimError(
"Cannot get first element of empty tensor".to_string(),
))
));
}
};
@@ -1691,8 +1699,8 @@ impl<T: TensorType + Rem<Output = T> + std::marker::Send + std::marker::Sync + P
lhs.par_iter_mut()
.zip(rhs)
.map(|(o, r)| {
if let Some(zero) = T::zero() {
.map(|(o, r)| match T::zero() {
Some(zero) => {
if r != zero {
*o = o.clone() % r;
Ok(())
@@ -1701,11 +1709,10 @@ impl<T: TensorType + Rem<Output = T> + std::marker::Send + std::marker::Sync + P
"Cannot divide by zero in remainder".to_string(),
))
}
} else {
Err(TensorError::InvalidArgument(
"Undefined zero value".to_string(),
))
}
_ => Err(TensorError::InvalidArgument(
"Undefined zero value".to_string(),
)),
})
.collect::<Result<Vec<_>, _>>()?;
@@ -1767,6 +1774,229 @@ pub fn get_broadcasted_shape(
}
}
////////////////////////
///
/// The shape of data for some operations
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Default, Copy)]
pub enum DataFormat {
/// NCHW
#[default]
NCHW,
/// NHWC
NHWC,
/// CHW
CHW,
/// HWC
HWC,
}
// as str
impl core::fmt::Display for DataFormat {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DataFormat::NCHW => write!(f, "NCHW"),
DataFormat::NHWC => write!(f, "NHWC"),
DataFormat::CHW => write!(f, "CHW"),
DataFormat::HWC => write!(f, "HWC"),
}
}
}
impl DataFormat {
/// Get the format's canonical form
pub fn canonical(&self) -> DataFormat {
match self {
DataFormat::NHWC => DataFormat::NCHW,
DataFormat::HWC => DataFormat::CHW,
_ => self.clone(),
}
}
/// no batch dim
pub fn has_no_batch(&self) -> bool {
match self {
DataFormat::CHW | DataFormat::HWC => true,
_ => false,
}
}
/// Convert tensor to canonical format (NCHW or CHW)
pub fn to_canonical<F: PrimeField + TensorType + PartialOrd + Hash>(
&self,
tensor: &mut ValTensor<F>,
) -> Result<(), TensorError> {
match self {
DataFormat::NHWC => {
// For ND: Move channels from last axis to position after batch
let ndims = tensor.dims().len();
if ndims > 2 {
tensor.move_axis(ndims - 1, 1)?;
}
}
DataFormat::HWC => {
// For ND: Move channels from last axis to first position
let ndims = tensor.dims().len();
if ndims > 1 {
tensor.move_axis(ndims - 1, 0)?;
}
}
_ => {} // NCHW/CHW are already in canonical format
}
Ok(())
}
/// Convert tensor from canonical format to target format
pub fn from_canonical<F: PrimeField + TensorType + PartialOrd + Hash>(
&self,
tensor: &mut ValTensor<F>,
) -> Result<(), TensorError> {
match self {
DataFormat::NHWC => {
// Move channels from position 1 to end
let ndims = tensor.dims().len();
if ndims > 2 {
tensor.move_axis(1, ndims - 1)?;
}
}
DataFormat::HWC => {
// Move channels from position 0 to end
let ndims = tensor.dims().len();
if ndims > 1 {
tensor.move_axis(0, ndims - 1)?;
}
}
_ => {} // NCHW/CHW don't need conversion
}
Ok(())
}
/// Get the position of the channel dimension
pub fn get_channel_dim(&self, ndims: usize) -> usize {
match self {
DataFormat::NCHW => 1,
DataFormat::NHWC => ndims - 1,
DataFormat::CHW => 0,
DataFormat::HWC => ndims - 1,
}
}
}
/// The shape of the kernel for some operations
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Default, Copy)]
pub enum KernelFormat {
/// HWIO
HWIO,
/// OIHW
#[default]
OIHW,
/// OHWI
OHWI,
}
impl core::fmt::Display for KernelFormat {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
KernelFormat::HWIO => write!(f, "HWIO"),
KernelFormat::OIHW => write!(f, "OIHW"),
KernelFormat::OHWI => write!(f, "OHWI"),
}
}
}
impl KernelFormat {
/// Get the format's canonical form
pub fn canonical(&self) -> KernelFormat {
match self {
KernelFormat::HWIO => KernelFormat::OIHW,
KernelFormat::OHWI => KernelFormat::OIHW,
_ => self.clone(),
}
}
/// Convert kernel to canonical format (OIHW)
pub fn to_canonical<F: PrimeField + TensorType + PartialOrd + Hash>(
&self,
kernel: &mut ValTensor<F>,
) -> Result<(), TensorError> {
match self {
KernelFormat::HWIO => {
let kdims = kernel.dims().len();
// Move output channels from last to first
kernel.move_axis(kdims - 1, 0)?;
// Move input channels from new last to second position
kernel.move_axis(kdims - 1, 1)?;
}
KernelFormat::OHWI => {
let kdims = kernel.dims().len();
// Move input channels from last to second position
kernel.move_axis(kdims - 1, 1)?;
}
_ => {} // OIHW is already canonical
}
Ok(())
}
/// Convert kernel from canonical format to target format
pub fn from_canonical<F: PrimeField + TensorType + PartialOrd + Hash>(
&self,
kernel: &mut ValTensor<F>,
) -> Result<(), TensorError> {
match self {
KernelFormat::HWIO => {
let kdims = kernel.dims().len();
// Move input channels from second position to last
kernel.move_axis(1, kdims - 1)?;
// Move output channels from first to last
kernel.move_axis(0, kdims - 1)?;
}
KernelFormat::OHWI => {
let kdims = kernel.dims().len();
// Move input channels from second position to last
kernel.move_axis(1, kdims - 1)?;
}
_ => {} // OIHW doesn't need conversion
}
Ok(())
}
/// Get the position of input and output channel dimensions
pub fn get_channel_dims(&self, ndims: usize) -> (usize, usize) {
// (input_ch, output_ch)
match self {
KernelFormat::OIHW => (1, 0),
KernelFormat::HWIO => (ndims - 2, ndims - 1),
KernelFormat::OHWI => (ndims - 1, 0),
}
}
}
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
impl From<tract_onnx::tract_hir::ops::nn::DataFormat> for DataFormat {
fn from(fmt: tract_onnx::tract_hir::ops::nn::DataFormat) -> Self {
match fmt {
tract_onnx::tract_hir::ops::nn::DataFormat::NCHW => DataFormat::NCHW,
tract_onnx::tract_hir::ops::nn::DataFormat::NHWC => DataFormat::NHWC,
tract_onnx::tract_hir::ops::nn::DataFormat::CHW => DataFormat::CHW,
tract_onnx::tract_hir::ops::nn::DataFormat::HWC => DataFormat::HWC,
}
}
}
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
impl From<tract_onnx::tract_hir::tract_core::ops::cnn::conv::KernelFormat> for KernelFormat {
fn from(fmt: tract_onnx::tract_hir::tract_core::ops::cnn::conv::KernelFormat) -> Self {
match fmt {
tract_onnx::tract_hir::tract_core::ops::cnn::conv::KernelFormat::HWIO => {
KernelFormat::HWIO
}
tract_onnx::tract_hir::tract_core::ops::cnn::conv::KernelFormat::OIHW => {
KernelFormat::OIHW
}
tract_onnx::tract_hir::tract_core::ops::cnn::conv::KernelFormat::OHWI => {
KernelFormat::OHWI
}
}
}
}
#[cfg(test)]
mod tests {

View File

@@ -160,7 +160,7 @@ pub fn decompose(
///
/// let result = trilu(&a, 0, false).unwrap();
/// let expected = Tensor::<IntegerRep>::new(Some(&[1, 0, 3, 4, 5, 6]), &[1, 3, 2]).unwrap();
/// assert_eq!(result, expected);
/// assert_eq!(result, expected);
///
/// let result = trilu(&a, -1, true).unwrap();
/// let expected = Tensor::<IntegerRep>::new(Some(&[1, 2, 3, 4, 0, 6]), &[1, 3, 2]).unwrap();
@@ -168,7 +168,7 @@ pub fn decompose(
///
/// let result = trilu(&a, -1, false).unwrap();
/// let expected = Tensor::<IntegerRep>::new(Some(&[0, 0, 3, 0, 5, 6]), &[1, 3, 2]).unwrap();
/// assert_eq!(result, expected);
/// assert_eq!(result, expected);
///
/// let a = Tensor::<IntegerRep>::new(
/// Some(&[1, 2, 3, 4, 5, 6]),
@@ -188,7 +188,7 @@ pub fn decompose(
///
/// let result = trilu(&a, 0, false).unwrap();
/// let expected = Tensor::<IntegerRep>::new(Some(&[1, 0, 0, 4, 5, 0]), &[1, 2, 3]).unwrap();
/// assert_eq!(result, expected);
/// assert_eq!(result, expected);
///
/// let result = trilu(&a, -1, true).unwrap();
/// let expected = Tensor::<IntegerRep>::new(Some(&[1, 2, 3, 4, 5, 6]), &[1, 2, 3]).unwrap();
@@ -196,7 +196,7 @@ pub fn decompose(
///
/// let result = trilu(&a, -1, false).unwrap();
/// let expected = Tensor::<IntegerRep>::new(Some(&[0, 0, 0, 4, 0, 0]), &[1, 2, 3]).unwrap();
/// assert_eq!(result, expected);
/// assert_eq!(result, expected);
///
/// let a = Tensor::<IntegerRep>::new(
/// Some(&[1, 2, 3, 4, 5, 6, 7, 8, 9]),
@@ -216,7 +216,7 @@ pub fn decompose(
///
/// let result = trilu(&a, 0, false).unwrap();
/// let expected = Tensor::<IntegerRep>::new(Some(&[1, 0, 0, 4, 5, 0, 7, 8, 9]), &[1, 3, 3]).unwrap();
/// assert_eq!(result, expected);
/// assert_eq!(result, expected);
///
/// let result = trilu(&a, -1, true).unwrap();
/// let expected = Tensor::<IntegerRep>::new(Some(&[1, 2, 3, 4, 5, 6, 0, 8, 9]), &[1, 3, 3]).unwrap();
@@ -224,7 +224,7 @@ pub fn decompose(
///
/// let result = trilu(&a, -1, false).unwrap();
/// let expected = Tensor::<IntegerRep>::new(Some(&[0, 0, 0, 4, 0, 0, 7, 8, 0]), &[1, 3, 3]).unwrap();
/// assert_eq!(result, expected);
/// assert_eq!(result, expected);
/// ```
pub fn trilu<T: TensorType + std::marker::Send + std::marker::Sync>(
a: &Tensor<T>,
@@ -387,7 +387,7 @@ pub fn add<T: TensorType + Add<Output = T> + std::marker::Send + std::marker::Sy
) -> Result<Tensor<T>, TensorError> {
if t.len() == 1 {
return Ok(t[0].clone());
} else if t.len() == 0 {
} else if t.is_empty() {
return Err(TensorError::DimMismatch("add".to_string()));
}
@@ -441,7 +441,7 @@ pub fn sub<T: TensorType + Sub<Output = T> + std::marker::Send + std::marker::Sy
) -> Result<Tensor<T>, TensorError> {
if t.len() == 1 {
return Ok(t[0].clone());
} else if t.len() == 0 {
} else if t.is_empty() {
return Err(TensorError::DimMismatch("sub".to_string()));
}
// calculate value of output
@@ -492,7 +492,7 @@ pub fn mult<T: TensorType + Mul<Output = T> + std::marker::Send + std::marker::S
) -> Result<Tensor<T>, TensorError> {
if t.len() == 1 {
return Ok(t[0].clone());
} else if t.len() == 0 {
} else if t.is_empty() {
return Err(TensorError::DimMismatch("mult".to_string()));
}
// calculate value of output
@@ -535,30 +535,101 @@ pub fn mult<T: TensorType + Mul<Output = T> + std::marker::Send + std::marker::S
/// let result = downsample(&x, 1, 2, 2).unwrap();
/// let expected = Tensor::<IntegerRep>::new(Some(&[3, 6]), &[2, 1]).unwrap();
/// assert_eq!(result, expected);
/// let x = Tensor::<IntegerRep>::new(
/// Some(&[1, 2, 3, 4, 5, 6]),
/// &[2, 3],
/// ).unwrap();
///
/// // Test case 1: Negative stride along dimension 0
/// // This should flip the order along dimension 0
/// let result = downsample(&x, 0, -1, 0).unwrap();
/// let expected = Tensor::<IntegerRep>::new(
/// Some(&[4, 5, 6, 1, 2, 3]), // Flipped order of rows
/// &[2, 3]
/// ).unwrap();
/// assert_eq!(result, expected);
///
/// // Test case 2: Negative stride along dimension 1
/// // This should flip the order along dimension 1
/// let result = downsample(&x, 1, -1, 0).unwrap();
/// let expected = Tensor::<IntegerRep>::new(
/// Some(&[3, 2, 1, 6, 5, 4]), // Flipped order of columns
/// &[2, 3]
/// ).unwrap();
/// assert_eq!(result, expected);
///
/// // Test case 3: Negative stride with stride magnitude > 1
/// // This should both skip and flip
/// let result = downsample(&x, 1, -2, 0).unwrap();
/// let expected = Tensor::<IntegerRep>::new(
/// Some(&[3, 1, 6, 4]), // Take every 2nd element in reverse
/// &[2, 2]
/// ).unwrap();
/// assert_eq!(result, expected);
///
/// // Test case 4: Negative stride with non-zero modulo
/// // This should start at (size - 1 - modulo) and reverse
/// let result = downsample(&x, 1, -2, 1).unwrap();
/// let expected = Tensor::<IntegerRep>::new(
/// Some(&[2, 5]), // Start at second element from end, take every 2nd in reverse
/// &[2, 1]
/// ).unwrap();
/// assert_eq!(result, expected);
///
/// // Create a larger test case for more complex downsampling
/// let y = Tensor::<IntegerRep>::new(
/// Some(&[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]),
/// &[3, 4],
/// ).unwrap();
///
/// // Test case 5: Negative stride with modulo on larger tensor
/// let result = downsample(&y, 1, -2, 1).unwrap();
/// let expected = Tensor::<IntegerRep>::new(
/// Some(&[3, 1, 7, 5, 11, 9]), // Start at one after reverse, take every 2nd
/// &[3, 2]
/// ).unwrap();
/// assert_eq!(result, expected);
/// ```
pub fn downsample<T: TensorType + Send + Sync>(
input: &Tensor<T>,
dim: usize,
stride: usize,
stride: isize, // Changed from usize to isize to support negative strides
modulo: usize,
) -> Result<Tensor<T>, TensorError> {
let mut output_shape = input.dims().to_vec();
// now downsample along axis dim offset by modulo, rounding up (+1 if remaidner is non-zero)
let remainder = (input.dims()[dim] - modulo) % stride;
let div = (input.dims()[dim] - modulo) / stride;
output_shape[dim] = div + (remainder > 0) as usize;
let mut output = Tensor::<T>::new(None, &output_shape)?;
// Handle negative stride case
if stride == 0 {
return Err(TensorError::DimMismatch(
"downsample stride cannot be zero".to_string(),
));
}
if modulo > input.dims()[dim] {
let stride_abs = stride.unsigned_abs();
let mut output_shape = input.dims().to_vec();
if modulo >= input.dims()[dim] {
return Err(TensorError::DimMismatch("downsample".to_string()));
}
// now downsample along axis dim offset by modulo
// Calculate output shape based on the absolute value of stride
let remainder = (input.dims()[dim] - modulo) % stride_abs;
let div = (input.dims()[dim] - modulo) / stride_abs;
output_shape[dim] = div + (remainder > 0) as usize;
let mut output = Tensor::<T>::new(None, &output_shape)?;
// Calculate indices based on stride direction
let indices = (0..output_shape.len())
.map(|i| {
if i == dim {
let mut index = vec![0; output_shape[i]];
for (i, idx) in index.iter_mut().enumerate() {
*idx = i * stride + modulo;
for (j, idx) in index.iter_mut().enumerate() {
if stride > 0 {
// Positive stride: move forward from modulo
*idx = j * stride_abs + modulo;
} else {
// Negative stride: move backward from (size - 1 - modulo)
*idx = (input.dims()[dim] - 1 - modulo) - j * stride_abs;
}
}
index
} else {
@@ -1326,7 +1397,6 @@ pub fn pad<T: TensorType>(
///
/// # Errors
/// Returns a TensorError if the tensors in `inputs` have incompatible dimensions for concatenation along the specified `axis`.
pub fn concat<T: TensorType + Send + Sync>(
inputs: &[&Tensor<T>],
axis: usize,
@@ -1789,14 +1859,14 @@ pub mod nonlinearities {
/// Some(&[4, 25, 8, 1, 1, 1]),
/// &[2, 3],
/// ).unwrap();
/// let result = rsqrt(&x, 1.0);
/// let result = rsqrt(&x, 1.0, f64::EPSILON);
/// let expected = Tensor::<IntegerRep>::new(Some(&[1, 0, 0, 1, 1, 1]), &[2, 3]).unwrap();
/// assert_eq!(result, expected);
/// ```
pub fn rsqrt(a: &Tensor<IntegerRep>, scale_input: f64) -> Tensor<IntegerRep> {
pub fn rsqrt(a: &Tensor<IntegerRep>, scale_input: f64, eps: f64) -> Tensor<IntegerRep> {
a.par_enum_map(|_, a_i| {
let kix = (a_i as f64) / scale_input;
let fout = scale_input / (kix.sqrt() + f64::EPSILON);
let fout = scale_input / (kix.sqrt() + eps);
let rounded = fout.round();
Ok::<_, TensorError>(rounded as IntegerRep)
})
@@ -2102,7 +2172,6 @@ pub mod nonlinearities {
/// let expected = Tensor::<IntegerRep>::new(Some(&[4, 25, 8, 1, 1, 0]), &[2, 3]).unwrap();
/// assert_eq!(result, expected);
/// ```
pub fn tanh(a: &Tensor<IntegerRep>, scale_input: f64) -> Tensor<IntegerRep> {
a.par_enum_map(|_, a_i| {
let kix = (a_i as f64) / scale_input;
@@ -2270,14 +2339,23 @@ pub mod nonlinearities {
/// &[2, 3],
/// ).unwrap();
/// let k = 2_f64;
/// let result = recip(&x, 1.0, k);
/// let result = recip(&x, 1.0, k, f64::EPSILON);
/// let expected = Tensor::<IntegerRep>::new(Some(&[1, 2, 1, 0, 2, 2]), &[2, 3]).unwrap();
/// assert_eq!(result, expected);
/// ```
pub fn recip(a: &Tensor<IntegerRep>, input_scale: f64, out_scale: f64) -> Tensor<IntegerRep> {
pub fn recip(
a: &Tensor<IntegerRep>,
input_scale: f64,
out_scale: f64,
eps: f64,
) -> Tensor<IntegerRep> {
a.par_enum_map(|_, a_i| {
let rescaled = (a_i as f64) / input_scale;
let denom = (1_f64) / (rescaled + f64::EPSILON);
let denom = if rescaled == 0_f64 {
(1_f64) / (rescaled + eps)
} else {
(1_f64) / (rescaled)
};
let d_inv_x = out_scale * denom;
Ok::<_, TensorError>(d_inv_x.round() as IntegerRep)
})
@@ -2293,16 +2371,16 @@ pub mod nonlinearities {
/// use ezkl::fieldutils::IntegerRep;
/// use ezkl::tensor::ops::nonlinearities::zero_recip;
/// let k = 2_f64;
/// let result = zero_recip(1.0);
/// let result = zero_recip(1.0, f64::EPSILON);
/// let expected = Tensor::<IntegerRep>::new(Some(&[4503599627370496]), &[1]).unwrap();
/// assert_eq!(result, expected);
/// ```
pub fn zero_recip(out_scale: f64) -> Tensor<IntegerRep> {
pub fn zero_recip(out_scale: f64, eps: f64) -> Tensor<IntegerRep> {
let a = Tensor::<IntegerRep>::new(Some(&[0]), &[1]).unwrap();
a.par_enum_map(|_, a_i| {
let rescaled = a_i as f64;
let denom = (1_f64) / (rescaled + f64::EPSILON);
let denom = (1_f64) / (rescaled + eps);
let d_inv_x = out_scale * denom;
Ok::<_, TensorError>(d_inv_x.round() as IntegerRep)
})

View File

@@ -1342,9 +1342,11 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> ValTensor<F> {
/// Gets the total number of elements in the tensor
pub fn len(&self) -> usize {
match self {
ValTensor::Value { dims, .. } => {
ValTensor::Value { dims, inner, .. } => {
if !dims.is_empty() && (dims != &[0]) {
dims.iter().product::<usize>()
} else if dims.is_empty() {
inner.inner.len()
} else {
0
}

View File

@@ -2,7 +2,7 @@ use std::collections::HashSet;
use log::{debug, error, warn};
use crate::circuit::{region::ConstantsMap, CheckMode};
use crate::circuit::{CheckMode, region::ConstantsMap};
use super::*;
/// A wrapper around Halo2's Column types that represents a tensor of variables in the circuit.
@@ -403,7 +403,10 @@ impl VarTensor {
let mut assigned_coord = 0;
let mut res: ValTensor<F> = match values {
ValTensor::Instance { .. } => {
unimplemented!("cannot assign instance to advice columns with omissions")
error!(
"assignment with omissions is not supported on instance columns. increase K if you require more rows."
);
Err(halo2_proofs::plonk::Error::Synthesis)
}
ValTensor::Value { inner: v, .. } => Ok::<ValTensor<F>, halo2_proofs::plonk::Error>(
v.enum_map(|coord, k| {
@@ -569,8 +572,13 @@ impl VarTensor {
constants: &mut ConstantsMap<F>,
) -> Result<(ValTensor<F>, usize), halo2_proofs::plonk::Error> {
match values {
ValTensor::Instance { .. } => unimplemented!("duplication is not supported on instance columns. increase K if you require more rows."),
ValTensor::Value { inner: v, dims , ..} => {
ValTensor::Instance { .. } => {
error!(
"duplication is not supported on instance columns. increase K if you require more rows."
);
Err(halo2_proofs::plonk::Error::Synthesis)
}
ValTensor::Value { inner: v, dims, .. } => {
let duplication_freq = if single_inner_col {
self.col_size()
} else {
@@ -583,21 +591,20 @@ impl VarTensor {
self.num_inner_cols()
};
let duplication_offset = if single_inner_col {
row
} else {
offset
};
let duplication_offset = if single_inner_col { row } else { offset };
// duplicates every nth element to adjust for column overflow
let mut res: ValTensor<F> = v.duplicate_every_n(duplication_freq, num_repeats, duplication_offset).unwrap().into();
let mut res: ValTensor<F> = v
.duplicate_every_n(duplication_freq, num_repeats, duplication_offset)
.unwrap()
.into();
let constants_map = res.create_constants_map();
constants.extend(constants_map);
let total_used_len = res.len();
res.remove_every_n(duplication_freq, num_repeats, duplication_offset).unwrap();
res.remove_every_n(duplication_freq, num_repeats, duplication_offset)
.unwrap();
res.reshape(dims).unwrap();
res.set_scale(values.scale());
@@ -627,9 +634,13 @@ impl VarTensor {
constants: &mut ConstantsMap<F>,
) -> Result<(ValTensor<F>, usize), halo2_proofs::plonk::Error> {
match values {
ValTensor::Instance { .. } => unimplemented!("duplication is not supported on instance columns. increase K if you require more rows."),
ValTensor::Value { inner: v, dims , ..} => {
ValTensor::Instance { .. } => {
error!(
"duplication is not supported on instance columns. increase K if you require more rows."
);
Err(halo2_proofs::plonk::Error::Synthesis)
}
ValTensor::Value { inner: v, dims, .. } => {
let duplication_freq = self.block_size();
let num_repeats = self.num_inner_cols();
@@ -637,17 +648,31 @@ impl VarTensor {
let duplication_offset = offset;
// duplicates every nth element to adjust for column overflow
let v = v.duplicate_every_n(duplication_freq, num_repeats, duplication_offset).unwrap();
let v = v
.duplicate_every_n(duplication_freq, num_repeats, duplication_offset)
.map_err(|e| {
error!("Error duplicating values: {:?}", e);
halo2_proofs::plonk::Error::Synthesis
})?;
let mut res: ValTensor<F> = {
v.enum_map(|coord, k| {
let cell = self.assign_value(region, offset, k.clone(), coord, constants)?;
Ok::<_, halo2_proofs::plonk::Error>(cell)
})?.into()};
let cell =
self.assign_value(region, offset, k.clone(), coord, constants)?;
Ok::<_, halo2_proofs::plonk::Error>(cell)
})?
.into()
};
let total_used_len = res.len();
res.remove_every_n(duplication_freq, num_repeats, duplication_offset).unwrap();
res.remove_every_n(duplication_freq, num_repeats, duplication_offset)
.map_err(|e| {
error!("Error duplicating values: {:?}", e);
halo2_proofs::plonk::Error::Synthesis
})?;
res.reshape(dims).unwrap();
res.reshape(dims).map_err(|e| {
error!("Error duplicating values: {:?}", e);
halo2_proofs::plonk::Error::Synthesis
})?;
res.set_scale(values.scale());
Ok((res, total_used_len))
@@ -681,61 +706,71 @@ impl VarTensor {
let mut prev_cell = None;
match values {
ValTensor::Instance { .. } => unimplemented!("duplication is not supported on instance columns. increase K if you require more rows."),
ValTensor::Value { inner: v, dims , ..} => {
ValTensor::Instance { .. } => {
error!(
"duplication is not supported on instance columns. increase K if you require more rows."
);
Err(halo2_proofs::plonk::Error::Synthesis)
}
ValTensor::Value { inner: v, dims, .. } => {
let duplication_freq = self.col_size();
let num_repeats = 1;
let duplication_offset = row;
// duplicates every nth element to adjust for column overflow
let v = v.duplicate_every_n(duplication_freq, num_repeats, duplication_offset).unwrap();
let mut res: ValTensor<F> =
v.enum_map(|coord, k| {
let v = v
.duplicate_every_n(duplication_freq, num_repeats, duplication_offset)
.unwrap();
let mut res: ValTensor<F> = v
.enum_map(|coord, k| {
let step = self.num_inner_cols();
let step = self.num_inner_cols();
let (x, y, z) = self.cartesian_coord(offset + coord * step);
if matches!(check_mode, CheckMode::SAFE) && coord > 0 && z == 0 && y == 0 {
// assert that duplication occurred correctly
assert_eq!(
Into::<IntegerRep>::into(k.clone()),
Into::<IntegerRep>::into(v[coord - 1].clone())
);
};
let (x, y, z) = self.cartesian_coord(offset + coord * step);
if matches!(check_mode, CheckMode::SAFE) && coord > 0 && z == 0 && y == 0 {
// assert that duplication occurred correctly
assert_eq!(Into::<IntegerRep>::into(k.clone()), Into::<IntegerRep>::into(v[coord - 1].clone()));
};
let cell =
self.assign_value(region, offset, k.clone(), coord * step, constants)?;
let cell = self.assign_value(region, offset, k.clone(), coord * step, constants)?;
let at_end_of_column = z == duplication_freq - 1;
let at_beginning_of_column = z == 0;
let at_end_of_column = z == duplication_freq - 1;
let at_beginning_of_column = z == 0;
if at_end_of_column {
// if we are at the end of the column, we need to copy the cell to the next column
prev_cell = Some(cell.clone());
} else if coord > 0 && at_beginning_of_column {
if let Some(prev_cell) = prev_cell.as_ref() {
let cell = if let Some(cell) = cell.cell() {
cell
if at_end_of_column {
// if we are at the end of the column, we need to copy the cell to the next column
prev_cell = Some(cell.clone());
} else if coord > 0 && at_beginning_of_column {
if let Some(prev_cell) = prev_cell.as_ref() {
let cell = if let Some(cell) = cell.cell() {
cell
} else {
error!("Error getting cell: {:?}", (x, y));
return Err(halo2_proofs::plonk::Error::Synthesis);
};
let prev_cell = if let Some(prev_cell) = prev_cell.cell() {
prev_cell
} else {
error!("Error getting prev cell: {:?}", (x, y));
return Err(halo2_proofs::plonk::Error::Synthesis);
};
region.constrain_equal(prev_cell, cell)?;
} else {
error!("Error getting cell: {:?}", (x,y));
error!("Previous cell was not set");
return Err(halo2_proofs::plonk::Error::Synthesis);
};
let prev_cell = if let Some(prev_cell) = prev_cell.cell() {
prev_cell
} else {
error!("Error getting prev cell: {:?}", (x,y));
return Err(halo2_proofs::plonk::Error::Synthesis);
};
region.constrain_equal(prev_cell,cell)?;
} else {
error!("Previous cell was not set");
return Err(halo2_proofs::plonk::Error::Synthesis);
}
}
}
Ok(cell)
})?.into();
Ok(cell)
})?
.into();
let total_used_len = res.len();
res.remove_every_n(duplication_freq, num_repeats, duplication_offset).unwrap();
res.remove_every_n(duplication_freq, num_repeats, duplication_offset)
.unwrap();
res.reshape(dims).unwrap();
res.set_scale(values.scale());
@@ -771,21 +806,30 @@ impl VarTensor {
VarTensor::Advice { inner: advices, .. } => {
ValType::PrevAssigned(region.assign_advice(|| "k", advices[x][y], z, || v)?)
}
_ => unimplemented!(),
_ => {
error!("VarTensor was not initialized");
return Err(halo2_proofs::plonk::Error::Synthesis);
}
},
// Handle copying previously assigned value
ValType::PrevAssigned(v) => match &self {
VarTensor::Advice { inner: advices, .. } => {
ValType::PrevAssigned(v.copy_advice(|| "k", region, advices[x][y], z)?)
}
_ => unimplemented!(),
_ => {
error!("VarTensor was not initialized");
return Err(halo2_proofs::plonk::Error::Synthesis);
}
},
// Handle copying previously assigned constant
ValType::AssignedConstant(v, val) => match &self {
VarTensor::Advice { inner: advices, .. } => {
ValType::AssignedConstant(v.copy_advice(|| "k", region, advices[x][y], z)?, val)
}
_ => unimplemented!(),
_ => {
error!("VarTensor was not initialized");
return Err(halo2_proofs::plonk::Error::Synthesis);
}
},
// Handle assigning evaluated value
ValType::AssignedValue(v) => match &self {
@@ -794,7 +838,10 @@ impl VarTensor {
.assign_advice(|| "k", advices[x][y], z, || v)?
.evaluate(),
),
_ => unimplemented!(),
_ => {
error!("VarTensor was not initialized");
return Err(halo2_proofs::plonk::Error::Synthesis);
}
},
// Handle constant value assignment with caching
ValType::Constant(v) => {

Binary file not shown.

14
tests/foundry/.gitignore vendored Normal file
View File

@@ -0,0 +1,14 @@
# Compiler files
cache/
out/
# Ignores development broadcast logs
!/broadcast
/broadcast/*/31337/
/broadcast/**/dry-run/
# Docs
docs/
# Dotenv file
.env

66
tests/foundry/README.md Normal file
View File

@@ -0,0 +1,66 @@
## Foundry
**Foundry is a blazing fast, portable and modular toolkit for Ethereum application development written in Rust.**
Foundry consists of:
- **Forge**: Ethereum testing framework (like Truffle, Hardhat and DappTools).
- **Cast**: Swiss army knife for interacting with EVM smart contracts, sending transactions and getting chain data.
- **Anvil**: Local Ethereum node, akin to Ganache, Hardhat Network.
- **Chisel**: Fast, utilitarian, and verbose solidity REPL.
## Documentation
https://book.getfoundry.sh/
## Usage
### Build
```shell
$ forge build
```
### Test
```shell
$ forge test
```
### Format
```shell
$ forge fmt
```
### Gas Snapshots
```shell
$ forge snapshot
```
### Anvil
```shell
$ anvil
```
### Deploy
```shell
$ forge script script/Counter.s.sol:CounterScript --rpc-url <your_rpc_url> --private-key <your_private_key>
```
### Cast
```shell
$ cast <subcommand>
```
### Help
```shell
$ forge --help
$ anvil --help
$ cast --help
```

View File

@@ -0,0 +1,6 @@
[profile.default]
src = "../../contracts"
out = "out"
libs = ["lib"]
# See more config options https://github.com/foundry-rs/foundry/blob/master/crates/config/README.md#all-options

View File

@@ -0,0 +1 @@
contracts/=../../contracts/

View File

@@ -0,0 +1,429 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.20;
import "forge-std/Test.sol";
import {console} from "forge-std/console.sol";
import "contracts/AttestData.sol" as AttestData;
contract MockVKA {
constructor() {}
}
contract MockVerifier {
bool public shouldVerify;
constructor(bool _shouldVerify) {
shouldVerify = _shouldVerify;
}
function verifyProof(
bytes calldata,
uint256[] calldata
) external view returns (bool) {
require(shouldVerify, "Verification failed");
return shouldVerify;
}
}
contract MockVerifierSeperate {
bool public shouldVerify;
constructor(bool _shouldVerify) {
shouldVerify = _shouldVerify;
}
function verifyProof(
address,
bytes calldata,
uint256[] calldata
) external view returns (bool) {
require(shouldVerify, "Verification failed");
return shouldVerify;
}
}
contract MockTargetContract {
int256[] public data;
constructor(int256[] memory _data) {
data = _data;
}
function setData(int256[] memory _data) external {
data = _data;
}
function getData() external view returns (int256[] memory) {
return data;
}
}
contract DataAttestationTest is Test {
AttestData.DataAttestation das;
MockVerifier verifier;
MockVerifierSeperate verifierSeperate;
MockVKA vka;
MockTargetContract target;
int256[] mockData = [int256(1e18), -int256(5e17)];
uint256[] decimals = [18, 18];
uint256[] bits = [13, 13];
uint8 instanceOffset = 0;
bytes callData;
function setUp() public {
target = new MockTargetContract(mockData);
verifier = new MockVerifier(true);
verifierSeperate = new MockVerifierSeperate(true);
vka = new MockVKA();
callData = abi.encodeWithSignature("getData()");
das = new AttestData.DataAttestation(
address(target),
callData,
decimals,
bits,
instanceOffset
);
}
// Fork of mulDivRound which doesn't revert on overflow and returns a boolean instead to indicate overflow
function mulDivRound(
uint256 x,
uint256 y,
uint256 denominator
) public pure returns (uint256 result, bool overflow) {
unchecked {
uint256 prod0;
uint256 prod1;
assembly {
let mm := mulmod(x, y, not(0))
prod0 := mul(x, y)
prod1 := sub(sub(mm, prod0), lt(mm, prod0))
}
uint256 remainder = mulmod(x, y, denominator);
bool addOne;
if (remainder * 2 >= denominator) {
addOne = true;
}
if (prod1 == 0) {
if (addOne) {
return ((prod0 / denominator) + 1, false);
}
return (prod0 / denominator, false);
}
if (denominator > prod1) {
return (0, true);
}
assembly {
prod1 := sub(prod1, gt(remainder, prod0))
prod0 := sub(prod0, remainder)
}
uint256 twos = denominator & (~denominator + 1);
assembly {
denominator := div(denominator, twos)
prod0 := div(prod0, twos)
twos := add(div(sub(0, twos), twos), 1)
}
prod0 |= prod1 * twos;
uint256 inverse = (3 * denominator) ^ 2;
inverse *= 2 - denominator * inverse;
inverse *= 2 - denominator * inverse;
inverse *= 2 - denominator * inverse;
inverse *= 2 - denominator * inverse;
inverse *= 2 - denominator * inverse;
inverse *= 2 - denominator * inverse;
result = prod0 * inverse;
if (addOne) {
result += 1;
}
return (result, false);
}
}
struct SampleAttestation {
int256 mockData;
uint8 decimals;
uint8 bits;
}
function test_fuzzAttestedData(
SampleAttestation[] memory _attestations
) public {
vm.assume(_attestations.length == 1);
int256[] memory _mockData = new int256[](1);
uint256[] memory _decimals = new uint256[](1);
uint256[] memory _bits = new uint256[](1);
uint256[] memory _instances = new uint256[](1);
for (uint256 i = 0; i < 1; i++) {
SampleAttestation memory attestation = _attestations[i];
_mockData[i] = attestation.mockData;
vm.assume(attestation.mockData != type(int256).min); /// Will overflow int256 during negation op
vm.assume(attestation.decimals < 77); /// Else will exceed uint256 bounds
vm.assume(attestation.bits < 128); /// Else will exceed EZKL fixed point bounds for int128 type
bool neg = attestation.mockData < 0;
if (neg) {
attestation.mockData = -attestation.mockData;
}
(uint256 _result, bool overflow) = mulDivRound(
uint256(attestation.mockData),
uint256(1 << attestation.bits),
uint256(10 ** attestation.decimals)
);
vm.assume(!overflow);
vm.assume(_result < das.HALF_ORDER());
if (neg) {
// No possibility of overflow here since output is less than or equal to HALF_ORDER
// and therefore falls within the max range of int256 without overflow
vm.assume(-int256(_result) > type(int128).min);
_instances[i] =
uint256(int(das.ORDER()) - int256(_result)) %
das.ORDER();
} else {
vm.assume(_result < uint128(type(int128).max));
_instances[i] = _result;
}
_decimals[i] = attestation.decimals;
_bits[i] = attestation.bits;
}
// Update the attested data
target.setData(_mockData);
// Deploy the new data attestation contract
AttestData.DataAttestation dasNew = new AttestData.DataAttestation(
address(target),
callData,
_decimals,
_bits,
instanceOffset
);
bytes memory proof = hex"1234"; // Would normally contain commitments
bytes memory encoded = abi.encodeWithSignature(
"verifyProof(bytes,uint256[])",
proof,
_instances
);
AttestData.DataAttestation.Scalars memory _scalars = AttestData
.DataAttestation
.Scalars(10 ** _decimals[0], 1 << _bits[0]);
int256 output = dasNew.quantizeData(_mockData[0], _scalars);
console.log("output: ", output);
uint256 fieldElement = dasNew.toFieldElement(output);
// output should equal to _instances[0]
assertEq(fieldElement, _instances[0]);
bool verificationResult = dasNew.verifyWithDataAttestation(
address(verifier),
encoded
);
assertTrue(verificationResult);
}
// Test deployment parameters
function testDeployment() public view {
assertEq(das.contractAddress(), address(target));
assertEq(das.callData(), abi.encodeWithSignature("getData()"));
assertEq(das.instanceOffset(), instanceOffset);
AttestData.DataAttestation.Scalars memory scalar = das.getScalars(0);
assertEq(scalar.decimals, 1e18);
assertEq(scalar.bits, 1 << 13);
}
// Test quantizeData function
function testQuantizeData() public view {
AttestData.DataAttestation.Scalars memory scalar = das.getScalars(0);
int256 positive = das.quantizeData(1e18, scalar);
assertEq(positive, int256(scalar.bits));
int256 negative = das.quantizeData(-1e18, scalar);
assertEq(negative, -int256(scalar.bits));
// Test rounding
int half = int(0.5e18 / scalar.bits);
int256 rounded = das.quantizeData(half, scalar);
assertEq(rounded, 1);
}
// Test staticCall functionality
function testStaticCall() public view {
bytes memory result = das.staticCall(
address(target),
abi.encodeWithSignature("getData()")
);
int256[] memory decoded = abi.decode(result, (int256[]));
assertEq(decoded[0], mockData[0]);
assertEq(decoded[1], mockData[1]);
}
// Test attestData validation
function testAttestDataSuccess() public view {
uint256[] memory instances = new uint256[](2);
AttestData.DataAttestation.Scalars memory scalar = das.getScalars(0);
instances[0] = das.toFieldElement(int(scalar.bits));
instances[1] = das.toFieldElement(-int(scalar.bits >> 1));
das.attestData(instances); // Should not revert
}
function testAttestDataFailure() public {
uint256[] memory instances = new uint256[](2);
instances[0] = das.toFieldElement(1e18); // Incorrect value
instances[1] = das.toFieldElement(5e17);
vm.expectRevert("Public input does not match");
das.attestData(instances);
}
// Test full verification flow
function testSuccessfulVerification() public view {
// Prepare valid instances
uint256[] memory instances = new uint256[](2);
AttestData.DataAttestation.Scalars memory scalar = das.getScalars(0);
instances[0] = das.toFieldElement(int(scalar.bits));
instances[1] = das.toFieldElement(-int(scalar.bits >> 1));
// Create valid calldata (mock)
bytes memory proof = hex"1234"; // Would normally contain commitments
bytes memory encoded = abi.encodeWithSignature(
"verifyProof(bytes,uint256[])",
proof,
instances
);
bytes memory encoded_vka = abi.encodeWithSignature(
"verifyProof(address,bytes,uint256[])",
address(vka),
proof,
instances
);
bool result = das.verifyWithDataAttestation(address(verifier), encoded);
assertTrue(result);
result = das.verifyWithDataAttestation(
address(verifierSeperate),
encoded_vka
);
assertTrue(result);
}
function testLoadInstances() public view {
uint256[] memory instances = new uint256[](2);
AttestData.DataAttestation.Scalars memory scalar = das.getScalars(0);
instances[0] = das.toFieldElement(int(scalar.bits));
instances[1] = das.toFieldElement(-int(scalar.bits >> 1));
// Create valid calldata (mock)
bytes memory proof = hex"1234"; // Would normally contain commitments
bytes memory encoded = abi.encodeWithSignature(
"verifyProof(bytes,uint256[])",
proof,
instances
);
bytes memory encoded_vka = abi.encodeWithSignature(
"verifyProof(address,bytes,uint256[])",
address(vka),
proof,
instances
);
// Load encoded instances from calldata
uint256[] memory extracted_instances_calldata = das
.getInstancesCalldata(encoded);
assertEq(extracted_instances_calldata[0], instances[0]);
assertEq(extracted_instances_calldata[1], instances[1]);
// Load encoded instances from memory
uint256[] memory extracted_instances_memory = das.getInstancesMemory(
encoded
);
assertEq(extracted_instances_memory[0], instances[0]);
assertEq(extracted_instances_memory[1], instances[1]);
// Load encoded with vk instances from calldata
uint256[] memory extracted_instances_calldata_vk = das
.getInstancesCalldata(encoded_vka);
assertEq(extracted_instances_calldata_vk[0], instances[0]);
assertEq(extracted_instances_calldata_vk[1], instances[1]);
// Load encoded with vk instances from memory
uint256[] memory extracted_instances_memory_vk = das.getInstancesMemory(
encoded_vka
);
assertEq(extracted_instances_memory_vk[0], instances[0]);
assertEq(extracted_instances_memory_vk[1], instances[1]);
}
function testInvalidCommitments() public {
// Create calldata with invalid commitments
bytes memory invalidProof = hex"5678";
uint256[] memory instances = new uint256[](2);
AttestData.DataAttestation.Scalars memory scalar = das.getScalars(0);
instances[0] = das.toFieldElement(int(scalar.bits));
instances[1] = das.toFieldElement(-int(scalar.bits >> 1));
bytes memory encoded = abi.encodeWithSignature(
"verifyProof(bytes,uint256[])",
invalidProof,
instances
);
vm.expectRevert("Invalid KZG commitments");
das.verifyWithDataAttestation(address(verifier), encoded);
}
function testInvalidVerifier() public {
MockVerifier invalidVerifier = new MockVerifier(false);
uint256[] memory instances = new uint256[](2);
AttestData.DataAttestation.Scalars memory scalar = das.getScalars(0);
instances[0] = das.toFieldElement(int(scalar.bits));
instances[1] = das.toFieldElement(-int(scalar.bits >> 1));
bytes memory encoded = abi.encodeWithSignature(
"verifyProof(bytes,uint256[])",
hex"1234",
instances
);
vm.expectRevert("low-level call to verifier failed");
das.verifyWithDataAttestation(address(invalidVerifier), encoded);
}
// Test edge cases
function testZeroValueQuantization() public view {
AttestData.DataAttestation.Scalars memory scalar = das.getScalars(0);
int256 zero = das.quantizeData(0, scalar);
assertEq(zero, 0);
}
function testOverflowProtection() public {
int256 order = int(
uint256(
0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001
)
);
// int256 half_order = int(order >> 1);
AttestData.DataAttestation.Scalars memory scalar = AttestData
.DataAttestation
.Scalars(1, 1 << 2);
vm.expectRevert("Overflow field modulus");
das.quantizeData(order, scalar); // Value that would overflow
}
function testInvalidFunctionSignature() public {
uint256[] memory instances = new uint256[](2);
AttestData.DataAttestation.Scalars memory scalar = das.getScalars(0);
instances[0] = das.toFieldElement(int(scalar.bits));
instances[1] = das.toFieldElement(-int(scalar.bits >> 1));
bytes memory encoded_invalid_sig = abi.encodeWithSignature(
"verifyProofff(bytes,uint256[])",
hex"1234",
instances
);
vm.expectRevert("Invalid function signature");
das.verifyWithDataAttestation(address(verifier), encoded_invalid_sig);
}
}

View File

@@ -1,13 +1,12 @@
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
#[cfg(test)]
mod native_tests {
use ezkl::circuit::Tolerance;
use ezkl::fieldutils::{felt_to_integer_rep, integer_rep_to_felt, IntegerRep};
// use ezkl::circuit::table::RESERVED_BLINDING_ROWS_PAD;
use ezkl::Commitments;
use ezkl::graph::input::{FileSource, FileSourceInner, GraphData};
use ezkl::graph::{DataSource, GraphSettings, GraphWitness};
use ezkl::pfsys::Snark;
use ezkl::Commitments;
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
use halo2curves::bn256::Bn256;
use lazy_static::lazy_static;
@@ -187,7 +186,7 @@ mod native_tests {
const PF_FAILURE_AGGR: &str = "examples/test_failure_aggr_proof.json";
const LARGE_TESTS: [&str; 7] = [
const LARGE_TESTS: [&str; 8] = [
"self_attention",
"nanoGPT",
"multihead_attention",
@@ -195,6 +194,7 @@ mod native_tests {
"mnist_gan",
"smallworm",
"fr_age",
"1d_conv",
];
const ACCURACY_CAL_TESTS: [&str; 6] = [
@@ -522,7 +522,7 @@ mod native_tests {
use crate::native_tests::run_js_tests;
use crate::native_tests::render_circuit;
use crate::native_tests::model_serialization_different_binaries;
use rand::Rng;
use tempdir::TempDir;
use ezkl::Commitments;
@@ -543,7 +543,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "public", "fixed", "public", 1, "accuracy", None, 0.0, false, None, None);
mock(path, test.to_string(), "public", "fixed", "public", 1, "accuracy", None, false, None, None);
test_dir.close().unwrap();
}
});
@@ -608,7 +608,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "private", "private", "public", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "private", "private", "public", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -618,22 +618,10 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "private", "private", "public", 1, "resources", None, 0.0, true, Some(8194), Some(4));
mock(path, test.to_string(), "private", "private", "public", 1, "resources", None, true, Some(8194), Some(4));
test_dir.close().unwrap();
}
#(#[test_case(TESTS[N])])*
fn mock_tolerance_public_outputs_(test: &str) {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
// gen random number between 0.0 and 1.0
let tolerance = rand::thread_rng().gen_range(0.0..1.0) * 100.0;
mock(path, test.to_string(), "private", "private", "public", 1, "resources", None, tolerance, false, Some(32776), Some(5));
test_dir.close().unwrap();
}
#(#[test_case(TESTS[N])])*
fn mock_large_batch_public_outputs_(test: &str) {
@@ -644,7 +632,7 @@ mod native_tests {
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
let large_batch_dir = &format!("large_batches_{}", test);
crate::native_tests::mk_data_batches_(path, test, &large_batch_dir, 10);
mock(path, large_batch_dir.to_string(), "private", "private", "public", 10, "resources", None, 0.0, false, None, None);
mock(path, large_batch_dir.to_string(), "private", "private", "public", 10, "resources", None, false, None, None);
test_dir.close().unwrap();
}
}
@@ -654,7 +642,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "public", "private", "private", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "public", "private", "private", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -663,7 +651,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "public", "hashed", "private", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "public", "hashed", "private", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -672,7 +660,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "fixed", "private", "private", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "fixed", "private", "private", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -681,7 +669,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "private", "private", "fixed", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "private", "private", "fixed", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -690,7 +678,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "private", "fixed", "private", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "private", "fixed", "private", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -699,7 +687,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "hashed", "private", "public", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "hashed", "private", "public", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -708,7 +696,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "polycommit", "private", "public", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "polycommit", "private", "public", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -718,7 +706,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "private", "hashed", "public", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "private", "hashed", "public", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -728,7 +716,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "private", "polycommit", "public", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "private", "polycommit", "public", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -737,7 +725,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "public", "private", "hashed", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "public", "private", "hashed", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -747,7 +735,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "public", "private", "polycommit", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "public", "private", "polycommit", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -756,7 +744,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "public", "fixed", "hashed", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "public", "fixed", "hashed", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -766,7 +754,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "public", "polycommit", "hashed", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "public", "polycommit", "hashed", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -776,7 +764,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "polycommit", "polycommit", "polycommit", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "polycommit", "polycommit", "polycommit", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -786,7 +774,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "hashed", "private", "hashed", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(), "hashed", "private", "hashed", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -796,7 +784,7 @@ mod native_tests {
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
// needs an extra row for the large model
mock(path, test.to_string(),"hashed", "hashed", "public", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(),"hashed", "hashed", "public", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -806,7 +794,7 @@ mod native_tests {
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
// needs an extra row for the large model
mock(path, test.to_string(),"hashed", "hashed", "hashed", 1, "resources", None, 0.0, false, None, None);
mock(path, test.to_string(),"hashed", "hashed", "hashed", 1, "resources", None, false, None, None);
test_dir.close().unwrap();
}
@@ -965,7 +953,7 @@ mod native_tests {
});
seq!(N in 0..=6 {
seq!(N in 0..=7 {
#(#[test_case(LARGE_TESTS[N])])*
#[ignore]
@@ -983,7 +971,7 @@ mod native_tests {
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
mock(path, test.to_string(), "private", "fixed", "public", 1, "resources", None, 0.0, false, None, Some(5));
mock(path, test.to_string(), "private", "fixed", "public", 1, "resources", None, false, None, Some(5));
test_dir.close().unwrap();
}
});
@@ -1459,12 +1447,10 @@ mod native_tests {
batch_size: usize,
cal_target: &str,
scales_to_use: Option<Vec<u32>>,
tolerance: f32,
bounded_lookup_log: bool,
decomp_base: Option<usize>,
decomp_legs: Option<usize>,
) {
let mut tolerance = tolerance;
gen_circuit_settings_and_witness(
test_dir,
example_name.clone(),
@@ -1475,7 +1461,6 @@ mod native_tests {
cal_target,
scales_to_use,
2,
&mut tolerance,
Commitments::KZG,
2,
bounded_lookup_log,
@@ -1483,128 +1468,17 @@ mod native_tests {
decomp_legs,
);
if tolerance > 0.0 {
// load witness and shift the output by a small amount that is less than tolerance percent
let witness = GraphWitness::from_path(
format!("{}/{}/witness.json", test_dir, example_name).into(),
)
.unwrap();
let witness = witness.clone();
let outputs = witness.outputs.clone();
// get values as i64
let output_perturbed_safe: Vec<Vec<halo2curves::bn256::Fr>> = outputs
.iter()
.map(|sv| {
sv.iter()
.map(|v| {
// randomly perturb by a small amount less than tolerance
let perturbation = if v == &halo2curves::bn256::Fr::zero() {
halo2curves::bn256::Fr::zero()
} else {
integer_rep_to_felt(
(felt_to_integer_rep(*v) as f32
* (rand::thread_rng().gen_range(-0.01..0.01) * tolerance))
as IntegerRep,
)
};
*v + perturbation
})
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
// get values as i64
let output_perturbed_bad: Vec<Vec<halo2curves::bn256::Fr>> = outputs
.iter()
.map(|sv| {
sv.iter()
.map(|v| {
// randomly perturb by a small amount less than tolerance
let perturbation = if v == &halo2curves::bn256::Fr::zero() {
halo2curves::bn256::Fr::from(2)
} else {
integer_rep_to_felt(
(felt_to_integer_rep(*v) as f32
* (rand::thread_rng().gen_range(0.02..0.1) * tolerance))
as IntegerRep,
)
};
*v + perturbation
})
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
let good_witness = GraphWitness {
outputs: output_perturbed_safe,
..witness.clone()
};
// save
good_witness
.save(format!("{}/{}/witness_ok.json", test_dir, example_name).into())
.unwrap();
let bad_witness = GraphWitness {
outputs: output_perturbed_bad,
..witness.clone()
};
// save
bad_witness
.save(format!("{}/{}/witness_bad.json", test_dir, example_name).into())
.unwrap();
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"mock",
"-W",
format!("{}/{}/witness.json", test_dir, example_name).as_str(),
"-M",
format!("{}/{}/network.compiled", test_dir, example_name).as_str(),
])
.status()
.expect("failed to execute process");
assert!(status.success());
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"mock",
"-W",
format!("{}/{}/witness_ok.json", test_dir, example_name).as_str(),
"-M",
format!("{}/{}/network.compiled", test_dir, example_name).as_str(),
])
.status()
.expect("failed to execute process");
assert!(status.success());
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"mock",
"-W",
format!("{}/{}/witness_bad.json", test_dir, example_name).as_str(),
"-M",
format!("{}/{}/network.compiled", test_dir, example_name).as_str(),
])
.status()
.expect("failed to execute process");
assert!(!status.success());
} else {
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"mock",
"-W",
format!("{}/{}/witness.json", test_dir, example_name).as_str(),
"-M",
format!("{}/{}/network.compiled", test_dir, example_name).as_str(),
])
.status()
.expect("failed to execute process");
assert!(status.success());
}
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"mock",
"-W",
format!("{}/{}/witness.json", test_dir, example_name).as_str(),
"-M",
format!("{}/{}/network.compiled", test_dir, example_name).as_str(),
])
.status()
.expect("failed to execute process");
assert!(status.success());
}
#[allow(clippy::too_many_arguments)]
@@ -1618,7 +1492,6 @@ mod native_tests {
cal_target: &str,
scales_to_use: Option<Vec<u32>>,
num_inner_columns: usize,
tolerance: &mut f32,
commitment: Commitments,
lookup_safety_margin: usize,
bounded_lookup_log: bool,
@@ -1633,13 +1506,16 @@ mod native_tests {
"--settings-path={}/{}/settings.json",
test_dir, example_name
),
format!("--variables=batch_size->{}", batch_size),
format!(
"--variables=batch_size->{},sequence_length->100,<Sym1>->1",
batch_size
),
format!("--input-visibility={}", input_visibility),
format!("--param-visibility={}", param_visibility),
format!("--output-visibility={}", output_visibility),
format!("--num-inner-cols={}", num_inner_columns),
format!("--tolerance={}", tolerance),
format!("--commitment={}", commitment),
format!("--logrows={}", 22),
];
// if output-visibility is fixed set --range-check-inputs-outputs to False
@@ -1695,24 +1571,6 @@ mod native_tests {
.expect("failed to execute process");
assert!(status.success());
let mut settings =
GraphSettings::load(&format!("{}/{}/settings.json", test_dir, example_name).into())
.unwrap();
let any_output_scales_smol = settings.model_output_scales.iter().any(|s| *s <= 0);
if any_output_scales_smol {
// set the tolerance to 0.0
settings.run_args.tolerance = Tolerance {
val: 0.0,
scale: 0.0.into(),
};
settings
.save(&format!("{}/{}/settings.json", test_dir, example_name).into())
.unwrap();
*tolerance = 0.0;
}
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"compile-circuit",
@@ -1725,7 +1583,6 @@ mod native_tests {
test_dir, example_name
),
])
.stdout(std::process::Stdio::null())
.status()
.expect("failed to execute process");
assert!(status.success());
@@ -1768,7 +1625,6 @@ mod native_tests {
cal_target,
None,
2,
&mut 0.0,
Commitments::KZG,
2,
false,
@@ -2054,7 +1910,6 @@ mod native_tests {
target_str,
scales_to_use,
num_inner_columns,
&mut 0.0,
commitment,
lookup_safety_margin,
false,
@@ -2438,7 +2293,12 @@ mod native_tests {
.expect("failed to execute process");
if status.success() {
log::error!("Verification unexpectedly succeeded for modified proof {}. Flipped bit {} in byte {}", i, random_bit, random_byte);
log::error!(
"Verification unexpectedly succeeded for modified proof {}. Flipped bit {} in byte {}",
i,
random_bit,
random_byte
);
}
assert!(
@@ -2489,7 +2349,6 @@ mod native_tests {
// we need the accuracy
Some(vec![4]),
1,
&mut 0.0,
Commitments::KZG,
2,
false,
@@ -2581,23 +2440,43 @@ mod native_tests {
));
}
input.save(data_path.clone().into()).unwrap();
let args = vec![
"setup-test-evm-data",
"-D",
data_path.as_str(),
"-M",
&model_path,
"--test-data",
test_on_chain_data_path.as_str(),
rpc_arg.as_str(),
test_input_source.as_str(),
test_output_source.as_str(),
];
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"setup-test-evm-data",
"-D",
data_path.as_str(),
"-M",
&model_path,
"--test-data",
test_on_chain_data_path.as_str(),
rpc_arg.as_str(),
test_input_source.as_str(),
test_output_source.as_str(),
])
.args(args)
.status()
.expect("failed to execute process");
assert!(status.success());
// generate the witness, passing the vk path to generate the necessary kzg commits only
// if input visibility is NOT hashed
if input_visibility != "hashed" {
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"gen-witness",
"-D",
&test_on_chain_data_path,
"-M",
&model_path,
"-O",
&witness_path,
"--vk-path",
&format!("{}/{}/key.vk", test_dir, example_name),
])
.status()
.expect("failed to execute process");
assert!(status.success());
}
}
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
@@ -2740,56 +2619,6 @@ mod native_tests {
.status()
.expect("failed to execute process");
assert!(status.success());
// Create a new set of test on chain data only for the on-chain input source
if input_source != "file" || output_source != "file" {
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args([
"setup-test-evm-data",
"-D",
data_path.as_str(),
"-M",
&model_path,
"--test-data",
test_on_chain_data_path.as_str(),
rpc_arg.as_str(),
test_input_source.as_str(),
test_output_source.as_str(),
])
.status()
.expect("failed to execute process");
assert!(status.success());
let deployed_addr_arg = format!("--addr={}", addr_da);
let args: Vec<&str> = vec![
"test-update-account-calls",
deployed_addr_arg.as_str(),
"-D",
test_on_chain_data_path.as_str(),
rpc_arg.as_str(),
];
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args(&args)
.status()
.expect("failed to execute process");
assert!(status.success());
}
// As sanity check, add example that should fail.
let args = vec![
"verify-evm",
"--proof-path",
PF_FAILURE,
deployed_addr_verifier_arg.as_str(),
deployed_addr_da_arg.as_str(),
rpc_arg.as_str(),
];
let status = Command::new(format!("{}/{}", *CARGO_TARGET_DIR, TEST_BINARY))
.args(args)
.status()
.expect("failed to execute process");
assert!(!status.success());
}
fn build_ezkl() {

View File

@@ -46,7 +46,28 @@ mod py_tests {
assert!(status.success());
});
// set VOICE_DATA_DIR environment variable
std::env::set_var("VOICE_DATA_DIR", format!("{}", voice_data_dir));
unsafe {
std::env::set_var("VOICE_DATA_DIR", format!("{}", voice_data_dir));
}
}
fn download_catdog_data() {
let cat_and_dog_data_dir = shellexpand::tilde("~/data/catdog_data");
DOWNLOAD_VOICE_DATA.call_once(|| {
let status = Command::new("bash")
.args([
"examples/notebooks/cat_and_dog_data.sh",
&cat_and_dog_data_dir,
])
.status()
.expect("failed to execute process");
assert!(status.success());
});
// set VOICE_DATA_DIR environment variable
unsafe {
std::env::set_var("CATDOG_DATA_DIR", format!("{}", cat_and_dog_data_dir));
}
}
fn setup_py_env() {
@@ -225,6 +246,20 @@ mod py_tests {
anvil_child.kill().unwrap();
}
#[test]
fn cat_and_dog_notebook_() {
crate::py_tests::init_binary();
let mut anvil_child = crate::py_tests::start_anvil(false);
crate::py_tests::download_catdog_data();
let test_dir: TempDir = TempDir::new("cat_and_dog").unwrap();
let path = test_dir.path().to_str().unwrap();
crate::py_tests::mv_test_(path, "cat_and_dog.ipynb");
run_notebook(path, "cat_and_dog.ipynb");
test_dir.close().unwrap();
anvil_child.kill().unwrap();
}
#[test]
fn reusable_verifier_notebook_() {
crate::py_tests::init_binary();
@@ -237,16 +272,6 @@ mod py_tests {
anvil_child.kill().unwrap();
}
#[test]
fn postgres_notebook_() {
crate::py_tests::init_binary();
let test_dir: TempDir = TempDir::new("mean_postgres").unwrap();
let path = test_dir.path().to_str().unwrap();
crate::py_tests::mv_test_(path, "mean_postgres.ipynb");
run_notebook(path, "mean_postgres.ipynb");
test_dir.close().unwrap();
}
#[test]
fn tictactoe_autoencoder_notebook_() {
crate::py_tests::init_binary();

View File

@@ -48,7 +48,6 @@ def test_py_run_args():
run_args = ezkl.PyRunArgs()
run_args.input_visibility = "hashed"
run_args.output_visibility = "hashed"
run_args.tolerance = 1.5
def test_poseidon_hash():
@@ -480,15 +479,15 @@ async def test_deploy_evm_reusable_and_vka():
res = await ezkl.deploy_evm(
addr_path_verifier,
sol_code_path,
anvil_url,
sol_code_path,
"verifier/reusable",
)
res = await ezkl.deploy_evm(
addr_path_vk,
vk_code_path,
anvil_url,
vk_code_path,
"vka",
)
@@ -507,8 +506,8 @@ async def test_deploy_evm():
res = await ezkl.deploy_evm(
addr_path,
sol_code_path,
anvil_url,
sol_code_path,
)
assert res == True
@@ -529,8 +528,8 @@ async def test_deploy_evm_with_private_key():
res = await ezkl.deploy_evm(
addr_path,
anvil_url,
sol_code_path,
rpc_url=anvil_url,
private_key=anvil_default_private_key
)
@@ -541,8 +540,8 @@ async def test_deploy_evm_with_private_key():
with pytest.raises(RuntimeError, match="Failed to run deploy_evm"):
res = await ezkl.deploy_evm(
addr_path,
anvil_url,
sol_code_path,
rpc_url=anvil_url,
private_key=custom_zero_balance_private_key
)
@@ -565,8 +564,8 @@ async def test_verify_evm():
res = await ezkl.verify_evm(
addr,
anvil_url,
proof_path,
rpc_url=anvil_url,
# sol_code_path
# optimizer_runs
)
@@ -605,8 +604,8 @@ async def test_verify_evm_separate_vk():
res = await ezkl.verify_evm(
addr_verifier,
anvil_url,
proof_path,
rpc_url=anvil_url,
addr_vk=addr_vk,
# sol_code_path
# optimizer_runs
@@ -832,8 +831,8 @@ async def test_evm_aggregate_and_verify_aggr():
res = await ezkl.deploy_evm(
addr_path,
anvil_url,
sol_code_path,
rpc_url=anvil_url,
)
# as a sanity check
@@ -873,7 +872,8 @@ def get_examples():
'linear_regression',
"mnist_gan",
"smallworm",
"fr_age"
"fr_age",
"1d_conv",
]
examples = []
for subdir, _, _ in os.walk(os.path.join(examples_path, "onnx")):
@@ -900,7 +900,12 @@ async def test_all_examples(model_file, input_file):
proof_path = os.path.join(folder_path, 'proof.json')
print("Testing example: ", model_file)
res = ezkl.gen_settings(model_file, settings_path)
run_args = ezkl.PyRunArgs()
run_args.variables = [("batch_size", 1), ("sequence_length", 100), ("<Sym1>", 1)]
run_args.logrows = 22
res = ezkl.gen_settings(model_file, settings_path, py_run_args=run_args)
assert res
res = await ezkl.calibrate_settings(

View File

@@ -337,7 +337,7 @@ mod wasm32 {
// Run compiled circuit validation on onnx network (should fail)
let circuit = compiledCircuitValidation(wasm_bindgen::Clamped(NETWORK.to_vec()));
assert!(circuit.is_err());
// Run compiled circuit validation on comiled network (should pass)
// Run compiled circuit validation on compiled network (should pass)
let circuit = compiledCircuitValidation(wasm_bindgen::Clamped(NETWORK_COMPILED.to_vec()));
assert!(circuit.is_ok());
// Run input validation on witness (should fail)