Compare commits

..

1 Commits

Author SHA1 Message Date
dante
1a75963705 refactor: DataSource enum -> struct 2025-04-29 12:18:24 -04:00
116 changed files with 7380 additions and 41897 deletions

View File

@@ -87,7 +87,7 @@ jobs:
- name: Replace memory definition in nodejs
run: |
sed -i "3s|.*|imports['env'] = {memory: new WebAssembly.Memory({initial:21,maximum:65536,shared:true})}|" pkg/nodejs/ezkl.js
sed -i "3s|.*|imports['env'] = {memory: new WebAssembly.Memory({initial:20,maximum:65536,shared:true})}|" pkg/nodejs/ezkl.js
- name: Replace `import.meta.url` with `import.meta.resolve` definition in workerHelpers.js
run: |
@@ -188,3 +188,63 @@ jobs:
npm publish
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
in-browser-evm-ver-publish:
permissions:
contents: read
packages: write
name: publish-in-browser-evm-verifier-package
needs: [publish-wasm-bindings]
runs-on: ubuntu-latest
env:
RELEASE_TAG: ${{ github.ref_name }}
if: startsWith(github.ref, 'refs/tags/')
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Update version in package.json
shell: bash
run: |
sed -i "s|\"version\": \".*\"|\"version\": \"$RELEASE_TAG\"|" in-browser-evm-verifier/package.json
- name: Prepare tag and fetch package integrity
run: |
CLEANED_TAG=${RELEASE_TAG} # Get the tag from ref_name
CLEANED_TAG="${CLEANED_TAG#v}" # Remove leading 'v'
echo "CLEANED_TAG=${CLEANED_TAG}" >> $GITHUB_ENV # Set it as an environment variable for later steps
ENGINE_INTEGRITY=$(npm view @ezkljs/engine@$CLEANED_TAG dist.integrity)
echo "ENGINE_INTEGRITY=$ENGINE_INTEGRITY" >> $GITHUB_ENV
- name: Update @ezkljs/engine version in package.json
shell: bash
env:
RELEASE_TAG: ${{ github.ref_name }}
run: |
sed -i "s|\"@ezkljs/engine\": \".*\"|\"@ezkljs/engine\": \"$CLEANED_TAG\"|" in-browser-evm-verifier/package.json
- name: Update the engine import in in-browser-evm-verifier to use @ezkljs/engine package instead of the local one;
run: |
sed -i "s|import { encodeVerifierCalldata } from '../nodejs/ezkl';|import { encodeVerifierCalldata } from '@ezkljs/engine';|" in-browser-evm-verifier/src/index.ts
- name: Update pnpm-lock.yaml versions and integrity
run: |
awk -v integrity="$ENGINE_INTEGRITY" -v tag="$CLEANED_TAG" '
NR==30{$0=" specifier: \"" tag "\""}
NR==31{$0=" version: \"" tag "\""}
NR==400{$0=" /@ezkljs/engine@" tag ":"}
NR==401{$0=" resolution: {integrity: \"" integrity "\"}"} 1' in-browser-evm-verifier/pnpm-lock.yaml > temp.yaml && mv temp.yaml in-browser-evm-verifier/pnpm-lock.yaml
- name: Use pnpm 8
uses: pnpm/action-setup@eae0cfeb286e66ffb5155f1a79b90583a127a68b #v2.4.1
with:
version: 8
- name: Set up Node.js
uses: actions/setup-node@1a4442cacd436585916779262731d5b162bc6ec7 #v3.8.2
with:
node-version: "18.12.1"
registry-url: "https://registry.npmjs.org"
- name: Publish to npm
run: |
cd in-browser-evm-verifier
pnpm install --frozen-lockfile
pnpm run build
pnpm publish --no-git-checks
env:
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

View File

@@ -198,15 +198,15 @@ jobs:
- name: Build release binary (no asm or metal)
if: matrix.build != 'linux-gnu' && matrix.build != 'macos-aarch64'
run: ${{ env.CARGO }} build --release ${{ env.TARGET_FLAGS }} -Z sparse-registry --features mimalloc
run: ${{ env.CARGO }} build --release ${{ env.TARGET_FLAGS }} -Z sparse-registry
- name: Build release binary (asm)
if: matrix.build == 'linux-gnu'
run: ${{ env.CARGO }} build --release ${{ env.TARGET_FLAGS }} -Z sparse-registry --features asm,mimalloc
run: ${{ env.CARGO }} build --release ${{ env.TARGET_FLAGS }} -Z sparse-registry --features asm
- name: Build release binary (metal)
if: matrix.build == 'macos-aarch64'
run: ${{ env.CARGO }} build --release ${{ env.TARGET_FLAGS }} -Z sparse-registry --features macos-metal,mimalloc
run: ${{ env.CARGO }} build --release ${{ env.TARGET_FLAGS }} -Z sparse-registry --features macos-metal
- name: Strip release binary
if: matrix.build != 'windows-msvc' && matrix.build != 'linux-aarch64'

View File

@@ -25,29 +25,29 @@ jobs:
contents: read
runs-on: large-self-hosted
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
@@ -66,29 +66,29 @@ jobs:
contents: read
runs-on: ubuntu-latest
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
@@ -103,29 +103,29 @@ jobs:
contents: read
runs-on: ubuntu-latest
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
@@ -140,29 +140,29 @@ jobs:
contents: read
runs-on: ubuntu-latest-32-cores
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
@@ -219,29 +219,29 @@ jobs:
contents: read
runs-on: non-gpu
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
@@ -273,29 +273,29 @@ jobs:
contents: read
runs-on: non-gpu
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
@@ -327,29 +327,29 @@ jobs:
contents: read
runs-on: ubuntu-latest-16-cores
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
@@ -368,29 +368,29 @@ jobs:
contents: read
runs-on: non-gpu
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
@@ -413,35 +413,54 @@ jobs:
# AR=/opt/homebrew/opt/llvm/bin/llvm-ar CC=/opt/homebrew/opt/llvm/bin/clang wasm-pack test --firefox --headless -- -Z build-std="panic_abort,std" --features web
run: wasm-pack test --chrome --headless -- -Z build-std="panic_abort,std" --features web
foudry-solidity-tests:
permissions:
contents: read
runs-on: non-gpu
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
persist-credentials: false
submodules: recursive
- name: Install Foundry
uses: foundry-rs/foundry-toolchain@3b74dacdda3c0b763089addb99ed86bc3800e68b
- name: Run tests
run: |
cd tests/foundry
forge install https://github.com/foundry-rs/forge-std --no-git --no-commit
forge test -vvvv --fuzz-runs 64
mock-proving-tests:
permissions:
contents: read
runs-on: non-gpu
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
toolchain: nightly-2025-02-17
@@ -506,29 +525,29 @@ jobs:
runs-on: non-gpu
needs: [build, library-tests, docs, python-tests, python-integration-tests]
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
@@ -553,16 +572,28 @@ jobs:
cache: "pnpm"
- name: "Add rust-src"
run: rustup component add rust-src --toolchain nightly-2025-02-17-x86_64-unknown-linux-gnu
- name: Install dependencies for js tests and package
- name: Install dependencies for js tests and in-browser-evm-verifier package
run: |
pnpm install --frozen-lockfile
pnpm install --dir ./in-browser-evm-verifier --frozen-lockfile
env:
CI: false
NODE_ENV: development
- name: Build wasm package for nodejs target.
run: |
wasm-pack build --target nodejs --out-dir ./in-browser-evm-verifier/nodejs . -- -Z build-std="panic_abort,std"
- name: Replace memory definition in nodejs
run: |
sed -i "3s|.*|imports['env'] = {memory: new WebAssembly.Memory({initial:20,maximum:65536,shared:true})}|" in-browser-evm-verifier/nodejs/ezkl.js
- name: Build @ezkljs/verify package
run: |
cd in-browser-evm-verifier
pnpm build:commonjs
cd ..
# - name: Install solc
# run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
- name: Install Anvil
run: cargo install --git https://github.com/foundry-rs/foundry --rev 62cdea8ff9e6efef011f77e295823b5f2dbeb3a1 --locked anvil --force
- name: Build wasm package for nodejs target.
run: |
wasm-pack build --target nodejs --out-dir ./tests/wasm/nodejs . -- -Z build-std="panic_abort,std"
- name: KZG prove and verify tests (EVM + reusable verifier + col-overflow)
run: cargo nextest run --verbose tests_evm::kzg_evm_prove_and_verify_reusable_verifier --test-threads 1
- name: KZG prove and verify tests (EVM + kzg all)
@@ -571,6 +602,20 @@ jobs:
run: cargo nextest run --verbose tests_evm::kzg_evm_kzg_input_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + kzg params)
run: cargo nextest run --verbose tests_evm::kzg_evm_kzg_params_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + on chain inputs)
run: cargo nextest run --verbose tests_evm::kzg_evm_on_chain_input_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + on chain outputs)
run: cargo nextest run --verbose tests_evm::kzg_evm_on_chain_output_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + on chain inputs & outputs)
run: cargo nextest run --verbose tests_evm::kzg_evm_on_chain_input_output_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + on chain inputs & kzg outputs + params)
run: cargo nextest run --verbose tests_evm::kzg_evm_on_chain_input_kzg_output_kzg_params_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + on chain outputs & kzg inputs + params)
run: cargo nextest run --verbose tests_evm::kzg_evm_on_chain_output_kzg_input_kzg_params_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + on chain all kzg)
run: cargo nextest run --verbose tests_evm::kzg_evm_on_chain_all_kzg_params_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + on chain inputs & outputs hashes)
run: cargo nextest run --verbose tests_evm::kzg_evm_on_chain_input_output_hashed_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM)
run: cargo nextest run --verbose tests_evm::kzg_evm_prove_and_verify --test-threads 1
- name: KZG prove and verify tests (EVM + hashed inputs)
@@ -620,29 +665,29 @@ jobs:
runs-on: non-gpu
needs: [build, library-tests, docs]
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
@@ -683,6 +728,9 @@ jobs:
- name: Build wasm package for nodejs target.
run: |
wasm-pack build --target nodejs --out-dir ./tests/wasm/nodejs . -- -Z build-std="panic_abort,std"
- name: Replace memory definition in nodejs
run: |
sed -i "3s|.*|imports['env'] = {memory: new WebAssembly.Memory({initial:20,maximum:65536,shared:true})}|" tests/wasm/nodejs/ezkl.js
- name: KZG prove and verify tests (public outputs + column overflow)
run: cargo nextest run --verbose tests::kzg_prove_and_verify_with_overflow_::w
- name: KZG prove and verify tests (public outputs + fixed params + column overflow)
@@ -757,29 +805,29 @@ jobs:
runs-on: self-hosted
needs: [build, library-tests, docs, python-tests, python-integration-tests]
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: dtolnay/rust-toolchain@4f94fbe7e03939b0e674bcc9ca609a16088f63ff #nightly branch, TODO: update when required
with:
@@ -819,29 +867,29 @@ jobs:
runs-on: large-self-hosted
needs: [build, library-tests, docs, python-tests, python-integration-tests]
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
@@ -861,29 +909,29 @@ jobs:
runs-on: large-self-hosted
needs: [build, library-tests, docs, python-tests, python-integration-tests]
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
@@ -907,29 +955,29 @@ jobs:
runs-on: ubuntu-latest-32-cores
needs: [build, library-tests, docs]
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
@@ -949,29 +997,29 @@ jobs:
runs-on: non-gpu
needs: [build, library-tests, docs]
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0
with:
@@ -1000,29 +1048,29 @@ jobs:
runs-on: non-gpu
needs: [build, library-tests, docs, python-tests, python-integration-tests]
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0
with:
@@ -1053,30 +1101,48 @@ jobs:
permissions:
contents: read
runs-on: large-self-hosted
services:
# Label used to access the service container
postgres:
# Docker Hub image
image: postgres
env:
POSTGRES_USER: ubuntu
POSTGRES_HOST_AUTH_METHOD: trust
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
-v /var/run/postgresql:/var/run/postgresql
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions/setup-python@b64ffcaf5b410884ad320a9cfac8866006a109aa #v4.8.0
with:
@@ -1133,29 +1199,29 @@ jobs:
contents: read
runs-on: macos-latest
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:
@@ -1176,29 +1242,29 @@ jobs:
needs: [ios-integration-tests]
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
with:
persist-credentials: false
- name: Configure Git credentials
run: |
if [ -z "$EVM_VERIFIER_EZKL_TOKEN" ]; then
echo "❌ EVM_VERIFIER_EZKL_TOKEN is empty check repo/org secrets" >&2
if [ -z "$VERIFICATION_EZKL_TOKEN" ]; then
echo "❌ VERIFICATION_EZKL_TOKEN is empty check repo/org secrets" >&2
exit 1
fi
# For libgit2 (what Cargo uses internally)
# Configure git to use the token for all GitHub requests
git config --global credential.helper store
echo "https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com" > ~/.git-credentials
echo "https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com" > ~/.git-credentials
chmod 600 ~/.git-credentials
# Also set URL replacement with oauth2 format
# Also set the URL replacement as before
git config --global \
url."https://oauth2:${EVM_VERIFIER_EZKL_TOKEN}@github.com/".insteadOf \
url."https://${VERIFICATION_EZKL_TOKEN}:x-oauth-basic@github.com/".insteadOf \
"https://github.com/"
env:
EVM_VERIFIER_EZKL_TOKEN: ${{ secrets.EVM_VERIFIER_EZKL_TOKEN }}
VERIFICATION_EZKL_TOKEN: ${{ secrets.VERIFICATION_EZKL_TOKEN }}
- uses: actions-rs/toolchain@b2417cde72dcf67f306c0ae8e0828a81bf0b189f #v1.0.6
with:

2
.gitignore vendored
View File

@@ -52,5 +52,3 @@ docs/python/build
!tests/assets/vk_aggr.key
cache
out
!tests/assets/wasm.code
!tests/assets/wasm.sol

185
Cargo.lock generated
View File

@@ -126,27 +126,6 @@ dependencies = [
"winnow 0.6.26",
]
[[package]]
name = "alloy-eip2930"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0069cf0642457f87a01a014f6dc29d5d893cd4fd8fddf0c3cdfad1bb3ebafc41"
dependencies = [
"alloy-primitives 0.8.25",
"alloy-rlp",
]
[[package]]
name = "alloy-eip7702"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ea59dc42102bc9a1905dc57901edc6dd48b9f38115df86c7d252acba70d71d04"
dependencies = [
"alloy-primitives 0.8.25",
"alloy-rlp",
"k256",
]
[[package]]
name = "alloy-eips"
version = "0.1.0"
@@ -238,7 +217,7 @@ dependencies = [
"bytes",
"cfg-if",
"const-hex",
"derive_more 0.99.20",
"derive_more",
"hex-literal",
"itoa",
"ruint",
@@ -255,7 +234,7 @@ dependencies = [
"bytes",
"cfg-if",
"const-hex",
"derive_more 0.99.20",
"derive_more",
"getrandom 0.2.16",
"hex-literal",
"itoa",
@@ -268,25 +247,6 @@ dependencies = [
"tiny-keccak",
]
[[package]]
name = "alloy-primitives"
version = "0.8.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8c77490fe91a0ce933a1f219029521f20fc28c2c0ca95d53fa4da9c00b8d9d4e"
dependencies = [
"alloy-rlp",
"bytes",
"cfg-if",
"const-hex",
"derive_more 2.0.1",
"hashbrown 0.15.2",
"itoa",
"k256",
"paste",
"ruint",
"tiny-keccak",
]
[[package]]
name = "alloy-provider"
version = "0.1.0"
@@ -892,16 +852,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "aurora-engine-modexp"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "518bc5745a6264b5fd7b09dffb9667e400ee9e2bbe18555fac75e1fe9afa0df9"
dependencies = [
"hex",
"num",
]
[[package]]
name = "auto_impl"
version = "1.3.0"
@@ -979,7 +929,7 @@ dependencies = [
"bitflags 2.9.0",
"cexpr",
"clang-sys",
"itertools 0.12.1",
"itertools 0.11.0",
"lazy_static",
"lazycell",
"log",
@@ -1366,7 +1316,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c"
dependencies = [
"lazy_static",
"windows-sys 0.59.0",
"windows-sys 0.48.0",
]
[[package]]
@@ -1743,27 +1693,6 @@ dependencies = [
"syn 2.0.101",
]
[[package]]
name = "derive_more"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678"
dependencies = [
"derive_more-impl",
]
[[package]]
name = "derive_more-impl"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.101",
"unicode-xid",
]
[[package]]
name = "digest"
version = "0.9.0"
@@ -2010,7 +1939,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e"
dependencies = [
"libc",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -2085,7 +2014,6 @@ dependencies = [
"indicatif",
"instant",
"itertools 0.10.5",
"jemallocator",
"lazy_static",
"log",
"maybe-rayon",
@@ -2585,7 +2513,7 @@ dependencies = [
[[package]]
name = "halo2_solidity_verifier"
version = "0.1.0"
source = "git+https://github.com/zkonduit/ezkl-verifier?branch=main#ff30972bf729d046f0c903ad91703af1a9e33a8f"
source = "git+https://github.com/zkonduit/verification-ezkl?branch=vka-hash#409f977e461b435b9afc33ed38edba09fe2eaee4"
dependencies = [
"askama",
"blake2b_simd",
@@ -2593,7 +2521,6 @@ dependencies = [
"hex",
"itertools 0.11.0",
"regex",
"revm 14.0.3",
"ruint",
"sha3 0.10.8",
]
@@ -3275,7 +3202,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
dependencies = [
"hermit-abi 0.5.0",
"libc",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -3335,26 +3262,6 @@ version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
[[package]]
name = "jemalloc-sys"
version = "0.5.4+5.3.0-patched"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac6c1946e1cea1788cbfde01c993b52a10e2da07f4bac608228d1bed20bfebf2"
dependencies = [
"cc",
"libc",
]
[[package]]
name = "jemallocator"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0de374a9f8e63150e6f5e8a60cc14c668226d7a347d8aee1a45766e3c4dd3bc"
dependencies = [
"jemalloc-sys",
"libc",
]
[[package]]
name = "jiff"
version = "0.2.10"
@@ -3489,7 +3396,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
dependencies = [
"cfg-if",
"windows-targets 0.52.6",
"windows-targets 0.48.5",
]
[[package]]
@@ -4696,7 +4603,7 @@ dependencies = [
"once_cell",
"socket2",
"tracing",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -4991,21 +4898,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68f4ca8ae0345104523b4af1a8a7ea97cfa1865cdb7a7c25d23c1a18d9b48598"
dependencies = [
"auto_impl",
"revm-interpreter 1.3.0",
"revm-precompile 2.2.0",
]
[[package]]
name = "revm"
version = "14.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "641702b12847f9ed418d552f4fcabe536d867a2c980e96b6e7e25d7b992f929f"
dependencies = [
"auto_impl",
"cfg-if",
"dyn-clone",
"revm-interpreter 10.0.3",
"revm-precompile 11.0.3",
"revm-interpreter",
"revm-precompile",
]
[[package]]
@@ -5014,16 +4908,7 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f959cafdf64a7f89b014fa73dc2325001cf654b3d9400260b212d19a2ebe3da0"
dependencies = [
"revm-primitives 1.3.0",
]
[[package]]
name = "revm-interpreter"
version = "10.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e5e14002afae20b5bf1566f22316122f42f57517000e559c55b25bf7a49cba2"
dependencies = [
"revm-primitives 10.0.0",
"revm-primitives",
]
[[package]]
@@ -5035,23 +4920,7 @@ dependencies = [
"k256",
"num",
"once_cell",
"revm-primitives 1.3.0",
"ripemd",
"sha2",
"substrate-bn",
]
[[package]]
name = "revm-precompile"
version = "11.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3198c06247e8d4ad0d1312591edf049b0de4ddffa9fecb625c318fd67db8639b"
dependencies = [
"aurora-engine-modexp",
"cfg-if",
"k256",
"once_cell",
"revm-primitives 10.0.0",
"revm-primitives",
"ripemd",
"sha2",
"substrate-bn",
@@ -5073,24 +4942,6 @@ dependencies = [
"hex",
]
[[package]]
name = "revm-primitives"
version = "10.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f1525851a03aff9a9d6a1d018b414d76252d6802ab54695b27093ecd7e7a101"
dependencies = [
"alloy-eip2930",
"alloy-eip7702",
"alloy-primitives 0.8.25",
"auto_impl",
"bitflags 2.9.0",
"bitvec",
"cfg-if",
"dyn-clone",
"enumn",
"hex",
]
[[package]]
name = "rfc6979"
version = "0.4.0"
@@ -5234,7 +5085,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys 0.4.15",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -5247,7 +5098,7 @@ dependencies = [
"errno",
"libc",
"linux-raw-sys 0.9.4",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -5684,7 +5535,7 @@ dependencies = [
"num-traits",
"poseidon",
"rand 0.8.5",
"revm 3.5.0",
"revm",
"serde",
"sha3 0.10.8",
]
@@ -5951,7 +5802,7 @@ dependencies = [
"getrandom 0.3.2",
"once_cell",
"rustix 1.0.5",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -7080,7 +6931,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.59.0",
"windows-sys 0.48.0",
]
[[package]]

View File

@@ -35,9 +35,7 @@ halo2_wrong_ecc = { git = "https://github.com/zkonduit/halo2wrong", branch = "ac
snark-verifier = { git = "https://github.com/zkonduit/snark-verifier", branch = "ac/chunked-mv-lookup", features = [
"derive_serde",
] }
halo2_solidity_verifier = { git = "https://github.com/zkonduit/ezkl-verifier", branch = "main", optional = true, features = [
"evm",
] }
halo2_solidity_verifier = { git = "https://github.com/zkonduit/verification-ezkl", branch = "vka-hash", optional = true }
maybe-rayon = { version = "0.1.1", default-features = false }
bincode = { version = "1.3.3", default-features = false }
unzip-n = "0.1.2"
@@ -45,12 +43,10 @@ num = "0.4.1"
tosubcommand = { git = "https://github.com/zkonduit/enum_to_subcommand", package = "tosubcommand", optional = true }
semver = { version = "1.0.22", optional = true }
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
# evm related deps
serde_json = { version = "1.0.97", features = ["float_roundtrip", "raw_value"] }
# evm related deps
alloy = { git = "https://github.com/alloy-rs/alloy", version = "0.1.0", rev = "5fbf57bac99edef9d8475190109a7ea9fb7e5e83", features = [
"provider-http",
"signers",
@@ -60,7 +56,6 @@ alloy = { git = "https://github.com/alloy-rs/alloy", version = "0.1.0", rev = "5
"node-bindings",
], optional = true }
foundry-compilers = { version = "0.4.1", features = [
"svm-solc",
], optional = true }
@@ -93,9 +88,9 @@ pyo3-log = { version = "0.12.0", default-features = false, optional = true }
tract-onnx = { git = "https://github.com/sonos/tract/", rev = "37132e0397d0a73e5bd3a8615d932dabe44f6736", default-features = false, optional = true }
tabled = { version = "0.12.0", optional = true }
objc = { version = "0.2.4", optional = true }
pyo3-stub-gen = { version = "0.6.0", optional = true }
jemallocator = { version = "0.5", optional = true }
mimalloc = { version = "0.1", optional = true }
pyo3-stub-gen = { version = "0.6.0", optional = true }
# universal bindings
uniffi = { version = "=0.28.0", optional = true }
getrandom = { version = "0.2.8", optional = true }
@@ -230,18 +225,12 @@ default = [
]
onnx = ["dep:tract-onnx"]
python-bindings = ["pyo3", "pyo3-log", "pyo3-async-runtimes", "pyo3-stub-gen"]
universal-bindings = [
"uniffi",
"mv-lookup",
"precompute-coset",
"parallel-poly-read",
"solidity-verifier-mv-lookup",
]
logging = ["dep:colored", "dep:env_logger", "dep:chrono"]
ios-bindings = ["universal-bindings"]
ios-bindings = ["eth-mv-lookup", "precompute-coset", "parallel-poly-read", "uniffi"]
ios-bindings-test = ["ios-bindings", "uniffi/bindgen-tests"]
ezkl = [
"onnx",
"dep:colored",
"dep:env_logger",
"tabled/color",
"serde_json/std",
"colored_json",
@@ -252,24 +241,42 @@ ezkl = [
"dep:lazy_static",
"dep:tokio",
"dep:openssl",
"dep:mimalloc",
"dep:chrono",
"dep:sha256",
"dep:clap_complete",
"dep:semver",
"dep:clap",
"dep:tosubcommand",
"logging",
]
eth = ["dep:alloy", "dep:foundry-compilers", "dep:ethabi"]
solidity-verifier = ["dep:halo2_solidity_verifier"]
solidity-verifier-mv-lookup = ["halo2_solidity_verifier/mv-lookup"]
eth-mv-lookup = ["solidity-verifier-mv-lookup", "mv-lookup", "eth"]
eth-original-lookup = ["eth", "solidity-verifier"]
eth = [
"dep:alloy",
"dep:foundry-compilers",
"dep:ethabi",
]
solidity-verifier = [
"dep:halo2_solidity_verifier",
]
solidity-verifier-mv-lookup = [
"halo2_solidity_verifier/mv-lookup",
]
eth-mv-lookup = [
"solidity-verifier-mv-lookup",
"mv-lookup",
"eth",
]
eth-original-lookup = [
"eth",
"solidity-verifier",
]
parallel-poly-read = [
"halo2_proofs/circuit-params",
"halo2_proofs/parallel-poly-read",
]
mv-lookup = ["halo2_proofs/mv-lookup", "snark-verifier/mv-lookup"]
mv-lookup = [
"halo2_proofs/mv-lookup",
"snark-verifier/mv-lookup",
]
asm = ["halo2curves/asm", "halo2_proofs/asm"]
precompute-coset = ["halo2_proofs/precompute-coset"]
det-prove = []
@@ -279,15 +286,12 @@ no-banner = []
no-update = []
macos-metal = ["halo2_proofs/macos"]
ios-metal = ["halo2_proofs/ios"]
jemalloc = ["dep:jemallocator"]
mimalloc = ["dep:mimalloc"]
[patch.crates-io]
uniffi_testing = { git = "https://github.com/ElusAegis/uniffi-rs", branch = "feat/testing-feature-build-fix" }
[profile.release]
# debug = true
rustflags = ["-C", "relocation-model=pic"]
lto = "fat"
codegen-units = 1

View File

@@ -76,6 +76,11 @@ For more details visit the [docs](https://docs.ezkl.xyz). The CLI is faster than
Build the auto-generated rust documentation and open the docs in your browser locally. `cargo doc --open`
#### In-browser EVM Verifier
As an alternative to running the native Halo2 verifier as a WASM binding in the browser, you can use the in-browser EVM verifier. The source code of which you can find in the `in-browser-evm-verifier` directory and a README with instructions on how to use it.
### Building the Project 🔨
#### Rust CLI

View File

@@ -68,7 +68,7 @@ impl Circuit<Fr> for MyCircuit {
config
.layout(
&mut region,
&[&self.image, &self.kernel, &self.bias],
&[self.image.clone(), self.kernel.clone(), self.bias.clone()],
Box::new(PolyOp::Conv {
padding: vec![(0, 0)],
stride: vec![1; 2],

View File

@@ -15,7 +15,6 @@ use halo2_proofs::{
plonk::{Circuit, ConstraintSystem, Error},
};
use halo2curves::bn256::{Bn256, Fr};
use itertools::Itertools;
use rand::rngs::OsRng;
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
use std::marker::PhantomData;
@@ -60,7 +59,7 @@ impl Circuit<Fr> for MyCircuit {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs,
Box::new(PolyOp::Einsum {
equation: "i,i->".to_string(),
}),

View File

@@ -15,7 +15,6 @@ use halo2_proofs::{
plonk::{Circuit, ConstraintSystem, Error},
};
use halo2curves::bn256::{Bn256, Fr};
use itertools::Itertools;
use rand::rngs::OsRng;
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
use std::marker::PhantomData;
@@ -62,7 +61,7 @@ impl Circuit<Fr> for MyCircuit {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs,
Box::new(PolyOp::Einsum {
equation: "ab,bc->ac".to_string(),
}),

View File

@@ -17,7 +17,6 @@ use halo2_proofs::{
plonk::{Circuit, ConstraintSystem, Error},
};
use halo2curves::bn256::{Bn256, Fr};
use itertools::Itertools;
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
use std::marker::PhantomData;
@@ -87,13 +86,13 @@ impl Circuit<Fr> for MyCircuit {
let mut region = region::RegionCtx::new(region, 0, 1, 1024, 2);
let output = config
.base_config
.layout(&mut region, &self.inputs.iter().collect_vec(), Box::new(op))
.layout(&mut region, &self.inputs, Box::new(op))
.unwrap();
let _output = config
.base_config
.layout(
&mut region,
&[&output.unwrap()],
&[output.unwrap()],
Box::new(LookupOp::Sigmoid { scale: 1.0.into() }),
)
.unwrap();

View File

@@ -17,7 +17,6 @@ use halo2_proofs::{
plonk::{Circuit, ConstraintSystem, Error},
};
use halo2curves::bn256::{Bn256, Fr};
use itertools::Itertools;
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
use std::marker::PhantomData;
@@ -88,13 +87,13 @@ impl Circuit<Fr> for MyCircuit {
let mut region = region::RegionCtx::new(region, 0, 1, 1024, 2);
let output = config
.base_config
.layout(&mut region, &self.inputs.iter().collect_vec(), Box::new(op))
.layout(&mut region, &self.inputs, Box::new(op))
.unwrap();
let _output = config
.base_config
.layout(
&mut region,
&[&output.unwrap()],
&[output.unwrap()],
Box::new(LookupOp::Sigmoid { scale: 1.0.into() }),
)
.unwrap();

View File

@@ -15,7 +15,6 @@ use halo2_proofs::{
plonk::{Circuit, ConstraintSystem, Error},
};
use halo2curves::bn256::{Bn256, Fr};
use itertools::Itertools;
use rand::rngs::OsRng;
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
use std::marker::PhantomData;
@@ -60,7 +59,7 @@ impl Circuit<Fr> for MyCircuit {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs,
Box::new(PolyOp::Sum { axes: vec![0] }),
)
.unwrap();

View File

@@ -63,7 +63,7 @@ impl Circuit<Fr> for MyCircuit {
config
.layout(
&mut region,
&[&self.image],
&[self.image.clone()],
Box::new(HybridOp::SumPool {
padding: vec![(0, 0); 2],
stride: vec![1, 1],

View File

@@ -15,7 +15,6 @@ use halo2_proofs::{
plonk::{Circuit, ConstraintSystem, Error},
};
use halo2curves::bn256::{Bn256, Fr};
use itertools::Itertools;
use rand::rngs::OsRng;
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
use std::marker::PhantomData;
@@ -58,11 +57,7 @@ impl Circuit<Fr> for MyCircuit {
|region| {
let mut region = region::RegionCtx::new(region, 0, 1, 1024, 2);
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
Box::new(PolyOp::Add),
)
.layout(&mut region, &self.inputs, Box::new(PolyOp::Add))
.unwrap();
Ok(())
},

View File

@@ -16,7 +16,6 @@ use halo2_proofs::{
plonk::{Circuit, ConstraintSystem, Error},
};
use halo2curves::bn256::{Bn256, Fr};
use itertools::Itertools;
use rand::rngs::OsRng;
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
use std::marker::PhantomData;
@@ -59,11 +58,7 @@ impl Circuit<Fr> for MyCircuit {
|region| {
let mut region = RegionCtx::new(region, 0, 1, 1024, 2);
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
Box::new(PolyOp::Pow(4)),
)
.layout(&mut region, &self.inputs, Box::new(PolyOp::Pow(4)))
.unwrap();
Ok(())
},

View File

@@ -70,7 +70,7 @@ impl Circuit<Fr> for NLCircuit {
config
.layout(
&mut region,
&[&self.input],
&[self.input.clone()],
Box::new(PolyOp::LeakyReLU {
slope: 0.0.into(),
scale: 1,

View File

@@ -67,7 +67,7 @@ impl Circuit<Fr> for NLCircuit {
config
.layout(
&mut region,
&[&self.input],
&[self.input.clone()],
Box::new(LookupOp::Sigmoid { scale: 1.0.into() }),
)
.unwrap();

397
contracts/AttestData.sol Normal file
View File

@@ -0,0 +1,397 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.20;
contract LoadInstances {
/**
* @dev Parse the instances array from the Halo2Verifier encoded calldata.
* @notice must pass encoded bytes from memory
* @param encoded - verifier calldata
*/
function getInstancesMemory(
bytes memory encoded
) public pure returns (uint256[] memory instances) {
bytes4 funcSig;
uint256 instances_offset;
uint256 instances_length;
assembly {
// fetch function sig. Either `verifyProof(bytes,uint256[])` or `verifyProof(address,bytes,uint256[])`
funcSig := mload(add(encoded, 0x20))
}
if (funcSig == 0xaf83a18d) {
instances_offset = 0x64;
} else if (funcSig == 0x1e8e1e13) {
instances_offset = 0x44;
} else {
revert("Invalid function signature");
}
assembly {
// Fetch instances offset which is 4 + 32 + 32 bytes away from
// start of encoded for `verifyProof(bytes,uint256[])`,
// and 4 + 32 + 32 +32 away for `verifyProof(address,bytes,uint256[])`
instances_offset := mload(add(encoded, instances_offset))
instances_length := mload(add(add(encoded, 0x24), instances_offset))
}
instances = new uint256[](instances_length); // Allocate memory for the instances array.
assembly {
// Now instances points to the start of the array data
// (right after the length field).
for {
let i := 0x20
} lt(i, add(mul(instances_length, 0x20), 0x20)) {
i := add(i, 0x20)
} {
mstore(
add(instances, i),
mload(add(add(encoded, add(i, 0x24)), instances_offset))
)
}
}
require(
funcSig == 0xaf83a18d || funcSig == 0x1e8e1e13,
"Invalid function signature"
);
}
/**
* @dev Parse the instances array from the Halo2Verifier encoded calldata.
* @notice must pass encoded bytes from calldata
* @param encoded - verifier calldata
*/
function getInstancesCalldata(
bytes calldata encoded
) public pure returns (uint256[] memory instances) {
bytes4 funcSig;
uint256 instances_offset;
uint256 instances_length;
assembly {
// fetch function sig. Either `verifyProof(bytes,uint256[])` or `verifyProof(address,bytes,uint256[])`
funcSig := calldataload(encoded.offset)
}
if (funcSig == 0xaf83a18d) {
instances_offset = 0x44;
} else if (funcSig == 0x1e8e1e13) {
instances_offset = 0x24;
} else {
revert("Invalid function signature");
}
// We need to create a new assembly block in order for solidity
// to cast the funcSig to a bytes4 type. Otherwise it will load the entire first 32 bytes of the calldata
// within the block
assembly {
// Fetch instances offset which is 4 + 32 + 32 bytes away from
// start of encoded for `verifyProof(bytes,uint256[])`,
// and 4 + 32 + 32 +32 away for `verifyProof(address,bytes,uint256[])`
instances_offset := calldataload(
add(encoded.offset, instances_offset)
)
instances_length := calldataload(
add(add(encoded.offset, 0x04), instances_offset)
)
}
instances = new uint256[](instances_length); // Allocate memory for the instances array.
assembly {
// Now instances points to the start of the array data
// (right after the length field).
for {
let i := 0x20
} lt(i, add(mul(instances_length, 0x20), 0x20)) {
i := add(i, 0x20)
} {
mstore(
add(instances, i),
calldataload(
add(add(encoded.offset, add(i, 0x04)), instances_offset)
)
)
}
}
}
}
// The kzg commitments of a given model, all aggregated into a single bytes array.
// At solidity generation time, the commitments are hardcoded into the contract via the COMMITMENT_KZG constant.
// It will be used to check that the proof commitments match the expected commitments.
bytes constant COMMITMENT_KZG = hex"1234";
contract SwapProofCommitments {
/**
* @dev Swap the proof commitments
* @notice must pass encoded bytes from memory
* @param encoded - verifier calldata
*/
function checkKzgCommits(
bytes calldata encoded
) internal pure returns (bool equal) {
bytes4 funcSig;
uint256 proof_offset;
uint256 proof_length;
assembly {
// fetch function sig. Either `verifyProof(bytes,uint256[])` or `verifyProof(address,bytes,uint256[])`
funcSig := calldataload(encoded.offset)
}
if (funcSig == 0xaf83a18d) {
proof_offset = 0x24;
} else if (funcSig == 0x1e8e1e13) {
proof_offset = 0x04;
} else {
revert("Invalid function signature");
}
assembly {
// Fetch proof offset which is 4 + 32 bytes away from
// start of encoded for `verifyProof(bytes,uint256[])`,
// and 4 + 32 + 32 away for `verifyProof(address,bytes,uint256[])`
proof_offset := calldataload(add(encoded.offset, proof_offset))
proof_length := calldataload(
add(add(encoded.offset, 0x04), proof_offset)
)
}
// Check the length of the commitment against the proof bytes
if (proof_length < COMMITMENT_KZG.length) {
return false;
}
// Load COMMITMENT_KZG into memory
bytes memory commitment = COMMITMENT_KZG;
// Compare the first N bytes of the proof with COMMITMENT_KZG
uint words = (commitment.length + 31) / 32; // Calculate the number of 32-byte words
assembly {
// Now we compare the commitment with the proof,
// ensuring that the commitments divided up into 32 byte words are all equal.
for {
let i := 0x20
} lt(i, add(mul(words, 0x20), 0x20)) {
i := add(i, 0x20)
} {
let wordProof := calldataload(
add(add(encoded.offset, add(i, 0x04)), proof_offset)
)
let wordCommitment := mload(add(commitment, i))
equal := eq(wordProof, wordCommitment)
if eq(equal, 0) {
break
}
}
}
return equal; // Return true if the commitment comparison passed
} /// end checkKzgCommits
}
contract DataAttestation is LoadInstances, SwapProofCommitments {
// the address of the account to make calls to
address public immutable contractAddress;
// the abi encoded function calls to make to the `contractAddress` that returns the attested to data
bytes public callData;
struct Scalars {
// The number of base 10 decimals to scale the data by.
// For most ERC20 tokens this is 1e18
uint256 decimals;
// The number of fractional bits of the fixed point EZKL data points.
uint256 bits;
}
Scalars[] private scalars;
function getScalars(uint256 index) public view returns (Scalars memory) {
return scalars[index];
}
/**
* @notice EZKL P value
* @dev In order to prevent the verifier from accepting two version of the same pubInput, n and the quantity (n + P), where n + P <= 2^256, we require that all instances are stricly less than P. a
* @dev The reason for this is that the assmebly code of the verifier performs all arithmetic operations modulo P and as a consequence can't distinguish between n and n + P.
*/
uint256 public constant ORDER =
uint256(
0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001
);
uint256 public constant HALF_ORDER = ORDER >> 1;
uint8 public instanceOffset;
/**
* @dev Initialize the contract with account calls the EZKL model will read from.
* @param _contractAddresses - The calls to all the contracts EZKL reads storage from.
* @param _callData - The abi encoded function calls to make to the `contractAddress` that EZKL reads storage from.
*/
constructor(
address _contractAddresses,
bytes memory _callData,
uint256[] memory _decimals,
uint[] memory _bits,
uint8 _instanceOffset
) {
require(
_bits.length == _decimals.length,
"Invalid scalar array lengths"
);
for (uint i; i < _bits.length; i++) {
scalars.push(Scalars(10 ** _decimals[i], 1 << _bits[i]));
}
contractAddress = _contractAddresses;
callData = _callData;
instanceOffset = _instanceOffset;
}
function mulDiv(
uint256 x,
uint256 y,
uint256 denominator
) public pure returns (uint256 result) {
unchecked {
uint256 prod0;
uint256 prod1;
assembly {
let mm := mulmod(x, y, not(0))
prod0 := mul(x, y)
prod1 := sub(sub(mm, prod0), lt(mm, prod0))
}
if (prod1 == 0) {
return prod0 / denominator;
}
require(denominator > prod1, "Math: mulDiv overflow");
uint256 remainder;
assembly {
remainder := mulmod(x, y, denominator)
prod1 := sub(prod1, gt(remainder, prod0))
prod0 := sub(prod0, remainder)
}
uint256 twos = denominator & (~denominator + 1);
assembly {
denominator := div(denominator, twos)
prod0 := div(prod0, twos)
twos := add(div(sub(0, twos), twos), 1)
}
prod0 |= prod1 * twos;
uint256 inverse = (3 * denominator) ^ 2;
inverse *= 2 - denominator * inverse;
inverse *= 2 - denominator * inverse;
inverse *= 2 - denominator * inverse;
inverse *= 2 - denominator * inverse;
inverse *= 2 - denominator * inverse;
inverse *= 2 - denominator * inverse;
result = prod0 * inverse;
return result;
}
}
/**
* @dev Quantize the data returned from the account calls to the scale used by the EZKL model.
* @param x - One of the elements of the data returned from the account calls
* @param _scalars - The scaling factors for the data returned from the account calls.
*
*/
function quantizeData(
int x,
Scalars memory _scalars
) public pure returns (int256 quantized_data) {
if (_scalars.bits == 1 && _scalars.decimals == 1) {
return x;
}
bool neg = x < 0;
if (neg) x = -x;
uint output = mulDiv(uint256(x), _scalars.bits, _scalars.decimals);
if (
mulmod(uint256(x), _scalars.bits, _scalars.decimals) * 2 >=
_scalars.decimals
) {
output += 1;
}
if (output > HALF_ORDER) {
revert("Overflow field modulus");
}
quantized_data = neg ? -int256(output) : int256(output);
}
/**
* @dev Make a static call to the account to fetch the data that EZKL reads from.
* @param target - The address of the account to make calls to.
* @param data - The abi encoded function calls to make to the `contractAddress` that EZKL reads storage from.
* @return The data returned from the account calls. (Must come from either a view or pure function. Will throw an error otherwise)
*/
function staticCall(
address target,
bytes memory data
) public view returns (bytes memory) {
(bool success, bytes memory returndata) = target.staticcall(data);
if (success) {
if (returndata.length == 0) {
require(
target.code.length > 0,
"Address: call to non-contract"
);
}
return returndata;
} else {
revert("Address: low-level call failed");
}
}
/**
* @dev Convert the fixed point quantized data into a field element.
* @param x - The quantized data.
* @return field_element - The field element.
*/
function toFieldElement(
int256 x
) public pure returns (uint256 field_element) {
// The casting down to uint256 is safe because the order is about 2^254, and the value
// of x ranges of -2^127 to 2^127, so x + int(ORDER) is always positive.
return uint256(x + int(ORDER)) % ORDER;
}
/**
* @dev Make the account calls to fetch the data that EZKL reads from and attest to the data.
* @param instances - The public instances to the proof (the data in the proof that publicly accessible to the verifier).
*/
function attestData(uint256[] memory instances) public view {
bytes memory returnData = staticCall(contractAddress, callData);
int256[] memory x = abi.decode(returnData, (int256[]));
int output;
uint fieldElement;
for (uint i = 0; i < x.length; i++) {
output = quantizeData(x[i], scalars[i]);
fieldElement = toFieldElement(output);
if (fieldElement != instances[i]) {
revert("Public input does not match");
}
}
}
/**
* @dev Verify the proof with the data attestation.
* @param verifier - The address of the verifier contract.
* @param encoded - The verifier calldata.
*/
function verifyWithDataAttestation(
address verifier,
bytes calldata encoded
) public view returns (bool) {
require(verifier.code.length > 0, "Address: call to non-contract");
attestData(getInstancesCalldata(encoded));
require(checkKzgCommits(encoded), "Invalid KZG commitments");
// static call the verifier contract to verify the proof
(bool success, bytes memory returndata) = verifier.staticcall(encoded);
if (success) {
return abi.decode(returndata, (bool));
} else {
revert("low-level call to verifier failed");
}
}
}

View File

@@ -32,6 +32,7 @@ use mnist::*;
use rand::rngs::OsRng;
use std::marker::PhantomData;
mod params;
const K: usize = 20;
@@ -215,7 +216,11 @@ where
.layer_config
.layout(
&mut region,
&[&self.input, &self.l0_params[0], &self.l0_params[1]],
&[
self.input.clone(),
self.l0_params[0].clone(),
self.l0_params[1].clone(),
],
Box::new(op),
)
.unwrap();
@@ -224,7 +229,7 @@ where
.layer_config
.layout(
&mut region,
&[&x.unwrap()],
&[x.unwrap()],
Box::new(PolyOp::LeakyReLU {
slope: 0.0.into(),
scale: 1,
@@ -236,7 +241,7 @@ where
.layer_config
.layout(
&mut region,
&[&x.unwrap()],
&[x.unwrap()],
Box::new(LookupOp::Div { denom: 32.0.into() }),
)
.unwrap()
@@ -248,7 +253,7 @@ where
.layer_config
.layout(
&mut region,
&[&self.l2_params[0], &x],
&[self.l2_params[0].clone(), x],
Box::new(PolyOp::Einsum {
equation: "ij,j->ik".to_string(),
}),
@@ -260,7 +265,7 @@ where
.layer_config
.layout(
&mut region,
&[&x, &self.l2_params[1]],
&[x, self.l2_params[1].clone()],
Box::new(PolyOp::Add),
)
.unwrap()

View File

@@ -117,7 +117,10 @@ impl<const LEN: usize, const LOOKUP_MIN: IntegerRep, const LOOKUP_MAX: IntegerRe
.layer_config
.layout(
&mut region,
&[&self.l0_params[0].clone().try_into().unwrap(), &self.input],
&[
self.l0_params[0].clone().try_into().unwrap(),
self.input.clone(),
],
Box::new(PolyOp::Einsum {
equation: "ab,bc->ac".to_string(),
}),
@@ -132,7 +135,7 @@ impl<const LEN: usize, const LOOKUP_MIN: IntegerRep, const LOOKUP_MAX: IntegerRe
.layer_config
.layout(
&mut region,
&[&x, &self.l0_params[1].clone().try_into().unwrap()],
&[x, self.l0_params[1].clone().try_into().unwrap()],
Box::new(PolyOp::Add),
)
.unwrap()
@@ -144,7 +147,7 @@ impl<const LEN: usize, const LOOKUP_MIN: IntegerRep, const LOOKUP_MAX: IntegerRe
.layer_config
.layout(
&mut region,
&[&x],
&[x],
Box::new(PolyOp::LeakyReLU {
scale: 1,
slope: 0.0.into(),
@@ -160,7 +163,7 @@ impl<const LEN: usize, const LOOKUP_MIN: IntegerRep, const LOOKUP_MAX: IntegerRe
.layer_config
.layout(
&mut region,
&[&self.l2_params[0].clone().try_into().unwrap(), &x],
&[self.l2_params[0].clone().try_into().unwrap(), x],
Box::new(PolyOp::Einsum {
equation: "ab,bc->ac".to_string(),
}),
@@ -175,7 +178,7 @@ impl<const LEN: usize, const LOOKUP_MIN: IntegerRep, const LOOKUP_MAX: IntegerRe
.layer_config
.layout(
&mut region,
&[&x, &self.l2_params[1].clone().try_into().unwrap()],
&[x, self.l2_params[1].clone().try_into().unwrap()],
Box::new(PolyOp::Add),
)
.unwrap()
@@ -187,7 +190,7 @@ impl<const LEN: usize, const LOOKUP_MIN: IntegerRep, const LOOKUP_MAX: IntegerRe
.layer_config
.layout(
&mut region,
&[&x],
&[x],
Box::new(PolyOp::LeakyReLU {
scale: 1,
slope: 0.0.into(),
@@ -200,7 +203,7 @@ impl<const LEN: usize, const LOOKUP_MIN: IntegerRep, const LOOKUP_MAX: IntegerRe
.layer_config
.layout(
&mut region,
&[&x.unwrap()],
&[x.unwrap()],
Box::new(LookupOp::Div {
denom: ezkl::circuit::utils::F32::from(128.),
}),

View File

@@ -904,7 +904,7 @@
"outputs": [],
"source": [
"\n",
"res = ezkl.calibrate_settings(\"input.json\", target=\"resources\", scales = [4])\n",
"res = await ezkl.calibrate_settings(\"input.json\", target=\"resources\", scales = [4])\n",
"assert res == True\n",
"print(\"verified\")\n"
]
@@ -954,7 +954,7 @@
"source": [
"\n",
"\n",
"res = ezkl.gen_witness()\n"
"res = await ezkl.gen_witness()\n"
]
},
{
@@ -1142,4 +1142,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}

View File

@@ -150,7 +150,7 @@
"res = ezkl.gen_settings(model_path, settings_path)\n",
"assert res == True\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True"
]
},
@@ -170,7 +170,7 @@
"with open(cal_path, \"w\") as f:\n",
" json.dump(cal_data, f)\n",
"\n",
"res = ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
@@ -204,7 +204,7 @@
"source": [
"# now generate the witness file \n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},

View File

@@ -437,7 +437,7 @@
"\n",
"# Optimize for resources, we cap logrows at 12 to reduce setup and proving time, at the expense of accuracy\n",
"# You may want to increase the max logrows if accuracy is a concern\n",
"res = ezkl.calibrate_settings(target = \"resources\", max_logrows = 12, scales = [2])"
"res = await ezkl.calibrate_settings(target = \"resources\", max_logrows = 12, scales = [2])"
]
},
{
@@ -526,7 +526,7 @@
"# now generate the witness file\n",
"witness_path = os.path.join('witness.json')\n",
"\n",
"res = ezkl.gen_witness()\n",
"res = await ezkl.gen_witness()\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -736,4 +736,4 @@
},
"nbformat": 4,
"nbformat_minor": 0
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -467,7 +467,7 @@
"outputs": [],
"source": [
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True"
]
},
@@ -508,7 +508,7 @@
"source": [
"# now generate the witness file\n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},

View File

@@ -196,7 +196,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True\n"
]
},
@@ -237,7 +237,7 @@
"source": [
"# now generate the witness file\n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -341,4 +341,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}

View File

@@ -179,7 +179,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True\n"
]
},
@@ -214,7 +214,7 @@
"source": [
"# now generate the witness file \n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},

View File

@@ -241,7 +241,7 @@
"with open(cal_path, \"w\") as f:\n",
" json.dump(cal_data, f)\n",
"\n",
"res = ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
@@ -291,7 +291,7 @@
"\n",
"witness_path = \"witness.json\"\n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
]
},
{
@@ -510,4 +510,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}

View File

@@ -152,7 +152,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True\n"
]
},
@@ -188,7 +188,7 @@
"# now generate the witness file \n",
"witness_path = \"witness.json\"\n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},

View File

@@ -155,7 +155,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True\n"
]
},
@@ -190,7 +190,7 @@
"source": [
"# now generate the witness file \n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},

View File

@@ -233,7 +233,7 @@
"with open(cal_path, \"w\") as f:\n",
" json.dump(cal_data, f)\n",
"\n",
"res = ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
@@ -315,7 +315,7 @@
"\n",
"witness_path = \"witness.json\"\n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n"
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n"
]
},
{
@@ -512,4 +512,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}

View File

@@ -193,7 +193,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True\n"
]
},
@@ -228,7 +228,7 @@
"source": [
"# now generate the witness file \n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},

View File

@@ -1,284 +1,284 @@
{
"cells": [
{
"cell_type": "markdown",
"id": "cf69bb3f-94e6-4dba-92cd-ce08df117d67",
"metadata": {},
"source": [
"## Linear Regression\n",
"\n",
"\n",
"Sklearn based models are slightly finicky to get into a suitable onnx format. \n",
"This notebook showcases how to do so using the `hummingbird-ml` python package ! "
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "95613ee9",
"metadata": {},
"outputs": [],
"source": [
"# check if notebook is in colab\n",
"try:\n",
" # install ezkl\n",
" import google.colab\n",
" import subprocess\n",
" import sys\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"ezkl\"])\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"onnx\"])\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"hummingbird-ml\"])\n",
"\n",
"# rely on local installation of ezkl if the notebook is not in colab\n",
"except:\n",
" pass\n",
"\n",
"import os\n",
"import torch\n",
"import ezkl\n",
"import json\n",
"from hummingbird.ml import convert\n",
"\n",
"\n",
"# here we create and (potentially train a model)\n",
"\n",
"# make sure you have the dependencies required here already installed\n",
"import numpy as np\n",
"from sklearn.linear_model import LinearRegression\n",
"X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])\n",
"# y = 1 * x_0 + 2 * x_1 + 3\n",
"y = np.dot(X, np.array([1, 2])) + 3\n",
"reg = LinearRegression().fit(X, y)\n",
"reg.score(X, y)\n",
"\n",
"circuit = convert(reg, \"torch\", X[:1]).model\n",
"\n",
"\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b37637c4",
"metadata": {},
"outputs": [],
"source": [
"model_path = os.path.join('network.onnx')\n",
"compiled_model_path = os.path.join('network.compiled')\n",
"pk_path = os.path.join('test.pk')\n",
"vk_path = os.path.join('test.vk')\n",
"settings_path = os.path.join('settings.json')\n",
"\n",
"witness_path = os.path.join('witness.json')\n",
"data_path = os.path.join('input.json')"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "82db373a",
"metadata": {},
"outputs": [],
"source": [
"\n",
"\n",
"# export to onnx format\n",
"# !!!!!!!!!!!!!!!!! This will flash a warning but it is fine !!!!!!!!!!!!!!!!!!!!!\n",
"\n",
"# Input to the model\n",
"shape = X.shape[1:]\n",
"x = torch.rand(1, *shape, requires_grad=True)\n",
"torch_out = circuit(x)\n",
"# Export the model\n",
"torch.onnx.export(circuit, # model being run\n",
" # model input (or a tuple for multiple inputs)\n",
" x,\n",
" # where to save the model (can be a file or file-like object)\n",
" \"network.onnx\",\n",
" export_params=True, # store the trained parameter weights inside the model file\n",
" opset_version=10, # the ONNX version to export the model to\n",
" do_constant_folding=True, # whether to execute constant folding for optimization\n",
" input_names=['input'], # the model's input names\n",
" output_names=['output'], # the model's output names\n",
" dynamic_axes={'input': {0: 'batch_size'}, # variable length axes\n",
" 'output': {0: 'batch_size'}})\n",
"\n",
"d = ((x).detach().numpy()).reshape([-1]).tolist()\n",
"\n",
"data = dict(input_shapes=[shape],\n",
" input_data=[d],\n",
" output_data=[((o).detach().numpy()).reshape([-1]).tolist() for o in torch_out])\n",
"\n",
"# Serialize data into file:\n",
"json.dump(data, open(\"input.json\", 'w'))\n",
"\n",
"\n",
"# note that you can also call the following function to generate random data for the model\n",
"# it is functionally equivalent to the code above\n",
"ezkl.gen_random_data()\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d5e374a2",
"metadata": {},
"outputs": [],
"source": [
"!RUST_LOG=trace\n",
"# TODO: Dictionary outputs\n",
"res = ezkl.gen_settings(model_path, settings_path)\n",
"assert res == True\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"cal_path = os.path.join(\"calibration.json\")\n",
"\n",
"data_array = (torch.randn(20, *shape).detach().numpy()).reshape([-1]).tolist()\n",
"\n",
"data = dict(input_data = [data_array])\n",
"\n",
"# Serialize data into file:\n",
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3aa4f090",
"metadata": {},
"outputs": [],
"source": [
"res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "8b74dcee",
"metadata": {},
"outputs": [],
"source": [
"# srs path\n",
"res = await ezkl.get_srs( settings_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "18c8b7c7",
"metadata": {},
"outputs": [],
"source": [
"# now generate the witness file \n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b1c561a8",
"metadata": {},
"outputs": [],
"source": [
"\n",
"# HERE WE SETUP THE CIRCUIT PARAMS\n",
"# WE GOT KEYS\n",
"# WE GOT CIRCUIT PARAMETERS\n",
"# EVERYTHING ANYONE HAS EVER NEEDED FOR ZK\n",
"\n",
"\n",
"\n",
"res = ezkl.setup(\n",
" compiled_model_path,\n",
" vk_path,\n",
" pk_path,\n",
" \n",
" )\n",
"\n",
"assert res == True\n",
"assert os.path.isfile(vk_path)\n",
"assert os.path.isfile(pk_path)\n",
"assert os.path.isfile(settings_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c384cbc8",
"metadata": {},
"outputs": [],
"source": [
"# GENERATE A PROOF\n",
"\n",
"\n",
"proof_path = os.path.join('test.pf')\n",
"\n",
"res = ezkl.prove(\n",
" witness_path,\n",
" compiled_model_path,\n",
" pk_path,\n",
" proof_path,\n",
" \n",
" \"single\",\n",
" )\n",
"\n",
"print(res)\n",
"assert os.path.isfile(proof_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "76f00d41",
"metadata": {},
"outputs": [],
"source": [
"# VERIFY IT\n",
"\n",
"res = ezkl.verify(\n",
" proof_path,\n",
" settings_path,\n",
" vk_path,\n",
" \n",
" )\n",
"\n",
"assert res == True\n",
"print(\"verified\")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.15"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
"cells": [
{
"cell_type": "markdown",
"id": "cf69bb3f-94e6-4dba-92cd-ce08df117d67",
"metadata": {},
"source": [
"## Linear Regression\n",
"\n",
"\n",
"Sklearn based models are slightly finicky to get into a suitable onnx format. \n",
"This notebook showcases how to do so using the `hummingbird-ml` python package ! "
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "95613ee9",
"metadata": {},
"outputs": [],
"source": [
"# check if notebook is in colab\n",
"try:\n",
" # install ezkl\n",
" import google.colab\n",
" import subprocess\n",
" import sys\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"ezkl\"])\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"onnx\"])\n",
" subprocess.check_call([sys.executable, \"-m\", \"pip\", \"install\", \"hummingbird-ml\"])\n",
"\n",
"# rely on local installation of ezkl if the notebook is not in colab\n",
"except:\n",
" pass\n",
"\n",
"import os\n",
"import torch\n",
"import ezkl\n",
"import json\n",
"from hummingbird.ml import convert\n",
"\n",
"\n",
"# here we create and (potentially train a model)\n",
"\n",
"# make sure you have the dependencies required here already installed\n",
"import numpy as np\n",
"from sklearn.linear_model import LinearRegression\n",
"X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])\n",
"# y = 1 * x_0 + 2 * x_1 + 3\n",
"y = np.dot(X, np.array([1, 2])) + 3\n",
"reg = LinearRegression().fit(X, y)\n",
"reg.score(X, y)\n",
"\n",
"circuit = convert(reg, \"torch\", X[:1]).model\n",
"\n",
"\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b37637c4",
"metadata": {},
"outputs": [],
"source": [
"model_path = os.path.join('network.onnx')\n",
"compiled_model_path = os.path.join('network.compiled')\n",
"pk_path = os.path.join('test.pk')\n",
"vk_path = os.path.join('test.vk')\n",
"settings_path = os.path.join('settings.json')\n",
"\n",
"witness_path = os.path.join('witness.json')\n",
"data_path = os.path.join('input.json')"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "82db373a",
"metadata": {},
"outputs": [],
"source": [
"\n",
"\n",
"# export to onnx format\n",
"# !!!!!!!!!!!!!!!!! This will flash a warning but it is fine !!!!!!!!!!!!!!!!!!!!!\n",
"\n",
"# Input to the model\n",
"shape = X.shape[1:]\n",
"x = torch.rand(1, *shape, requires_grad=True)\n",
"torch_out = circuit(x)\n",
"# Export the model\n",
"torch.onnx.export(circuit, # model being run\n",
" # model input (or a tuple for multiple inputs)\n",
" x,\n",
" # where to save the model (can be a file or file-like object)\n",
" \"network.onnx\",\n",
" export_params=True, # store the trained parameter weights inside the model file\n",
" opset_version=10, # the ONNX version to export the model to\n",
" do_constant_folding=True, # whether to execute constant folding for optimization\n",
" input_names=['input'], # the model's input names\n",
" output_names=['output'], # the model's output names\n",
" dynamic_axes={'input': {0: 'batch_size'}, # variable length axes\n",
" 'output': {0: 'batch_size'}})\n",
"\n",
"d = ((x).detach().numpy()).reshape([-1]).tolist()\n",
"\n",
"data = dict(input_shapes=[shape],\n",
" input_data=[d],\n",
" output_data=[((o).detach().numpy()).reshape([-1]).tolist() for o in torch_out])\n",
"\n",
"# Serialize data into file:\n",
"json.dump(data, open(\"input.json\", 'w'))\n",
"\n",
"\n",
"# note that you can also call the following function to generate random data for the model\n",
"# it is functionally equivalent to the code above\n",
"ezkl.gen_random_data()\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "d5e374a2",
"metadata": {},
"outputs": [],
"source": [
"!RUST_LOG=trace\n",
"# TODO: Dictionary outputs\n",
"res = ezkl.gen_settings(model_path, settings_path)\n",
"assert res == True\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"cal_path = os.path.join(\"calibration.json\")\n",
"\n",
"data_array = (torch.randn(20, *shape).detach().numpy()).reshape([-1]).tolist()\n",
"\n",
"data = dict(input_data = [data_array])\n",
"\n",
"# Serialize data into file:\n",
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "3aa4f090",
"metadata": {},
"outputs": [],
"source": [
"res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "8b74dcee",
"metadata": {},
"outputs": [],
"source": [
"# srs path\n",
"res = await ezkl.get_srs( settings_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "18c8b7c7",
"metadata": {},
"outputs": [],
"source": [
"# now generate the witness file \n",
"\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "b1c561a8",
"metadata": {},
"outputs": [],
"source": [
"\n",
"# HERE WE SETUP THE CIRCUIT PARAMS\n",
"# WE GOT KEYS\n",
"# WE GOT CIRCUIT PARAMETERS\n",
"# EVERYTHING ANYONE HAS EVER NEEDED FOR ZK\n",
"\n",
"\n",
"\n",
"res = ezkl.setup(\n",
" compiled_model_path,\n",
" vk_path,\n",
" pk_path,\n",
" \n",
" )\n",
"\n",
"assert res == True\n",
"assert os.path.isfile(vk_path)\n",
"assert os.path.isfile(pk_path)\n",
"assert os.path.isfile(settings_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "c384cbc8",
"metadata": {},
"outputs": [],
"source": [
"# GENERATE A PROOF\n",
"\n",
"\n",
"proof_path = os.path.join('test.pf')\n",
"\n",
"res = ezkl.prove(\n",
" witness_path,\n",
" compiled_model_path,\n",
" pk_path,\n",
" proof_path,\n",
" \n",
" \"single\",\n",
" )\n",
"\n",
"print(res)\n",
"assert os.path.isfile(proof_path)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "76f00d41",
"metadata": {},
"outputs": [],
"source": [
"# VERIFY IT\n",
"\n",
"res = ezkl.verify(\n",
" proof_path,\n",
" settings_path,\n",
" vk_path,\n",
" \n",
" )\n",
"\n",
"assert res == True\n",
"print(\"verified\")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.15"
}
},
"nbformat": 4,
"nbformat_minor": 5
}

View File

@@ -347,7 +347,7 @@
"# Serialize data into file:\n",
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True\n"
]
},
@@ -383,7 +383,7 @@
"# now generate the witness file \n",
"witness_path = \"gan_witness.json\"\n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},

View File

@@ -142,7 +142,7 @@
"# Serialize data into file:\n",
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True\n"
]
},
@@ -177,7 +177,7 @@
"source": [
"# now generate the witness file \n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -276,4 +276,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}

View File

@@ -139,7 +139,7 @@
"res = ezkl.gen_settings(model_path, settings_path, py_run_args=run_args)\n",
"assert res == True\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True\n"
]
},
@@ -193,7 +193,7 @@
"# now generate the witness file \n",
"witness_path = \"lstmwitness.json\"\n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},

View File

@@ -323,7 +323,7 @@
"res = ezkl.gen_settings(model_path, settings_path, py_run_args=run_args)\n",
"assert res == True\n",
"\n",
"res = ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales=[2,7])\n",
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales=[2,7])\n",
"assert res == True"
]
},
@@ -362,7 +362,7 @@
"# now generate the witness file\n",
"witness_path = \"witness.json\"\n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -558,4 +558,4 @@
},
"nbformat": 4,
"nbformat_minor": 0
}
}

View File

@@ -289,7 +289,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales=[0,6])"
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales=[0,6])"
]
},
{
@@ -321,7 +321,7 @@
"# now generate the witness file \n",
"witness_path = \"gan_witness.json\"\n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -425,4 +425,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}

View File

@@ -341,7 +341,7 @@
"\n",
" # generate settings for the current model\n",
" res = ezkl.gen_settings(model_path, settings_path, py_run_args=run_args)\n",
" res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\", scales=[run_args.input_scale], max_logrows=run_args.logrows)\n",
" res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\", scales=[run_args.input_scale], max_logrows=run_args.logrows)\n",
" assert res == True\n",
"\n",
" # load settings and print them to the console\n",
@@ -361,7 +361,7 @@
" assert res == True\n",
" assert os.path.isfile(vk_path)\n",
" assert os.path.isfile(pk_path)\n",
" res = ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n",
" res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n",
" run_args.input_scale = settings[\"model_output_scales\"][0]\n",
"\n",
"for i in range(3):\n",
@@ -484,4 +484,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}

View File

@@ -215,7 +215,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
@@ -247,7 +247,7 @@
"# now generate the witness file\n",
"witness_path = \"ae_witness.json\"\n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -451,7 +451,7 @@
"res = ezkl.gen_settings(model_path, settings_path)\n",
"assert res == True\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True\n",
"print(\"verified\")"
]
@@ -485,7 +485,7 @@
"# now generate the witness file \n",
"witness_path = \"vae_witness.json\"\n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -590,4 +590,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}

View File

@@ -845,7 +845,7 @@
"res = ezkl.gen_settings(model_path, settings_path)\n",
"assert res == True\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\", max_logrows = 20, scales = [3])\n",
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\", max_logrows = 20, scales = [3])\n",
"assert res == True"
]
},
@@ -881,7 +881,7 @@
},
"outputs": [],
"source": [
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},

File diff suppressed because it is too large Load Diff

View File

@@ -282,7 +282,7 @@
"\n",
" # generate settings for the current model\n",
" res = ezkl.gen_settings(model_path, settings_path, py_run_args=run_args)\n",
" res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\", scales=[run_args.input_scale], max_logrows=run_args.logrows)\n",
" res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\", scales=[run_args.input_scale], max_logrows=run_args.logrows)\n",
" assert res == True\n",
"\n",
" # load settings and print them to the console\n",
@@ -303,7 +303,7 @@
" assert os.path.isfile(vk_path)\n",
" assert os.path.isfile(pk_path)\n",
"\n",
" res = ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n",
" res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n",
" run_args.input_scale = settings[\"model_output_scales\"][0]\n",
"\n",
"for i in range(2):\n",
@@ -472,4 +472,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}

View File

@@ -176,7 +176,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
@@ -210,7 +210,7 @@
"source": [
"# now generate the witness file \n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -309,4 +309,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}

View File

@@ -1,336 +1,331 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Reusable Verifiers \n",
"\n",
"TODO: Update the reusable verifier solidity contract name.. Make it less generic to H2 and more bespoke to us.\n",
"\n",
"This notebook demonstrates how to create and reuse the same set of separated verifiers for different models. Specifically, we will use the same verifier for the following four models:\n",
"\n",
"- `1l_mlp sigmoid`\n",
"- `1l_mlp relu`\n",
"- `1l_conv sigmoid`\n",
"- `1l_conv relu`\n",
"\n",
"When deploying EZKL verifiers on the blockchain, each associated model typically requires its own unique verifier, leading to increased on-chain state usage. \n",
"However, with the reusable verifier, we can deploy a single verifier that can be used to verify proofs for any valid H2 circuit. This notebook shows how to do so. \n",
"\n",
"By reusing the same verifier across multiple models, we significantly reduce the amount of state bloat on the blockchain. Instead of deploying a unique verifier for each model, we register a unique and much smaller verifying key artifact (VKA) on the reusable verifier contract for each model while sharing a common separated verifier. The VKA contains the VK for the model as well circuit specific metadata that was otherwise hardcoded into the stack of the original non-reusable verifier. The VKA is passed as a parameter to the verifyProof method. This VKA calldata needs to be d with the reusable verifier before it can start verifying proofs by calling the registerVKA method. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"import torch.nn as nn\n",
"import torch.onnx\n",
"\n",
"# Define the models\n",
"class MLP_Sigmoid(nn.Module):\n",
" def __init__(self):\n",
" super(MLP_Sigmoid, self).__init__()\n",
" self.fc = nn.Linear(3, 3)\n",
" self.sigmoid = nn.Sigmoid()\n",
"\n",
" def forward(self, x):\n",
" x = self.fc(x)\n",
" x = self.sigmoid(x)\n",
" return x\n",
"\n",
"class MLP_Relu(nn.Module):\n",
" def __init__(self):\n",
" super(MLP_Relu, self).__init__()\n",
" self.fc = nn.Linear(3, 3)\n",
" self.relu = nn.ReLU()\n",
"\n",
" def forward(self, x):\n",
" x = self.fc(x)\n",
" x = self.relu(x)\n",
" return x\n",
"\n",
"class Conv_Sigmoid(nn.Module):\n",
" def __init__(self):\n",
" super(Conv_Sigmoid, self).__init__()\n",
" self.conv = nn.Conv1d(1, 1, kernel_size=3, stride=1)\n",
" self.sigmoid = nn.Sigmoid()\n",
"\n",
" def forward(self, x):\n",
" x = self.conv(x)\n",
" x = self.sigmoid(x)\n",
" return x\n",
"\n",
"class Conv_Relu(nn.Module):\n",
" def __init__(self):\n",
" super(Conv_Relu, self).__init__()\n",
" self.conv = nn.Conv1d(1, 1, kernel_size=3, stride=1)\n",
" self.relu = nn.ReLU()\n",
"\n",
" def forward(self, x):\n",
" x = self.conv(x)\n",
" x = self.relu(x)\n",
" return x\n",
"\n",
"# Instantiate the models\n",
"mlp_sigmoid = MLP_Sigmoid()\n",
"mlp_relu = MLP_Relu()\n",
"conv_sigmoid = Conv_Sigmoid()\n",
"conv_relu = Conv_Relu()\n",
"\n",
"# Dummy input tensor for mlp\n",
"dummy_input_mlp = torch.tensor([[-1.5737053155899048, -1.708398461341858, 0.19544155895709991]])\n",
"input_mlp_path = 'mlp_input.json'\n",
"\n",
"# Dummy input tensor for conv\n",
"dummy_input_conv = torch.tensor([[[1.4124163389205933, 0.6938204169273376, 1.0664031505584717]]])\n",
"input_conv_path = 'conv_input.json'"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"names = ['mlp_sigmoid', 'mlp_relu', 'conv_sigmoid', 'conv_relu']\n",
"models = [mlp_sigmoid, mlp_relu, conv_sigmoid, conv_relu]\n",
"inputs = [dummy_input_mlp, dummy_input_mlp, dummy_input_conv, dummy_input_conv]\n",
"input_paths = [input_mlp_path, input_mlp_path, input_conv_path, input_conv_path]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import json\n",
"import torch\n",
"import ezkl\n",
"\n",
"for name, model, x, input_path in zip(names, models, inputs, input_paths):\n",
" # Create a new directory for the model if it doesn't exist\n",
" if not os.path.exists(name):\n",
" os.mkdir(name)\n",
" # Store the paths in each of their respective directories\n",
" model_path = os.path.join(name, \"network.onnx\")\n",
" compiled_model_path = os.path.join(name, \"network.compiled\")\n",
" pk_path = os.path.join(name, \"test.pk\")\n",
" vk_path = os.path.join(name, \"test.vk\")\n",
" settings_path = os.path.join(name, \"settings.json\")\n",
"\n",
" witness_path = os.path.join(name, \"witness.json\")\n",
" sol_code_path = os.path.join(name, 'test.sol')\n",
" vka_path = os.path.join(name, 'vka.bytes')\n",
" abi_path = os.path.join(name, 'test.abi')\n",
" proof_path = os.path.join(name, \"proof.json\")\n",
"\n",
" # Flips the neural net into inference mode\n",
" model.eval()\n",
"\n",
" # Export the model\n",
" torch.onnx.export(model, x, model_path, export_params=True, opset_version=10,\n",
" do_constant_folding=True, input_names=['input'],\n",
" output_names=['output'], dynamic_axes={'input': {0: 'batch_size'},\n",
" 'output': {0: 'batch_size'}})\n",
"\n",
" data_array = ((x).detach().numpy()).reshape([-1]).tolist()\n",
" data = dict(input_data=[data_array])\n",
" json.dump(data, open(input_path, 'w'))\n",
"\n",
" py_run_args = ezkl.PyRunArgs()\n",
" py_run_args.input_visibility = \"private\"\n",
" py_run_args.output_visibility = \"public\"\n",
" py_run_args.param_visibility = \"fixed\" # private by default\n",
"\n",
" res = ezkl.gen_settings(model_path, settings_path, py_run_args=py_run_args)\n",
" assert res == True\n",
"\n",
" ezkl.calibrate_settings(input_path, model_path, settings_path, \"resources\")\n",
"\n",
" res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)\n",
" assert res == True\n",
"\n",
" res = await ezkl.get_srs(settings_path)\n",
" assert res == True\n",
"\n",
" # now generate the witness file\n",
" res = ezkl.gen_witness(input_path, compiled_model_path, witness_path)\n",
" assert os.path.isfile(witness_path) == True\n",
"\n",
" # SETUP \n",
" # We recommend disabling selector compression for the setup as it decreases the size of the VK artifact\n",
" res = ezkl.setup(compiled_model_path, vk_path, pk_path, disable_selector_compression=True)\n",
" assert res == True\n",
" assert os.path.isfile(vk_path)\n",
" assert os.path.isfile(pk_path)\n",
" assert os.path.isfile(settings_path)\n",
"\n",
" # GENERATE A PROOF\n",
" res = ezkl.prove(witness_path, compiled_model_path, pk_path, proof_path, \"single\")\n",
" assert os.path.isfile(proof_path)\n",
"\n",
" res = await ezkl.create_evm_verifier(vk_path, settings_path, sol_code_path, abi_path, reusable=True)\n",
" # TODO: Add a flag force equals true to in the deprication process to preserve OG single purpose verifier?\n",
" assert res == True\n",
"\n",
" # TODO: \n",
" res = await ezkl.create_evm_vka(vk_path, settings_path, vka_path, decimals=18)\n",
" assert res == True\n"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [],
"source": [
"import subprocess\n",
"import time\n",
"\n",
"# make sure anvil is running locally\n",
"# $ anvil -p 3030\n",
"\n",
"RPC_URL = \"http://localhost:3030\"\n",
"\n",
"# Save process globally\n",
"anvil_process = None\n",
"\n",
"def start_anvil():\n",
" global anvil_process\n",
" if anvil_process is None:\n",
" anvil_process = subprocess.Popen([\"anvil\", \"-p\", \"3030\", \"--code-size-limit=41943040\"])\n",
" if anvil_process.returncode is not None:\n",
" raise Exception(\"failed to start anvil process\")\n",
" time.sleep(3)\n",
"\n",
"def stop_anvil():\n",
" global anvil_process\n",
" if anvil_process is not None:\n",
" anvil_process.terminate()\n",
" anvil_process = None\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Check that the generated verifiers are identical for all models."
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"import filecmp\n",
"\n",
"def compare_files(file1, file2):\n",
" return filecmp.cmp(file1, file2, shallow=False)\n",
"\n",
"sol_code_path_0 = os.path.join(\"mlp_sigmoid\", 'test.sol')\n",
"sol_code_path_1 = os.path.join(\"mlp_relu\", 'test.sol')\n",
"\n",
"sol_code_path_2 = os.path.join(\"conv_sigmoid\", 'test.sol')\n",
"sol_code_path_3 = os.path.join(\"conv_relu\", 'test.sol')\n",
"\n",
"\n",
"assert compare_files(sol_code_path_0, sol_code_path_1) == True\n",
"assert compare_files(sol_code_path_2, sol_code_path_3) == True"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Here we deploy reusable verifier that will be shared by the four models. We picked the `1l_mlp sigmoid` model as an example but you could have used any of the generated verifiers since they are all identical. "
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"import os \n",
"addr_path_verifier = \"addr_verifier.txt\"\n",
"sol_code_path = os.path.join(\"mlp_sigmoid\", 'test.sol')\n",
"\n",
"res = await ezkl.deploy_evm(\n",
" addr_path_verifier,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
" \"verifier/reusable\" # TODO deprecate this option for selecting the type of verifier you want to deploy. \n",
" # verifier, verifier/reusable, vka\n",
")\n",
"\n",
"assert res == True\n",
"\n",
"with open(addr_path_verifier, 'r') as file:\n",
" addr = file.read().rstrip()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Finally we deploy each of the unique VK-artifacts and verify them using the shared verifier deployed in the previous step."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"for name in names:\n",
" addr_path_vk = \"addr_vk.txt\"\n",
" vka_path = os.path.join(name, 'vka.bytes')\n",
" res = await ezkl.register_vka(\n",
" addr, # address of the reusable verifier. TODO: If we deploy the RV across all chains to a single canoncial address, we can hardcode that address and remove this param.\n",
" 'http://127.0.0.1:3030',\n",
" vka_path=vka_path, # TODO: Pass in private key and potentially create new command that both creates and registers the vka. Simplify testing pipeline for us and other folks. \n",
" )\n",
" assert res == True\n",
" \n",
" proof_path = os.path.join(name, \"proof.json\")\n",
" res = await ezkl.verify_evm(\n",
" addr,\n",
" \"http://127.0.0.1:3030\",\n",
" proof_path,\n",
" vka_path = vka_path # TODO: Turn this from optional to required if we deprecate the orignal verifier. \n",
" # TODO: Make it where the use only needs to deply a vka. \n",
" )\n",
" assert res == True"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Reusable Verifiers \n",
"\n",
"This notebook demonstrates how to create and reuse the same set of separated verifiers for different models. Specifically, we will use the same verifier for the following four models:\n",
"\n",
"- `1l_mlp sigmoid`\n",
"- `1l_mlp relu`\n",
"- `1l_conv sigmoid`\n",
"- `1l_conv relu`\n",
"\n",
"When deploying EZKL verifiers on the blockchain, each associated model typically requires its own unique verifier, leading to increased on-chain state usage. \n",
"However, with the reusable verifier, we can deploy a single verifier that can be used to verify proofs for any valid H2 circuit. This notebook shows how to do so. \n",
"\n",
"By reusing the same verifier across multiple models, we significantly reduce the amount of state bloat on the blockchain. Instead of deploying a unique verifier for each model, we deploy a unique and much smaller verifying key artifact (VKA) contract for each model while sharing a common separated verifier. The VKA contains the VK for the model as well circuit specific metadata that was otherwise hardcoded into the stack of the original non-reusable verifier."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"import torch.nn as nn\n",
"import torch.onnx\n",
"\n",
"# Define the models\n",
"class MLP_Sigmoid(nn.Module):\n",
" def __init__(self):\n",
" super(MLP_Sigmoid, self).__init__()\n",
" self.fc = nn.Linear(3, 3)\n",
" self.sigmoid = nn.Sigmoid()\n",
"\n",
" def forward(self, x):\n",
" x = self.fc(x)\n",
" x = self.sigmoid(x)\n",
" return x\n",
"\n",
"class MLP_Relu(nn.Module):\n",
" def __init__(self):\n",
" super(MLP_Relu, self).__init__()\n",
" self.fc = nn.Linear(3, 3)\n",
" self.relu = nn.ReLU()\n",
"\n",
" def forward(self, x):\n",
" x = self.fc(x)\n",
" x = self.relu(x)\n",
" return x\n",
"\n",
"class Conv_Sigmoid(nn.Module):\n",
" def __init__(self):\n",
" super(Conv_Sigmoid, self).__init__()\n",
" self.conv = nn.Conv1d(1, 1, kernel_size=3, stride=1)\n",
" self.sigmoid = nn.Sigmoid()\n",
"\n",
" def forward(self, x):\n",
" x = self.conv(x)\n",
" x = self.sigmoid(x)\n",
" return x\n",
"\n",
"class Conv_Relu(nn.Module):\n",
" def __init__(self):\n",
" super(Conv_Relu, self).__init__()\n",
" self.conv = nn.Conv1d(1, 1, kernel_size=3, stride=1)\n",
" self.relu = nn.ReLU()\n",
"\n",
" def forward(self, x):\n",
" x = self.conv(x)\n",
" x = self.relu(x)\n",
" return x\n",
"\n",
"# Instantiate the models\n",
"mlp_sigmoid = MLP_Sigmoid()\n",
"mlp_relu = MLP_Relu()\n",
"conv_sigmoid = Conv_Sigmoid()\n",
"conv_relu = Conv_Relu()\n",
"\n",
"# Dummy input tensor for mlp\n",
"dummy_input_mlp = torch.tensor([[-1.5737053155899048, -1.708398461341858, 0.19544155895709991]])\n",
"input_mlp_path = 'mlp_input.json'\n",
"\n",
"# Dummy input tensor for conv\n",
"dummy_input_conv = torch.tensor([[[1.4124163389205933, 0.6938204169273376, 1.0664031505584717]]])\n",
"input_conv_path = 'conv_input.json'"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"names = ['mlp_sigmoid', 'mlp_relu', 'conv_sigmoid', 'conv_relu']\n",
"models = [mlp_sigmoid, mlp_relu, conv_sigmoid, conv_relu]\n",
"inputs = [dummy_input_mlp, dummy_input_mlp, dummy_input_conv, dummy_input_conv]\n",
"input_paths = [input_mlp_path, input_mlp_path, input_conv_path, input_conv_path]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"import json\n",
"import torch\n",
"import ezkl\n",
"\n",
"for name, model, x, input_path in zip(names, models, inputs, input_paths):\n",
" # Create a new directory for the model if it doesn't exist\n",
" if not os.path.exists(name):\n",
" os.mkdir(name)\n",
" # Store the paths in each of their respective directories\n",
" model_path = os.path.join(name, \"network.onnx\")\n",
" compiled_model_path = os.path.join(name, \"network.compiled\")\n",
" pk_path = os.path.join(name, \"test.pk\")\n",
" vk_path = os.path.join(name, \"test.vk\")\n",
" settings_path = os.path.join(name, \"settings.json\")\n",
"\n",
" witness_path = os.path.join(name, \"witness.json\")\n",
" sol_code_path = os.path.join(name, 'test.sol')\n",
" vka_path = os.path.join(name, 'vka.bytes')\n",
" abi_path = os.path.join(name, 'test.abi')\n",
" proof_path = os.path.join(name, \"proof.json\")\n",
"\n",
" # Flips the neural net into inference mode\n",
" model.eval()\n",
"\n",
" # Export the model\n",
" torch.onnx.export(model, x, model_path, export_params=True, opset_version=10,\n",
" do_constant_folding=True, input_names=['input'],\n",
" output_names=['output'], dynamic_axes={'input': {0: 'batch_size'},\n",
" 'output': {0: 'batch_size'}})\n",
"\n",
" data_array = ((x).detach().numpy()).reshape([-1]).tolist()\n",
" data = dict(input_data=[data_array])\n",
" json.dump(data, open(input_path, 'w'))\n",
"\n",
" py_run_args = ezkl.PyRunArgs()\n",
" py_run_args.input_visibility = \"private\"\n",
" py_run_args.output_visibility = \"public\"\n",
" py_run_args.param_visibility = \"fixed\" # private by default\n",
"\n",
" res = ezkl.gen_settings(model_path, settings_path, py_run_args=py_run_args)\n",
" assert res == True\n",
"\n",
" await ezkl.calibrate_settings(input_path, model_path, settings_path, \"resources\")\n",
"\n",
" res = ezkl.compile_circuit(model_path, compiled_model_path, settings_path)\n",
" assert res == True\n",
"\n",
" res = await ezkl.get_srs(settings_path)\n",
" assert res == True\n",
"\n",
" # now generate the witness file\n",
" res = await ezkl.gen_witness(input_path, compiled_model_path, witness_path)\n",
" assert os.path.isfile(witness_path) == True\n",
"\n",
" # SETUP \n",
" # We recommend disabling selector compression for the setup as it decreases the size of the VK artifact\n",
" res = ezkl.setup(compiled_model_path, vk_path, pk_path, disable_selector_compression=True)\n",
" assert res == True\n",
" assert os.path.isfile(vk_path)\n",
" assert os.path.isfile(pk_path)\n",
" assert os.path.isfile(settings_path)\n",
"\n",
" # GENERATE A PROOF\n",
" res = ezkl.prove(witness_path, compiled_model_path, pk_path, proof_path, \"single\")\n",
" assert os.path.isfile(proof_path)\n",
"\n",
" res = await ezkl.create_evm_verifier(vk_path, settings_path, sol_code_path, abi_path, reusable=True)\n",
" assert res == True\n",
"\n",
" res = await ezkl.create_evm_vka(vk_path, settings_path, vka_path)\n",
" assert res == True\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import subprocess\n",
"import time\n",
"\n",
"# make sure anvil is running locally\n",
"# $ anvil -p 3030\n",
"\n",
"RPC_URL = \"http://localhost:3030\"\n",
"\n",
"# Save process globally\n",
"anvil_process = None\n",
"\n",
"def start_anvil():\n",
" global anvil_process\n",
" if anvil_process is None:\n",
" anvil_process = subprocess.Popen([\"anvil\", \"-p\", \"3030\", \"--code-size-limit=41943040\"])\n",
" if anvil_process.returncode is not None:\n",
" raise Exception(\"failed to start anvil process\")\n",
" time.sleep(3)\n",
"\n",
"def stop_anvil():\n",
" global anvil_process\n",
" if anvil_process is not None:\n",
" anvil_process.terminate()\n",
" anvil_process = None\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Check that the generated verifiers are identical for all models."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import filecmp\n",
"\n",
"def compare_files(file1, file2):\n",
" return filecmp.cmp(file1, file2, shallow=False)\n",
"\n",
"sol_code_path_0 = os.path.join(\"mlp_sigmoid\", 'test.sol')\n",
"sol_code_path_1 = os.path.join(\"mlp_relu\", 'test.sol')\n",
"\n",
"sol_code_path_2 = os.path.join(\"conv_sigmoid\", 'test.sol')\n",
"sol_code_path_3 = os.path.join(\"conv_relu\", 'test.sol')\n",
"\n",
"\n",
"assert compare_files(sol_code_path_0, sol_code_path_1) == True\n",
"assert compare_files(sol_code_path_2, sol_code_path_3) == True"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Here we deploy separate verifier that will be shared by the four models. We picked the `1l_mlp sigmoid` model as an example but you could have used any of the generated verifiers since they are all identical. "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import os \n",
"addr_path_verifier = \"addr_verifier.txt\"\n",
"sol_code_path = os.path.join(\"mlp_sigmoid\", 'test.sol')\n",
"\n",
"res = await ezkl.deploy_evm(\n",
" addr_path_verifier,\n",
" 'http://127.0.0.1:3030',\n",
" sol_code_path,\n",
" \"verifier/reusable\"\n",
")\n",
"\n",
"assert res == True\n",
"\n",
"with open(addr_path_verifier, 'r') as file:\n",
" addr = file.read().rstrip()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Finally we deploy each of the unique VK-artifacts and verify them using the shared verifier deployed in the previous step."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"for name in names:\n",
" addr_path_vk = \"addr_vk.txt\"\n",
" vka_path = os.path.join(name, 'vka.bytes')\n",
" res = await ezkl.register_vka(\n",
" addr,\n",
" 'http://127.0.0.1:3030',\n",
" vka_path=vka_path,\n",
" )\n",
" assert res == True\n",
" \n",
" proof_path = os.path.join(name, \"proof.json\")\n",
" sol_code_path = os.path.join(name, 'vk.sol')\n",
" res = await ezkl.verify_evm(\n",
" addr,\n",
" \"http://127.0.0.1:3030\",\n",
" proof_path,\n",
" vka_path = vka_path\n",
" )\n",
" assert res == True"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".env",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@@ -231,7 +231,7 @@
"source": [
"# now generate the witness file\n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -267,7 +267,7 @@
" # Serialize data into file:\n",
"json.dump( data, open(data_path_faulty, 'w' ))\n",
"\n",
"res = ezkl.gen_witness(data_path_faulty, compiled_model_path, witness_path_faulty)\n",
"res = await ezkl.gen_witness(data_path_faulty, compiled_model_path, witness_path_faulty)\n",
"assert os.path.isfile(witness_path_faulty)"
]
},
@@ -312,7 +312,7 @@
"# Serialize data into file:\n",
"json.dump( data, open(data_path_truthy, 'w' ))\n",
"\n",
"res = ezkl.gen_witness(data_path_truthy, compiled_model_path, witness_path_truthy)\n",
"res = await ezkl.gen_witness(data_path_truthy, compiled_model_path, witness_path_truthy)\n",
"assert os.path.isfile(witness_path_truthy)"
]
},

View File

@@ -171,7 +171,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
@@ -205,7 +205,7 @@
"source": [
"# now generate the witness file \n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -404,4 +404,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}

View File

@@ -171,7 +171,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
@@ -205,7 +205,7 @@
"source": [
"# now generate the witness file \n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -304,4 +304,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}

View File

@@ -169,7 +169,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
@@ -203,7 +203,7 @@
"source": [
"# now generate the witness file \n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -302,4 +302,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}

View File

@@ -170,7 +170,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
@@ -204,7 +204,7 @@
"source": [
"# now generate the witness file \n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -303,4 +303,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}

View File

@@ -149,7 +149,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
@@ -183,7 +183,7 @@
"source": [
"# now generate the witness file \n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},

View File

@@ -298,7 +298,7 @@
"\n",
"witness_path = \"witness.json\"\n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path, vk_path)\n",
"assert os.path.isfile(witness_path)\n",
"\n",
"# we force the output to be 1 this corresponds to the solvency test being true -- and we set this to a fixed vis output\n",
@@ -412,7 +412,7 @@
"source": [
"# now generate the witness file\n",
"\n",
"res = ezkl.gen_witness(data_path_faulty, compiled_model_path, witness_path, vk_path)\n",
"res = await ezkl.gen_witness(data_path_faulty, compiled_model_path, witness_path, vk_path)\n",
"assert os.path.isfile(witness_path)\n",
"\n",
"# we force the output to be 1 this corresponds to the solvency test being true -- and we set this to a fixed vis output\n",

View File

@@ -167,7 +167,7 @@
"res = ezkl.gen_settings(model_path, settings_path)\n",
"assert res == True\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True"
]
},
@@ -187,7 +187,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
@@ -221,7 +221,7 @@
"source": [
"# now generate the witness file \n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},

View File

@@ -152,7 +152,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
@@ -186,7 +186,7 @@
"source": [
"# now generate the witness file \n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -392,7 +392,7 @@
"res = ezkl.gen_settings(model_path, settings_path)\n",
"assert res == True\n",
"\n",
"res = ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"res = await ezkl.calibrate_settings(data_path, model_path, settings_path, \"resources\")\n",
"assert res == True"
]
}
@@ -418,4 +418,4 @@
},
"nbformat": 4,
"nbformat_minor": 5
}
}

View File

@@ -637,7 +637,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales = [11])"
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales = [11])"
]
},
{
@@ -683,7 +683,7 @@
" data = json.load(f)\n",
" print(len(data['input_data'][0]))\n",
"\n",
"ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
"await ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
]
},
{
@@ -758,4 +758,4 @@
},
"nbformat": 4,
"nbformat_minor": 4
}
}

View File

@@ -525,7 +525,7 @@
"json.dump(data, open(cal_path, 'w'))\n",
"\n",
"\n",
"ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales = [4])"
"await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\", scales = [4])"
]
},
{
@@ -572,7 +572,7 @@
" data = json.load(f)\n",
" print(len(data['input_data'][0]))\n",
"\n",
"ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
"await ezkl.gen_witness(data_path, compiled_model_path, witness_path)"
]
},
{
@@ -647,4 +647,4 @@
},
"nbformat": 4,
"nbformat_minor": 4
}
}

View File

@@ -458,7 +458,7 @@
"\n",
"\n",
"ezkl.gen_settings(onnx_filename, settings_filename)\n",
"ezkl.calibrate_settings(\n",
"await ezkl.calibrate_settings(\n",
" input_filename, onnx_filename, settings_filename, \"resources\", scales = [4])\n",
"res = await ezkl.get_srs(settings_filename)\n",
"ezkl.compile_circuit(onnx_filename, compiled_filename, settings_filename)\n",
@@ -527,7 +527,7 @@
"\n",
"witness_path = \"witness.json\"\n",
"\n",
"res = ezkl.gen_witness(input_filename, compiled_filename, witness_path)\n",
"res = await ezkl.gen_witness(input_filename, compiled_filename, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -762,4 +762,4 @@
},
"nbformat": 4,
"nbformat_minor": 0
}
}

View File

@@ -629,7 +629,7 @@
"source": [
"\n",
"\n",
"res = ezkl.calibrate_settings(val_data, model_path, settings_path, \"resources\", scales = [4])\n",
"res = await ezkl.calibrate_settings(val_data, model_path, settings_path, \"resources\", scales = [4])\n",
"assert res == True\n",
"print(\"verified\")\n"
]
@@ -680,7 +680,7 @@
"\n",
"witness_path = \"witness.json\"\n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
@@ -905,4 +905,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
}

View File

@@ -193,7 +193,7 @@
"with open(cal_path, \"w\") as f:\n",
" json.dump(cal_data, f)\n",
"\n",
"res = ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
"res = await ezkl.calibrate_settings(cal_path, model_path, settings_path, \"resources\")"
]
},
{
@@ -227,7 +227,7 @@
"source": [
"# now generate the witness file \n",
"\n",
"res = ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"res = await ezkl.gen_witness(data_path, compiled_model_path, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},

File diff suppressed because it is too large Load Diff

View File

@@ -104,5 +104,5 @@ json.dump(data, open("input.json", 'w'))
# ezkl.gen_settings("network.onnx", "settings.json")
# !RUST_LOG = full
# res = ezkl.calibrate_settings(
# res = await ezkl.calibrate_settings(
# "input.json", "network.onnx", "settings.json", "resources")

View File

@@ -0,0 +1,60 @@
# inbrowser-evm-verify
We would like the Solidity verifier to be canonical and usually all you ever need. For this, we need to be able to run that verifier in browser.
## How to use (Node js)
```ts
import localEVMVerify from '@ezkljs/verify';
// Load in the proof file as a buffer
const proofFileBuffer = fs.readFileSync(`${path}/${example}/proof.pf`)
// Stringified EZKL evm verifier bytecode (this is just an example don't use in production)
const bytecode = '0x608060405234801561001057600080fd5b5060d38061001f6000396000f3fe608060405234801561001057600080fd5b50600436106100415760003560e01c8063cfae321714610046575b600080fd5b6100496100f1565b60405161005691906100f1565b60405180910390f35b'
const result = await localEVMVerify(proofFileBuffer, bytecode)
console.log('result', result)
```
**Note**: Run `ezkl create-evm-verifier` to get the Solidity verifier, with which you can retrieve the bytecode once compiled. We recommend compiling to the Shanghai hardfork target, else you will have to pass an additional parameter specifying the EVM version to the `localEVMVerify` function like so (for Paris hardfork):
```ts
import localEVMVerify, { hardfork } from '@ezkljs/verify';
const result = await localEVMVerify(proofFileBuffer, bytecode, hardfork['Paris'])
```
**Note**: You can also verify separated vk verifiers using the `localEVMVerify` function. Just pass the vk verifier bytecode as the third parameter like so:
```ts
import localEVMVerify from '@ezkljs/verify';
const result = await localEVMVerify(proofFileBuffer, verifierBytecode, VKBytecode)
```
## How to use (Browser)
```ts
import localEVMVerify from '@ezkljs/verify';
// Load in the proof file as a buffer using the web apis (fetch, FileReader, etc)
// We use fetch in this example to load the proof file as a buffer
const proofFileBuffer = await fetch(`${path}/${example}/proof.pf`).then(res => res.arrayBuffer())
// Stringified EZKL evm verifier bytecode (this is just an example don't use in production)
const bytecode = '0x608060405234801561001057600080fd5b5060d38061001f6000396000f3fe608060405234801561001057600080fd5b50600436106100415760003560e01c8063cfae321714610046575b600080fd5b6100496100f1565b60405161005691906100f1565b60405180910390f35b'
const result = await browserEVMVerify(proofFileBuffer, bytecode)
console.log('result', result)
```
Output:
```ts
result: true
```

View File

@@ -0,0 +1,42 @@
{
"name": "@ezkljs/verify",
"version": "v10.4.2",
"publishConfig": {
"access": "public"
},
"description": "Evm verify EZKL proofs in the browser.",
"main": "dist/commonjs/index.js",
"module": "dist/esm/index.js",
"types": "dist/commonjs/index.d.ts",
"files": [
"dist",
"LICENSE",
"README.md"
],
"scripts": {
"clean": "rm -r dist || true",
"build:commonjs": "tsc --project tsconfig.commonjs.json && resolve-tspaths -p tsconfig.commonjs.json",
"build:esm": "tsc --project tsconfig.esm.json && resolve-tspaths -p tsconfig.esm.json",
"build": "npm run clean && npm run build:commonjs && npm run build:esm"
},
"dependencies": {
"@ethereumjs/common": "4.0.0",
"@ethereumjs/evm": "2.0.0",
"@ethereumjs/statemanager": "2.0.0",
"@ethereumjs/tx": "5.0.0",
"@ethereumjs/util": "9.0.0",
"@ethereumjs/vm": "7.0.0",
"@ethersproject/abi": "5.7.0",
"@ezkljs/engine": "10.4.2",
"ethers": "6.7.1",
"json-bigint": "1.0.0"
},
"devDependencies": {
"@types/node": "^20.8.3",
"ts-loader": "^9.5.0",
"ts-node": "^10.9.1",
"resolve-tspaths": "^0.8.16",
"tsconfig-paths": "^4.2.0",
"typescript": "^5.2.2"
}
}

1479
in-browser-evm-verifier/pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,144 @@
import { defaultAbiCoder as AbiCoder } from '@ethersproject/abi'
import { Address, hexToBytes } from '@ethereumjs/util'
import { Chain, Common, Hardfork } from '@ethereumjs/common'
import { LegacyTransaction, LegacyTxData } from '@ethereumjs/tx'
// import { DefaultStateManager } from '@ethereumjs/statemanager'
// import { Blockchain } from '@ethereumjs/blockchain'
import { VM } from '@ethereumjs/vm'
import { EVM } from '@ethereumjs/evm'
import { buildTransaction, encodeDeployment } from './utils/tx-builder'
import { getAccountNonce, insertAccount } from './utils/account-utils'
import { encodeVerifierCalldata } from '../nodejs/ezkl';
async function deployContract(
vm: VM,
common: Common,
senderPrivateKey: Uint8Array,
deploymentBytecode: string
): Promise<Address> {
// Contracts are deployed by sending their deployment bytecode to the address 0
// The contract params should be abi-encoded and appended to the deployment bytecode.
// const data =
const data = encodeDeployment(deploymentBytecode)
const txData = {
data,
nonce: await getAccountNonce(vm, senderPrivateKey),
}
const tx = LegacyTransaction.fromTxData(
buildTransaction(txData) as LegacyTxData,
{ common, allowUnlimitedInitCodeSize: true },
).sign(senderPrivateKey)
const deploymentResult = await vm.runTx({
tx,
skipBlockGasLimitValidation: true,
skipNonce: true
})
if (deploymentResult.execResult.exceptionError) {
throw deploymentResult.execResult.exceptionError
}
return deploymentResult.createdAddress!
}
async function verify(
vm: VM,
contractAddress: Address,
caller: Address,
proof: Uint8Array | Uint8ClampedArray,
vkAddress?: Address | Uint8Array,
): Promise<boolean> {
if (proof instanceof Uint8Array) {
proof = new Uint8ClampedArray(proof.buffer)
}
if (vkAddress) {
const vkAddressBytes = hexToBytes(vkAddress.toString())
const vkAddressArray = Array.from(vkAddressBytes)
let string = JSON.stringify(vkAddressArray)
const uint8Array = new TextEncoder().encode(string);
// Step 3: Convert to Uint8ClampedArray
vkAddress = new Uint8Array(uint8Array.buffer);
// convert uitn8array of length
console.error('vkAddress', vkAddress)
}
const data = encodeVerifierCalldata(proof, vkAddress)
const verifyResult = await vm.evm.runCall({
to: contractAddress,
caller: caller,
origin: caller, // The tx.origin is also the caller here
data: data,
})
if (verifyResult.execResult.exceptionError) {
throw verifyResult.execResult.exceptionError
}
const results = AbiCoder.decode(['bool'], verifyResult.execResult.returnValue)
return results[0]
}
/**
* Spins up an ephemeral EVM instance for executing the bytecode of a solidity verifier
* @param proof Json serialized proof file
* @param bytecode The bytecode of a compiled solidity verifier.
* @param bytecode_vk The bytecode of a contract that stores the vk. (Optional, only required if the vk is stored in a separate contract)
* @param evmVersion The evm version to use for the verification. (Default: London)
* @returns The result of the evm verification.
* @throws If the verify transaction reverts
*/
export default async function localEVMVerify(
proof: Uint8Array | Uint8ClampedArray,
bytecode_verifier: string,
bytecode_vk?: string,
evmVersion?: Hardfork,
): Promise<boolean> {
try {
const hardfork = evmVersion ? evmVersion : Hardfork['Shanghai']
const common = new Common({ chain: Chain.Mainnet, hardfork })
const accountPk = hexToBytes(
'0xe331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109', // anvil deterministic Pk
)
const evm = new EVM({
allowUnlimitedContractSize: true,
allowUnlimitedInitCodeSize: true,
})
const vm = await VM.create({ common, evm })
const accountAddress = Address.fromPrivateKey(accountPk)
await insertAccount(vm, accountAddress)
const verifierAddress = await deployContract(
vm,
common,
accountPk,
bytecode_verifier
)
if (bytecode_vk) {
const accountPk = hexToBytes("0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"); // anvil deterministic Pk
const accountAddress = Address.fromPrivateKey(accountPk)
await insertAccount(vm, accountAddress)
const output = await deployContract(vm, common, accountPk, bytecode_vk)
const result = await verify(vm, verifierAddress, accountAddress, proof, output)
return true
}
const result = await verify(vm, verifierAddress, accountAddress, proof)
return result
} catch (error) {
// log or re-throw the error, depending on your needs
console.error('An error occurred:', error)
throw error
}
}

View File

@@ -0,0 +1,32 @@
import { VM } from '@ethereumjs/vm'
import { Account, Address } from '@ethereumjs/util'
export const keyPair = {
secretKey:
'0x3cd7232cd6f3fc66a57a6bedc1a8ed6c228fff0a327e169c2bcc5e869ed49511',
publicKey:
'0x0406cc661590d48ee972944b35ad13ff03c7876eae3fd191e8a2f77311b0a3c6613407b5005e63d7d8d76b89d5f900cde691497688bb281e07a5052ff61edebdc0',
}
export const insertAccount = async (vm: VM, address: Address) => {
const acctData = {
nonce: 0,
balance: BigInt('1000000000000000000'), // 1 eth
}
const account = Account.fromAccountData(acctData)
await vm.stateManager.putAccount(address, account)
}
export const getAccountNonce = async (
vm: VM,
accountPrivateKey: Uint8Array,
) => {
const address = Address.fromPrivateKey(accountPrivateKey)
const account = await vm.stateManager.getAccount(address)
if (account) {
return account.nonce
} else {
return BigInt(0)
}
}

View File

@@ -0,0 +1,59 @@
import { Interface, defaultAbiCoder as AbiCoder } from '@ethersproject/abi'
import {
AccessListEIP2930TxData,
FeeMarketEIP1559TxData,
TxData,
} from '@ethereumjs/tx'
type TransactionsData =
| TxData
| AccessListEIP2930TxData
| FeeMarketEIP1559TxData
export const encodeFunction = (
method: string,
params?: {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
types: any[]
values: unknown[]
},
): string => {
const parameters = params?.types ?? []
const methodWithParameters = `function ${method}(${parameters.join(',')})`
const signatureHash = new Interface([methodWithParameters]).getSighash(method)
const encodedArgs = AbiCoder.encode(parameters, params?.values ?? [])
return signatureHash + encodedArgs.slice(2)
}
export const encodeDeployment = (
bytecode: string,
params?: {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
types: any[]
values: unknown[]
},
) => {
const deploymentData = '0x' + bytecode
if (params) {
const argumentsEncoded = AbiCoder.encode(params.types, params.values)
return deploymentData + argumentsEncoded.slice(2)
}
return deploymentData
}
export const buildTransaction = (
data: Partial<TransactionsData>,
): TransactionsData => {
const defaultData: Partial<TransactionsData> = {
gasLimit: 3_000_000_000_000_000,
gasPrice: 7,
value: 0,
data: '0x',
}
return {
...defaultData,
...data,
}
}

View File

@@ -0,0 +1,7 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"module": "CommonJS",
"outDir": "./dist/commonjs"
}
}

View File

@@ -0,0 +1,7 @@
{
"extends": "./tsconfig.json",
"compilerOptions": {
"module": "ES2020",
"outDir": "./dist/esm"
}
}

View File

@@ -0,0 +1,62 @@
{
"compilerOptions": {
"rootDir": "src",
"target": "es2017",
"outDir": "dist",
"declaration": true,
"lib": [
"dom",
"dom.iterable",
"esnext"
],
"allowJs": true,
"checkJs": true,
"skipLibCheck": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"noEmit": false,
"esModuleInterop": true,
"module": "CommonJS",
"moduleResolution": "node",
"resolveJsonModule": true,
"isolatedModules": true,
"jsx": "preserve",
// "incremental": true,
"noUncheckedIndexedAccess": true,
"baseUrl": ".",
"paths": {
"@/*": [
"./src/*"
]
}
},
"include": [
"src/**/*.ts",
"src/**/*.tsx",
"src/**/*.cjs",
"src/**/*.mjs"
],
"exclude": [
"node_modules"
],
// NEW: Options for file/directory watching
"watchOptions": {
// Use native file system events for files and directories
"watchFile": "useFsEvents",
"watchDirectory": "useFsEvents",
// Poll files for updates more frequently
// when they're updated a lot.
"fallbackPolling": "dynamicPriority",
// Don't coalesce watch notification
"synchronousWatchDirectory": true,
// Finally, two additional settings for reducing the amount of possible
// files to track work from these directories
"excludeDirectories": [
"**/node_modules",
"_build"
],
"excludeFiles": [
"build/fileWhichChangesOften.ts"
]
}
}

View File

@@ -1,5 +1,12 @@
// ignore file if compiling for wasm
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use mimalloc::MiMalloc;
#[global_allocator]
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
static GLOBAL: MiMalloc = MiMalloc;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use clap::{CommandFactory, Parser};
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]

View File

@@ -3,7 +3,7 @@
pub mod python;
/// Universal bindings for all platforms
#[cfg(any(
feature = "universal-bindings",
feature = "ios-bindings",
all(target_arch = "wasm32", target_os = "unknown")
))]
pub mod universal;

View File

@@ -1030,6 +1030,7 @@ fn gen_random_data(
))]
#[gen_stub_pyfunction]
fn calibrate_settings(
py: Python,
data: String,
model: PathBuf,
settings: PathBuf,
@@ -1038,7 +1039,7 @@ fn calibrate_settings(
scales: Option<Vec<crate::Scale>>,
scale_rebase_multiplier: Vec<u32>,
max_logrows: Option<u32>,
) -> PyResult<bool> {
) -> PyResult<Bound<'_, PyAny>> {
crate::execute::calibrate(
model,
data,
@@ -1090,12 +1091,13 @@ fn calibrate_settings(
))]
#[gen_stub_pyfunction]
fn gen_witness(
py: Python,
data: String,
model: PathBuf,
output: Option<PathBuf>,
vk_path: Option<PathBuf>,
srs_path: Option<PathBuf>,
) -> PyResult<PyObject> {
) -> PyResult<Bound<'_, PyAny>> {
let output =
crate::execute::gen_witness(model, data, output, vk_path, srs_path).map_err(|e| {
let err_str = format!("Failed to generate witness: {}", e);
@@ -1839,9 +1841,6 @@ fn register_vka<'a>(
///
/// vka_path: str
/// The path to the VKA calldata bytes file (generated using the create_evm_vka command)
///
/// encoded_calldata: str
/// The path to the encoded calldata bytes file (generated using the encode calldata command)
/// Returns
/// -------
/// bool
@@ -1851,7 +1850,6 @@ fn register_vka<'a>(
rpc_url,
proof_path=PathBuf::from(DEFAULT_PROOF),
vka_path = None,
encoded_calldata = None,
))]
#[gen_stub_pyfunction]
fn verify_evm<'a>(
@@ -1860,23 +1858,16 @@ fn verify_evm<'a>(
rpc_url: String,
proof_path: PathBuf,
vka_path: Option<PathBuf>,
encoded_calldata: Option<PathBuf>,
) -> PyResult<Bound<'a, PyAny>> {
let addr_verifier = H160Flag::from(addr_verifier);
pyo3_async_runtimes::tokio::future_into_py(py, async move {
crate::execute::verify_evm(
proof_path,
addr_verifier,
rpc_url,
vka_path,
encoded_calldata,
)
.await
.map_err(|e| {
let err_str = format!("Failed to run verify_evm: {}", e);
PyRuntimeError::new_err(err_str)
})?;
crate::execute::verify_evm(proof_path, addr_verifier, rpc_url, vka_path)
.await
.map_err(|e| {
let err_str = format!("Failed to run verify_evm: {}", e);
PyRuntimeError::new_err(err_str)
})?;
Ok(true)
})

View File

@@ -1,6 +1,7 @@
use halo2_proofs::{
plonk::*,
poly::{
VerificationStrategy,
commitment::{CommitmentScheme, ParamsProver},
ipa::{
commitment::{IPACommitmentScheme, ParamsIPA},
@@ -12,7 +13,6 @@ use halo2_proofs::{
multiopen::{ProverSHPLONK, VerifierSHPLONK},
strategy::SingleStrategy as KZGSingleStrategy,
},
VerificationStrategy,
},
};
use std::fmt::Display;
@@ -20,23 +20,19 @@ use std::io::BufReader;
use std::str::FromStr;
use crate::{
CheckMode, Commitments, EZKLError as InnerEZKLError,
circuit::region::RegionSettings,
graph::GraphSettings,
pfsys::{
create_proof_circuit, encode_calldata,
TranscriptType, create_proof_circuit,
evm::aggregation_kzg::{AggregationCircuit, PoseidonTranscript},
verify_proof_circuit, TranscriptType,
verify_proof_circuit,
},
tensor::TensorType,
CheckMode, Commitments, EZKLError as InnerEZKLError,
};
use crate::circuit::modules::poseidon::{
spec::{PoseidonSpec, POSEIDON_RATE, POSEIDON_WIDTH},
PoseidonChip,
};
use crate::circuit::modules::Module;
use crate::graph::{GraphCircuit, GraphWitness};
use halo2_solidity_verifier::encode_calldata;
use halo2curves::{
bn256::{Bn256, Fr, G1Affine},
ff::{FromUniformBytes, PrimeField},
@@ -65,32 +61,10 @@ impl From<InnerEZKLError> for EZKLError {
}
}
/// Hash the input message with poseidon
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn poseidon_hash(message: Vec<u8>) -> Result<Vec<u8>, EZKLError> {
let message: Vec<Fr> = serde_json::from_slice(&message[..]).map_err(InnerEZKLError::from)?;
let output = PoseidonChip::<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE>::run(message.clone())
.map_err(InnerEZKLError::from)?;
Ok(serde_json::to_vec(&output).map_err(InnerEZKLError::from)?)
}
/// Hash the input message with poseidon without converting to Fr
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn poseidon_hash_no_felt(message: Vec<u8>) -> Result<Vec<u8>, EZKLError> {
let message: Vec<Fr> = message.iter().map(|x| Fr::from(*x as u64)).collect();
let output = PoseidonChip::<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE>::run(message.clone())
.map_err(InnerEZKLError::from)?;
Ok(serde_json::to_vec(&output).map_err(InnerEZKLError::from)?)
}
/// Encode verifier calldata from proof and ethereum vk_address
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn encode_verifier_calldata(
// TODO - shuold it be pub or pub or pub(super)?
pub(crate) fn encode_verifier_calldata(
// TODO - shuold it be pub(crate) or pub or pub(super)?
proof: Vec<u8>,
vka: Option<Vec<u8>>,
) -> Result<Vec<u8>, EZKLError> {
@@ -116,23 +90,18 @@ pub fn encode_verifier_calldata(
/// Generate witness from compiled circuit and input json
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn gen_witness(compiled_circuit: Vec<u8>, input: Vec<u8>) -> Result<Vec<u8>, EZKLError> {
println!("[circuit]");
pub(crate) fn gen_witness(compiled_circuit: Vec<u8>, input: Vec<u8>) -> Result<Vec<u8>, EZKLError> {
let mut circuit: crate::graph::GraphCircuit = bincode::deserialize(&compiled_circuit[..])
.map_err(|e| {
EZKLError::InternalError(format!("Failed to deserialize compiled model: {}", e))
})?;
println!("[input]");
let input: crate::graph::input::GraphData = serde_json::from_slice(&input[..])
.map_err(|e| EZKLError::InternalError(format!("Failed to deserialize input: {}", e)))?;
println!("[load graph input]");
let mut input = circuit
.load_graph_input(&input)
.map_err(|e| EZKLError::InternalError(format!("{}", e)))?;
println!("[load graph witness]");
let witness = circuit
.forward::<KZGCommitmentScheme<Bn256>>(
&mut input,
@@ -145,14 +114,13 @@ pub fn gen_witness(compiled_circuit: Vec<u8>, input: Vec<u8>) -> Result<Vec<u8>,
)
.map_err(|e| EZKLError::InternalError(format!("{}", e)))?;
println!("[serialize witness]");
serde_json::to_vec(&witness)
.map_err(|e| EZKLError::InternalError(format!("Failed to serialize witness: {}", e)))
}
/// Generate verifying key from compiled circuit, and parameters srs
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn gen_vk(
pub(crate) fn gen_vk(
compiled_circuit: Vec<u8>,
srs: Vec<u8>,
compress_selectors: bool,
@@ -182,7 +150,11 @@ pub fn gen_vk(
/// Generate proving key from vk, compiled circuit and parameters srs
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn gen_pk(vk: Vec<u8>, compiled_circuit: Vec<u8>, srs: Vec<u8>) -> Result<Vec<u8>, EZKLError> {
pub(crate) fn gen_pk(
vk: Vec<u8>,
compiled_circuit: Vec<u8>,
srs: Vec<u8>,
) -> Result<Vec<u8>, EZKLError> {
let mut reader = BufReader::new(&srs[..]);
let params: ParamsKZG<Bn256> = get_params(&mut reader)?;
@@ -209,7 +181,7 @@ pub fn gen_pk(vk: Vec<u8>, compiled_circuit: Vec<u8>, srs: Vec<u8>) -> Result<Ve
/// Verify proof with vk, proof json, circuit settings json and srs
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn verify(
pub(crate) fn verify(
proof: Vec<u8>,
vk: Vec<u8>,
settings: Vec<u8>,
@@ -291,7 +263,7 @@ pub fn verify(
/// Verify aggregate proof with vk, proof, circuit settings and srs
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn verify_aggr(
pub(crate) fn verify_aggr(
proof: Vec<u8>,
vk: Vec<u8>,
logrows: u64,
@@ -373,7 +345,7 @@ pub fn verify_aggr(
/// Prove in browser with compiled circuit, witness json, proving key, and srs
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn prove(
pub(crate) fn prove(
witness: Vec<u8>,
pk: Vec<u8>,
compiled_circuit: Vec<u8>,
@@ -471,7 +443,7 @@ pub fn prove(
/// Validate the witness json
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn witness_validation(witness: Vec<u8>) -> Result<bool, EZKLError> {
pub(crate) fn witness_validation(witness: Vec<u8>) -> Result<bool, EZKLError> {
let _: GraphWitness = serde_json::from_slice(&witness[..]).map_err(InnerEZKLError::from)?;
Ok(true)
@@ -479,7 +451,7 @@ pub fn witness_validation(witness: Vec<u8>) -> Result<bool, EZKLError> {
/// Validate the compiled circuit
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn compiled_circuit_validation(compiled_circuit: Vec<u8>) -> Result<bool, EZKLError> {
pub(crate) fn compiled_circuit_validation(compiled_circuit: Vec<u8>) -> Result<bool, EZKLError> {
let _: GraphCircuit = bincode::deserialize(&compiled_circuit[..]).map_err(|e| {
EZKLError::InternalError(format!("Failed to deserialize compiled circuit: {}", e))
})?;
@@ -489,7 +461,7 @@ pub fn compiled_circuit_validation(compiled_circuit: Vec<u8>) -> Result<bool, EZ
/// Validate the input json
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn input_validation(input: Vec<u8>) -> Result<bool, EZKLError> {
pub(crate) fn input_validation(input: Vec<u8>) -> Result<bool, EZKLError> {
let _: crate::graph::input::GraphData =
serde_json::from_slice(&input[..]).map_err(InnerEZKLError::from)?;
@@ -498,7 +470,7 @@ pub fn input_validation(input: Vec<u8>) -> Result<bool, EZKLError> {
/// Validate the proof json
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn proof_validation(proof: Vec<u8>) -> Result<bool, EZKLError> {
pub(crate) fn proof_validation(proof: Vec<u8>) -> Result<bool, EZKLError> {
let _: crate::pfsys::Snark<Fr, G1Affine> =
serde_json::from_slice(&proof[..]).map_err(InnerEZKLError::from)?;
@@ -507,7 +479,7 @@ pub fn proof_validation(proof: Vec<u8>) -> Result<bool, EZKLError> {
/// Validate the verifying key given the settings json
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn vk_validation(vk: Vec<u8>, settings: Vec<u8>) -> Result<bool, EZKLError> {
pub(crate) fn vk_validation(vk: Vec<u8>, settings: Vec<u8>) -> Result<bool, EZKLError> {
let circuit_settings: GraphSettings =
serde_json::from_slice(&settings[..]).map_err(InnerEZKLError::from)?;
@@ -524,7 +496,7 @@ pub fn vk_validation(vk: Vec<u8>, settings: Vec<u8>) -> Result<bool, EZKLError>
/// Validate the proving key given the settings json
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn pk_validation(pk: Vec<u8>, settings: Vec<u8>) -> Result<bool, EZKLError> {
pub(crate) fn pk_validation(pk: Vec<u8>, settings: Vec<u8>) -> Result<bool, EZKLError> {
let circuit_settings: GraphSettings =
serde_json::from_slice(&settings[..]).map_err(InnerEZKLError::from)?;
@@ -541,7 +513,7 @@ pub fn pk_validation(pk: Vec<u8>, settings: Vec<u8>) -> Result<bool, EZKLError>
/// Validate the settings json
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn settings_validation(settings: Vec<u8>) -> Result<bool, EZKLError> {
pub(crate) fn settings_validation(settings: Vec<u8>) -> Result<bool, EZKLError> {
let _: GraphSettings = serde_json::from_slice(&settings[..]).map_err(InnerEZKLError::from)?;
Ok(true)
@@ -549,7 +521,7 @@ pub fn settings_validation(settings: Vec<u8>) -> Result<bool, EZKLError> {
/// Validate the srs
#[cfg_attr(feature = "ios-bindings", uniffi::export)]
pub fn srs_validation(srs: Vec<u8>) -> Result<bool, EZKLError> {
pub(crate) fn srs_validation(srs: Vec<u8>) -> Result<bool, EZKLError> {
let mut reader = BufReader::new(&srs[..]);
let _: ParamsKZG<Bn256> =
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader).map_err(|e| {

View File

@@ -1,5 +1,12 @@
use crate::{
circuit::modules::polycommit::PolyCommitChip,
circuit::modules::{
polycommit::PolyCommitChip,
poseidon::{
spec::{PoseidonSpec, POSEIDON_RATE, POSEIDON_WIDTH},
PoseidonChip,
},
Module,
},
fieldutils::{felt_to_integer_rep, integer_rep_to_felt},
graph::{quantize_float, scale_to_multiplier, GraphCircuit, GraphSettings},
};
@@ -8,7 +15,6 @@ use halo2_proofs::{
plonk::*,
poly::kzg::commitment::{KZGCommitmentScheme, ParamsKZG},
};
use halo2_solidity_verifier::Evm;
use halo2curves::{
bn256::{Bn256, Fr, G1Affine},
ff::PrimeField,
@@ -219,9 +225,15 @@ pub fn bufferToVecOfFelt(
pub fn poseidonHash(
message: wasm_bindgen::Clamped<Vec<u8>>,
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
super::universal::poseidon_hash(message.0)
.map_err(JsError::from)
.map(|x| wasm_bindgen::Clamped(x.clone()))
let message: Vec<Fr> = serde_json::from_slice(&message[..])
.map_err(|e| JsError::new(&format!("Failed to deserialize message: {}", e)))?;
let output = PoseidonChip::<PoseidonSpec, POSEIDON_WIDTH, POSEIDON_RATE>::run(message.clone())
.map_err(|e| JsError::new(&format!("{}", e)))?;
Ok(wasm_bindgen::Clamped(serde_json::to_vec(&output).map_err(
|e| JsError::new(&format!("Failed to serialize poseidon hash output: {}", e)),
)?))
}
/// Generate a witness file from input.json, compiled model and a settings.json file.
@@ -267,33 +279,6 @@ pub fn verify(
super::universal::verify(proof_js.0, vk.0, settings.0, srs.0).map_err(JsError::from)
}
/// Verify proof in browser evm using wasm
#[wasm_bindgen]
#[allow(non_snake_case)]
pub fn verifyEVM(
proof_js: wasm_bindgen::Clamped<Vec<u8>>,
bytecode_verifier: Vec<u8>,
bytecode_vka: Option<Vec<u8>>,
) -> Result<bool, JsError> {
let mut evm = Evm::unlimited();
let decoded_verifier = utf8_bytes_to_hex_decoded(&bytecode_verifier)?;
let (verifier_address, _) = evm.create(decoded_verifier);
// if bytecode_vk is Some, then create the vk contract
let vk_address = if let Some(bytecode_vka) = bytecode_vka {
let decoded_vka = utf8_bytes_to_hex_decoded(&bytecode_vka)?;
let (address, _) = evm.create(decoded_vka);
Some(address.as_slice().to_vec())
// check if bytecode_verifier is none and if so then generate the
// reusable verifier
} else {
None
};
let calldata = encode_verifier_calldata(proof_js.0, vk_address).map_err(JsError::from);
let output = evm.call(verifier_address, calldata?).1;
let true_word = [vec![0; 31], vec![1]].concat();
Ok(output == true_word)
}
/// Verify aggregate proof in browser using wasm
#[wasm_bindgen]
#[allow(non_snake_case)]
@@ -386,13 +371,3 @@ pub fn u8_array_to_u128_le(arr: [u8; 16]) -> u128 {
}
n
}
///
pub fn utf8_bytes_to_hex_decoded(input: &[u8]) -> Result<Vec<u8>, JsError> {
let string = std::str::from_utf8(input)?.trim();
let hex_string = if string.starts_with("0x") {
&string[2..]
} else {
string
};
hex::decode(hex_string).map_err(JsError::from)
}

View File

@@ -962,7 +962,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> BaseConfig<F> {
pub fn layout(
&mut self,
region: &mut RegionCtx<F>,
values: &[&ValTensor<F>],
values: &[ValTensor<F>],
op: Box<dyn Op<F>>,
) -> Result<Option<ValTensor<F>>, CircuitError> {
op.layout(self, region, values)

View File

@@ -1,7 +1,7 @@
use super::*;
use crate::{
circuit::{layouts, utils},
fieldutils::{integer_rep_to_felt, IntegerRep},
fieldutils::{IntegerRep, integer_rep_to_felt},
graph::multiplier_to_scale,
tensor::{self, DataFormat, Tensor, TensorType, ValTensor},
};
@@ -109,13 +109,13 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
///
fn requires_homogenous_input_scales(&self) -> Vec<usize> {
match self {
HybridOp::Greater
| HybridOp::Less
| HybridOp::Equals
| HybridOp::GreaterEqual
HybridOp::Greater { .. }
| HybridOp::Less { .. }
| HybridOp::Equals { .. }
| HybridOp::GreaterEqual { .. }
| HybridOp::Max
| HybridOp::Min
| HybridOp::LessEqual => {
| HybridOp::LessEqual { .. } => {
vec![0, 1]
}
_ => vec![],
@@ -213,7 +213,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
&self,
config: &mut crate::circuit::BaseConfig<F>,
region: &mut RegionCtx<F>,
values: &[&ValTensor<F>],
values: &[ValTensor<F>],
) -> Result<Option<ValTensor<F>>, CircuitError> {
Ok(Some(match self {
HybridOp::Rsqrt {
@@ -362,10 +362,10 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Hybrid
fn out_scale(&self, in_scales: Vec<crate::Scale>) -> Result<crate::Scale, CircuitError> {
let scale = match self {
HybridOp::Greater
| HybridOp::GreaterEqual
| HybridOp::Less
| HybridOp::LessEqual
HybridOp::Greater { .. }
| HybridOp::GreaterEqual { .. }
| HybridOp::Less { .. }
| HybridOp::LessEqual { .. }
| HybridOp::ReduceArgMax { .. }
| HybridOp::OneHot { .. }
| HybridOp::ReduceArgMin { .. } => 0,

File diff suppressed because it is too large Load Diff

View File

@@ -186,7 +186,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Lookup
&self,
config: &mut crate::circuit::BaseConfig<F>,
region: &mut RegionCtx<F>,
values: &[&ValTensor<F>],
values: &[ValTensor<F>],
) -> Result<Option<ValTensor<F>>, CircuitError> {
Ok(Some(layouts::nonlinearity(
config,

View File

@@ -49,7 +49,7 @@ pub trait Op<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>:
&self,
config: &mut crate::circuit::BaseConfig<F>,
region: &mut RegionCtx<F>,
values: &[&ValTensor<F>],
values: &[ValTensor<F>],
) -> Result<Option<ValTensor<F>>, CircuitError>;
/// Returns the scale of the output of the operation.
@@ -209,7 +209,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Input
&self,
config: &mut crate::circuit::BaseConfig<F>,
region: &mut RegionCtx<F>,
values: &[&ValTensor<F>],
values: &[ValTensor<F>],
) -> Result<Option<ValTensor<F>>, CircuitError> {
let value = values[0].clone();
if !value.all_prev_assigned() {
@@ -223,29 +223,12 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Input
true,
)?))
}
_ => {
if self.decomp {
log::debug!("constraining input to be decomp");
Ok(Some(
super::layouts::decompose(
config,
region,
values[..].try_into()?,
&region.base(),
&region.legs(),
false,
)?
.1,
))
} else {
log::debug!("constraining input to be identity");
Ok(Some(super::layouts::identity(
config,
region,
values[..].try_into()?,
)?))
}
}
_ => Ok(Some(super::layouts::identity(
config,
region,
values[..].try_into()?,
self.decomp,
)?)),
}
} else {
Ok(Some(value))
@@ -280,7 +263,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Op<F> for Unknow
&self,
_: &mut crate::circuit::BaseConfig<F>,
_: &mut RegionCtx<F>,
_: &[&ValTensor<F>],
_: &[ValTensor<F>],
) -> Result<Option<ValTensor<F>>, CircuitError> {
Err(super::CircuitError::UnsupportedOp)
}
@@ -336,13 +319,8 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Constant<F> {
}
impl<
F: PrimeField
+ TensorType
+ PartialOrd
+ std::hash::Hash
+ Serialize
+ for<'de> Deserialize<'de>,
> Op<F> for Constant<F>
F: PrimeField + TensorType + PartialOrd + std::hash::Hash + Serialize + for<'de> Deserialize<'de>,
> Op<F> for Constant<F>
{
fn as_any(&self) -> &dyn Any {
self
@@ -355,20 +333,20 @@ impl<
&self,
config: &mut crate::circuit::BaseConfig<F>,
region: &mut RegionCtx<F>,
_: &[&ValTensor<F>],
_: &[ValTensor<F>],
) -> Result<Option<ValTensor<F>>, CircuitError> {
let value = if let Some(value) = &self.pre_assigned_val {
value.clone()
} else {
self.quantized_values.clone().try_into()?
};
Ok(Some(if self.decomp {
log::debug!("constraining constant to be decomp");
super::layouts::decompose(config, region, &[&value], &region.base(), &region.legs(), false)?.1
} else {
log::debug!("constraining constant to be identity");
super::layouts::identity(config, region, &[&value])?
}))
// we gotta constrain it once if its used multiple times
Ok(Some(layouts::identity(
config,
region,
&[value],
self.decomp,
)?))
}
fn clone_dyn(&self) -> Box<dyn Op<F>> {

View File

@@ -108,13 +108,8 @@ pub enum PolyOp {
}
impl<
F: PrimeField
+ TensorType
+ PartialOrd
+ std::hash::Hash
+ Serialize
+ for<'de> Deserialize<'de>,
> Op<F> for PolyOp
F: PrimeField + TensorType + PartialOrd + std::hash::Hash + Serialize + for<'de> Deserialize<'de>,
> Op<F> for PolyOp
{
/// Returns a reference to the Any trait.
fn as_any(&self) -> &dyn Any {
@@ -208,11 +203,11 @@ impl<
&self,
config: &mut crate::circuit::BaseConfig<F>,
region: &mut RegionCtx<F>,
values: &[&ValTensor<F>],
values: &[ValTensor<F>],
) -> Result<Option<ValTensor<F>>, CircuitError> {
Ok(Some(match self {
PolyOp::Abs => layouts::abs(config, region, values[..].try_into()?)?,
PolyOp::Sign => layouts::sign(config, region, values[..].try_into()?, true)?,
PolyOp::Sign => layouts::sign(config, region, values[..].try_into()?)?,
PolyOp::LeakyReLU { slope, scale } => {
layouts::leaky_relu(config, region, values[..].try_into()?, slope, scale)?
}
@@ -340,7 +335,9 @@ impl<
PolyOp::Mult => {
layouts::pairwise(config, region, values[..].try_into()?, BaseOp::Mult)?
}
PolyOp::Identity { .. } => layouts::identity(config, region, values[..].try_into()?)?,
PolyOp::Identity { .. } => {
layouts::identity(config, region, values[..].try_into()?, false)?
}
PolyOp::Reshape(d) | PolyOp::Flatten(d) => layouts::reshape(values[..].try_into()?, d)?,
PolyOp::Pad(p) => {
if values.len() != 1 {
@@ -419,14 +416,14 @@ impl<
PolyOp::Reshape(_) | PolyOp::Flatten(_) => in_scales[0],
PolyOp::Pow(pow) => in_scales[0] * (*pow as crate::Scale),
PolyOp::Identity { out_scale } => out_scale.unwrap_or(in_scales[0]),
PolyOp::Sign => 0,
PolyOp::Sign { .. } => 0,
_ => in_scales[0],
};
Ok(scale)
}
fn requires_homogenous_input_scales(&self) -> Vec<usize> {
if matches!(self, PolyOp::Add | PolyOp::Sub) {
if matches!(self, PolyOp::Add { .. } | PolyOp::Sub) {
vec![0, 1]
} else if matches!(self, PolyOp::Iff) {
vec![1, 2]

View File

@@ -10,6 +10,7 @@ use halo2_proofs::{
plonk::{Error, Selector},
};
use halo2curves::ff::PrimeField;
use itertools::Itertools;
use maybe_rayon::iter::ParallelExtend;
use std::{
cell::RefCell,
@@ -461,14 +462,15 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
/// Update the max and min from inputs
pub fn update_max_min_lookup_inputs(
&mut self,
inputs: &ValTensor<F>,
inputs: &[ValTensor<F>],
) -> Result<(), CircuitError> {
let int_eval = inputs.int_evals()?;
let max = int_eval.iter().max().unwrap_or(&0);
let min = int_eval.iter().min().unwrap_or(&0);
self.statistics.max_lookup_inputs = self.statistics.max_lookup_inputs.max(*max);
self.statistics.min_lookup_inputs = self.statistics.min_lookup_inputs.min(*min);
let (mut min, mut max) = (0, 0);
for i in inputs {
max = max.max(i.int_evals()?.into_iter().max().unwrap_or_default());
min = min.min(i.int_evals()?.into_iter().min().unwrap_or_default());
}
self.statistics.max_lookup_inputs = self.statistics.max_lookup_inputs.max(max);
self.statistics.min_lookup_inputs = self.statistics.min_lookup_inputs.min(min);
Ok(())
}
@@ -503,10 +505,10 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
/// add used lookup
pub fn add_used_lookup(
&mut self,
lookup: &LookupOp,
inputs: &ValTensor<F>,
lookup: LookupOp,
inputs: &[ValTensor<F>],
) -> Result<(), CircuitError> {
self.statistics.used_lookups.insert(lookup.clone());
self.statistics.used_lookups.insert(lookup);
self.update_max_min_lookup_inputs(inputs)
}
@@ -640,6 +642,34 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
self.assign_dynamic_lookup(var, values)
}
/// Assign a valtensor to a vartensor
pub fn assign_with_omissions(
&mut self,
var: &VarTensor,
values: &ValTensor<F>,
ommissions: &HashSet<usize>,
) -> Result<ValTensor<F>, CircuitError> {
if let Some(region) = &self.region {
Ok(var.assign_with_omissions(
&mut region.borrow_mut(),
self.linear_coord,
values,
ommissions,
&mut self.assigned_constants,
)?)
} else {
let mut values_clone = values.clone();
let mut indices = ommissions.clone().into_iter().collect_vec();
values_clone.remove_indices(&mut indices, false)?;
let values_map = values.create_constants_map();
self.assigned_constants.par_extend(values_map);
Ok(values.clone())
}
}
/// Assign a valtensor to a vartensor with duplication
pub fn assign_with_duplication_unconstrained(
&mut self,

View File

@@ -9,7 +9,6 @@ use halo2_proofs::{
};
use halo2curves::bn256::Fr as F;
use halo2curves::ff::{Field, PrimeField};
use itertools::Itertools;
#[cfg(not(any(
all(target_arch = "wasm32", target_os = "unknown"),
not(feature = "ezkl")
@@ -65,7 +64,7 @@ mod matmul {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "ij,jk->ik".to_string(),
}),
@@ -142,7 +141,7 @@ mod matmul_col_overflow_double_col {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "ij,jk->ik".to_string(),
}),
@@ -216,7 +215,7 @@ mod matmul_col_overflow {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "ij,jk->ik".to_string(),
}),
@@ -303,7 +302,7 @@ mod matmul_col_ultra_overflow_double_col {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "ij,jk->ik".to_string(),
}),
@@ -381,7 +380,6 @@ mod matmul_col_ultra_overflow {
multiopen::{ProverSHPLONK, VerifierSHPLONK},
strategy::SingleStrategy,
};
use itertools::Itertools;
use snark_verifier::system::halo2::transcript::evm::EvmTranscript;
use super::*;
@@ -424,7 +422,7 @@ mod matmul_col_ultra_overflow {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "ij,jk->ik".to_string(),
}),
@@ -535,7 +533,7 @@ mod dot {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "i,i->".to_string(),
}),
@@ -612,7 +610,7 @@ mod dot_col_overflow_triple_col {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "i,i->".to_string(),
}),
@@ -685,7 +683,7 @@ mod dot_col_overflow {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "i,i->".to_string(),
}),
@@ -758,7 +756,7 @@ mod sum {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs.clone(),
Box::new(PolyOp::Sum { axes: vec![0] }),
)
.map_err(|_| Error::Synthesis)
@@ -828,7 +826,7 @@ mod sum_col_overflow_double_col {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs.clone(),
Box::new(PolyOp::Sum { axes: vec![0] }),
)
.map_err(|_| Error::Synthesis)
@@ -897,7 +895,7 @@ mod sum_col_overflow {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs.clone(),
Box::new(PolyOp::Sum { axes: vec![0] }),
)
.map_err(|_| Error::Synthesis)
@@ -968,7 +966,7 @@ mod composition {
let _ = config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "i,i->".to_string(),
}),
@@ -977,7 +975,7 @@ mod composition {
let _ = config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "i,i->".to_string(),
}),
@@ -986,7 +984,7 @@ mod composition {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs.clone(),
Box::new(PolyOp::Einsum {
equation: "i,i->".to_string(),
}),
@@ -1063,7 +1061,7 @@ mod conv {
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
&self.inputs,
Box::new(PolyOp::Conv {
padding: vec![(1, 1); 2],
stride: vec![2; 2],
@@ -1220,7 +1218,7 @@ mod conv_col_ultra_overflow {
config
.layout(
&mut region,
&[&self.image, &self.kernel],
&[self.image.clone(), self.kernel.clone()],
Box::new(PolyOp::Conv {
padding: vec![(1, 1); 2],
stride: vec![2; 2],
@@ -1379,7 +1377,7 @@ mod conv_relu_col_ultra_overflow {
let output = config
.layout(
&mut region,
&[&self.image, &self.kernel],
&[self.image.clone(), self.kernel.clone()],
Box::new(PolyOp::Conv {
padding: vec![(1, 1); 2],
stride: vec![2; 2],
@@ -1392,7 +1390,7 @@ mod conv_relu_col_ultra_overflow {
let _output = config
.layout(
&mut region,
&[&output.unwrap().unwrap()],
&[output.unwrap().unwrap()],
Box::new(PolyOp::LeakyReLU {
slope: 0.0.into(),
scale: 1,
@@ -1519,11 +1517,7 @@ mod add_w_shape_casting {
|region| {
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
Box::new(PolyOp::Add),
)
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Add))
.map_err(|_| Error::Synthesis)
},
)
@@ -1590,11 +1584,7 @@ mod add {
|region| {
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
Box::new(PolyOp::Add),
)
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Add))
.map_err(|_| Error::Synthesis)
},
)
@@ -1681,8 +1671,8 @@ mod dynamic_lookup {
layouts::dynamic_lookup(
&config,
&mut region,
&self.lookups[i].iter().collect_vec().try_into().unwrap(),
&self.tables[i].iter().collect_vec().try_into().unwrap(),
&self.lookups[i],
&self.tables[i],
)
.map_err(|_| Error::Synthesis)?;
}
@@ -1777,8 +1767,8 @@ mod shuffle {
#[derive(Clone)]
struct MyCircuit<F: PrimeField + TensorType + PartialOrd> {
inputs: [ValTensor<F>; NUM_LOOP],
references: [ValTensor<F>; NUM_LOOP],
inputs: [[ValTensor<F>; 1]; NUM_LOOP],
references: [[ValTensor<F>; 1]; NUM_LOOP],
_marker: PhantomData<F>,
}
@@ -1828,15 +1818,15 @@ mod shuffle {
layouts::shuffles(
&config,
&mut region,
&[&self.inputs[i]],
&[&self.references[i]],
&self.inputs[i],
&self.references[i],
layouts::SortCollisionMode::Unsorted,
)
.map_err(|_| Error::Synthesis)?;
}
assert_eq!(
region.shuffle_col_coord(),
NUM_LOOP * self.references[0].len()
NUM_LOOP * self.references[0][0].len()
);
assert_eq!(region.shuffle_index(), NUM_LOOP);
@@ -1853,19 +1843,17 @@ mod shuffle {
// parameters
let references = (0..NUM_LOOP)
.map(|loop_idx| {
ValTensor::from(Tensor::from(
(0..LEN).map(|i| Value::known(F::from((i * loop_idx) as u64 + 1))),
))
[ValTensor::from(Tensor::from((0..LEN).map(|i| {
Value::known(F::from((i * loop_idx) as u64 + 1))
})))]
})
.collect::<Vec<_>>();
let inputs = (0..NUM_LOOP)
.map(|loop_idx| {
ValTensor::from(Tensor::from(
(0..LEN)
.rev()
.map(|i| Value::known(F::from((i * loop_idx) as u64 + 1))),
))
[ValTensor::from(Tensor::from((0..LEN).rev().map(|i| {
Value::known(F::from((i * loop_idx) as u64 + 1))
})))]
})
.collect::<Vec<_>>();
@@ -1885,11 +1873,9 @@ mod shuffle {
} else {
loop_idx - 1
};
ValTensor::from(Tensor::from(
(0..LEN)
.rev()
.map(|i| Value::known(F::from((i * prev_idx) as u64 + 1))),
))
[ValTensor::from(Tensor::from((0..LEN).rev().map(|i| {
Value::known(F::from((i * prev_idx) as u64 + 1))
})))]
})
.collect::<Vec<_>>();
@@ -1945,11 +1931,7 @@ mod add_with_overflow {
|region| {
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
Box::new(PolyOp::Add),
)
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Add))
.map_err(|_| Error::Synthesis)
},
)
@@ -2044,7 +2026,7 @@ mod add_with_overflow_and_poseidon {
layouter.assign_region(|| "_new_module", |_| Ok(()))?;
let inputs = vec![&assigned_inputs_a, &assigned_inputs_b];
let inputs = vec![assigned_inputs_a, assigned_inputs_b];
layouter.assign_region(
|| "model",
@@ -2153,11 +2135,7 @@ mod sub {
|region| {
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
Box::new(PolyOp::Sub),
)
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Sub))
.map_err(|_| Error::Synthesis)
},
)
@@ -2224,11 +2202,7 @@ mod mult {
|region| {
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
Box::new(PolyOp::Mult),
)
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Mult))
.map_err(|_| Error::Synthesis)
},
)
@@ -2295,11 +2269,7 @@ mod pow {
|region| {
let mut region = RegionCtx::new(region, 0, 1, 128, 2);
config
.layout(
&mut region,
&self.inputs.iter().collect_vec(),
Box::new(PolyOp::Pow(5)),
)
.layout(&mut region, &self.inputs.clone(), Box::new(PolyOp::Pow(5)))
.map_err(|_| Error::Synthesis)
},
)
@@ -2390,13 +2360,13 @@ mod matmul_relu {
};
let output = config
.base_config
.layout(&mut region, &self.inputs.iter().collect_vec(), Box::new(op))
.layout(&mut region, &self.inputs, Box::new(op))
.unwrap();
let _output = config
.base_config
.layout(
&mut region,
&[&output.unwrap()],
&[output.unwrap()],
Box::new(PolyOp::LeakyReLU {
slope: 0.0.into(),
scale: 1,
@@ -2495,7 +2465,7 @@ mod relu {
Ok(config
.layout(
&mut region,
&[&self.input],
&[self.input.clone()],
Box::new(PolyOp::LeakyReLU {
slope: 0.0.into(),
scale: 1,
@@ -2593,7 +2563,7 @@ mod lookup_ultra_overflow {
config
.layout(
&mut region,
&[&self.input],
&[self.input.clone()],
Box::new(LookupOp::Sigmoid { scale: 1.0.into() }),
)
.map_err(|_| Error::Synthesis)

View File

@@ -384,8 +384,10 @@ impl FromStr for DataField {
fn from_str(s: &str) -> Result<Self, Self::Err> {
// Check if the input starts with '@'
if let Some(file_path) = s.strip_prefix('@') {
if s.starts_with('@') {
// Extract the file path (remove the '@' prefix)
let file_path = &s[1..];
// Read the file content
let content = std::fs::read_to_string(file_path)
.map_err(|e| format!("Failed to read data file '{}': {}", file_path, e))?;
@@ -862,11 +864,8 @@ pub enum Commands {
#[arg(short = 'U', long, value_hint = clap::ValueHint::Url)]
rpc_url: String,
/// The path to the serialized vka file
#[arg(long, value_hint = clap::ValueHint::FilePath)]
#[arg(long, default_value = DEFAULT_VKA, value_hint = clap::ValueHint::FilePath)]
vka_path: Option<PathBuf>,
/// The path to the serialized encoded calldata file generated via the encode_calldata command
#[arg(long, value_hint = clap::ValueHint::FilePath)]
encoded_calldata: Option<PathBuf>,
},
/// Registers a VKA, returning the its digest used to identify it on-chain.
#[command(name = "register-vka")]

File diff suppressed because one or more lines are too long

View File

@@ -15,7 +15,7 @@ use crate::pfsys::{
use crate::pfsys::{
create_proof_circuit, swap_proof_commitments_polycommit, verify_proof_circuit, ProofSplitCommit,
};
use crate::pfsys::{encode_calldata, save_vk, srs::*};
use crate::pfsys::{save_vk, srs::*};
use crate::tensor::TensorError;
use crate::EZKL_BUF_CAPACITY;
use crate::{commands::*, EZKLError};
@@ -416,14 +416,12 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
addr_verifier,
rpc_url,
vka_path,
encoded_calldata,
} => {
verify_evm(
proof_path.unwrap_or(DEFAULT_PROOF.into()),
addr_verifier,
rpc_url,
vka_path,
encoded_calldata,
)
.await
}
@@ -1135,9 +1133,15 @@ pub(crate) fn calibrate(
// if unix get a gag
#[cfg(all(not(not(feature = "ezkl")), unix))]
let _r = Gag::stdout().ok();
let _r = match Gag::stdout() {
Ok(g) => Some(g),
_ => None,
};
#[cfg(all(not(not(feature = "ezkl")), unix))]
let _g = Gag::stderr().ok();
let _g = match Gag::stderr() {
Ok(g) => Some(g),
_ => None,
};
let mut circuit = match GraphCircuit::from_run_args(&local_run_args, &model_path) {
Ok(c) => c,
@@ -1295,7 +1299,7 @@ pub(crate) fn calibrate(
.clone()
}
CalibrationTarget::Accuracy => {
let mut param_iterator = found_params.iter().sorted_by_key(|p| {
let param_iterator = found_params.iter().sorted_by_key(|p| {
(
p.run_args.input_scale,
p.run_args.param_scale,
@@ -1304,7 +1308,7 @@ pub(crate) fn calibrate(
)
});
let last = param_iterator.next_back().ok_or("no params found")?;
let last = param_iterator.last().ok_or("no params found")?;
let max_scale = (
last.run_args.input_scale,
last.run_args.param_scale,
@@ -1603,7 +1607,11 @@ pub(crate) fn encode_evm_calldata(
};
let vka: Option<&[[u8; 32]]> = vka_buf.as_deref();
let encoded = encode_calldata(vka, &snark.proof, &flattened_instances.collect::<Vec<_>>());
let encoded = halo2_solidity_verifier::encode_calldata(
vka,
&snark.proof,
&flattened_instances.collect::<Vec<_>>(),
);
log::debug!("Encoded calldata: {:?}", encoded);
@@ -1612,7 +1620,7 @@ pub(crate) fn encode_evm_calldata(
Ok(encoded)
}
/// TODO: Add an optional vka_digest param that will allow us to fetch the associated VKA
/// TODO: Add an optional vka_digest param that will allow use to fetch the assocaited VKA
/// from the RegisteredVKA events on the RV.
#[cfg(all(feature = "eth", not(target_arch = "wasm32")))]
pub(crate) async fn verify_evm(
@@ -1620,7 +1628,6 @@ pub(crate) async fn verify_evm(
addr_verifier: H160Flag,
rpc_url: String,
vka_path: Option<PathBuf>,
encoded_calldata: Option<PathBuf>,
) -> Result<String, EZKLError> {
let proof = Snark::load::<KZGCommitmentScheme<Bn256>>(&proof_path)?;
@@ -1629,7 +1636,6 @@ pub(crate) async fn verify_evm(
addr_verifier.into(),
vka_path.map(|s| s.into()),
rpc_url.as_ref(),
encoded_calldata.map(|s| s.into()),
)
.await?;

View File

@@ -67,11 +67,8 @@ pub enum GraphError {
#[error("invalid input types")]
InvalidInputTypes,
/// Missing results
#[error("missing result for node {0}")]
MissingResults(usize),
/// Missing input
#[error("missing input {0}")]
MissingInputForNode(usize),
#[error("missing results")]
MissingResults,
/// Tensor error
#[error("[tensor] {0}")]
TensorError(#[from] crate::tensor::TensorError),

View File

@@ -365,7 +365,9 @@ impl GraphData {
pub fn from_str(data: &str) -> Result<Self, GraphError> {
let graph_input = serde_json::from_str(data);
match graph_input {
Ok(graph_input) => Ok(graph_input),
Ok(graph_input) => {
return Ok(graph_input);
}
Err(_) => {
let path = std::path::PathBuf::from(data);
GraphData::from_path(path)

View File

@@ -595,7 +595,7 @@ impl GraphSettings {
std::io::BufWriter::with_capacity(*EZKL_BUF_CAPACITY, std::fs::File::create(path)?);
serde_json::to_writer(writer, &self).map_err(|e| {
error!("failed to save settings file at {}", e);
std::io::Error::other(e)
std::io::Error::new(std::io::ErrorKind::Other, e)
})
}
/// load params from file
@@ -605,7 +605,7 @@ impl GraphSettings {
std::io::BufReader::with_capacity(*EZKL_BUF_CAPACITY, std::fs::File::open(path)?);
let settings: GraphSettings = serde_json::from_reader(reader).map_err(|e| {
error!("failed to load settings file at {}", e);
std::io::Error::other(e)
std::io::Error::new(std::io::ErrorKind::Other, e)
})?;
crate::check_version_string_matches(&settings.version);
@@ -1156,9 +1156,15 @@ impl GraphCircuit {
let mut cs = ConstraintSystem::default();
// if unix get a gag
#[cfg(all(not(not(feature = "ezkl")), unix))]
let _r = Gag::stdout().ok();
let _r = match Gag::stdout() {
Ok(g) => Some(g),
_ => None,
};
#[cfg(all(not(not(feature = "ezkl")), unix))]
let _g = Gag::stderr().ok();
let _g = match Gag::stderr() {
Ok(g) => Some(g),
_ => None,
};
Self::configure_with_params(&mut cs, settings);
@@ -1415,13 +1421,13 @@ impl Circuit<Fp> for GraphCircuit {
let mut module_configs = ModuleConfigs::from_visibility(
cs,
&params.module_sizes,
params.module_sizes.clone(),
params.run_args.logrows as usize,
);
let mut vars = ModelVars::new(cs, &params);
module_configs.configure_complex_modules(cs, &visibility, &params.module_sizes);
module_configs.configure_complex_modules(cs, visibility, params.module_sizes.clone());
vars.instantiate_instance(
cs,

View File

@@ -200,7 +200,7 @@ fn number_of_iterations(mappings: &[InputMapping], dims: Vec<&[usize]>) -> usize
InputMapping::Stacked { axis, chunk } => Some(
// number of iterations given the dim size along the axis
// and the chunk size
dims[*axis].div_ceil(*chunk), // (dims[*axis] + chunk - 1) / chunk,
(dims[*axis] + chunk - 1) / chunk,
),
_ => None,
});
@@ -589,7 +589,10 @@ impl Model {
required_range_checks: res.range_checks.into_iter().collect(),
model_output_scales: self.graph.get_output_scales()?,
model_input_scales: self.graph.get_input_scales(),
input_types: self.get_input_types().ok(),
input_types: match self.get_input_types() {
Ok(x) => Some(x),
Err(_) => None,
},
output_types: Some(self.get_output_types()),
num_dynamic_lookups: res.num_dynamic_lookups,
total_dynamic_col_size: res.dynamic_lookup_col_coord,
@@ -647,13 +650,10 @@ impl Model {
let variables: std::collections::HashMap<String, usize> =
std::collections::HashMap::from_iter(variables.iter().map(|(k, v)| (k.clone(), *v)));
let inputs = model.inputs.clone();
let outputs = model.outputs.clone();
for (i, id) in inputs.iter().enumerate() {
for (i, id) in model.clone().inputs.iter().enumerate() {
let input = model.node_mut(id.node);
if input.outputs.is_empty() {
if input.outputs.len() == 0 {
return Err(GraphError::MissingOutput(id.node));
}
let mut fact: InferenceFact = input.outputs[0].fact.clone();
@@ -672,7 +672,7 @@ impl Model {
model.set_input_fact(i, fact)?;
}
for (i, _) in outputs.iter().enumerate() {
for (i, _) in model.clone().outputs.iter().enumerate() {
model.set_output_fact(i, InferenceFact::default())?;
}
@@ -1196,7 +1196,7 @@ impl Model {
.base
.layout(
&mut thread_safe_region,
&[output, &comparators],
&[output.clone(), comparators],
Box::new(HybridOp::Output {
decomp: !run_args.ignore_range_check_inputs_outputs,
}),
@@ -1257,27 +1257,12 @@ impl Model {
node.inputs()
.iter()
.map(|(idx, outlet)| {
// check node is not an output
let is_output = self.graph.outputs.iter().any(|(o_idx, _)| *idx == *o_idx);
let res = if self.graph.nodes[idx].num_uses() == 1 && !is_output {
let res = results.remove(idx);
res.ok_or(GraphError::MissingResults(*idx))?[*outlet].clone()
} else {
results.get(idx).ok_or(GraphError::MissingResults(*idx))?[*outlet]
.clone()
};
Ok(res)
Ok(results.get(idx).ok_or(GraphError::MissingResults)?[*outlet].clone())
})
.collect::<Result<Vec<_>, GraphError>>()?
} else {
// we re-assign inputs, always from the 0 outlet
if self.graph.nodes[idx].num_uses() == 1 {
let res = results.remove(idx);
vec![res.ok_or(GraphError::MissingInput(*idx))?[0].clone()]
} else {
vec![results.get(idx).ok_or(GraphError::MissingInput(*idx))?[0].clone()]
}
vec![results.get(idx).ok_or(GraphError::MissingResults)?[0].clone()]
};
trace!("output dims: {:?}", node.out_dims());
trace!(
@@ -1288,7 +1273,7 @@ impl Model {
let start = instant::Instant::now();
match &node {
NodeType::Node(n) => {
let mut res = if node.is_constant() && node.num_uses() == 1 {
let res = if node.is_constant() && node.num_uses() == 1 {
log::debug!("node {} is a constant with 1 use", n.idx);
let mut node = n.clone();
let c = node
@@ -1299,19 +1284,19 @@ impl Model {
} else {
config
.base
.layout(region, &values.iter().collect_vec(), n.opkind.clone_dyn())
.layout(region, &values, n.opkind.clone_dyn())
.map_err(|e| {
error!("{}", e);
halo2_proofs::plonk::Error::Synthesis
})?
};
if let Some(vt) = &mut res {
if let Some(mut vt) = res {
vt.reshape(&node.out_dims()[0])?;
//only use with mock prover
debug!("------------ output node {:?}: {:?}", idx, vt.show());
// we get the max as for fused nodes this corresponds to the node output
results.insert(*idx, vec![vt.clone()]);
//only use with mock prover
debug!("------------ output node {:?}: {:?}", idx, vt.show());
}
}
NodeType::SubGraph {
@@ -1355,7 +1340,7 @@ impl Model {
.inputs
.clone()
.into_iter()
.zip(values.iter().map(|v| vec![v.clone()])),
.zip(values.clone().into_iter().map(|v| vec![v])),
);
let res = model.layout_nodes(config, region, &mut subgraph_results)?;
@@ -1436,7 +1421,7 @@ impl Model {
);
let outputs = output_nodes
.map(|(idx, outlet)| {
Ok(results.get(idx).ok_or(GraphError::MissingResults(*idx))?[*outlet].clone())
Ok(results.get(idx).ok_or(GraphError::MissingResults)?[*outlet].clone())
})
.collect::<Result<Vec<_>, GraphError>>()?;
@@ -1491,7 +1476,7 @@ impl Model {
dummy_config.layout(
&mut region,
&[output, &comparator],
&[output.clone(), comparator],
Box::new(HybridOp::Output {
decomp: !run_args.ignore_range_check_inputs_outputs,
}),

View File

@@ -37,15 +37,15 @@ impl ModuleConfigs {
/// Create new module configs from visibility of each variable
pub fn from_visibility(
cs: &mut ConstraintSystem<Fp>,
module_size: &ModuleSizes,
module_size: ModuleSizes,
logrows: usize,
) -> Self {
let mut config = Self::default();
for size in &module_size.polycommit {
for size in module_size.polycommit {
config
.polycommit
.push(PolyCommitChip::configure(cs, (logrows, *size)));
.push(PolyCommitChip::configure(cs, (logrows, size)));
}
config
@@ -55,8 +55,8 @@ impl ModuleConfigs {
pub fn configure_complex_modules(
&mut self,
cs: &mut ConstraintSystem<Fp>,
visibility: &VarVisibility,
module_size: &ModuleSizes,
visibility: VarVisibility,
module_size: ModuleSizes,
) {
if (visibility.input.is_hashed()
|| visibility.output.is_hashed()

View File

@@ -37,7 +37,6 @@ use crate::tensor::TensorError;
// Import curve-specific field type
use halo2curves::bn256::Fr as Fp;
use itertools::Itertools;
// Import logging for EZKL
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use log::trace;
@@ -119,15 +118,16 @@ impl Op<Fp> for Rescaled {
&self,
config: &mut crate::circuit::BaseConfig<Fp>,
region: &mut crate::circuit::region::RegionCtx<Fp>,
values: &[&crate::tensor::ValTensor<Fp>],
values: &[crate::tensor::ValTensor<Fp>],
) -> Result<Option<crate::tensor::ValTensor<Fp>>, CircuitError> {
if self.scale.len() != values.len() {
return Err(TensorError::DimMismatch("rescaled inputs".to_string()).into());
}
let res =
crate::circuit::layouts::rescale(config, region, values[..].try_into()?, &self.scale)?;
self.inner.layout(config, region, &res.iter().collect_vec())
&crate::circuit::layouts::rescale(config, region, values[..].try_into()?, &self.scale)?
[..];
self.inner.layout(config, region, res)
}
/// Create a cloned boxed copy of this operation
@@ -274,13 +274,13 @@ impl Op<Fp> for RebaseScale {
&self,
config: &mut crate::circuit::BaseConfig<Fp>,
region: &mut crate::circuit::region::RegionCtx<Fp>,
values: &[&crate::tensor::ValTensor<Fp>],
values: &[crate::tensor::ValTensor<Fp>],
) -> Result<Option<crate::tensor::ValTensor<Fp>>, CircuitError> {
let original_res = self
.inner
.layout(config, region, values)?
.ok_or(CircuitError::MissingLayout(self.as_string()))?;
self.rebase_op.layout(config, region, &[&original_res])
self.rebase_op.layout(config, region, &[original_res])
}
/// Create a cloned boxed copy of this operation
@@ -472,7 +472,7 @@ impl Op<Fp> for SupportedOp {
&self,
config: &mut crate::circuit::BaseConfig<Fp>,
region: &mut crate::circuit::region::RegionCtx<Fp>,
values: &[&crate::tensor::ValTensor<Fp>],
values: &[crate::tensor::ValTensor<Fp>],
) -> Result<Option<crate::tensor::ValTensor<Fp>>, CircuitError> {
self.as_op().layout(config, region, values)
}

View File

@@ -1,14 +1,14 @@
use super::errors::GraphError;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use super::VarScales;
use super::errors::GraphError;
use super::{Rescaled, SupportedOp, Visibility};
use crate::circuit::Op;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use crate::circuit::hybrid::HybridOp;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use crate::circuit::lookup::LookupOp;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use crate::circuit::poly::PolyOp;
use crate::circuit::Op;
use crate::fieldutils::IntegerRep;
use crate::tensor::{Tensor, TensorError, TensorType};
use halo2curves::bn256::Fr as Fp;
@@ -22,6 +22,7 @@ use std::sync::Arc;
use tract_onnx::prelude::{DatumType, Node as OnnxNode, TypedFact, TypedOp};
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use tract_onnx::tract_core::ops::{
Downsample,
array::{
Gather, GatherElements, GatherNd, MultiBroadcastTo, OneHot, ScatterElements, ScatterNd,
Slice, Topk,
@@ -31,7 +32,6 @@ use tract_onnx::tract_core::ops::{
einsum::EinSum,
element_wise::ElementWiseOp,
nn::{LeakyRelu, Reduce, Softmax},
Downsample,
};
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use tract_onnx::tract_hir::{

View File

@@ -23,19 +23,14 @@
)]
// we allow this for our dynamic range based indexing scheme
#![allow(clippy::single_range_in_vec_init)]
#![feature(buf_read_has_data_left)]
#![feature(stmt_expr_attributes)]
//! A library for turning computational graphs, such as neural networks, into ZK-circuits.
//!
use log::warn;
#[global_allocator]
#[cfg(all(feature = "jemalloc", not(target_arch = "wasm32")))]
static GLOBAL: jemallocator::Jemalloc = jemallocator::Jemalloc;
#[global_allocator]
#[cfg(all(feature = "mimalloc", not(target_arch = "wasm32")))]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
use mimalloc as _;
/// Error type
// #[cfg_attr(not(feature = "ezkl"), derive(uniffi::Error))]
@@ -130,7 +125,7 @@ pub fn version() -> &'static str {
/// Bindings management
#[cfg(any(
feature = "universal-bindings",
feature = "ios-bindings",
all(target_arch = "wasm32", target_os = "unknown"),
feature = "python-bindings"
))]
@@ -157,7 +152,7 @@ pub mod fieldutils;
pub mod graph;
/// beautiful logging
#[cfg(all(
feature = "logging",
feature = "ezkl",
not(all(target_arch = "wasm32", target_os = "unknown"))
))]
pub mod logger;

View File

@@ -8,8 +8,6 @@ pub mod srs;
pub mod errors;
pub use errors::PfsysError;
use itertools::chain;
use std::borrow::Borrow;
use crate::circuit::CheckMode;
use crate::graph::GraphWitness;
@@ -19,16 +17,16 @@ use crate::{Commitments, EZKL_BUF_CAPACITY, EZKL_KEY_FORMAT};
use clap::ValueEnum;
use halo2_proofs::circuit::Value;
use halo2_proofs::plonk::{
create_proof, keygen_pk, keygen_vk_custom, verify_proof, Circuit, ProvingKey, VerifyingKey,
Circuit, ProvingKey, VerifyingKey, create_proof, keygen_pk, keygen_vk_custom, verify_proof,
};
use halo2_proofs::poly::VerificationStrategy;
use halo2_proofs::poly::commitment::{CommitmentScheme, Params, ParamsProver, Prover, Verifier};
use halo2_proofs::poly::ipa::commitment::IPACommitmentScheme;
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
use halo2_proofs::poly::VerificationStrategy;
use halo2_proofs::transcript::{EncodedChallenge, TranscriptReadBuffer, TranscriptWriterBuffer};
use halo2curves::CurveAffine;
use halo2curves::ff::{FromUniformBytes, PrimeField, WithSmallOrderMulGroup};
use halo2curves::serde::SerdeObject;
use halo2curves::{bn256, CurveAffine};
use instant::Instant;
use log::{debug, info, trace};
#[cfg(not(feature = "det-prove"))]
@@ -65,81 +63,6 @@ fn serde_format_from_str(s: &str) -> halo2_proofs::SerdeFormat {
}
}
/// Function signature of `verifyProof(bytes,uint256[])`.
pub const FN_SIG_VERIFY_PROOF: [u8; 4] = [0x1e, 0x8e, 0x1e, 0x13];
/// Function signature of `verifyProof(bytes,uint256[],bytes32[])`.
pub const FN_SIG_VERIFY_PROOF_WITH_VKA: [u8; 4] = [0x34, 0x09, 0xfc, 0x9f];
/// Function signature of verifyWithDataAttestation(address,bytes)
pub const FN_SIG_VERIFY_WITH_DATA_ATTESTATION: [u8; 4] = [0x4c, 0x79, 0x85, 0xd0];
/// Function signatore of registeredVkas(bytes32[]) 0xdc8b4094
pub const FN_SIG_REGISTER_VKA: [u8; 4] = [0xdc, 0x8b, 0x40, 0x94];
/// Encode proof into calldata to invoke `Halo2Verifier.verifyProof`.
///
/// For `vk_address`:
/// - Pass `None` if verifying key is embedded in `Halo2Verifier`
/// - Pass `Some(vka)` if verifying key is separated and already registered
pub fn encode_calldata(vka: Option<&[[u8; 32]]>, proof: &[u8], instances: &[bn256::Fr]) -> Vec<u8> {
let (fn_sig, offset) = if vka.is_some() {
(FN_SIG_VERIFY_PROOF_WITH_VKA, 0x60)
} else {
(FN_SIG_VERIFY_PROOF, 0x40)
};
let num_instances = instances.len();
let (vka_offset, vka_data) = if let Some(vka) = vka {
(
to_be_bytes_32(offset + 0x40 + proof.len() + (num_instances * 0x20)).to_vec(),
vka.to_vec(),
)
} else {
(Vec::new(), Vec::new())
};
let num_vka_words = vka_data.len();
chain![
fn_sig, // function signature
to_be_bytes_32(offset), // offset of proof
to_be_bytes_32(offset + 0x20 + proof.len()), // offset of instances
vka_offset, // offset of vka
to_be_bytes_32(proof.len()), // length of proof
proof.iter().cloned(), // proof
to_be_bytes_32(num_instances), // length of instances
instances.iter().map(fr_to_bytes32).flatten(), // instances
to_be_bytes_32(num_vka_words), // vka length
vka_data.iter().flat_map(|arr| arr.iter().cloned()) // vka words
]
.collect()
}
fn to_be_bytes_32(value: usize) -> [u8; 32] {
let mut bytes = [0u8; 32];
// Convert the usize to big-endian bytes in the last 8 bytes (or however many needed)
let value_bytes = value.to_be_bytes();
let start_idx = 32 - value_bytes.len();
bytes[start_idx..].copy_from_slice(&value_bytes);
bytes
}
fn fr_to_bytes32(fe: impl Borrow<bn256::Fr>) -> [u8; 32] {
fe_to_bytes32(fe)
}
fn fe_to_bytes32<F>(fe: impl Borrow<F>) -> [u8; 32]
where
F: PrimeField<Repr = halo2_proofs::halo2curves::serde::Repr<32>>,
{
let repr = fe.borrow().to_repr();
// Note: we're converting from little-endian representation to big-endian bytes
let mut bytes = [0u8; 32];
let inner = repr.inner();
for i in 0..32 {
bytes[31 - i] = inner[i];
}
bytes
}
#[allow(missing_docs)]
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Deserialize, Serialize, PartialOrd)]
#[cfg_attr(all(feature = "ezkl", not(target_arch = "wasm32")), derive(ValueEnum))]
@@ -401,7 +324,7 @@ where
}
#[cfg(feature = "python-bindings")]
use pyo3::{types::PyDict, PyObject, Python, ToPyObject};
use pyo3::{PyObject, Python, ToPyObject, types::PyDict};
#[cfg(feature = "python-bindings")]
impl<F: PrimeField + SerdeObject + Serialize, C: CurveAffine + Serialize> ToPyObject for Snark<F, C>
where
@@ -425,9 +348,9 @@ where
}
impl<
F: PrimeField + SerdeObject + Serialize + FromUniformBytes<64> + DeserializeOwned,
C: CurveAffine + Serialize + DeserializeOwned,
> Snark<F, C>
F: PrimeField + SerdeObject + Serialize + FromUniformBytes<64> + DeserializeOwned,
C: CurveAffine + Serialize + DeserializeOwned,
> Snark<F, C>
where
C::Scalar: Serialize + DeserializeOwned,
C::ScalarExt: Serialize + DeserializeOwned,

View File

@@ -19,14 +19,15 @@ use maybe_rayon::{
slice::ParallelSliceMut,
};
use serde::{Deserialize, Serialize};
use std::io::{BufRead, Write};
use std::io::BufRead;
use std::io::Write;
use std::path::PathBuf;
pub use val::*;
pub use var::*;
use crate::{
circuit::utils,
fieldutils::{integer_rep_to_felt, IntegerRep},
fieldutils::{IntegerRep, integer_rep_to_felt},
graph::Visibility,
};
@@ -41,11 +42,11 @@ use std::error::Error;
use std::fmt::Debug;
use std::io::Read;
use std::iter::Iterator;
use std::ops::Rem;
use std::ops::{Add, Deref, DerefMut, Div, Mul, Neg, Range, Sub};
use std::{cmp::max, ops::Rem};
/// The (inner) type of tensor elements.
pub trait TensorType: Clone + Debug {
pub trait TensorType: Clone + Debug + 'static {
/// Returns the zero value.
fn zero() -> Option<Self> {
None
@@ -54,10 +55,14 @@ pub trait TensorType: Clone + Debug {
fn one() -> Option<Self> {
None
}
/// Max operator for ordering values.
fn tmax(&self, _: &Self) -> Option<Self> {
None
}
}
macro_rules! tensor_type {
($rust_type:ty, $tensor_type:ident, $zero:expr, $one:expr) => {
($rust_type:ty, $tensor_type:ident, $zero:expr_2021, $one:expr_2021) => {
impl TensorType for $rust_type {
fn zero() -> Option<Self> {
Some($zero)
@@ -65,6 +70,10 @@ macro_rules! tensor_type {
fn one() -> Option<Self> {
Some($one)
}
fn tmax(&self, other: &Self) -> Option<Self> {
Some(max(*self, *other))
}
}
};
}
@@ -73,12 +82,46 @@ impl TensorType for f32 {
fn zero() -> Option<Self> {
Some(0.0)
}
// f32 doesnt impl Ord so we cant just use max like we can for IntegerRep, usize.
// A comparison between f32s needs to handle NAN values.
fn tmax(&self, other: &Self) -> Option<Self> {
match (self.is_nan(), other.is_nan()) {
(true, true) => Some(f32::NAN),
(true, false) => Some(*other),
(false, true) => Some(*self),
(false, false) => {
if self >= other {
Some(*self)
} else {
Some(*other)
}
}
}
}
}
impl TensorType for f64 {
fn zero() -> Option<Self> {
Some(0.0)
}
// f32 doesnt impl Ord so we cant just use max like we can for IntegerRep, usize.
// A comparison between f32s needs to handle NAN values.
fn tmax(&self, other: &Self) -> Option<Self> {
match (self.is_nan(), other.is_nan()) {
(true, true) => Some(f64::NAN),
(true, false) => Some(*other),
(false, true) => Some(*self),
(false, false) => {
if self >= other {
Some(*self)
} else {
Some(*other)
}
}
}
}
}
tensor_type!(bool, Bool, false, true);
@@ -104,6 +147,14 @@ impl<T: TensorType> TensorType for Value<T> {
fn one() -> Option<Self> {
Some(Value::known(T::one().unwrap()))
}
fn tmax(&self, other: &Self) -> Option<Self> {
Some(
(self.clone())
.zip(other.clone())
.map(|(a, b)| a.tmax(&b).unwrap()),
)
}
}
impl<F: PrimeField + PartialOrd> TensorType for Assigned<F>
@@ -117,6 +168,14 @@ where
fn one() -> Option<Self> {
Some(F::ONE.into())
}
fn tmax(&self, other: &Self) -> Option<Self> {
if self.evaluate() >= other.evaluate() {
Some(*self)
} else {
Some(*other)
}
}
}
impl<F: PrimeField> TensorType for Expression<F>
@@ -130,14 +189,42 @@ where
fn one() -> Option<Self> {
Some(Expression::Constant(F::ONE))
}
fn tmax(&self, _: &Self) -> Option<Self> {
todo!()
}
}
impl TensorType for Column<Advice> {}
impl TensorType for Column<Fixed> {}
impl<F: PrimeField + PartialOrd> TensorType for AssignedCell<Assigned<F>, F> {}
impl<F: PrimeField + PartialOrd> TensorType for AssignedCell<Assigned<F>, F> {
fn tmax(&self, other: &Self) -> Option<Self> {
let mut output: Option<Self> = None;
self.value_field().zip(other.value_field()).map(|(a, b)| {
if a.evaluate() >= b.evaluate() {
output = Some(self.clone());
} else {
output = Some(other.clone());
}
});
output
}
}
impl<F: PrimeField + PartialOrd> TensorType for AssignedCell<F, F> {}
impl<F: PrimeField + PartialOrd> TensorType for AssignedCell<F, F> {
fn tmax(&self, other: &Self) -> Option<Self> {
let mut output: Option<Self> = None;
self.value().zip(other.value()).map(|(a, b)| {
if a >= b {
output = Some(self.clone());
} else {
output = Some(other.clone());
}
});
output
}
}
// specific types
impl TensorType for halo2curves::pasta::Fp {
@@ -148,6 +235,10 @@ impl TensorType for halo2curves::pasta::Fp {
fn one() -> Option<Self> {
Some(halo2curves::pasta::Fp::one())
}
fn tmax(&self, other: &Self) -> Option<Self> {
Some((*self).max(*other))
}
}
impl TensorType for halo2curves::bn256::Fr {
@@ -158,15 +249,9 @@ impl TensorType for halo2curves::bn256::Fr {
fn one() -> Option<Self> {
Some(halo2curves::bn256::Fr::one())
}
}
impl<F: TensorType> TensorType for &F {
fn zero() -> Option<Self> {
None
}
fn one() -> Option<Self> {
None
fn tmax(&self, other: &Self) -> Option<Self> {
Some((*self).max(*other))
}
}
@@ -289,7 +374,7 @@ impl<T: Clone + TensorType + std::marker::Send + std::marker::Sync>
}
}
impl<'data, T: Clone + TensorType + std::marker::Send + std::marker::Sync + 'data>
impl<'data, T: Clone + TensorType + std::marker::Send + std::marker::Sync>
maybe_rayon::iter::IntoParallelRefMutIterator<'data> for Tensor<T>
{
type Iter = maybe_rayon::slice::IterMut<'data, T>;
@@ -321,63 +406,23 @@ impl<T: Clone + TensorType + PrimeField> Tensor<T> {
let mut buf_reader = std::io::BufReader::new(reader);
let mut inner = Vec::new();
loop {
// Check if there's more data available
let has_data = match buf_reader.fill_buf() {
Ok(buffer) => !buffer.is_empty(),
Err(e) => {
return Err(TensorError::FileLoadError(format!(
"IO error while checking for data: {}",
e
)));
}
};
// If no data left, we're done
if !has_data {
break;
}
// Try to read a complete T::Repr
while let Ok(true) = buf_reader.has_data_left() {
let mut repr = T::Repr::default();
match buf_reader.read_exact(repr.as_mut()) {
Ok(_) => {
// Successfully read a complete representation
let tensor = T::from_repr(repr);
// Check if the conversion was successful
if tensor.is_some().into() {
// Unwrap the value safely (we already checked it's Some)
inner.push(tensor.unwrap());
} else {
return Err(TensorError::FileLoadError(
"Failed to convert representation to tensor".to_string(),
));
}
inner.push(T::from_repr(repr).unwrap());
}
Err(_) => {
// Any error during read_exact is treated as a failure
// This matches the original implementation
return Err(TensorError::FileLoadError(
"Failed to read tensor".to_string(),
));
}
}
}
Ok(Tensor::new(Some(&inner), &[inner.len()]).unwrap())
}
}
impl<T: Clone + TensorType> Tensor<&T> {
/// Clones the tensor values into a new tensor.
pub fn cloned(&self) -> Tensor<T> {
let inner = self.inner.clone().into_iter().cloned().collect::<Vec<T>>();
Tensor::new(Some(&inner), &self.dims).unwrap()
}
}
impl<T: Clone + TensorType> Tensor<T> {
/// Sets (copies) the tensor values to the provided ones.
pub fn new(values: Option<&[T]>, dims: &[usize]) -> Result<Self, TensorError> {
@@ -509,6 +554,7 @@ impl<T: Clone + TensorType> Tensor<T> {
/// let mut a = Tensor::<IntegerRep>::new(Some(&[1,2,3,4,5,6]), &[2, 3]).unwrap();
/// let expected = Tensor::<IntegerRep>::new(Some(&[1, 2, 3, 4, 5, 6, 0, 0]), &[8]).unwrap();
/// assert_eq!(a.pad_to_zero_rem(4, 0).unwrap(), expected);
///
/// let expected = Tensor::<IntegerRep>::new(Some(&[1, 2, 3, 4, 5, 6, 0, 0, 0]), &[9]).unwrap();
/// assert_eq!(a.pad_to_zero_rem(9, 0).unwrap(), expected);
/// ```
@@ -585,23 +631,23 @@ impl<T: Clone + TensorType> Tensor<T> {
// Fill remaining dimensions
full_indices.extend((indices.len()..self.dims.len()).map(|i| 0..self.dims[i]));
// Pre-calculate total size and allocate result vector
let total_size: usize = full_indices
.iter()
.map(|range| range.end - range.start)
.product();
let mut res = Vec::with_capacity(total_size);
// Calculate new dimensions once
let dims: Vec<usize> = full_indices.iter().map(|e| e.end - e.start).collect();
let mut output = Tensor::new(None, &dims)?;
// Use iterator directly without collecting into intermediate Vec
for coord in full_indices.iter().cloned().multi_cartesian_product() {
let index = self.get_index(&coord);
res.push(self[index].clone());
}
let cartesian_coord: Vec<Vec<usize>> = full_indices
.iter()
.cloned()
.multi_cartesian_product()
.collect();
output.par_iter_mut().enumerate().for_each(|(i, e)| {
let coord = &cartesian_coord[i];
*e = self.get(coord);
});
Ok(output)
Tensor::new(Some(&res), &dims)
}
/// Set a slice of the Tensor.
@@ -707,7 +753,7 @@ impl<T: Clone + TensorType> Tensor<T> {
/// ```
pub fn get_every_n(&self, n: usize) -> Result<Tensor<T>, TensorError> {
let mut inner: Vec<T> = vec![];
for (i, elem) in self.inner.iter().enumerate() {
for (i, elem) in self.inner.clone().into_iter().enumerate() {
if i % n == 0 {
inner.push(elem.clone());
}
@@ -730,7 +776,7 @@ impl<T: Clone + TensorType> Tensor<T> {
/// ```
pub fn exclude_every_n(&self, n: usize) -> Result<Tensor<T>, TensorError> {
let mut inner: Vec<T> = vec![];
for (i, elem) in self.inner.iter().enumerate() {
for (i, elem) in self.inner.clone().into_iter().enumerate() {
if i % n != 0 {
inner.push(elem.clone());
}
@@ -766,9 +812,9 @@ impl<T: Clone + TensorType> Tensor<T> {
let mut inner: Vec<T> = Vec::with_capacity(self.inner.len());
let mut offset = initial_offset;
for (i, elem) in self.inner.iter().enumerate() {
for (i, elem) in self.inner.clone().into_iter().enumerate() {
if (i + offset + 1) % n == 0 {
inner.extend(vec![elem.clone(); 1 + num_repeats]);
inner.extend(vec![elem; 1 + num_repeats]);
offset += num_repeats;
} else {
inner.push(elem.clone());
@@ -825,16 +871,16 @@ impl<T: Clone + TensorType> Tensor<T> {
/// ```
/// use ezkl::tensor::Tensor;
/// use ezkl::fieldutils::IntegerRep;
/// let mut a = Tensor::<IntegerRep>::new(Some(&[1, 2, 3, 4, 5, 6]), &[6]).unwrap();
/// let a = Tensor::<IntegerRep>::new(Some(&[1, 2, 3, 4, 5, 6]), &[6]).unwrap();
/// let expected = Tensor::<IntegerRep>::new(Some(&[1, 2, 3, 6]), &[4]).unwrap();
/// let mut indices = vec![3, 4];
/// assert_eq!(a.remove_indices(&mut indices, false).unwrap(), expected);
/// assert_eq!(a.remove_indices(&mut indices, true).unwrap(), expected);
///
///
/// let a = Tensor::<IntegerRep>::new(Some(&[52, -245, 153, 13, -4, -56, -163, 249, -128, -172, 396, 143, 2, -96, 504, -44, -158, -393, 61, 95, 191, 74, 64, -219, 553, 104, 235, 222, 44, -216, 63, -251, 40, -140, 112, -355, 60, 123, 26, -116, -89, -200, -109, 168, 135, -34, -99, -54, 5, -81, 322, 87, 4, -139, 420, 92, -295, -12, 262, -1, 26, -48, 231, 1, -335, 244, 188, -4, 5, -362, 57, -198, -184, -117, 40, 305, 49, 30, -59, -26, -37, 96]), &[82]).unwrap();
/// let b = Tensor::<IntegerRep>::new(Some(&[52, -245, 153, 13, -4, -56, -163, 249, -128, -172, 396, 143, 2, -96, 504, -44, -158, -393, 61, 95, 191, 74, 64, -219, 553, 104, 235, 222, 44, -216, 63, -251, 40, -140, 112, -355, 60, 123, 26, -116, -89, -200, -109, 168, 135, -34, -99, -54, 5, -81, 322, 87, 4, -139, 420, 92, -295, -12, 262, -1, 26, -48, 231, -335, 244, 188, 5, -362, 57, -198, -184, -117, 40, 305, 49, 30, -59, -26, -37, 96]), &[80]).unwrap();
/// let mut indices = vec![63, 67];
/// assert_eq!(a.remove_indices(&mut indices, false).unwrap(), b);
/// assert_eq!(a.remove_indices(&mut indices, true).unwrap(), b);
/// ```
pub fn remove_indices(
&self,
@@ -881,7 +927,7 @@ impl<T: Clone + TensorType> Tensor<T> {
}
self.dims = vec![];
}
if self.dims() == [0] && new_dims.iter().product::<usize>() == 1 {
if self.dims() == &[0] && new_dims.iter().product::<usize>() == 1 {
self.dims = Vec::from(new_dims);
} else {
let product = if new_dims != [0] {
@@ -1246,6 +1292,33 @@ impl<T: Clone + TensorType> Tensor<T> {
Tensor::new(Some(&[res]), &[1])
}
/// Maps a function to tensors and enumerates in parallel
/// ```
/// use ezkl::tensor::{Tensor, TensorError};
/// use ezkl::fieldutils::IntegerRep;
/// let mut a = Tensor::<IntegerRep>::new(Some(&[1, 4]), &[2]).unwrap();
/// let mut c = a.par_enum_map::<_,_,TensorError>(|i, x| Ok(IntegerRep::pow(x + i as IntegerRep, 2))).unwrap();
/// assert_eq!(c, Tensor::from([1, 25].into_iter()));
/// ```
pub fn par_enum_map_mut_filtered<
F: Fn(usize) -> Result<T, E> + std::marker::Send + std::marker::Sync,
E: Error + std::marker::Send + std::marker::Sync,
>(
&mut self,
filter_indices: &std::collections::HashSet<usize>,
f: F,
) -> Result<(), E>
where
T: std::marker::Send + std::marker::Sync,
{
self.inner
.par_iter_mut()
.enumerate()
.filter(|(i, _)| filter_indices.contains(i))
.for_each(move |(i, e)| *e = f(i).unwrap());
Ok(())
}
}
impl<T: Clone + TensorType> Tensor<Tensor<T>> {
@@ -1262,9 +1335,9 @@ impl<T: Clone + TensorType> Tensor<Tensor<T>> {
pub fn combine(&self) -> Result<Tensor<T>, TensorError> {
let mut dims = 0;
let mut inner = Vec::new();
for t in self.inner.iter() {
for t in self.inner.clone().into_iter() {
dims += t.len();
inner.extend(t.inner.clone());
inner.extend(t.inner);
}
Tensor::new(Some(&inner), &[dims])
}
@@ -1735,7 +1808,7 @@ impl DataFormat {
match self {
DataFormat::NHWC => DataFormat::NCHW,
DataFormat::HWC => DataFormat::CHW,
_ => *self,
_ => self.clone(),
}
}
@@ -1835,7 +1908,7 @@ impl KernelFormat {
match self {
KernelFormat::HWIO => KernelFormat::OIHW,
KernelFormat::OHWI => KernelFormat::OIHW,
_ => *self,
_ => self.clone(),
}
}
@@ -1957,9 +2030,6 @@ mod tests {
fn tensor_slice() {
let a = Tensor::<IntegerRep>::new(Some(&[1, 2, 3, 4, 5, 6]), &[2, 3]).unwrap();
let b = Tensor::<IntegerRep>::new(Some(&[1, 4]), &[2, 1]).unwrap();
assert_eq!(
a.get_slice(&[0..2, 0..1]).unwrap(),
b.get_slice(&[0..2, 0..1]).unwrap()
);
assert_eq!(a.get_slice(&[0..2, 0..1]).unwrap(), b);
}
}

View File

@@ -916,14 +916,11 @@ pub fn gather_elements<T: TensorType + Send + Sync>(
/// let expected = Tensor::<IntegerRep>::new(Some(&[2, 7]), &[2]).unwrap();
/// assert_eq!(result, expected);
///
pub fn gather_nd<'a, T: TensorType + Send + Sync + 'a>(
input: &'a Tensor<T>,
pub fn gather_nd<T: TensorType + Send + Sync>(
input: &Tensor<T>,
index: &Tensor<usize>,
batch_dims: usize,
) -> Result<Tensor<T>, TensorError>
where
&'a T: TensorType,
{
) -> Result<Tensor<T>, TensorError> {
// Calculate the output tensor size
let index_dims = index.dims().to_vec();
let input_dims = input.dims().to_vec();
@@ -1111,14 +1108,11 @@ where
/// assert_eq!(result, expected);
/// ````
///
pub fn scatter_nd<'a, T: TensorType + Send + Sync + 'a>(
pub fn scatter_nd<T: TensorType + Send + Sync>(
input: &Tensor<T>,
index: &Tensor<usize>,
src: &'a Tensor<T>,
) -> Result<Tensor<T>, TensorError>
where
&'a T: TensorType,
{
src: &Tensor<T>,
) -> Result<Tensor<T>, TensorError> {
// Calculate the output tensor size
let index_dims = index.dims().to_vec();
let input_dims = input.dims().to_vec();
@@ -1189,12 +1183,12 @@ pub fn abs<T: TensorType + Add<Output = T> + std::cmp::Ord + Neg<Output = T>>(
/// use ezkl::tensor::ops::intercalate_values;
///
/// let tensor = Tensor::<IntegerRep>::new(Some(&[1, 2, 3, 4]), &[2, 2]).unwrap();
/// let result = intercalate_values(&tensor, &0, 2, 1).unwrap();
/// let result = intercalate_values(&tensor, 0, 2, 1).unwrap();
///
/// let expected = Tensor::<IntegerRep>::new(Some(&[1, 0, 2, 3, 0, 4]), &[2, 3]).unwrap();
/// assert_eq!(result, expected);
///
/// let result = intercalate_values(&expected, &0, 2, 0).unwrap();
/// let result = intercalate_values(&expected, 0, 2, 0).unwrap();
/// let expected = Tensor::<IntegerRep>::new(Some(&[1, 0, 2, 0, 0, 0, 3, 0, 4]), &[3, 3]).unwrap();
///
/// assert_eq!(result, expected);
@@ -1202,7 +1196,7 @@ pub fn abs<T: TensorType + Add<Output = T> + std::cmp::Ord + Neg<Output = T>>(
/// ```
pub fn intercalate_values<T: TensorType>(
tensor: &Tensor<T>,
value: &T,
value: T,
stride: usize,
axis: usize,
) -> Result<Tensor<T>, TensorError> {
@@ -1500,7 +1494,7 @@ pub fn slice<T: TensorType + Send + Sync>(
}
}
Ok(t.get_slice(&slice)?)
t.get_slice(&slice)
}
// ---------------------------------------------------------------------------------------------------------
@@ -2420,20 +2414,20 @@ pub mod accumulated {
/// Some(&[25, 35]),
/// &[2],
/// ).unwrap();
/// assert_eq!(dot(&x, &y, 1).unwrap(), expected);
/// assert_eq!(dot(&[x, y], 1).unwrap(), expected);
/// ```
pub fn dot<T: TensorType + Mul<Output = T> + Add<Output = T>>(
a: &Tensor<T>,
b: &Tensor<T>,
inputs: &[Tensor<T>; 2],
chunk_size: usize,
) -> Result<Tensor<T>, TensorError> {
if a.len() != b.len() {
if inputs[0].clone().len() != inputs[1].clone().len() {
return Err(TensorError::DimMismatch("dot".to_string()));
}
let (a, b): (Tensor<T>, Tensor<T>) = (inputs[0].clone(), inputs[1].clone());
let transcript: Tensor<T> = a
.iter()
.zip(b.iter())
.zip(b)
.chunks(chunk_size)
.into_iter()
.scan(T::zero().unwrap(), |acc, chunk| {

Some files were not shown because too many files have changed in this diff Show More