Compare commits

..

1 Commits

Author SHA1 Message Date
github-actions[bot]
18c7e35514 ci: update version string in docs 2024-04-12 22:25:16 +00:00
24 changed files with 265 additions and 359 deletions

View File

@@ -354,7 +354,7 @@ jobs:
prove-and-verify-tests:
runs-on: non-gpu
needs: [build, library-tests, docs]
needs: [build, library-tests, docs, python-tests, python-integration-tests]
steps:
- uses: actions/checkout@v4
- uses: actions-rs/toolchain@v1
@@ -394,18 +394,14 @@ jobs:
- name: Replace memory definition in nodejs
run: |
sed -i "3s|.*|imports['env'] = {memory: new WebAssembly.Memory({initial:20,maximum:65536,shared:true})}|" tests/wasm/nodejs/ezkl.js
- name: KZG prove and verify tests (public outputs + column overflow)
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_with_overflow_::w
- name: KZG prove and verify tests (public outputs + fixed params + column overflow)
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_with_overflow_fixed_params_
- name: KZG prove and verify tests (hashed inputs + column overflow)
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_with_overflow_hashed_inputs_
- name: KZG prove and verify tests (public outputs)
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_tight_lookup_::t
- name: IPA prove and verify tests
run: cargo nextest run --release --verbose tests::ipa_prove_and_verify_::t --test-threads 1
- name: IPA prove and verify tests (ipa outputs)
run: cargo nextest run --release --verbose tests::ipa_prove_and_verify_ipa_output
- name: KZG prove and verify tests (public outputs + column overflow)
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_with_overflow_::w
- name: KZG prove and verify tests single inner col
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_single_col
- name: KZG prove and verify tests triple inner col
@@ -416,6 +412,8 @@ jobs:
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_octuple_col --test-threads 8
- name: KZG prove and verify tests (kzg outputs)
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_kzg_output
- name: KZG prove and verify tests (public outputs + fixed params + column overflow)
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_with_overflow_fixed_params_
- name: KZG prove and verify tests (public outputs)
run: cargo nextest run --release --verbose tests::kzg_prove_and_verify_::t
- name: KZG prove and verify tests (public inputs)
@@ -547,6 +545,8 @@ jobs:
with:
crate: cargo-nextest
locked: true
- name: Download MNIST
run: sh data.sh
- name: Examples
run: cargo nextest run --release tests_examples
@@ -610,24 +610,6 @@ jobs:
python-integration-tests:
runs-on: large-self-hosted
services:
# Label used to access the service container
postgres:
# Docker Hub image
image: postgres
env:
POSTGRES_USER: ubuntu
POSTGRES_HOST_AUTH_METHOD: trust
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
-v /var/run/postgresql:/var/run/postgresql
ports:
# Maps tcp port 5432 on service container to the host
- 5432:5432
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v4
@@ -652,8 +634,6 @@ jobs:
run: python -m venv .env --clear; source .env/bin/activate; pip install -r requirements.txt; python -m ensurepip --upgrade
- name: Build python ezkl
run: source .env/bin/activate; unset CONDA_PREFIX; maturin develop --features python-bindings --release
- name: Postgres tutorials
run: source .env/bin/activate; cargo nextest run py_tests::tests::postgres_ --no-capture
- name: Tictactoe tutorials
run: source .env/bin/activate; cargo nextest run py_tests::tests::tictactoe_ --test-threads 1
# - name: authenticate-kaggle-cli
@@ -671,3 +651,5 @@ jobs:
run: source .env/bin/activate; cargo nextest run py_tests::tests::voice_
- name: NBEATS tutorial
run: source .env/bin/activate; cargo nextest run py_tests::tests::nbeats_
# - name: Postgres tutorials
# run: source .env/bin/activate; cargo nextest run py_tests::tests::postgres_ --test-threads 1

View File

@@ -31,21 +31,10 @@ jobs:
env:
RELEASE_TAG: ${{ github.ref_name }}
run: |
sed -i "s|\"@ezkljs/engine\": \".*\"|\"@ezkljs/engine\": \"${{ github.ref_name#v }}\"|" in-browser-evm-verifier/package.json
sed -i "s|\"@ezkljs/engine\": \".*\"|\"@ezkljs/engine\": \"${{ github.ref_name }}\"|" in-browser-evm-verifier/package.json
- name: Update the engine import in in-browser-evm-verifier to use @ezkljs/engine package instead of the local one;
run: |
sed -i "s|import { encodeVerifierCalldata } from '../nodejs/ezkl';|import { encodeVerifierCalldata } from '@ezkljs/engine';|" in-browser-evm-verifier/src/index.ts
- name: Fetch integrity
run: |
ENGINE_INTEGRITY=$(npm view @ezkljs/engine@${{ github.ref_name#v }} dist.integrity)
echo "ENGINE_INTEGRITY=$ENGINE_INTEGRITY" >> $GITHUB_ENV
- name: Update pnpm-lock.yaml versions and integrity
run: |
awk -v integrity="$ENGINE_INTEGRITY" -v tag="${{ github.ref_name#v }}" '
NR==30{$0=" specifier: \"" tag "\""}
NR==31{$0=" version: \"" tag "\""}
NR==400{$0=" /@ezkljs/engine@" tag ":"}
NR==401{$0=" resolution: {integrity: \"" integrity "\"}"} 1' in-browser-evm-verifier/pnpm-lock.yaml > temp.yaml && mv temp.yaml in-browser-evm-verifier/pnpm-lock.yaml
- name: Use pnpm 8
uses: pnpm/action-setup@v2
with:

1
.gitignore vendored
View File

@@ -1,5 +1,6 @@
target
pkg
data
*.csv
!examples/notebooks/eth_price.csv
*.ipynb_checkpoints

11
data.sh Executable file
View File

@@ -0,0 +1,11 @@
#! /bin/bash
mkdir data
cd data
wget http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz
wget http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz
wget http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz
wget http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz
gzip -d *.gz

View File

@@ -1,4 +1,4 @@
ezkl==10.4.3
ezkl==10.3.2
sphinx
sphinx-rtd-theme
sphinxcontrib-napoleon

View File

@@ -1,7 +1,7 @@
import ezkl
project = 'ezkl'
release = '10.4.3'
release = '10.3.2'
version = release

View File

@@ -308,7 +308,6 @@ pub fn runconv() {
tst_lbl: _,
..
} = MnistBuilder::new()
.base_path("examples/data")
.label_format_digit()
.training_set_length(50_000)
.validation_set_length(10_000)

Binary file not shown.

Binary file not shown.

View File

@@ -7,9 +7,9 @@
"## Mean of ERC20 transfer amounts\n",
"\n",
"This notebook shows how to calculate the mean of ERC20 transfer amounts, pulling data in from a Postgres database. First we install and get the necessary libraries running. \n",
"The first of which is [shovel](https://indexsupply.com/shovel/docs/#getting-started), which is a library that allows us to pull data from the Ethereum blockchain into a Postgres database.\n",
"The first of which is [e2pg](https://github.com/indexsupply/x/tree/main/docs/e2pg), which is a library that allows us to pull data from the Ethereum blockchain into a Postgres database.\n",
"\n",
"Make sure you install postgres if needed https://indexsupply.com/shovel/docs/#getting-started. \n",
"Make sure you install postgres if needed https://postgresapp.com/. \n",
"\n"
]
},
@@ -21,81 +21,23 @@
"source": [
"import os\n",
"import getpass\n",
"import json\n",
"import time\n",
"import subprocess\n",
"\n",
"\n",
"# swap out for the relevant linux/amd64, darwin/arm64, darwin/amd64, windows/amd64\n",
"os.system(\"curl -LO https://indexsupply.net/bin/1.0/linux/amd64/shovel\")\n",
"os.system(\"chmod +x shovel\")\n",
"os.system(\"curl -LO https://indexsupply.net/bin/main/linux/amd64/e2pg\")\n",
"os.system(\"chmod +x e2pg\")\n",
"\n",
"\n",
"os.environ[\"PG_URL\"] = \"postgres://\" + getpass.getuser() + \":@localhost:5432/shovel\"\n",
"\n",
"# create a config.json file with the following contents\n",
"config = {\n",
" \"pg_url\": \"$PG_URL\",\n",
" \"eth_sources\": [\n",
" {\"name\": \"mainnet\", \"chain_id\": 1, \"url\": \"https://ethereum-rpc.publicnode.com\"},\n",
" {\"name\": \"base\", \"chain_id\": 8453, \"url\": \"https://base-rpc.publicnode.com\"}\n",
" ],\n",
" \"integrations\": [{\n",
" \"name\": \"usdc_transfer\",\n",
" \"enabled\": True,\n",
" \"sources\": [{\"name\": \"mainnet\"}, {\"name\": \"base\"}],\n",
" \"table\": {\n",
" \"name\": \"usdc\",\n",
" \"columns\": [\n",
" {\"name\": \"log_addr\", \"type\": \"bytea\"},\n",
" {\"name\": \"block_num\", \"type\": \"numeric\"},\n",
" {\"name\": \"f\", \"type\": \"bytea\"},\n",
" {\"name\": \"t\", \"type\": \"bytea\"},\n",
" {\"name\": \"v\", \"type\": \"numeric\"}\n",
" ]\n",
" },\n",
" \"block\": [\n",
" {\"name\": \"block_num\", \"column\": \"block_num\"},\n",
" {\n",
" \"name\": \"log_addr\",\n",
" \"column\": \"log_addr\",\n",
" \"filter_op\": \"contains\",\n",
" \"filter_arg\": [\n",
" \"a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48\",\n",
" \"833589fCD6eDb6E08f4c7C32D4f71b54bdA02913\"\n",
" ]\n",
" }\n",
" ],\n",
" \"event\": {\n",
" \"name\": \"Transfer\",\n",
" \"type\": \"event\",\n",
" \"anonymous\": False,\n",
" \"inputs\": [\n",
" {\"indexed\": True, \"name\": \"from\", \"type\": \"address\", \"column\": \"f\"},\n",
" {\"indexed\": True, \"name\": \"to\", \"type\": \"address\", \"column\": \"t\"},\n",
" {\"indexed\": False, \"name\": \"value\", \"type\": \"uint256\", \"column\": \"v\"}\n",
" ]\n",
" }\n",
" }]\n",
"}\n",
"\n",
"# write the config to a file\n",
"with open(\"config.json\", \"w\") as f:\n",
" f.write(json.dumps(config))\n",
"\n",
"os.environ[\"PG_URL\"] = \"postgresql://\" + getpass.getuser() + \":@localhost:5432/e2pg\"\n",
"os.environ[\"RLPS_URL\"] = \"https://1.rlps.indexsupply.net\"\n",
"\n",
"# print the two env variables\n",
"os.system(\"echo $PG_URL\")\n",
"os.system(\"echo $RLPS_URL\")\n",
"\n",
"os.system(\"createdb -h localhost -p 5432 shovel\")\n",
"\n",
"os.system(\"echo shovel is now installed. starting:\")\n",
"\n",
"command = [\"./shovel\", \"-config\", \"config.json\"]\n",
"subprocess.Popen(command)\n",
"\n",
"os.system(\"echo shovel started.\")\n",
"\n",
"time.sleep(5)\n",
"os.system(\"createdb -h localhost -p 5432 e2pg\")\n",
"# equivalent of nohup ./e2pg -reset -e $RLPS_URL -pg $PG_URL &\n",
"e2pg_process = os.system(\"nohup ./e2pg -e $RLPS_URL -pg $PG_URL &\")\n",
"\n"
]
},
@@ -137,13 +79,11 @@
"import json\n",
"import os\n",
"\n",
"import logging\n",
"# # uncomment for more descriptive logging \n",
"FORMAT = '%(levelname)s %(name)s %(asctime)-15s %(filename)s:%(lineno)d %(message)s'\n",
"logging.basicConfig(format=FORMAT)\n",
"logging.getLogger().setLevel(logging.DEBUG)\n",
"\n",
"print(\"ezkl version: \", ezkl.__version__)"
"# import logging\n",
"# # # uncomment for more descriptive logging \n",
"# FORMAT = '%(levelname)s %(name)s %(asctime)-15s %(filename)s:%(lineno)d %(message)s'\n",
"# logging.basicConfig(format=FORMAT)\n",
"# logging.getLogger().setLevel(logging.DEBUG)"
]
},
{
@@ -236,7 +176,6 @@
},
"outputs": [],
"source": [
"import getpass\n",
"# make an input.json file from the df above\n",
"input_filename = os.path.join('input.json')\n",
"\n",
@@ -244,9 +183,9 @@
" \"host\": \"localhost\",\n",
" # make sure you replace this with your own username\n",
" \"user\": getpass.getuser(),\n",
" \"dbname\": \"shovel\",\n",
" \"dbname\": \"e2pg\",\n",
" \"password\": \"\",\n",
" \"query\": \"SELECT v FROM usdc ORDER BY block_num DESC LIMIT 5\",\n",
" \"query\": \"SELECT value FROM erc20_transfers ORDER BY block_number DESC LIMIT 5\",\n",
" \"port\": \"5432\",\n",
"})\n",
"\n",
@@ -255,7 +194,7 @@
"\n",
"\n",
" # Serialize data into file:\n",
"json.dump(pg_input_file, open(input_filename, 'w' ))\n"
"json.dump( pg_input_file, open(input_filename, 'w' ))\n"
]
},
{
@@ -271,9 +210,9 @@
" \"host\": \"localhost\",\n",
" # make sure you replace this with your own username\n",
" \"user\": getpass.getuser(),\n",
" \"dbname\": \"shovel\",\n",
" \"dbname\": \"e2pg\",\n",
" \"password\": \"\",\n",
" \"query\": \"SELECT v FROM usdc ORDER BY block_num DESC LIMIT 20\",\n",
" \"query\": \"SELECT value FROM erc20_transfers ORDER BY block_number DESC LIMIT 20\",\n",
" \"port\": \"5432\",\n",
"})\n",
"\n",
@@ -290,6 +229,22 @@
"**EZKL Workflow**"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"onnx_filename = os.path.join('lol.onnx')\n",
"compiled_filename = os.path.join('lol.compiled')\n",
"settings_filename = os.path.join('settings.json')\n",
"\n",
"ezkl.gen_settings(onnx_filename, settings_filename)\n",
"\n",
"ezkl.calibrate_settings(\n",
" input_filename, onnx_filename, settings_filename, \"resources\")"
]
},
{
"cell_type": "code",
"execution_count": null,
@@ -298,21 +253,10 @@
},
"outputs": [],
"source": [
"import subprocess\n",
"import os\n",
"# setup kzg params\n",
"params_path = os.path.join('kzg.params')\n",
"\n",
"onnx_filename = os.path.join('lol.onnx')\n",
"compiled_filename = os.path.join('lol.compiled')\n",
"settings_filename = os.path.join('settings.json')\n",
"\n",
"# Generate settings using ezkl\n",
"res = ezkl.gen_settings(onnx_filename, settings_filename)\n",
"\n",
"assert res == True\n",
"\n",
"res = ezkl.calibrate_settings(input_filename, onnx_filename, settings_filename, \"resources\")\n",
"\n",
"assert res == True"
"res = ezkl.get_srs(params_path, settings_filename)"
]
},
{
@@ -362,13 +306,16 @@
"source": [
"pk_path = os.path.join('test.pk')\n",
"vk_path = os.path.join('test.vk')\n",
"params_path = os.path.join('kzg.params')\n",
"\n",
"\n",
"# setup the proof\n",
"res = ezkl.setup(\n",
" compiled_filename,\n",
" vk_path,\n",
" pk_path\n",
" pk_path,\n",
" params_path,\n",
" settings_filename,\n",
" )\n",
"\n",
"assert res == True\n",
@@ -384,14 +331,11 @@
"metadata": {},
"outputs": [],
"source": [
"\n",
"witness_path = \"witness.json\"\n",
"\n",
"# generate the witness\n",
"res = ezkl.gen_witness(\n",
" input_filename,\n",
" compiled_filename,\n",
" witness_path\n",
" )\n"
"res = ezkl.gen_witness(input_filename, compiled_filename, witness_path)\n",
"assert os.path.isfile(witness_path)"
]
},
{
@@ -416,14 +360,73 @@
" compiled_filename,\n",
" pk_path,\n",
" proof_path,\n",
" \"single\"\n",
" params_path,\n",
" \"single\",\n",
" )\n",
"\n",
"\n",
"print(\"proved\")\n",
"\n",
"assert os.path.isfile(proof_path)\n",
"\n"
"\n",
"# verify\n",
"res = ezkl.verify(\n",
" proof_path,\n",
" settings_filename,\n",
" vk_path,\n",
" params_path,\n",
" )\n",
"\n",
"assert res == True\n",
"print(\"verified\")"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "W7tAa-DFAtvS"
},
"source": [
"# Part 2 (Using the ZK Computational Graph Onchain!)"
]
},
{
"cell_type": "markdown",
"metadata": {
"id": "8Ym91kaVAIB6"
},
"source": [
"**Now How Do We Do It Onchain?????**"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 339
},
"id": "fodkNgwS70FM",
"outputId": "827b5efd-f74f-44de-c114-861b3a86daf2"
},
"outputs": [],
"source": [
"# first we need to create evm verifier\n",
"print(vk_path)\n",
"print(params_path)\n",
"print(settings_filename)\n",
"\n",
"\n",
"abi_path = 'test.abi'\n",
"sol_code_path = 'test.sol'\n",
"\n",
"res = ezkl.create_evm_verifier(\n",
" vk_path,\n",
" params_path,\n",
" settings_filename,\n",
" sol_code_path,\n",
" abi_path,\n",
" )\n",
"assert res == True"
]
},
{
@@ -432,8 +435,51 @@
"metadata": {},
"outputs": [],
"source": [
"# kill all shovel process \n",
"os.system(\"pkill -f shovel\")"
"# Make sure anvil is running locally first\n",
"# run with $ anvil -p 3030\n",
"# we use the default anvil node here\n",
"import json\n",
"\n",
"address_path = os.path.join(\"address.json\")\n",
"\n",
"res = ezkl.deploy_evm(\n",
" address_path,\n",
" sol_code_path,\n",
" 'http://127.0.0.1:3030'\n",
")\n",
"\n",
"assert res == True\n",
"\n",
"with open(address_path, 'r') as file:\n",
" addr = file.read().rstrip()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# read the address from addr_path\n",
"addr = None\n",
"with open(address_path, 'r') as f:\n",
" addr = f.read()\n",
"\n",
"res = ezkl.verify_evm(\n",
" addr,\n",
" proof_path,\n",
" \"http://127.0.0.1:3030\"\n",
")\n",
"assert res == True"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"os.system(\"killall -9 e2pg\");"
]
}
],
@@ -455,7 +501,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.2"
"version": "3.9.13"
}
},
"nbformat": 4,

View File

@@ -1,6 +1,6 @@
{
"name": "@ezkljs/verify",
"version": "v10.4.2",
"version": "0.0.0",
"publishConfig": {
"access": "public"
},
@@ -27,7 +27,7 @@
"@ethereumjs/util": "9.0.0",
"@ethereumjs/vm": "7.0.0",
"@ethersproject/abi": "5.7.0",
"@ezkljs/engine": "10.4.2",
"@ezkljs/engine": "^9.4.4",
"ethers": "6.7.1",
"json-bigint": "1.0.0"
},

View File

@@ -27,8 +27,8 @@ dependencies:
specifier: 5.7.0
version: 5.7.0
'@ezkljs/engine':
specifier: "10.4.2"
version: "10.4.2"
specifier: ^9.4.4
version: 9.4.4
ethers:
specifier: 6.7.1
version: 6.7.1
@@ -397,8 +397,8 @@ packages:
'@ethersproject/strings': 5.7.0
dev: false
/@ezkljs/engine@10.4.2:
resolution: {integrity: "sha512-1GNB4vChbaQ1ALcYbEbM/AFoh4QWtswpzGCO/g9wL8Ep6NegM2gQP/uWICU7Utl0Lj1DncXomD7PUhFSXhtx8A=="}
/@ezkljs/engine@9.4.4:
resolution: {integrity: sha512-kNsTmDQa8mIiQ6yjJmBMwVgAAxh4nfs4NCtnewJifonyA8Mfhs+teXwwW8WhERRDoQPUofKO2pT8BPvV/XGIDA==}
dependencies:
'@types/json-bigint': 1.0.2
json-bigint: 1.0.0

View File

@@ -44,11 +44,12 @@ impl PolyCommitChip {
/// Commit to the message using the KZG commitment scheme
pub fn commit<Scheme: CommitmentScheme<Scalar = Fp, Curve = G1Affine>>(
message: Vec<Scheme::Scalar>,
degree: u32,
num_unusable_rows: u32,
params: &Scheme::ParamsProver,
) -> Vec<G1Affine> {
let k = params.k();
let domain = halo2_proofs::poly::EvaluationDomain::new(2, k);
let domain = halo2_proofs::poly::EvaluationDomain::new(degree, k);
let n = 2_u64.pow(k) - num_unusable_rows as u64;
let num_poly = (message.len() / n as usize) + 1;
let mut poly = vec![domain.empty_lagrange(); num_poly];

View File

@@ -196,6 +196,7 @@ pub async fn run(command: Commands) -> Result<String, Box<dyn Error>> {
vk_path,
srs_path,
} => gen_witness(compiled_circuit, data, Some(output), vk_path, srs_path)
.await
.map(|e| serde_json::to_string(&e).unwrap()),
Commands::Mock { model, witness } => mock(model, witness),
#[cfg(not(target_arch = "wasm32"))]
@@ -636,7 +637,7 @@ pub(crate) fn table(model: PathBuf, run_args: RunArgs) -> Result<String, Box<dyn
Ok(String::new())
}
pub(crate) fn gen_witness(
pub(crate) async fn gen_witness(
compiled_circuit_path: PathBuf,
data: PathBuf,
output: Option<PathBuf>,
@@ -659,7 +660,7 @@ pub(crate) fn gen_witness(
};
#[cfg(not(target_arch = "wasm32"))]
let mut input = circuit.load_graph_input(&data)?;
let mut input = circuit.load_graph_input(&data).await?;
#[cfg(target_arch = "wasm32")]
let mut input = circuit.load_graph_input(&data)?;
@@ -1228,14 +1229,22 @@ pub(crate) fn calibrate(
);
if matches!(target, CalibrationTarget::Resources { col_overflow: true }) {
let lookup_log_rows = best_params.lookup_log_rows_with_blinding();
let module_log_row = best_params.module_constraint_logrows_with_blinding();
let instance_logrows = best_params.log2_total_instances_with_blinding();
let dynamic_lookup_logrows = best_params.dynamic_lookup_and_shuffle_logrows_with_blinding();
let mut reduction = std::cmp::max(lookup_log_rows, module_log_row);
reduction = std::cmp::max(reduction, instance_logrows);
reduction = std::cmp::max(reduction, dynamic_lookup_logrows);
let lookup_log_rows = ((best_params.run_args.lookup_range.1
- best_params.run_args.lookup_range.0) as f32)
.log2()
.ceil() as u32
+ 1;
let mut reduction = std::cmp::max(
(best_params
.model_instance_shapes
.iter()
.map(|x| x.iter().product::<usize>())
.sum::<usize>() as f32)
.log2()
.ceil() as u32
+ 1,
lookup_log_rows,
);
reduction = std::cmp::max(reduction, crate::graph::MIN_LOGROWS);
info!(

View File

@@ -21,6 +21,8 @@ use std::io::BufWriter;
use std::io::Read;
use std::panic::UnwindSafe;
#[cfg(not(target_arch = "wasm32"))]
use std::thread;
#[cfg(not(target_arch = "wasm32"))]
use tract_onnx::tract_core::{
tract_data::{prelude::Tensor as TractTensor, TVec},
value::TValue,
@@ -232,15 +234,21 @@ impl PostgresSource {
)
};
let mut client = Client::connect(&config, NoTls)?;
let mut res: Vec<pg_bigdecimal::PgNumeric> = Vec::new();
// extract rows from query
for row in client.query(&query, &[])? {
// extract features from row
for i in 0..row.len() {
res.push(row.get(i));
let res: Vec<pg_bigdecimal::PgNumeric> = thread::spawn(move || {
let mut client = Client::connect(&config, NoTls).unwrap();
let mut res: Vec<pg_bigdecimal::PgNumeric> = Vec::new();
// extract rows from query
for row in client.query(&query, &[]).unwrap() {
// extract features from row
for i in 0..row.len() {
res.push(row.get(i));
}
}
}
res
})
.join()
.map_err(|_| "failed to fetch data from postgres")?;
Ok(vec![res])
}

View File

@@ -483,22 +483,7 @@ pub struct GraphSettings {
}
impl GraphSettings {
/// Calc the number of rows required for lookup tables
pub fn lookup_log_rows(&self) -> u32 {
((self.run_args.lookup_range.1 - self.run_args.lookup_range.0) as f32)
.log2()
.ceil() as u32
}
/// Calc the number of rows required for lookup tables
pub fn lookup_log_rows_with_blinding(&self) -> u32 {
((self.run_args.lookup_range.1 - self.run_args.lookup_range.0) as f32
+ RESERVED_BLINDING_ROWS as f32)
.log2()
.ceil() as u32
}
fn model_constraint_logrows_with_blinding(&self) -> u32 {
fn model_constraint_logrows(&self) -> u32 {
(self.num_rows as f64 + RESERVED_BLINDING_ROWS as f64)
.log2()
.ceil() as u32
@@ -510,31 +495,14 @@ impl GraphSettings {
.ceil() as u32
}
/// calculate the number of rows required for the dynamic lookup and shuffle
pub fn dynamic_lookup_and_shuffle_logrows_with_blinding(&self) -> u32 {
(self.total_dynamic_col_size as f64
+ self.total_shuffle_col_size as f64
+ RESERVED_BLINDING_ROWS as f64)
.log2()
.ceil() as u32
}
fn dynamic_lookup_and_shuffle_col_size(&self) -> usize {
self.total_dynamic_col_size + self.total_shuffle_col_size
}
/// calculate the number of rows required for the module constraints
pub fn module_constraint_logrows(&self) -> u32 {
fn module_constraint_logrows(&self) -> u32 {
(self.module_sizes.max_constraints() as f64).log2().ceil() as u32
}
/// calculate the number of rows required for the module constraints
pub fn module_constraint_logrows_with_blinding(&self) -> u32 {
(self.module_sizes.max_constraints() as f64 + RESERVED_BLINDING_ROWS as f64)
.log2()
.ceil() as u32
}
fn constants_logrows(&self) -> u32 {
(self.total_const_size as f64 / self.run_args.num_inner_cols as f64)
.log2()
@@ -561,14 +529,6 @@ impl GraphSettings {
std::cmp::max((sum as f64).log2().ceil() as u32, 1)
}
/// calculate the log2 of the total number of instances
pub fn log2_total_instances_with_blinding(&self) -> u32 {
let sum = self.total_instances().iter().sum::<usize>() + RESERVED_BLINDING_ROWS;
// max between 1 and the log2 of the sums
std::cmp::max((sum as f64).log2().ceil() as u32, 1)
}
/// save params to file
pub fn save(&self, path: &std::path::PathBuf) -> Result<(), std::io::Error> {
// buf writer
@@ -958,7 +918,7 @@ impl GraphCircuit {
///
#[cfg(not(target_arch = "wasm32"))]
pub fn load_graph_input(
pub async fn load_graph_input(
&mut self,
data: &GraphData,
) -> Result<Vec<Tensor<Fp>>, Box<dyn std::error::Error>> {
@@ -968,6 +928,7 @@ impl GraphCircuit {
debug!("input scales: {:?}", scales);
self.process_data_source(&data.input_data, shapes, scales, input_types)
.await
}
#[cfg(target_arch = "wasm32")]
@@ -991,7 +952,7 @@ impl GraphCircuit {
#[cfg(not(target_arch = "wasm32"))]
/// Process the data source for the model
fn process_data_source(
async fn process_data_source(
&mut self,
data: &DataSource,
shapes: Vec<Vec<usize>>,
@@ -1004,16 +965,8 @@ impl GraphCircuit {
for (i, shape) in shapes.iter().enumerate() {
per_item_scale.extend(vec![scales[i]; shape.iter().product::<usize>()]);
}
// start runtime and fetch data
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()?;
runtime.block_on(async {
self.load_on_chain_data(source.clone(), &shapes, per_item_scale)
.await
})
self.load_on_chain_data(source.clone(), &shapes, per_item_scale)
.await
}
DataSource::File(file_data) => {
self.load_file_data(file_data, &shapes, scales, input_types)
@@ -1173,7 +1126,7 @@ impl GraphCircuit {
);
// These are upper limits, going above these is wasteful, but they are not hard limits
let model_constraint_logrows = self.settings().model_constraint_logrows_with_blinding();
let model_constraint_logrows = self.settings().model_constraint_logrows();
let min_bits = self.table_size_logrows(safe_lookup_range, max_range_size)?;
let constants_logrows = self.settings().constants_logrows();
max_logrows = std::cmp::min(

View File

@@ -314,6 +314,7 @@ impl GraphModules {
let commitments = inputs.iter().fold(vec![], |mut acc, x| {
let res = PolyCommitChip::commit::<Scheme>(
x.to_vec(),
vk.cs().degree() as u32,
(vk.cs().blinding_factors() + 1) as u32,
srs,
);

View File

@@ -200,13 +200,13 @@ pub struct RunArgs {
/// Hand-written parser for graph variables, eg. batch_size=1
#[arg(short = 'V', long, value_parser = parse_key_val::<String, usize>, default_value = "batch_size->1", value_delimiter = ',')]
pub variables: Vec<(String, usize)>,
/// Flags whether inputs are public, private, hashed, fixed, kzgcommit
/// Flags whether inputs are public, private, hashed
#[arg(long, default_value = "private")]
pub input_visibility: Visibility,
/// Flags whether outputs are public, private, fixed, hashed, kzgcommit
/// Flags whether outputs are public, private, hashed
#[arg(long, default_value = "public")]
pub output_visibility: Visibility,
/// Flags whether params are fixed, private, hashed, kzgcommit
/// Flags whether params are public, private, hashed
#[arg(long, default_value = "private")]
pub param_visibility: Visibility,
#[arg(long, default_value = "false")]
@@ -248,12 +248,6 @@ impl Default for RunArgs {
impl RunArgs {
///
pub fn validate(&self) -> Result<(), Box<dyn std::error::Error>> {
if self.param_visibility == Visibility::Public {
return Err(
"params cannot be public instances, you are probably trying to use `fixed` or `kzgcommit`"
.into(),
);
}
if self.scale_rebase_multiplier < 1 {
return Err("scale_rebase_multiplier must be >= 1".into());
}

View File

@@ -547,6 +547,7 @@ fn kzg_commit(
let output = PolyCommitChip::commit::<KZGCommitmentScheme<Bn256>>(
message,
vk.cs().degree() as u32,
(vk.cs().blinding_factors() + 1) as u32,
&srs,
);
@@ -605,6 +606,7 @@ fn ipa_commit(
let output = PolyCommitChip::commit::<IPACommitmentScheme<G1Affine>>(
message,
vk.cs().degree() as u32,
(vk.cs().blinding_factors() + 1) as u32,
&srs,
);
@@ -940,7 +942,7 @@ fn calibrate_settings(
///
#[pyfunction(signature = (
data=PathBuf::from(DEFAULT_DATA),
model=PathBuf::from(DEFAULT_COMPILED_CIRCUIT),
model=PathBuf::from(DEFAULT_MODEL),
output=PathBuf::from(DEFAULT_WITNESS),
vk_path=None,
srs_path=None,
@@ -952,8 +954,12 @@ fn gen_witness(
vk_path: Option<PathBuf>,
srs_path: Option<PathBuf>,
) -> PyResult<PyObject> {
let output =
crate::execute::gen_witness(model, data, output, vk_path, srs_path).map_err(|e| {
let output = Runtime::new()
.unwrap()
.block_on(crate::execute::gen_witness(
model, data, output, vk_path, srs_path,
))
.map_err(|e| {
let err_str = format!("Failed to run generate witness: {}", e);
PyRuntimeError::new_err(err_str)
})?;
@@ -976,7 +982,7 @@ fn gen_witness(
///
#[pyfunction(signature = (
witness=PathBuf::from(DEFAULT_WITNESS),
model=PathBuf::from(DEFAULT_COMPILED_CIRCUIT),
model=PathBuf::from(DEFAULT_MODEL),
))]
fn mock(witness: PathBuf, model: PathBuf) -> PyResult<bool> {
crate::execute::mock(model, witness).map_err(|e| {
@@ -1048,7 +1054,7 @@ fn mock_aggregate(
/// bool
///
#[pyfunction(signature = (
model=PathBuf::from(DEFAULT_COMPILED_CIRCUIT),
model=PathBuf::from(DEFAULT_MODEL),
vk_path=PathBuf::from(DEFAULT_VK),
pk_path=PathBuf::from(DEFAULT_PK),
srs_path=None,
@@ -1107,7 +1113,7 @@ fn setup(
///
#[pyfunction(signature = (
witness=PathBuf::from(DEFAULT_WITNESS),
model=PathBuf::from(DEFAULT_COMPILED_CIRCUIT),
model=PathBuf::from(DEFAULT_MODEL),
pk_path=PathBuf::from(DEFAULT_PK),
proof_path=None,
proof_type=ProofType::default(),
@@ -1166,21 +1172,26 @@ fn prove(
settings_path=PathBuf::from(DEFAULT_SETTINGS),
vk_path=PathBuf::from(DEFAULT_VK),
srs_path=None,
reduced_srs=DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION.parse::<bool>().unwrap(),
non_reduced_srs=DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION.parse::<bool>().unwrap(),
))]
fn verify(
proof_path: PathBuf,
settings_path: PathBuf,
vk_path: PathBuf,
srs_path: Option<PathBuf>,
reduced_srs: bool,
non_reduced_srs: bool,
) -> Result<bool, PyErr> {
crate::execute::verify(proof_path, settings_path, vk_path, srs_path, reduced_srs).map_err(
|e| {
let err_str = format!("Failed to run verify: {}", e);
PyRuntimeError::new_err(err_str)
},
)?;
crate::execute::verify(
proof_path,
settings_path,
vk_path,
srs_path,
non_reduced_srs,
)
.map_err(|e| {
let err_str = format!("Failed to run verify: {}", e);
PyRuntimeError::new_err(err_str)
})?;
Ok(true)
}

View File

@@ -1,4 +1,3 @@
use crate::circuit::modules::polycommit::PolyCommitChip;
use crate::circuit::modules::poseidon::spec::{PoseidonSpec, POSEIDON_RATE, POSEIDON_WIDTH};
use crate::circuit::modules::poseidon::PoseidonChip;
use crate::circuit::modules::Module;
@@ -148,44 +147,6 @@ pub fn floatToFelt(
)?))
}
/// Generate a kzg commitment.
#[wasm_bindgen]
#[allow(non_snake_case)]
pub fn kzgCommit(
message: wasm_bindgen::Clamped<Vec<u8>>,
vk: wasm_bindgen::Clamped<Vec<u8>>,
settings: wasm_bindgen::Clamped<Vec<u8>>,
params_ser: wasm_bindgen::Clamped<Vec<u8>>,
) -> Result<wasm_bindgen::Clamped<Vec<u8>>, JsError> {
let message: Vec<Fr> = serde_json::from_slice(&message[..])
.map_err(|e| JsError::new(&format!("Failed to deserialize message: {}", e)))?;
let mut reader = std::io::BufReader::new(&params_ser[..]);
let params: ParamsKZG<Bn256> =
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader)
.map_err(|e| JsError::new(&format!("Failed to deserialize params: {}", e)))?;
let mut reader = std::io::BufReader::new(&vk[..]);
let circuit_settings: GraphSettings = serde_json::from_slice(&settings[..])
.map_err(|e| JsError::new(&format!("Failed to deserialize settings: {}", e)))?;
let vk = VerifyingKey::<G1Affine>::read::<_, GraphCircuit>(
&mut reader,
halo2_proofs::SerdeFormat::RawBytes,
circuit_settings,
)
.map_err(|e| JsError::new(&format!("Failed to deserialize vk: {}", e)))?;
let output = PolyCommitChip::commit::<KZGCommitmentScheme<Bn256>>(
message,
(vk.cs().blinding_factors() + 1) as u32,
&params,
);
Ok(wasm_bindgen::Clamped(
serde_json::to_vec(&output).map_err(|e| JsError::new(&format!("{}", e)))?,
))
}
/// Converts a buffer to vector of 4 u64s representing a fixed point field element
#[wasm_bindgen]
#[allow(non_snake_case)]

View File

@@ -899,7 +899,7 @@ mod native_tests {
seq!(N in 0..=45 {
#(#[test_case(WASM_TESTS[N])])*
fn kzg_prove_and_verify_with_overflow_(test: &str) {
fn prove_and_verify_with_overflow_(test: &str) {
crate::native_tests::init_binary();
// crate::native_tests::init_wasm();
let test_dir = TempDir::new(test).unwrap();
@@ -912,20 +912,7 @@ mod native_tests {
}
#(#[test_case(WASM_TESTS[N])])*
fn kzg_prove_and_verify_with_overflow_hashed_inputs_(test: &str) {
crate::native_tests::init_binary();
// crate::native_tests::init_wasm();
let test_dir = TempDir::new(test).unwrap();
env_logger::init();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
prove_and_verify(path, test.to_string(), "safe", "hashed", "private", "public", 1, None, true, "single", Commitments::KZG, 2);
#[cfg(not(feature = "icicle"))]
run_js_tests(path, test.to_string(), "testWasm", false);
// test_dir.close().unwrap();
}
#(#[test_case(WASM_TESTS[N])])*
fn kzg_prove_and_verify_with_overflow_fixed_params_(test: &str) {
fn prove_and_verify_with_overflow_fixed_params_(test: &str) {
crate::native_tests::init_binary();
// crate::native_tests::init_wasm();
let test_dir = TempDir::new(test).unwrap();

View File

@@ -1,29 +1,21 @@
#[cfg(all(target_arch = "wasm32", target_os = "unknown"))]
#[cfg(test)]
mod wasm32 {
use ezkl::circuit::modules::polycommit::PolyCommitChip;
use ezkl::circuit::modules::poseidon::spec::{PoseidonSpec, POSEIDON_RATE, POSEIDON_WIDTH};
use ezkl::circuit::modules::poseidon::PoseidonChip;
use ezkl::circuit::modules::Module;
use ezkl::graph::modules::POSEIDON_LEN_GRAPH;
use ezkl::graph::GraphCircuit;
use ezkl::graph::{GraphSettings, GraphWitness};
use ezkl::graph::GraphWitness;
use ezkl::pfsys;
use ezkl::wasm::{
bufferToVecOfFelt, compiledCircuitValidation, encodeVerifierCalldata, feltToBigEndian,
feltToFloat, feltToInt, feltToLittleEndian, genPk, genVk, genWitness, inputValidation,
kzgCommit, pkValidation, poseidonHash, proofValidation, prove, settingsValidation,
srsValidation, u8_array_to_u128_le, verify, verifyAggr, vkValidation, witnessValidation,
pkValidation, poseidonHash, proofValidation, prove, settingsValidation, srsValidation,
u8_array_to_u128_le, verify, verifyAggr, vkValidation, witnessValidation,
};
use halo2_proofs::plonk::VerifyingKey;
use halo2_proofs::poly::commitment::CommitmentScheme;
use halo2_proofs::poly::kzg::commitment::KZGCommitmentScheme;
use halo2_proofs::poly::kzg::commitment::ParamsKZG;
use halo2_solidity_verifier::encode_calldata;
use halo2curves::bn256::Bn256;
use halo2curves::bn256::{Fr, G1Affine};
use snark_verifier::util::arithmetic::PrimeField;
use wasm_bindgen::JsError;
#[cfg(feature = "web")]
pub use wasm_bindgen_rayon::init_thread_pool;
use wasm_bindgen_test::*;
@@ -98,45 +90,6 @@ mod wasm32 {
assert_eq!(calldata, reference_calldata);
}
#[wasm_bindgen_test]
fn verify_kzg_commit() {
// create a vector of field elements Vec<Fr> and assign it to the message variable
let mut message: Vec<Fr> = vec![];
for i in 0..32 {
message.push(Fr::from(i as u64));
}
let message_ser = serde_json::to_vec(&message).unwrap();
let settings: GraphSettings = serde_json::from_slice(&SETTINGS).unwrap();
let mut reader = std::io::BufReader::new(SRS);
let params: ParamsKZG<Bn256> =
halo2_proofs::poly::commitment::Params::<'_, G1Affine>::read(&mut reader).unwrap();
let mut reader = std::io::BufReader::new(VK);
let vk = VerifyingKey::<G1Affine>::read::<_, GraphCircuit>(
&mut reader,
halo2_proofs::SerdeFormat::RawBytes,
settings.clone(),
)
.unwrap();
let commitment_ser = kzgCommit(
wasm_bindgen::Clamped(message_ser),
wasm_bindgen::Clamped(VK.to_vec()),
wasm_bindgen::Clamped(SETTINGS.to_vec()),
wasm_bindgen::Clamped(SRS.to_vec()),
)
.map_err(|_| "failed")
.unwrap();
let commitment: Vec<halo2curves::bn256::G1Affine> =
serde_json::from_slice(&commitment_ser[..]).unwrap();
let reference_commitment = PolyCommitChip::commit::<KZGCommitmentScheme<Bn256>>(
message,
(vk.cs().blinding_factors() + 1) as u32,
&params,
);
assert_eq!(commitment, reference_commitment);
}
#[wasm_bindgen_test]
async fn verify_field_serialization_roundtrip() {
for i in 0..32 {