Compare commits

..

19 Commits

Author SHA1 Message Date
dante
9daeae3d25 Update rust.yml 2024-08-18 09:28:49 -04:00
dante
592c940060 Update Cargo.toml 2024-08-17 19:29:30 -04:00
dante
5859c0f07e Update Cargo.toml 2024-08-17 17:54:27 -04:00
dante
f78ac60551 Update Cargo.toml 2024-08-17 17:54:02 -04:00
dante
ab3b85351b fix: undo cargo upgrade 2024-08-17 17:52:11 -04:00
dante
f276775333 fix: patch wasm 2024-08-16 12:15:06 -04:00
dante
7a45a4a3d7 patch name clashes 2024-08-16 11:31:47 -04:00
dante
7bd579de2f chore: cache lookup tables 2024-08-16 11:15:54 -04:00
dante
0bfe0b53ba Merge branch 'main' into ac/-patch-par-main 2024-08-16 10:57:41 -04:00
dante
134b54d32b Update Cargo.toml 2024-08-15 12:42:45 -04:00
dante
beb5f12376 chore: use mimalloc 2024-08-15 12:40:55 -04:00
dante
65be3c84bb Update Cargo.toml 2024-08-14 18:05:26 -04:00
dante
6f743c57d3 chore: parallelize prepare and commit 2024-08-13 15:06:47 -04:00
dante
ddb54c5a73 feat: precompute lookup cosets 2024-08-08 18:15:22 -04:00
dante
6e1f22a15b log lack of cache 2024-08-08 10:44:41 -04:00
dante
da97323bde feat: cache lookup tables 2024-08-08 09:12:40 -04:00
dante
55046feeb6 Update Cargo.toml 2024-08-07 23:42:19 -04:00
dante
d0d0596e58 chore: bump h2 2024-08-07 23:40:42 -04:00
dante
b78efdcbf4 fix: add required serde patches 2024-08-07 18:30:08 -04:00
15 changed files with 250 additions and 37 deletions

View File

@@ -327,8 +327,8 @@ jobs:
cd in-browser-evm-verifier
pnpm build:commonjs
cd ..
- name: Install solc
run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
# - name: Install solc
# run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
- name: Install Anvil
run: cargo install --git https://github.com/foundry-rs/foundry --rev 62cdea8ff9e6efef011f77e295823b5f2dbeb3a1 --locked anvil --force
- name: KZG prove and verify tests (EVM + VK rendered seperately)
@@ -536,8 +536,8 @@ jobs:
with:
crate: cargo-nextest
locked: true
- name: Install solc
run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
# - name: Install solc
# run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
- name: Install Anvil
run: cargo install --git https://github.com/foundry-rs/foundry --rev 62cdea8ff9e6efef011f77e295823b5f2dbeb3a1 --locked anvil --force
- name: KZG prove and verify aggr tests
@@ -575,8 +575,8 @@ jobs:
components: rustfmt, clippy
- name: Install cmake
run: sudo apt-get install -y cmake
- name: Install solc
run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
# - name: Install solc
# run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
- name: Setup Virtual Env and Install python dependencies
run: python -m venv .env --clear; source .env/bin/activate; pip install -r requirements.txt;
- name: Install Anvil
@@ -652,8 +652,8 @@ jobs:
with:
crate: cargo-nextest
locked: true
- name: Install solc
run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
# - name: Install solc
# run: (hash svm 2>/dev/null || cargo install svm-rs) && svm install 0.8.20 && solc --version
- name: Install Anvil
run: cargo install --git https://github.com/foundry-rs/foundry --rev 62cdea8ff9e6efef011f77e295823b5f2dbeb3a1 --locked anvil --force
- name: Install pip

20
Cargo.lock generated
View File

@@ -1877,6 +1877,7 @@ dependencies = [
"log",
"maybe-rayon",
"metal",
"mimalloc",
"mnist",
"num",
"objc",
@@ -2938,6 +2939,16 @@ version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
[[package]]
name = "libmimalloc-sys"
version = "0.1.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "23aa6811d3bd4deb8a84dde645f943476d13b248d818edcf8ce0b2f37f036b44"
dependencies = [
"cc",
"libc",
]
[[package]]
name = "libredox"
version = "0.0.1"
@@ -3139,6 +3150,15 @@ dependencies = [
"paste",
]
[[package]]
name = "mimalloc"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68914350ae34959d83f732418d51e2427a794055d0b9529f48259ac07af65633"
dependencies = [
"libmimalloc-sys",
]
[[package]]
name = "mime"
version = "0.3.17"

View File

@@ -16,7 +16,7 @@ crate-type = ["cdylib", "rlib"]
[dependencies]
halo2_gadgets = { git = "https://github.com/zkonduit/halo2", branch = "ac/optional-selector-poly" }
halo2_proofs = { git = "https://github.com/zkonduit/halo2", branch = "ac/optional-selector-poly" }
halo2_proofs = { git = "https://github.com/zkonduit/halo2#098ac0ef3b29255e0e2524ecbb4e7e325ae6e7fd", package = "halo2_proofs" }
halo2curves = { git = "https://github.com/privacy-scaling-explorations/halo2curves", rev = "9fff22c", features = [
"derive_serde",
] }
@@ -89,6 +89,7 @@ colored = { version = "2.0.0", default_features = false, optional = true }
env_logger = { version = "0.10.0", default_features = false, optional = true }
chrono = "0.4.31"
sha256 = "1.4.0"
mimalloc = "0.1"
[target.'cfg(target_arch = "wasm32")'.dependencies]
getrandom = { version = "0.2.8", features = ["js"] }
@@ -197,6 +198,7 @@ mv-lookup = [
"snark-verifier/mv-lookup",
"halo2_solidity_verifier/mv-lookup",
]
# precompute-coset = ["halo2_proofs/precompute-coset"]
det-prove = []
icicle = ["halo2_proofs/icicle_gpu"]
empty-cmd = []

View File

@@ -9,7 +9,6 @@ EZKL
<br>
</h1>
> Easy Zero-Knowledge Inference
[![Test](https://github.com/zkonduit/ezkl/workflows/Rust/badge.svg)](https://github.com/zkonduit/ezkl/actions?query=workflow%3ARust)

6
cla.md
View File

@@ -10,12 +10,6 @@ This Contributor License Agreement (this "**Agreement**" or "**CLA**") sets fort
3. "**Work**" means any products owned or managed by Zkonduit.
## 2. Licenses.
1. Copyright License. Subject to the terms and conditions of this Agreement, You hereby grant to Zkonduit, and to recipients and users of any software distributed by Zkonduit, a perpetual, worldwide, non-exclusive, transferable, no-charge, royalty-free, irrevocable copyright license, with the right to sublicense these rights to multiple tiers of sublicensees, to reproduce, prepare derivative works of, display, perform, sublicense, and distribute Your Contributions and such derivative works.

View File

@@ -1,4 +1,7 @@
// ignore file if compiling for wasm
#[global_allocator]
#[cfg(not(target_arch = "wasm32"))]
static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc;
#[cfg(not(target_arch = "wasm32"))]
use clap::{CommandFactory, Parser};

View File

@@ -91,4 +91,7 @@ pub enum CircuitError {
/// Missing layout
#[error("missing layout for op: {0}")]
MissingLayout(String),
#[error("[io] {0}")]
/// IO error
IoError(#[from] std::io::Error),
}

View File

@@ -135,6 +135,53 @@ impl LookupOp {
(-range, range)
}
/// as path
pub fn as_path(&self) -> String {
match self {
LookupOp::Abs => "abs".into(),
LookupOp::Ceil { scale } => format!("ceil_{}", scale),
LookupOp::Floor { scale } => format!("floor_{}", scale),
LookupOp::Round { scale } => format!("round_{}", scale),
LookupOp::RoundHalfToEven { scale } => format!("round_half_to_even_{}", scale),
LookupOp::Pow { scale, a } => format!("pow_{}_{}", scale, a),
LookupOp::KroneckerDelta => "kronecker_delta".into(),
LookupOp::Max { scale, a } => format!("max_{}_{}", scale, a),
LookupOp::Min { scale, a } => format!("min_{}_{}", scale, a),
LookupOp::Sign => "sign".into(),
LookupOp::LessThan { a } => format!("less_than_{}", a),
LookupOp::LessThanEqual { a } => format!("less_than_equal_{}", a),
LookupOp::GreaterThan { a } => format!("greater_than_{}", a),
LookupOp::GreaterThanEqual { a } => format!("greater_than_equal_{}", a),
LookupOp::Div { denom } => format!("div_{}", denom),
LookupOp::Cast { scale } => format!("cast_{}", scale),
LookupOp::Recip {
input_scale,
output_scale,
} => format!("recip_{}_{}", input_scale, output_scale),
LookupOp::ReLU => "relu".to_string(),
LookupOp::LeakyReLU { slope: a } => format!("leaky_relu_{}", a),
LookupOp::Sigmoid { scale } => format!("sigmoid_{}", scale),
LookupOp::Sqrt { scale } => format!("sqrt_{}", scale),
LookupOp::Rsqrt { scale } => format!("rsqrt_{}", scale),
LookupOp::Erf { scale } => format!("erf_{}", scale),
LookupOp::Exp { scale } => format!("exp_{}", scale),
LookupOp::Ln { scale } => format!("ln_{}", scale),
LookupOp::Cos { scale } => format!("cos_{}", scale),
LookupOp::ACos { scale } => format!("acos_{}", scale),
LookupOp::Cosh { scale } => format!("cosh_{}", scale),
LookupOp::ACosh { scale } => format!("acosh_{}", scale),
LookupOp::Sin { scale } => format!("sin_{}", scale),
LookupOp::ASin { scale } => format!("asin_{}", scale),
LookupOp::Sinh { scale } => format!("sinh_{}", scale),
LookupOp::ASinh { scale } => format!("asinh_{}", scale),
LookupOp::Tan { scale } => format!("tan_{}", scale),
LookupOp::ATan { scale } => format!("atan_{}", scale),
LookupOp::ATanh { scale } => format!("atanh_{}", scale),
LookupOp::Tanh { scale } => format!("tanh_{}", scale),
LookupOp::HardSwish { scale } => format!("hardswish_{}", scale),
}
}
/// Matches a [Op] to an operation in the `tensor::ops` module.
pub(crate) fn f<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
&self,

View File

@@ -15,6 +15,9 @@ use crate::{
tensor::{Tensor, TensorType},
};
#[cfg(not(target_arch = "wasm32"))]
use crate::execute::EZKL_REPO_PATH;
use crate::circuit::lookup::LookupOp;
/// The range of the lookup table.
@@ -25,6 +28,16 @@ pub const RANGE_MULTIPLIER: IntegerRep = 2;
/// The safety factor offset for the number of rows in the lookup table.
pub const RESERVED_BLINDING_ROWS_PAD: usize = 3;
#[cfg(not(target_arch = "wasm32"))]
lazy_static::lazy_static! {
/// an optional directory to read and write the lookup table cache
pub static ref LOOKUP_CACHE: String = format!("{}/cache", *EZKL_REPO_PATH);
}
/// The lookup table cache is disabled on wasm32 target.
#[cfg(target_arch = "wasm32")]
pub const LOOKUP_CACHE: &str = "";
#[derive(Debug, Clone)]
///
pub struct SelectorConstructor<F: PrimeField> {
@@ -137,6 +150,14 @@ pub fn num_cols_required(range_len: IntegerRep, col_size: usize) -> usize {
}
impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Table<F> {
fn name(&self) -> String {
format!(
"{}_{}_{}",
self.nonlinearity.as_path(),
self.range.0,
self.range.1
)
}
/// Configures the table.
pub fn configure(
cs: &mut ConstraintSystem<F>,
@@ -203,8 +224,51 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Table<F> {
let smallest = self.range.0;
let largest = self.range.1;
let inputs: Tensor<F> = Tensor::from(smallest..=largest).map(|x| integer_rep_to_felt(x));
let evals = self.nonlinearity.f(&[inputs.clone()])?;
let gen_table = || -> Result<(Tensor<F>, Tensor<F>), crate::tensor::TensorError> {
let inputs = Tensor::from(smallest..=largest)
.par_enum_map(|_, x| Ok::<_, crate::tensor::TensorError>(integer_rep_to_felt(x)))?;
let evals = self.nonlinearity.f(&[inputs.clone()])?;
Ok((inputs, evals.output))
};
let (inputs, evals) = if !LOOKUP_CACHE.is_empty() {
let cache = std::path::Path::new(&*LOOKUP_CACHE);
let cache_path = cache.join(self.name());
let input_path = cache_path.join("inputs");
let output_path = cache_path.join("outputs");
if cache_path.exists() {
log::info!("Loading lookup table from cache: {:?}", cache_path);
let (input_cache, output_cache) =
(Tensor::load(&input_path)?, Tensor::load(&output_path)?);
(input_cache, output_cache)
} else {
log::info!(
"Generating lookup table and saving to cache: {:?}",
cache_path
);
// mkdir -p cache_path
std::fs::create_dir_all(&cache_path).map_err(|e| {
CircuitError::TensorError(crate::tensor::TensorError::FileSaveError(
e.to_string(),
))
})?;
let (inputs, evals) = gen_table()?;
inputs.save(&input_path)?;
evals.save(&output_path)?;
(inputs, evals)
}
} else {
log::info!(
"Generating lookup table {} without cache",
self.nonlinearity.as_path()
);
gen_table()?
};
let chunked_inputs = inputs.chunks(self.col_size);
self.is_assigned = true;
@@ -236,7 +300,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Table<F> {
)?;
}
let output = evals.output[row_offset];
let output = evals[row_offset];
table.assign_cell(
|| format!("nl_o_col row {}", row_offset),
@@ -274,6 +338,11 @@ pub struct RangeCheck<F: PrimeField> {
}
impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RangeCheck<F> {
/// as path
pub fn as_path(&self) -> String {
format!("rangecheck_{}_{}", self.range.0, self.range.1)
}
/// get first_element of column
pub fn get_first_element(&self, chunk: usize) -> F {
let chunk = chunk as IntegerRep;
@@ -351,7 +420,32 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RangeCheck<F> {
let smallest = self.range.0;
let largest = self.range.1;
let inputs: Tensor<F> = Tensor::from(smallest..=largest).map(|x| integer_rep_to_felt(x));
let inputs: Tensor<F> = if !LOOKUP_CACHE.is_empty() {
let cache = std::path::Path::new(&*LOOKUP_CACHE);
let cache_path = cache.join(self.as_path());
let input_path = cache_path.join("inputs");
if cache_path.exists() {
log::info!("Loading range check table from cache: {:?}", cache_path);
Tensor::load(&input_path)?
} else {
log::info!(
"Generating range check table and saving to cache: {:?}",
cache_path
);
// mkdir -p cache_path
std::fs::create_dir_all(&cache_path)?;
let inputs = Tensor::from(smallest..=largest).map(|x| integer_rep_to_felt(x));
inputs.save(&input_path)?;
inputs
}
} else {
log::info!("Generating range check {} without cache", self.as_path());
Tensor::from(smallest..=largest).map(|x| integer_rep_to_felt(x))
};
let chunked_inputs = inputs.chunks(self.col_size);
self.is_assigned = true;

View File

@@ -23,6 +23,7 @@
)]
// we allow this for our dynamic range based indexing scheme
#![allow(clippy::single_range_in_vec_init)]
#![feature(buf_read_has_data_left)]
#![feature(stmt_expr_attributes)]
//! A library for turning computational graphs, such as neural networks, into ZK-circuits.

View File

@@ -76,7 +76,7 @@ pub fn init_logger() {
prefix_token(&record.level()),
// pretty print UTC time
chrono::Utc::now()
.format("%Y-%m-%d %H:%M:%S")
.format("%Y-%m-%d %H:%M:%S:%3f")
.to_string()
.bright_magenta(),
record.metadata().target(),

View File

@@ -558,7 +558,8 @@ where
+ PrimeField
+ FromUniformBytes<64>
+ WithSmallOrderMulGroup<3>,
Scheme::Curve: Serialize + DeserializeOwned,
Scheme::Curve: Serialize + DeserializeOwned + SerdeObject,
Scheme::ParamsProver: Send + Sync,
{
let strategy = Strategy::new(params.verifier_params());
let mut transcript = TranscriptWriterBuffer::<_, Scheme::Curve, _>::init(vec![]);

View File

@@ -27,4 +27,10 @@ pub enum TensorError {
/// Unset visibility
#[error("unset visibility")]
UnsetVisibility,
/// File save error
#[error("save error: {0}")]
FileSaveError(String),
/// File load error
#[error("load error: {0}")]
FileLoadError(String),
}

View File

@@ -18,6 +18,9 @@ use maybe_rayon::{
slice::ParallelSliceMut,
};
use serde::{Deserialize, Serialize};
use std::io::BufRead;
use std::io::Write;
use std::path::PathBuf;
pub use val::*;
pub use var::*;
@@ -41,6 +44,7 @@ use itertools::Itertools;
use metal::{Device, MTLResourceOptions, MTLSize};
use std::error::Error;
use std::fmt::Debug;
use std::io::Read;
use std::iter::Iterator;
use std::ops::{Add, Deref, DerefMut, Div, Mul, Neg, Range, Sub};
use std::{cmp::max, ops::Rem};
@@ -409,6 +413,45 @@ impl<'data, T: Clone + TensorType + std::marker::Send + std::marker::Sync>
}
}
impl<T: Clone + TensorType + PrimeField> Tensor<T> {
/// save to a file
pub fn save(&self, path: &PathBuf) -> Result<(), TensorError> {
let writer =
std::fs::File::create(path).map_err(|e| TensorError::FileSaveError(e.to_string()))?;
let mut buf_writer = std::io::BufWriter::new(writer);
self.inner.iter().map(|x| x.clone()).for_each(|x| {
let x = x.to_repr();
buf_writer.write_all(x.as_ref()).unwrap();
});
Ok(())
}
/// load from a file
pub fn load(path: &PathBuf) -> Result<Self, TensorError> {
let reader =
std::fs::File::open(path).map_err(|e| TensorError::FileLoadError(e.to_string()))?;
let mut buf_reader = std::io::BufReader::new(reader);
let mut inner = Vec::new();
while let Ok(true) = buf_reader.has_data_left() {
let mut repr = T::Repr::default();
match buf_reader.read_exact(repr.as_mut()) {
Ok(_) => {
inner.push(T::from_repr(repr).unwrap());
}
Err(_) => {
return Err(TensorError::FileLoadError(
"Failed to read tensor".to_string(),
))
}
}
}
Ok(Tensor::new(Some(&inner), &[inner.len()]).unwrap())
}
}
impl<T: Clone + TensorType> Tensor<T> {
/// Sets (copies) the tensor values to the provided ones.
pub fn new(values: Option<&[T]>, dims: &[usize]) -> Result<Self, TensorError> {

View File

@@ -509,7 +509,7 @@ mod native_tests {
use crate::native_tests::mock;
use crate::native_tests::accuracy_measurement;
use crate::native_tests::prove_and_verify;
use crate::native_tests::run_javascript_tests;
use crate::native_tests::run_js_tests;
use crate::native_tests::render_circuit;
use crate::native_tests::model_serialization_different_binaries;
use rand::Rng;
@@ -919,7 +919,7 @@ mod native_tests {
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
prove_and_verify(path, test.to_string(), "safe", "private", "private", "public", 1, None, true, "single", Commitments::KZG, 2);
#[cfg(not(feature = "icicle"))]
run_javascript_tests(path, test.to_string(), "testWasm", false);
run_js_tests(path, test.to_string(), "testWasm", false);
test_dir.close().unwrap();
}
@@ -932,7 +932,7 @@ mod native_tests {
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
prove_and_verify(path, test.to_string(), "safe", "hashed", "private", "public", 1, None, true, "single", Commitments::KZG, 2);
#[cfg(not(feature = "icicle"))]
run_javascript_tests(path, test.to_string(), "testWasm", false);
run_js_tests(path, test.to_string(), "testWasm", false);
test_dir.close().unwrap();
}
@@ -945,7 +945,7 @@ mod native_tests {
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
prove_and_verify(path, test.to_string(), "safe", "private", "fixed", "public", 1, None, true, "single", Commitments::KZG, 2);
#[cfg(not(feature = "icicle"))]
run_javascript_tests(path, test.to_string(), "testWasm", false);
run_js_tests(path, test.to_string(), "testWasm", false);
test_dir.close().unwrap();
}
@@ -992,7 +992,7 @@ mod native_tests {
use crate::native_tests::kzg_evm_aggr_prove_and_verify;
use tempdir::TempDir;
use crate::native_tests::Hardfork;
use crate::native_tests::run_javascript_tests;
use crate::native_tests::run_js_tests;
/// Currently only on chain inputs that return a non-negative value are supported.
const TESTS_ON_CHAIN_INPUT: [&str; 17] = [
@@ -1115,7 +1115,7 @@ mod native_tests {
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
kzg_evm_prove_and_verify(2, path, test.to_string(), "private", "private", "public");
#[cfg(not(feature = "icicle"))]
run_javascript_tests(path, test.to_string(), "testBrowserEvmVerify", false);
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
test_dir.close().unwrap();
}
@@ -1128,7 +1128,7 @@ mod native_tests {
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
kzg_evm_prove_and_verify_render_seperately(2, path, test.to_string(), "private", "private", "public");
#[cfg(not(feature = "icicle"))]
run_javascript_tests(path, test.to_string(), "testBrowserEvmVerify", true);
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", true);
test_dir.close().unwrap();
}
@@ -1142,7 +1142,7 @@ mod native_tests {
let mut _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
kzg_evm_prove_and_verify(2, path, test.to_string(), "hashed", "private", "private");
#[cfg(not(feature = "icicle"))]
run_javascript_tests(path, test.to_string(), "testBrowserEvmVerify", false);
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
test_dir.close().unwrap();
}
@@ -1159,7 +1159,7 @@ mod native_tests {
let mut _anvil_child = crate::native_tests::start_anvil(false, hardfork);
kzg_evm_prove_and_verify(2, path, test.to_string(), "polycommit", "private", "public");
#[cfg(not(feature = "icicle"))]
run_javascript_tests(path, test.to_string(), "testBrowserEvmVerify", false);
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
test_dir.close().unwrap();
}
@@ -1172,7 +1172,7 @@ mod native_tests {
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
kzg_evm_prove_and_verify(2, path, test.to_string(), "private", "hashed", "public");
#[cfg(not(feature = "icicle"))]
run_javascript_tests(path, test.to_string(), "testBrowserEvmVerify", false);
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
test_dir.close().unwrap();
}
@@ -1185,7 +1185,7 @@ mod native_tests {
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
kzg_evm_prove_and_verify(2, path, test.to_string(), "private", "private", "hashed");
#[cfg(not(feature = "icicle"))]
run_javascript_tests(path, test.to_string(), "testBrowserEvmVerify", false);
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
test_dir.close().unwrap();
}
@@ -1198,7 +1198,7 @@ mod native_tests {
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
kzg_evm_prove_and_verify(2, path, test.to_string(), "private", "polycommit", "public");
#[cfg(not(feature = "icicle"))]
run_javascript_tests(path, test.to_string(), "testBrowserEvmVerify", false);
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
test_dir.close().unwrap();
}
@@ -1211,7 +1211,7 @@ mod native_tests {
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
kzg_evm_prove_and_verify(2, path, test.to_string(), "private", "private", "polycommit");
#[cfg(not(feature = "icicle"))]
run_javascript_tests(path, test.to_string(), "testBrowserEvmVerify", false);
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
test_dir.close().unwrap();
}
@@ -1223,7 +1223,7 @@ mod native_tests {
let _anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
kzg_evm_prove_and_verify(2, path, test.to_string(), "polycommit", "polycommit", "polycommit");
#[cfg(not(feature = "icicle"))]
run_javascript_tests(path, test.to_string(), "testBrowserEvmVerify", false);
run_js_tests(path, test.to_string(), "testBrowserEvmVerify", false);
test_dir.close().unwrap();
}
@@ -2306,7 +2306,7 @@ mod native_tests {
}
// run js browser evm verify tests for a given example
fn run_javascript_tests(test_dir: &str, example_name: String, js_test: &str, vk: bool) {
fn run_js_tests(test_dir: &str, example_name: String, js_test: &str, vk: bool) {
let example = format!("--example={}", example_name);
let dir = format!("--dir={}", test_dir);
let mut args = vec!["run", "test", js_test, &example, &dir];