Compare commits

...

4 Commits

Author SHA1 Message Date
github-actions[bot]
92f2673d44 ci: update version string in docs 2025-02-09 00:13:36 +00:00
rebustron
eb205d0c73 chore: fix typos in comments and docs (#934) 2025-02-08 19:13:17 -05:00
dante
db498f8d7c docs: cat-dog example (#929) 2025-02-08 17:30:13 -05:00
Cypher Pepe
a363c91160 fix: broken links in polycommit.rs and poseidon.rs (#932) 2025-02-08 12:40:53 -05:00
10 changed files with 1170 additions and 14 deletions

View File

@@ -781,6 +781,8 @@ jobs:
run: python -m venv .env --clear; source .env/bin/activate; pip install -r requirements.txt; python -m ensurepip --upgrade
- name: Build python ezkl
run: source .env/bin/activate; unset CONDA_PREFIX; maturin develop --features python-bindings --profile=test-runs
- name: Cat and Dog notebook
run: source .env/bin/activate; cargo nextest run py_tests::tests::cat_and_dog_notebook_
- name: All notebooks
run: source .env/bin/activate; cargo nextest run py_tests::tests::run_notebook_ --test-threads 1
- name: Voice tutorial

View File

@@ -1,7 +1,7 @@
import ezkl
project = 'ezkl'
release = '0.0.0'
release = '20.0.3'
version = release

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,13 @@
# download tess data
# check if first argument has been set
if [ ! -z "$1" ]; then
DATA_DIR=$1
else
DATA_DIR=data
fi
echo "Downloading data to $DATA_DIR"
if [ ! -d "$DATA_DIR/CATDOG" ]; then
kaggle datasets download tongpython/cat-and-dog -p $DATA_DIR/CATDOG --unzip
fi

View File

@@ -597,7 +597,7 @@ fn poseidon_hash(message: Vec<PyFelt>) -> PyResult<Vec<PyFelt>> {
/// Arguments
/// -------
/// message: list[str]
/// List of field elements represnted as strings
/// List of field elements represented as strings
///
/// vk_path: str
/// Path to the verification key
@@ -656,7 +656,7 @@ fn kzg_commit(
/// Arguments
/// -------
/// message: list[str]
/// List of field elements represnted as strings
/// List of field elements represented as strings
///
/// vk_path: str
/// Path to the verification key
@@ -1950,7 +1950,7 @@ fn deploy_da_evm(
/// does the verifier use data attestation ?
///
/// addr_vk: str
/// The addess of the separate VK contract (if the verifier key is rendered as a separate contract)
/// The address of the separate VK contract (if the verifier key is rendered as a separate contract)
/// Returns
/// -------
/// bool

View File

@@ -1,7 +1,7 @@
/*
An easy-to-use implementation of the Poseidon Hash in the form of a Halo2 Chip. While the Poseidon Hash function
is already implemented in halo2_gadgets, there is no wrapper chip that makes it easy to use in other circuits.
Thanks to https://github.com/summa-dev/summa-solvency/blob/master/src/chips/poseidon/hash.rs for the inspiration (and also helping us understand how to use this).
Thanks to https://github.com/summa-dev/summa-solvency/blob/master/zk_prover/src/chips/poseidon/hash.rs for the inspiration (and also helping us understand how to use this).
*/
use std::collections::HashMap;

View File

@@ -1,7 +1,7 @@
/*
An easy-to-use implementation of the Poseidon Hash in the form of a Halo2 Chip. While the Poseidon Hash function
is already implemented in halo2_gadgets, there is no wrapper chip that makes it easy to use in other circuits.
Thanks to https://github.com/summa-dev/summa-solvency/blob/master/src/chips/poseidon/hash.rs for the inspiration (and also helping us understand how to use this).
Thanks to https://github.com/summa-dev/summa-solvency/blob/master/zk_prover/src/chips/poseidon/hash.rs for the inspiration (and also helping us understand how to use this).
*/
pub mod poseidon_params;

View File

@@ -349,7 +349,7 @@ pub fn sqrt<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
.into()
};
claimed_output.reshape(input_dims)?;
// force the output to be positive or zero, also implicitly checks that the ouput is in range
// force the output to be positive or zero, also implicitly checks that the output is in range
let claimed_output = abs(config, region, &[claimed_output.clone()])?;
// rescaled input
let rescaled_input = pairwise(config, region, &[input.clone(), unit_scale], BaseOp::Mult)?;
@@ -1841,7 +1841,7 @@ pub(crate) fn get_missing_set_elements<
// get the difference between the two vectors
for eval in input_evals.iter() {
// delete first occurence of that value
// delete first occurrence of that value
if let Some(pos) = fullset_evals.iter().position(|x| x == eval) {
fullset_evals.remove(pos);
}
@@ -1869,7 +1869,7 @@ pub(crate) fn get_missing_set_elements<
region.increment(claimed_output.len());
// input and claimed output should be the shuffles of fullset
// concatentate input and claimed output
// concatenate input and claimed output
let input_and_claimed_output = input.concat(claimed_output.clone())?;
// assert that this is a permutation/shuffle
@@ -3396,7 +3396,7 @@ pub fn max_pool<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
/// Performs a deconvolution on the given input tensor.
/// # Examples
/// ```
// // expected ouputs are taken from pytorch torch.nn.functional.conv_transpose2d
// // expected outputs are taken from pytorch torch.nn.functional.conv_transpose2d
///
/// use ezkl::tensor::Tensor;
/// use ezkl::fieldutils::IntegerRep;
@@ -3624,7 +3624,7 @@ pub fn deconv<
/// Applies convolution over a ND tensor of shape C x H x D1...DN (and adds a bias).
/// ```
/// // expected ouputs are taken from pytorch torch.nn.functional.conv2d
/// // expected outputs are taken from pytorch torch.nn.functional.conv2d
///
/// use ezkl::tensor::Tensor;
/// use ezkl::fieldutils::IntegerRep;
@@ -3908,7 +3908,7 @@ pub(crate) fn rescale<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>
Ok(rescaled_inputs)
}
/// Dummy (no contraints) reshape layout
/// Dummy (no constraints) reshape layout
pub(crate) fn reshape<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
values: &[ValTensor<F>; 1],
new_dims: &[usize],
@@ -3918,7 +3918,7 @@ pub(crate) fn reshape<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>
Ok(t)
}
/// Dummy (no contraints) move_axis layout
/// Dummy (no constraints) move_axis layout
pub(crate) fn move_axis<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
values: &[ValTensor<F>; 1],
source: usize,

View File

@@ -49,6 +49,23 @@ mod py_tests {
std::env::set_var("VOICE_DATA_DIR", format!("{}", voice_data_dir));
}
fn download_catdog_data() {
let cat_and_dog_data_dir = shellexpand::tilde("~/data/catdog_data");
DOWNLOAD_VOICE_DATA.call_once(|| {
let status = Command::new("bash")
.args([
"examples/notebooks/cat_and_dog_data.sh",
&cat_and_dog_data_dir,
])
.status()
.expect("failed to execute process");
assert!(status.success());
});
// set VOICE_DATA_DIR environment variable
std::env::set_var("CATDOG_DATA_DIR", format!("{}", cat_and_dog_data_dir));
}
fn setup_py_env() {
ENV_SETUP.call_once(|| {
// supposes that you have a virtualenv called .env and have run the following
@@ -225,6 +242,20 @@ mod py_tests {
anvil_child.kill().unwrap();
}
#[test]
fn cat_and_dog_notebook_() {
crate::py_tests::init_binary();
let mut anvil_child = crate::py_tests::start_anvil(false);
crate::py_tests::download_catdog_data();
let test_dir: TempDir = TempDir::new("cat_and_dog").unwrap();
let path = test_dir.path().to_str().unwrap();
crate::py_tests::mv_test_(path, "cat_and_dog.ipynb");
run_notebook(path, "cat_and_dog.ipynb");
test_dir.close().unwrap();
anvil_child.kill().unwrap();
}
#[test]
fn reusable_verifier_notebook_() {
crate::py_tests::init_binary();

View File

@@ -337,7 +337,7 @@ mod wasm32 {
// Run compiled circuit validation on onnx network (should fail)
let circuit = compiledCircuitValidation(wasm_bindgen::Clamped(NETWORK.to_vec()));
assert!(circuit.is_err());
// Run compiled circuit validation on comiled network (should pass)
// Run compiled circuit validation on compiled network (should pass)
let circuit = compiledCircuitValidation(wasm_bindgen::Clamped(NETWORK_COMPILED.to_vec()));
assert!(circuit.is_ok());
// Run input validation on witness (should fail)