mirror of
https://github.com/zkonduit/ezkl.git
synced 2026-01-13 16:27:59 -05:00
Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
94abf91400 | ||
|
|
4771192823 | ||
|
|
a863ccc868 | ||
|
|
8e6ccc863d |
2
.github/workflows/rust.yml
vendored
2
.github/workflows/rust.yml
vendored
@@ -345,6 +345,8 @@ jobs:
|
||||
run: cargo nextest run --release --verbose tests_evm::kzg_evm_on_chain_input_kzg_output_kzg_params_prove_and_verify --test-threads 1
|
||||
- name: KZG prove and verify tests (EVM + on chain outputs & kzg inputs + params)
|
||||
run: cargo nextest run --release --verbose tests_evm::kzg_evm_on_chain_output_kzg_input_kzg_params_prove_and_verify --test-threads 1
|
||||
- name: KZG prove and verify tests (EVM + on chain all kzg)
|
||||
run: cargo nextest run --release --verbose tests_evm::kzg_evm_on_chain_all_kzg_params_prove_and_verify --test-threads 1
|
||||
- name: KZG prove and verify tests (EVM + on chain inputs & outputs hashes)
|
||||
run: cargo nextest run --release --verbose tests_evm::kzg_evm_on_chain_input_output_hashed_prove_and_verify --test-threads 1
|
||||
- name: KZG prove and verify tests (EVM)
|
||||
|
||||
@@ -203,6 +203,7 @@ det-prove = []
|
||||
icicle = ["halo2_proofs/icicle_gpu"]
|
||||
empty-cmd = []
|
||||
no-banner = []
|
||||
no-update = []
|
||||
metal = ["dep:metal", "dep:objc"]
|
||||
|
||||
# icicle patch to 0.1.0 if feature icicle is enabled
|
||||
|
||||
@@ -93,9 +93,6 @@ contract LoadInstances {
|
||||
}
|
||||
}
|
||||
|
||||
// Contract that checks that the COMMITMENT_KZG bytes is equal to the first part of the proof.
|
||||
pragma solidity ^0.8.0;
|
||||
|
||||
// The kzg commitments of a given model, all aggregated into a single bytes array.
|
||||
// At solidity generation time, the commitments are hardcoded into the contract via the COMMITMENT_KZG constant.
|
||||
// It will be used to check that the proof commitments match the expected commitments.
|
||||
@@ -163,7 +160,7 @@ contract SwapProofCommitments {
|
||||
}
|
||||
|
||||
return equal; // Return true if the commitment comparison passed
|
||||
}
|
||||
} /// end checkKzgCommits
|
||||
}
|
||||
|
||||
// This contract serves as a Data Attestation Verifier for the EZKL model.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
ezkl==11.3.3
|
||||
ezkl==11.4.2
|
||||
sphinx
|
||||
sphinx-rtd-theme
|
||||
sphinxcontrib-napoleon
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import ezkl
|
||||
|
||||
project = 'ezkl'
|
||||
release = '11.3.3'
|
||||
release = '11.4.2'
|
||||
version = release
|
||||
|
||||
|
||||
|
||||
@@ -868,6 +868,7 @@ pub enum Commands {
|
||||
#[arg(long, value_hint = clap::ValueHint::Other)]
|
||||
addr_vk: Option<H160Flag>,
|
||||
},
|
||||
#[cfg(not(feature = "no-update"))]
|
||||
/// Updates ezkl binary to version specified (or latest if not specified)
|
||||
Update {
|
||||
/// The version to update to
|
||||
|
||||
97
src/eth.rs
97
src/eth.rs
@@ -327,11 +327,7 @@ pub async fn setup_eth_backend(
|
||||
ProviderBuilder::new()
|
||||
.with_recommended_fillers()
|
||||
.signer(EthereumSigner::from(wallet))
|
||||
.on_http(
|
||||
endpoint
|
||||
.parse()
|
||||
.map_err(|_| EthError::UrlParse(endpoint.clone()))?,
|
||||
),
|
||||
.on_http(endpoint.parse().map_err(|_| EthError::UrlParse(endpoint))?),
|
||||
);
|
||||
|
||||
let chain_id = client.get_chain_id().await?;
|
||||
@@ -354,8 +350,7 @@ pub async fn deploy_contract_via_solidity(
|
||||
let (abi, bytecode, runtime_bytecode) =
|
||||
get_contract_artifacts(sol_code_path, contract_name, runs).await?;
|
||||
|
||||
let factory =
|
||||
get_sol_contract_factory(abi, bytecode, runtime_bytecode, client.clone(), None::<()>)?;
|
||||
let factory = get_sol_contract_factory(abi, bytecode, runtime_bytecode, client, None::<()>)?;
|
||||
let contract = factory.deploy().await?;
|
||||
|
||||
Ok(contract)
|
||||
@@ -452,20 +447,30 @@ pub async fn deploy_da_verifier_via_solidity(
|
||||
}
|
||||
}
|
||||
|
||||
let (abi, bytecode, runtime_bytecode) =
|
||||
get_contract_artifacts(sol_code_path, "DataAttestation", runs).await?;
|
||||
|
||||
let (contract_addresses, call_data, decimals) = if !calls_to_accounts.is_empty() {
|
||||
parse_calls_to_accounts(calls_to_accounts)?
|
||||
} else {
|
||||
return Err(EthError::OnChainDataSource);
|
||||
// if calls to accounts is empty then we know need to check that atleast there kzg visibility in the settings file
|
||||
let kzg_visibility = settings.run_args.input_visibility.is_polycommit()
|
||||
|| settings.run_args.output_visibility.is_polycommit()
|
||||
|| settings.run_args.param_visibility.is_polycommit();
|
||||
if !kzg_visibility {
|
||||
return Err(EthError::OnChainDataSource);
|
||||
}
|
||||
let factory =
|
||||
get_sol_contract_factory::<_, ()>(abi, bytecode, runtime_bytecode, client, None)?;
|
||||
let contract = factory.deploy().await?;
|
||||
return Ok(contract);
|
||||
};
|
||||
|
||||
let (abi, bytecode, runtime_bytecode) =
|
||||
get_contract_artifacts(sol_code_path, "DataAttestation", runs).await?;
|
||||
|
||||
let factory = get_sol_contract_factory(
|
||||
abi,
|
||||
bytecode,
|
||||
runtime_bytecode,
|
||||
client.clone(),
|
||||
client,
|
||||
Some((
|
||||
// address[] memory _contractAddresses,
|
||||
DynSeqToken(
|
||||
@@ -506,7 +511,7 @@ pub async fn deploy_da_verifier_via_solidity(
|
||||
),
|
||||
// uint8 _instanceOffset,
|
||||
WordToken(U256::from(contract_instance_offset as u32).into()),
|
||||
//address _admin
|
||||
// address _admin
|
||||
WordToken(client_address.into_word()),
|
||||
)),
|
||||
)?;
|
||||
@@ -529,7 +534,7 @@ fn parse_calls_to_accounts(
|
||||
let mut call_data = vec![];
|
||||
let mut decimals: Vec<Vec<U256>> = vec![];
|
||||
for (i, val) in calls_to_accounts.iter().enumerate() {
|
||||
let contract_address_bytes = hex::decode(val.address.clone())?;
|
||||
let contract_address_bytes = hex::decode(&val.address)?;
|
||||
let contract_address = H160::from_slice(&contract_address_bytes);
|
||||
contract_addresses.push(contract_address);
|
||||
call_data.push(vec![]);
|
||||
@@ -573,7 +578,7 @@ pub async fn update_account_calls(
|
||||
|
||||
let (client, client_address) = setup_eth_backend(rpc_url, None).await?;
|
||||
|
||||
let contract = DataAttestation::new(addr, client.clone());
|
||||
let contract = DataAttestation::new(addr, &client);
|
||||
|
||||
info!("contract_addresses: {:#?}", contract_addresses);
|
||||
|
||||
@@ -804,7 +809,7 @@ pub async fn test_on_chain_data<M: 'static + Provider<Http<Client>, Ethereum>>(
|
||||
client: Arc<M>,
|
||||
data: &[Vec<FileSourceInner>],
|
||||
) -> Result<Vec<CallsToAccount>, EthError> {
|
||||
let (contract, decimals) = setup_test_contract(client.clone(), data).await?;
|
||||
let (contract, decimals) = setup_test_contract(client, data).await?;
|
||||
|
||||
// Get the encoded call data for each input
|
||||
let mut calldata = vec![];
|
||||
@@ -836,10 +841,10 @@ pub async fn read_on_chain_inputs<M: 'static + Provider<Http<Client>, Ethereum>>
|
||||
let mut decimals = vec![];
|
||||
for on_chain_data in data {
|
||||
// Construct the address
|
||||
let contract_address_bytes = hex::decode(on_chain_data.address.clone())?;
|
||||
let contract_address_bytes = hex::decode(&on_chain_data.address)?;
|
||||
let contract_address = H160::from_slice(&contract_address_bytes);
|
||||
for (call_data, decimal) in &on_chain_data.call_data {
|
||||
let call_data_bytes = hex::decode(call_data.clone())?;
|
||||
let call_data_bytes = hex::decode(call_data)?;
|
||||
let input: TransactionInput = call_data_bytes.into();
|
||||
|
||||
let tx = TransactionRequest::default()
|
||||
@@ -866,8 +871,8 @@ pub async fn evm_quantize<M: 'static + Provider<Http<Client>, Ethereum>>(
|
||||
) -> Result<Vec<Fr>, EthError> {
|
||||
let contract = QuantizeData::deploy(&client).await?;
|
||||
|
||||
let fetched_inputs = data.0.clone();
|
||||
let decimals = data.1.clone();
|
||||
let fetched_inputs = &data.0;
|
||||
let decimals = &data.1;
|
||||
|
||||
let fetched_inputs = fetched_inputs
|
||||
.iter()
|
||||
@@ -943,7 +948,7 @@ fn get_sol_contract_factory<'a, M: 'static + Provider<Http<Client>, Ethereum>, T
|
||||
(None, false) => {
|
||||
return Err(EthError::NoConstructor);
|
||||
}
|
||||
(None, true) => bytecode.clone(),
|
||||
(None, true) => bytecode,
|
||||
(Some(_), _) => {
|
||||
let mut data = bytecode.to_vec();
|
||||
|
||||
@@ -955,7 +960,7 @@ fn get_sol_contract_factory<'a, M: 'static + Provider<Http<Client>, Ethereum>, T
|
||||
}
|
||||
};
|
||||
|
||||
Ok(CallBuilder::new_raw_deploy(client.clone(), data))
|
||||
Ok(CallBuilder::new_raw_deploy(client, data))
|
||||
}
|
||||
|
||||
/// Compiles a solidity verifier contract and returns the abi, bytecode, and runtime bytecode
|
||||
@@ -1030,7 +1035,7 @@ pub fn fix_da_sol(
|
||||
|
||||
// fill in the quantization params and total calls
|
||||
// as constants to the contract to save on gas
|
||||
if let Some(input_data) = input_data {
|
||||
if let Some(input_data) = &input_data {
|
||||
let input_calls: usize = input_data.iter().map(|v| v.call_data.len()).sum();
|
||||
accounts_len = input_data.len();
|
||||
contract = contract.replace(
|
||||
@@ -1038,7 +1043,7 @@ pub fn fix_da_sol(
|
||||
&format!("uint256 constant INPUT_CALLS = {};", input_calls),
|
||||
);
|
||||
}
|
||||
if let Some(output_data) = output_data {
|
||||
if let Some(output_data) = &output_data {
|
||||
let output_calls: usize = output_data.iter().map(|v| v.call_data.len()).sum();
|
||||
accounts_len += output_data.len();
|
||||
contract = contract.replace(
|
||||
@@ -1048,8 +1053,9 @@ pub fn fix_da_sol(
|
||||
}
|
||||
contract = contract.replace("AccountCall[]", &format!("AccountCall[{}]", accounts_len));
|
||||
|
||||
if commitment_bytes.clone().is_some() && !commitment_bytes.clone().unwrap().is_empty() {
|
||||
let commitment_bytes = commitment_bytes.unwrap();
|
||||
// The case where a combination of on-chain data source + kzg commit is provided.
|
||||
if commitment_bytes.is_some() && !commitment_bytes.as_ref().unwrap().is_empty() {
|
||||
let commitment_bytes = commitment_bytes.as_ref().unwrap();
|
||||
let hex_string = hex::encode(commitment_bytes);
|
||||
contract = contract.replace(
|
||||
"bytes constant COMMITMENT_KZG = hex\"\";",
|
||||
@@ -1064,5 +1070,44 @@ pub fn fix_da_sol(
|
||||
);
|
||||
}
|
||||
|
||||
// if both input and output data is none then we will only deploy the DataAttest contract, adding in the verifyWithDataAttestation function
|
||||
if input_data.is_none()
|
||||
&& output_data.is_none()
|
||||
&& commitment_bytes.as_ref().is_some()
|
||||
&& !commitment_bytes.as_ref().unwrap().is_empty()
|
||||
{
|
||||
contract = contract.replace(
|
||||
"contract SwapProofCommitments {",
|
||||
"contract DataAttestation {",
|
||||
);
|
||||
|
||||
// Remove everything past the end of the checkKzgCommits function
|
||||
if let Some(pos) = contract.find(" } /// end checkKzgCommits") {
|
||||
contract.truncate(pos);
|
||||
contract.push('}');
|
||||
}
|
||||
|
||||
// Add the Solidity function below checkKzgCommits
|
||||
contract.push_str(
|
||||
r#"
|
||||
function verifyWithDataAttestation(
|
||||
address verifier,
|
||||
bytes calldata encoded
|
||||
) public view returns (bool) {
|
||||
require(verifier.code.length > 0, "Address: call to non-contract");
|
||||
require(checkKzgCommits(encoded), "Invalid KZG commitments");
|
||||
// static call the verifier contract to verify the proof
|
||||
(bool success, bytes memory returndata) = verifier.staticcall(encoded);
|
||||
|
||||
if (success) {
|
||||
return abi.decode(returndata, (bool));
|
||||
} else {
|
||||
revert("low-level call to verifier failed");
|
||||
}
|
||||
}
|
||||
}"#,
|
||||
);
|
||||
}
|
||||
|
||||
Ok(contract)
|
||||
}
|
||||
|
||||
@@ -506,10 +506,12 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
|
||||
)
|
||||
.await
|
||||
}
|
||||
#[cfg(not(feature = "no-update"))]
|
||||
Commands::Update { version } => update_ezkl_binary(&version).map(|e| e.to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "no-update"))]
|
||||
/// Assert that the version is valid
|
||||
fn assert_version_is_valid(version: &str) -> Result<(), EZKLError> {
|
||||
let err_string = "Invalid version string. Must be in the format v0.0.0";
|
||||
@@ -527,8 +529,10 @@ fn assert_version_is_valid(version: &str) -> Result<(), EZKLError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "no-update"))]
|
||||
const INSTALL_BYTES: &[u8] = include_bytes!("../install_ezkl_cli.sh");
|
||||
|
||||
#[cfg(not(feature = "no-update"))]
|
||||
fn update_ezkl_binary(version: &Option<String>) -> Result<String, EZKLError> {
|
||||
// run the install script with the version
|
||||
let install_script = std::str::from_utf8(INSTALL_BYTES)?;
|
||||
|
||||
@@ -41,9 +41,9 @@ pub enum GraphError {
|
||||
/// Error when attempting to rescale an operation
|
||||
#[error("failed to rescale inputs for {0}")]
|
||||
RescalingError(String),
|
||||
/// Error when attempting to load a model from a file
|
||||
#[error("failed to load model")]
|
||||
ModelLoad(#[from] std::io::Error),
|
||||
/// Reading a file failed
|
||||
#[error("[io] ({0}) {1}")]
|
||||
ReadWriteFileError(String, String),
|
||||
/// Model serialization error
|
||||
#[error("failed to ser/deser model: {0}")]
|
||||
ModelSerialize(#[from] bincode::Error),
|
||||
|
||||
@@ -485,18 +485,25 @@ impl GraphData {
|
||||
|
||||
/// Load the model input from a file
|
||||
pub fn from_path(path: std::path::PathBuf) -> Result<Self, GraphError> {
|
||||
let reader = std::fs::File::open(path)?;
|
||||
let reader = std::fs::File::open(&path).map_err(|e| {
|
||||
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
|
||||
})?;
|
||||
let mut reader = BufReader::with_capacity(*EZKL_BUF_CAPACITY, reader);
|
||||
let mut buf = String::new();
|
||||
reader.read_to_string(&mut buf)?;
|
||||
reader.read_to_string(&mut buf).map_err(|e| {
|
||||
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
|
||||
})?;
|
||||
let graph_input = serde_json::from_str(&buf)?;
|
||||
Ok(graph_input)
|
||||
}
|
||||
|
||||
/// Save the model input to a file
|
||||
pub fn save(&self, path: std::path::PathBuf) -> Result<(), GraphError> {
|
||||
let file = std::fs::File::create(path.clone()).map_err(|e| {
|
||||
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
|
||||
})?;
|
||||
// buf writer
|
||||
let writer = BufWriter::with_capacity(*EZKL_BUF_CAPACITY, std::fs::File::create(path)?);
|
||||
let writer = BufWriter::with_capacity(*EZKL_BUF_CAPACITY, file);
|
||||
serde_json::to_writer(writer, self)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -267,7 +267,9 @@ impl GraphWitness {
|
||||
|
||||
/// Load the model input from a file
|
||||
pub fn from_path(path: std::path::PathBuf) -> Result<Self, GraphError> {
|
||||
let file = std::fs::File::open(path.clone())?;
|
||||
let file = std::fs::File::open(path.clone()).map_err(|e| {
|
||||
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
|
||||
})?;
|
||||
|
||||
let reader = std::io::BufReader::with_capacity(*EZKL_BUF_CAPACITY, file);
|
||||
serde_json::from_reader(reader).map_err(|e| e.into())
|
||||
@@ -275,9 +277,11 @@ impl GraphWitness {
|
||||
|
||||
/// Save the model input to a file
|
||||
pub fn save(&self, path: std::path::PathBuf) -> Result<(), GraphError> {
|
||||
let file = std::fs::File::create(path.clone()).map_err(|e| {
|
||||
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
|
||||
})?;
|
||||
// use buf writer
|
||||
let writer =
|
||||
std::io::BufWriter::with_capacity(*EZKL_BUF_CAPACITY, std::fs::File::create(path)?);
|
||||
let writer = std::io::BufWriter::with_capacity(*EZKL_BUF_CAPACITY, file);
|
||||
|
||||
serde_json::to_writer(writer, &self).map_err(|e| e.into())
|
||||
}
|
||||
@@ -640,7 +644,9 @@ impl GraphCircuit {
|
||||
}
|
||||
///
|
||||
pub fn save(&self, path: std::path::PathBuf) -> Result<(), GraphError> {
|
||||
let f = std::fs::File::create(path)?;
|
||||
let f = std::fs::File::create(&path).map_err(|e| {
|
||||
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
|
||||
})?;
|
||||
let writer = std::io::BufWriter::with_capacity(*EZKL_BUF_CAPACITY, f);
|
||||
bincode::serialize_into(writer, &self)?;
|
||||
Ok(())
|
||||
@@ -649,7 +655,9 @@ impl GraphCircuit {
|
||||
///
|
||||
pub fn load(path: std::path::PathBuf) -> Result<Self, GraphError> {
|
||||
// read bytes from file
|
||||
let f = std::fs::File::open(path)?;
|
||||
let f = std::fs::File::open(&path).map_err(|e| {
|
||||
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
|
||||
})?;
|
||||
let reader = std::io::BufReader::with_capacity(*EZKL_BUF_CAPACITY, f);
|
||||
let result: GraphCircuit = bincode::deserialize_from(reader)?;
|
||||
|
||||
|
||||
@@ -483,7 +483,9 @@ impl Model {
|
||||
|
||||
///
|
||||
pub fn save(&self, path: PathBuf) -> Result<(), GraphError> {
|
||||
let f = std::fs::File::create(path)?;
|
||||
let f = std::fs::File::create(&path).map_err(|e| {
|
||||
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
|
||||
})?;
|
||||
let writer = std::io::BufWriter::new(f);
|
||||
bincode::serialize_into(writer, &self)?;
|
||||
Ok(())
|
||||
@@ -492,10 +494,16 @@ impl Model {
|
||||
///
|
||||
pub fn load(path: PathBuf) -> Result<Self, GraphError> {
|
||||
// read bytes from file
|
||||
let mut f = std::fs::File::open(&path)?;
|
||||
let metadata = fs::metadata(&path)?;
|
||||
let mut f = std::fs::File::open(&path).map_err(|e| {
|
||||
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
|
||||
})?;
|
||||
let metadata = fs::metadata(&path).map_err(|e| {
|
||||
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
|
||||
})?;
|
||||
let mut buffer = vec![0; metadata.len() as usize];
|
||||
f.read_exact(&mut buffer)?;
|
||||
f.read_exact(&mut buffer).map_err(|e| {
|
||||
GraphError::ReadWriteFileError(path.display().to_string(), e.to_string())
|
||||
})?;
|
||||
let result = bincode::deserialize(&buffer)?;
|
||||
Ok(result)
|
||||
}
|
||||
@@ -975,8 +983,11 @@ impl Model {
|
||||
) -> Result<Vec<Vec<Tensor<f32>>>, GraphError> {
|
||||
use tract_onnx::tract_core::internal::IntoArcTensor;
|
||||
|
||||
let (model, _) =
|
||||
Model::load_onnx_using_tract(&mut std::fs::File::open(model_path)?, run_args)?;
|
||||
let mut file = std::fs::File::open(model_path).map_err(|e| {
|
||||
GraphError::ReadWriteFileError(model_path.display().to_string(), e.to_string())
|
||||
})?;
|
||||
|
||||
let (model, _) = Model::load_onnx_using_tract(&mut file, run_args)?;
|
||||
|
||||
let datum_types: Vec<DatumType> = model
|
||||
.input_outlets()?
|
||||
@@ -1005,7 +1016,10 @@ impl Model {
|
||||
/// * `params` - A [GraphSettings] struct holding parsed CLI arguments.
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
pub fn from_run_args(run_args: &RunArgs, model: &std::path::Path) -> Result<Self, GraphError> {
|
||||
Model::new(&mut std::fs::File::open(model)?, run_args)
|
||||
let mut file = std::fs::File::open(model).map_err(|e| {
|
||||
GraphError::ReadWriteFileError(model.display().to_string(), e.to_string())
|
||||
})?;
|
||||
Model::new(&mut file, run_args)
|
||||
}
|
||||
|
||||
/// Configures a model for the circuit
|
||||
|
||||
@@ -1066,6 +1066,15 @@ mod native_tests {
|
||||
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "file", "on-chain", "polycommit", "public", "polycommit");
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
#(#[test_case(TESTS_ON_CHAIN_INPUT[N])])*
|
||||
fn kzg_evm_on_chain_all_kzg_params_prove_and_verify_(test: &str) {
|
||||
crate::native_tests::init_binary();
|
||||
let test_dir = TempDir::new(test).unwrap();
|
||||
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
|
||||
let _anvil_child = crate::native_tests::start_anvil(true, Hardfork::Latest);
|
||||
kzg_evm_on_chain_input_prove_and_verify(path, test.to_string(), "file", "file", "polycommit", "polycommit", "polycommit");
|
||||
test_dir.close().unwrap();
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -2330,7 +2339,6 @@ mod native_tests {
|
||||
|
||||
let model_path = format!("{}/{}/network.compiled", test_dir, example_name);
|
||||
let settings_path = format!("{}/{}/settings.json", test_dir, example_name);
|
||||
|
||||
init_params(settings_path.clone().into());
|
||||
|
||||
let data_path = format!("{}/{}/input.json", test_dir, example_name);
|
||||
@@ -2342,62 +2350,6 @@ mod native_tests {
|
||||
let test_input_source = format!("--input-source={}", input_source);
|
||||
let test_output_source = format!("--output-source={}", output_source);
|
||||
|
||||
// load witness
|
||||
let witness: GraphWitness = GraphWitness::from_path(witness_path.clone().into()).unwrap();
|
||||
let mut input: GraphData = GraphData::from_path(data_path.clone().into()).unwrap();
|
||||
|
||||
if input_visibility == "hashed" {
|
||||
let hashes = witness.processed_inputs.unwrap().poseidon_hash.unwrap();
|
||||
input.input_data = DataSource::File(
|
||||
hashes
|
||||
.iter()
|
||||
.map(|h| vec![FileSourceInner::Field(*h)])
|
||||
.collect(),
|
||||
);
|
||||
}
|
||||
if output_visibility == "hashed" {
|
||||
let hashes = witness.processed_outputs.unwrap().poseidon_hash.unwrap();
|
||||
input.output_data = Some(DataSource::File(
|
||||
hashes
|
||||
.iter()
|
||||
.map(|h| vec![FileSourceInner::Field(*h)])
|
||||
.collect(),
|
||||
));
|
||||
} else {
|
||||
input.output_data = Some(DataSource::File(
|
||||
witness
|
||||
.pretty_elements
|
||||
.unwrap()
|
||||
.rescaled_outputs
|
||||
.iter()
|
||||
.map(|o| {
|
||||
o.iter()
|
||||
.map(|f| FileSourceInner::Float(f.parse().unwrap()))
|
||||
.collect()
|
||||
})
|
||||
.collect(),
|
||||
));
|
||||
}
|
||||
|
||||
input.save(data_path.clone().into()).unwrap();
|
||||
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args([
|
||||
"setup-test-evm-data",
|
||||
"-D",
|
||||
data_path.as_str(),
|
||||
"-M",
|
||||
&model_path,
|
||||
"--test-data",
|
||||
test_on_chain_data_path.as_str(),
|
||||
rpc_arg.as_str(),
|
||||
test_input_source.as_str(),
|
||||
test_output_source.as_str(),
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args([
|
||||
"setup",
|
||||
@@ -2412,6 +2364,82 @@ mod native_tests {
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
|
||||
// generate the witness, passing the vk path to generate the necessary kzg commits
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args([
|
||||
"gen-witness",
|
||||
"-D",
|
||||
&data_path,
|
||||
"-M",
|
||||
&model_path,
|
||||
"-O",
|
||||
&witness_path,
|
||||
"--vk-path",
|
||||
&format!("{}/{}/key.vk", test_dir, example_name),
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
|
||||
// load witness
|
||||
let witness: GraphWitness = GraphWitness::from_path(witness_path.clone().into()).unwrap();
|
||||
// print out the witness
|
||||
println!("WITNESS: {:?}", witness);
|
||||
let mut input: GraphData = GraphData::from_path(data_path.clone().into()).unwrap();
|
||||
if input_source != "file" || output_source != "file" {
|
||||
println!("on chain input");
|
||||
if input_visibility == "hashed" {
|
||||
let hashes = witness.processed_inputs.unwrap().poseidon_hash.unwrap();
|
||||
input.input_data = DataSource::File(
|
||||
hashes
|
||||
.iter()
|
||||
.map(|h| vec![FileSourceInner::Field(*h)])
|
||||
.collect(),
|
||||
);
|
||||
}
|
||||
if output_visibility == "hashed" {
|
||||
let hashes = witness.processed_outputs.unwrap().poseidon_hash.unwrap();
|
||||
input.output_data = Some(DataSource::File(
|
||||
hashes
|
||||
.iter()
|
||||
.map(|h| vec![FileSourceInner::Field(*h)])
|
||||
.collect(),
|
||||
));
|
||||
} else {
|
||||
input.output_data = Some(DataSource::File(
|
||||
witness
|
||||
.pretty_elements
|
||||
.unwrap()
|
||||
.rescaled_outputs
|
||||
.iter()
|
||||
.map(|o| {
|
||||
o.iter()
|
||||
.map(|f| FileSourceInner::Float(f.parse().unwrap()))
|
||||
.collect()
|
||||
})
|
||||
.collect(),
|
||||
));
|
||||
}
|
||||
input.save(data_path.clone().into()).unwrap();
|
||||
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args([
|
||||
"setup-test-evm-data",
|
||||
"-D",
|
||||
data_path.as_str(),
|
||||
"-M",
|
||||
&model_path,
|
||||
"--test-data",
|
||||
test_on_chain_data_path.as_str(),
|
||||
rpc_arg.as_str(),
|
||||
test_input_source.as_str(),
|
||||
test_output_source.as_str(),
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
}
|
||||
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args([
|
||||
"prove",
|
||||
@@ -2502,13 +2530,19 @@ mod native_tests {
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
|
||||
let deploy_evm_data_path = if input_source != "file" || output_source != "file" {
|
||||
test_on_chain_data_path.clone()
|
||||
} else {
|
||||
data_path.clone()
|
||||
};
|
||||
|
||||
let addr_path_da_arg = format!("--addr-path={}/{}/addr_da.txt", test_dir, example_name);
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args([
|
||||
"deploy-evm-da",
|
||||
format!("--settings-path={}", settings_path).as_str(),
|
||||
"-D",
|
||||
test_on_chain_data_path.as_str(),
|
||||
deploy_evm_data_path.as_str(),
|
||||
"--sol-code-path",
|
||||
sol_arg.as_str(),
|
||||
rpc_arg.as_str(),
|
||||
@@ -2546,40 +2580,42 @@ mod native_tests {
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
assert!(status.success());
|
||||
// Create a new set of test on chain data
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args([
|
||||
"setup-test-evm-data",
|
||||
// Create a new set of test on chain data only for the on-chain input source
|
||||
if input_source != "file" || output_source != "file" {
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args([
|
||||
"setup-test-evm-data",
|
||||
"-D",
|
||||
data_path.as_str(),
|
||||
"-M",
|
||||
&model_path,
|
||||
"--test-data",
|
||||
test_on_chain_data_path.as_str(),
|
||||
rpc_arg.as_str(),
|
||||
test_input_source.as_str(),
|
||||
test_output_source.as_str(),
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
|
||||
assert!(status.success());
|
||||
|
||||
let deployed_addr_arg = format!("--addr={}", addr_da);
|
||||
|
||||
let args: Vec<&str> = vec![
|
||||
"test-update-account-calls",
|
||||
deployed_addr_arg.as_str(),
|
||||
"-D",
|
||||
data_path.as_str(),
|
||||
"-M",
|
||||
&model_path,
|
||||
"--test-data",
|
||||
test_on_chain_data_path.as_str(),
|
||||
rpc_arg.as_str(),
|
||||
test_input_source.as_str(),
|
||||
test_output_source.as_str(),
|
||||
])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
];
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args(&args)
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
|
||||
assert!(status.success());
|
||||
|
||||
let deployed_addr_arg = format!("--addr={}", addr_da);
|
||||
|
||||
let args = vec![
|
||||
"test-update-account-calls",
|
||||
deployed_addr_arg.as_str(),
|
||||
"-D",
|
||||
test_on_chain_data_path.as_str(),
|
||||
rpc_arg.as_str(),
|
||||
];
|
||||
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
|
||||
.args(&args)
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
|
||||
assert!(status.success());
|
||||
assert!(status.success());
|
||||
}
|
||||
// As sanity check, add example that should fail.
|
||||
let args = vec![
|
||||
"verify-evm",
|
||||
|
||||
Reference in New Issue
Block a user