Compare commits

..

57 Commits

Author SHA1 Message Date
Ethan
41d3233223 h2 sol verifier package update. 2024-10-28 20:01:00 +08:00
Ethan
cda4b2dacd merge "main" into "verifier-router" 2024-10-28 19:30:03 +08:00
Ethan
1ddbce537f ci testing 2024-10-25 22:31:38 +08:00
Ethan Cemer
5faa56f359 Merge branch 'zkonduit:main' into verifier-router 2024-10-11 02:15:14 -05:00
Ethan
97c1397fe6 verifier manager contract. 2024-10-08 20:47:58 +08:00
Ethan
e3051b79cd *bit flip fuzzing tests 2024-10-04 17:24:57 +08:00
Ethan Cemer
fe960767df Merge branch 'main' into reusable-vk-nb 2024-09-19 20:28:15 -05:00
Ethan
d4ebf8f120 skip lstm_large in overflow b/c too big to fit on chain. 2024-09-10 13:48:36 -05:00
Ethan
6788a9c726 *comprehensive test coverage for reusable verifier 2024-09-09 17:00:42 -05:00
Ethan
9fd3799ed1 *expand reusable verifier test examples 2024-09-07 17:46:32 -05:00
Ethan
c9351c0607 *reduce extcodeopy call by 1 2024-09-05 20:10:16 -05:00
Ethan Cemer
a2a8488d67 Merge branch 'main' into reusable-vk-nb 2024-09-05 15:40:34 -05:00
Ethan
9908f5c214 *add col overflow testing for reusable verifier. 2024-09-04 21:45:29 -05:00
Ethan Cemer
66ae2f7bac Merge branch 'main' into reusable-vk-nb 2024-09-02 18:02:09 -05:00
Ethan
4d18a5a903 *rmv create_evm_vk cmd
*test reusable verifier after h2 curve updates
2024-09-02 18:01:40 -05:00
Ethan
cd1860246c main lock 2024-08-29 16:43:16 -04:00
Ethan
ad59186eb6 Merge branch 'main' into reusable-vk-nb 2024-08-29 16:41:16 -04:00
Ethan
472a505063 *update separate vk contract name 2024-08-14 13:07:37 -04:00
Ethan
1588c9735e *update lock 2024-08-13 20:22:52 -04:00
Ethan
9d876d5bf9 MV lookup packed. 2024-08-09 17:34:22 -04:00
Ethan
fd29794cdd hardcode coeff_ptr 2024-08-08 13:55:10 -04:00
Ethan
a616fbb759 packed permutation evals and challenge data. 2024-08-07 16:34:39 -04:00
Ethan
f1fe01952f Merge branch 'main' into reusable-vk-nb 2024-08-05 10:39:39 -04:00
Ethan
23f71873ce *update lock 2024-08-05 10:37:06 -04:00
Ethan
31168b0a99 *fully reusable verifier 2024-08-03 01:10:26 -05:00
Ethan
35bb286d40 *coeff_sums_computation 2024-08-01 21:11:47 -05:00
Ethan
d5944d36fe *r_evals_computation 2024-08-01 15:18:17 -05:00
Ethan
3df63d540d coeff_computations. 2024-07-30 19:18:23 -05:00
Ethan
779f82e0dc *vanish computations pcs 2024-07-29 20:35:10 -05:00
Ethan
889db3a6fe *update lock. 2024-07-29 18:16:33 -05:00
Ethan
fab08bbe9d *update cargo lock 2024-07-29 14:08:16 -05:00
Ethan
72f1892d54 *update lock 2024-07-29 06:18:31 -05:00
Ethan
f3e531c3e7 *update lock 2024-07-28 21:52:29 -05:00
Ethan
00f8dd42f6 *update lock
*revert to svm 0.8.20
2024-07-28 18:06:56 -05:00
Ethan
099726b245 update lock. 2024-07-26 22:14:41 -05:00
Ethan
d5f18495de *update lock. 2024-07-26 15:10:41 -05:00
Ethan
add04f6090 Merge branch 'main' into reusable-vk-nb 2024-07-26 15:10:03 -05:00
Ethan
6b71bdc920 use latest version of solc 2024-07-23 15:01:42 -05:00
Ethan
3e5153db9f *update lock 2024-07-23 14:59:49 -05:00
Ethan
a1dd82a3c1 *update lock 2024-07-19 18:20:26 -05:00
Ethan
f6acf241c9 Merge branch 'main' into reusable-vk-nb 2024-07-19 18:18:49 -05:00
Ethan
dbe812b88d *update lock 2024-07-12 22:14:13 -05:00
Ethan
36188ab542 *comment out JS tests for reusable verifier CI tests 2024-07-12 16:00:46 -05:00
Ethan Cemer
cd9d7c3d50 Merge branch 'main' into reusable-vk-nb 2024-07-12 15:58:16 -05:00
Ethan Cemer
f5ae49e1c5 Merge branch 'main' into reusable-vk-nb 2024-07-11 22:48:11 -05:00
Ethan
f25b420429 *update lock 2024-07-11 22:47:42 -05:00
Ethan
f59aaf80c5 *update lock 2024-07-11 15:24:28 -05:00
Ethan
257e275773 *update lock 2024-07-11 00:23:19 -05:00
Ethan
2fe0eb4b27 *update lock 2024-07-06 23:05:48 -05:00
Ethan
bdad19b83c *update lock 2024-07-03 21:00:58 -05:00
Ethan
a17aad064b *update lock 2024-07-02 21:43:38 -05:00
Ethan
985205ae40 *update lock
*hardcode sampel inputs for resuable verifiers nb
2024-07-02 00:49:34 -05:00
Ethan Cemer
b08dc28ed5 Merge branch 'main' into reusable-vk-nb 2024-07-01 17:08:23 -05:00
Ethan
b3997cd325 lazy static import 2024-06-28 09:36:18 -05:00
Ethan
83cb957299 *fix stuck integration tests. 2024-06-27 19:46:05 -05:00
Ethan
c92be15b81 *update lockfile 2024-06-26 16:11:06 -05:00
Ethan
6924797e48 *reusable verifier example nb 2024-06-25 16:10:24 -05:00
27 changed files with 949 additions and 226 deletions

1
.gitignore vendored
View File

@@ -9,6 +9,7 @@ pkg
!AttestData.sol
!VerifierBase.sol
!LoadInstances.sol
!VerifierManager.sol
*.pf
*.vk
*.pk

18
Cargo.lock generated
View File

@@ -2397,7 +2397,7 @@ dependencies = [
[[package]]
name = "halo2_solidity_verifier"
version = "0.1.0"
source = "git+https://github.com/alexander-camuto/halo2-solidity-verifier?branch=ac/update-h2-curves#eede1db7f3e599112bd1186e9d1913286bdcb539"
source = "git+https://github.com/alexander-camuto/halo2-solidity-verifier?branch=vka-log#c319e229ad677ee4c7d95bdae45c2958350cfd14"
dependencies = [
"askama",
"blake2b_simd",
@@ -2543,6 +2543,12 @@ dependencies = [
"allocator-api2",
]
[[package]]
name = "hashbrown"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb"
[[package]]
name = "heck"
version = "0.4.1"
@@ -2811,12 +2817,12 @@ dependencies = [
[[package]]
name = "indexmap"
version = "2.2.5"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4"
checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da"
dependencies = [
"equivalent",
"hashbrown 0.14.3",
"hashbrown 0.15.0",
]
[[package]]
@@ -5628,9 +5634,9 @@ checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
[[package]]
name = "tower-service"
version = "0.3.2"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52"
checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
[[package]]
name = "tracing"

View File

@@ -19,8 +19,11 @@ crate-type = ["cdylib", "rlib", "staticlib"]
halo2_gadgets = { git = "https://github.com/zkonduit/halo2", branch = "ac/optional-selector-poly" }
halo2curves = { git = "https://github.com/privacy-scaling-explorations/halo2curves", rev = "b753a832e92d5c86c5c997327a9cf9de86a18851", features = [
"derive_serde",
"derive_serde",
] }
halo2_proofs = { git = "https://github.com/zkonduit/halo2", package = "halo2_proofs", branch = "ac/cache-lookup-commitments", features = [
"circuit-params",
] }
halo2_proofs = { git = "https://github.com/zkonduit/halo2", package = "halo2_proofs", branch = "ac/cache-lookup-commitments", features = ["circuit-params"] }
rand = { version = "0.8", default-features = false }
itertools = { version = "0.10.3", default-features = false }
clap = { version = "4.5.3", features = ["derive"], optional = true }
@@ -33,9 +36,9 @@ halo2_wrong_ecc = { git = "https://github.com/zkonduit/halo2wrong", branch = "ac
snark-verifier = { git = "https://github.com/zkonduit/snark-verifier", branch = "ac/chunked-mv-lookup", features = [
"derive_serde",
] }
halo2_solidity_verifier = { git = "https://github.com/alexander-camuto/halo2-solidity-verifier", branch = "ac/update-h2-curves", optional = true }
maybe-rayon = { version = "0.1.1", default-features = false }
bincode = { version = "1.3.3", default-features = false }
halo2_solidity_verifier = { git = "https://github.com/alexander-camuto/halo2-solidity-verifier", branch = "vka-log", optional = true }
maybe-rayon = { version = "0.1.1", default_features = false }
bincode = { version = "1.3.3", default_features = false }
unzip-n = "0.1.2"
num = "0.4.1"
portable-atomic = { version = "1.6.0", optional = true }
@@ -43,10 +46,7 @@ tosubcommand = { git = "https://github.com/zkonduit/enum_to_subcommand", package
semver = { version = "1.0.22", optional = true }
[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
serde_json = { version = "1.0.97", features = [
"float_roundtrip",
"raw_value",
] }
serde_json = { version = "1.0.97", features = ["float_roundtrip", "raw_value"] }
# evm related deps
alloy = { git = "https://github.com/alloy-rs/alloy", version = "0.1.0", rev = "5fbf57bac99edef9d8475190109a7ea9fb7e5e83", features = [
@@ -56,22 +56,39 @@ alloy = { git = "https://github.com/alloy-rs/alloy", version = "0.1.0", rev = "5
"rpc-types-eth",
"signer-wallet",
"node-bindings",
], optional = true }
foundry-compilers = { version = "0.4.1", features = [
"svm-solc",
], optional = true }
foundry-compilers = { version = "0.4.1", features = ["svm-solc"], optional = true }
ethabi = { version = "18", optional = true }
indicatif = { version = "0.17.5", features = ["rayon"], optional = true }
gag = { version = "1.0.0", default-features = false, optional = true }
instant = { version = "0.1" }
reqwest = { version = "0.12.4", default-features = false, features = ["default-tls", "multipart", "stream"], optional = true }
reqwest = { version = "0.12.4", default-features = false, features = [
"default-tls",
"multipart",
"stream",
], optional = true }
openssl = { version = "0.10.55", features = ["vendored"], optional = true }
tokio-postgres = { version = "0.7.10", optional = true }
pg_bigdecimal = { version = "0.1.5", optional = true }
lazy_static = { version = "1.4.0", optional = true }
colored_json = { version = "3.0.1", default-features = false, optional = true }
regex = { version = "1", default-features = false, optional = true }
tokio = { version = "1.35.0", default-features = false, features = ["macros", "rt-multi-thread"], optional = true }
pyo3 = { version = "0.21.2", features = ["extension-module", "abi3-py37", "macros"], default-features = false, optional = true }
pyo3-asyncio = { git = "https://github.com/jopemachine/pyo3-asyncio/", branch="migration-pyo3-0.21", features = ["attributes", "tokio-runtime"], default-features = false, optional = true }
tokio = { version = "1.35.0", default-features = false, features = [
"macros",
"rt-multi-thread",
], optional = true }
pyo3 = { version = "0.21.2", features = [
"extension-module",
"abi3-py37",
"macros",
], default-features = false, optional = true }
pyo3-asyncio = { git = "https://github.com/jopemachine/pyo3-asyncio/", branch = "migration-pyo3-0.21", features = [
"attributes",
"tokio-runtime",
], default-features = false, optional = true }
pyo3-log = { version = "0.10.0", default-features = false, optional = true }
tract-onnx = { git = "https://github.com/sonos/tract/", rev = "40c64319291184814d9fea5fdf4fa16f5a4f7116", default-features = false, optional = true }
tabled = { version = "0.12.0", optional = true }
@@ -196,7 +213,13 @@ required-features = ["ios-bindings", "uuid", "camino", "uniffi_bindgen"]
[features]
web = ["wasm-bindgen-rayon"]
default = ["ezkl", "mv-lookup", "precompute-coset", "no-banner", "parallel-poly-read"]
default = [
"ezkl",
"mv-lookup",
"precompute-coset",
"no-banner",
"parallel-poly-read",
]
onnx = ["dep:tract-onnx"]
python-bindings = ["pyo3", "pyo3-log", "pyo3-asyncio"]
ios-bindings = ["mv-lookup", "precompute-coset", "parallel-poly-read", "uniffi"]
@@ -230,7 +253,10 @@ ezkl = [
"dep:clap",
"dep:tosubcommand",
]
parallel-poly-read = ["halo2_proofs/circuit-params", "halo2_proofs/parallel-poly-read"]
parallel-poly-read = [
"halo2_proofs/circuit-params",
"halo2_proofs/parallel-poly-read",
]
mv-lookup = [
"halo2_proofs/mv-lookup",
"snark-verifier/mv-lookup",
@@ -259,4 +285,3 @@ rustflags = ["-C", "relocation-model=pic"]
lto = "fat"
codegen-units = 1
# panic = "abort"

View File

@@ -0,0 +1,146 @@
[
{
"inputs": [
{
"internalType": "address",
"name": "owner",
"type": "address"
}
],
"name": "OwnableInvalidOwner",
"type": "error"
},
{
"inputs": [
{
"internalType": "address",
"name": "account",
"type": "address"
}
],
"name": "OwnableUnauthorizedAccount",
"type": "error"
},
{
"anonymous": false,
"inputs": [
{
"indexed": false,
"internalType": "address",
"name": "addr",
"type": "address"
}
],
"name": "DeployedVerifier",
"type": "event"
},
{
"anonymous": false,
"inputs": [
{
"indexed": true,
"internalType": "address",
"name": "previousOwner",
"type": "address"
},
{
"indexed": true,
"internalType": "address",
"name": "newOwner",
"type": "address"
}
],
"name": "OwnershipTransferred",
"type": "event"
},
{
"inputs": [
{
"internalType": "bytes",
"name": "bytecode",
"type": "bytes"
}
],
"name": "deployVerifier",
"outputs": [
{
"internalType": "address",
"name": "addr",
"type": "address"
}
],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [],
"name": "owner",
"outputs": [
{
"internalType": "address",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [
{
"internalType": "bytes",
"name": "bytecode",
"type": "bytes"
}
],
"name": "precomputeAddress",
"outputs": [
{
"internalType": "address",
"name": "",
"type": "address"
}
],
"stateMutability": "view",
"type": "function"
},
{
"inputs": [],
"name": "renounceOwnership",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "newOwner",
"type": "address"
}
],
"name": "transferOwnership",
"outputs": [],
"stateMutability": "nonpayable",
"type": "function"
},
{
"inputs": [
{
"internalType": "address",
"name": "",
"type": "address"
}
],
"name": "verifierAddresses",
"outputs": [
{
"internalType": "bool",
"name": "",
"type": "bool"
}
],
"stateMutability": "view",
"type": "function"
}
]

View File

@@ -0,0 +1,184 @@
// SPDX-License-Identifier: MIT
pragma solidity 0.8.20;
// lib/openzeppelin-contracts/contracts/utils/Context.sol
// OpenZeppelin Contracts (last updated v5.0.1) (utils/Context.sol)
/**
* @dev Provides information about the current execution context, including the
* sender of the transaction and its data. While these are generally available
* via msg.sender and msg.data, they should not be accessed in such a direct
* manner, since when dealing with meta-transactions the account sending and
* paying for execution may not be the actual sender (as far as an application
* is concerned).
*
* This contract is only required for intermediate, library-like contracts.
*/
abstract contract Context {
function _msgSender() internal view virtual returns (address) {
return msg.sender;
}
function _msgData() internal view virtual returns (bytes calldata) {
return msg.data;
}
function _contextSuffixLength() internal view virtual returns (uint256) {
return 0;
}
}
// lib/openzeppelin-contracts/contracts/access/Ownable.sol
// OpenZeppelin Contracts (last updated v5.0.0) (access/Ownable.sol)
/**
* @dev Contract module which provides a basic access control mechanism, where
* there is an account (an owner) that can be granted exclusive access to
* specific functions.
*
* The initial owner is set to the address provided by the deployer. This can
* later be changed with {transferOwnership}.
*
* This module is used through inheritance. It will make available the modifier
* `onlyOwner`, which can be applied to your functions to restrict their use to
* the owner.
*/
abstract contract Ownable is Context {
/// set the owener initialy to be the anvil test account
address private _owner = 0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266;
/**
* @dev The caller account is not authorized to perform an operation.
*/
error OwnableUnauthorizedAccount(address account);
/**
* @dev The owner is not a valid owner account. (eg. `address(0)`)
*/
error OwnableInvalidOwner(address owner);
event OwnershipTransferred(
address indexed previousOwner,
address indexed newOwner
);
/**
* @dev Initializes the contract setting the address provided by the deployer as the initial owner.
*/
constructor() {
_transferOwnership(msg.sender);
}
/**
* @dev Throws if called by any account other than the owner.
*/
modifier onlyOwner() {
_checkOwner();
_;
}
/**
* @dev Returns the address of the current owner.
*/
function owner() public view virtual returns (address) {
return _owner;
}
/**
* @dev Throws if the sender is not the owner.
*/
function _checkOwner() internal view virtual {
if (owner() != _msgSender()) {
revert OwnableUnauthorizedAccount(_msgSender());
}
}
/**
* @dev Leaves the contract without owner. It will not be possible to call
* `onlyOwner` functions. Can only be called by the current owner.
*
* NOTE: Renouncing ownership will leave the contract without an owner,
* thereby disabling any functionality that is only available to the owner.
*/
function renounceOwnership() public virtual onlyOwner {
_transferOwnership(address(0));
}
/**
* @dev Transfers ownership of the contract to a new account (`newOwner`).
* Can only be called by the current owner.
*/
function transferOwnership(address newOwner) public virtual onlyOwner {
if (newOwner == address(0)) {
revert OwnableInvalidOwner(address(0));
}
_transferOwnership(newOwner);
}
/**
* @dev Transfers ownership of the contract to a new account (`newOwner`).
* Internal function without access restriction.
*/
function _transferOwnership(address newOwner) internal virtual {
address oldOwner = _owner;
_owner = newOwner;
emit OwnershipTransferred(oldOwner, newOwner);
}
}
// interface for the reusable verifier.
interface Halo2VerifierReusable {
function verifyProof(
address vkArtifact,
bytes calldata proof,
uint256[] calldata instances
) external returns (bool);
}
// Manages the deployment of all EZKL reusbale verifiers (ezkl version specific), verifiying key artifacts (circuit specific) and
// routing proof verifications to the correct VKA and associate reusable verifier.
// Helps to prevent the deployment of duplicate verifiers.
contract EZKLVerifierManager is Ownable {
/// @dev Mapping that checks if a given reusable verifier has been deployed
mapping(address => bool) public verifierAddresses;
event DeployedVerifier(address addr);
// 1. Compute the address of the verifier to be deployed
function precomputeAddress(
bytes memory bytecode
) public view returns (address) {
bytes32 hash = keccak256(
abi.encodePacked(
bytes1(0xff),
address(this),
uint(0),
keccak256(bytecode)
)
);
return address(uint160(uint(hash)));
}
// 2. Deploy the reusable verifier using create2
/// @param bytecode The bytecode of the reusable verifier to deploy
function deployVerifier(
bytes memory bytecode
) public returns (address addr) {
assembly {
addr := create2(
0x0, // value, hardcode to 0
add(bytecode, 0x20),
mload(bytecode),
0x0 // salt, hardcode to 0
)
if iszero(extcodesize(addr)) {
revert(0, 0)
}
}
verifierAddresses[addr] = true;
emit DeployedVerifier(addr);
}
}

View File

@@ -1,4 +1,4 @@
ezkl==14.2.0
ezkl==0.0.0
sphinx
sphinx-rtd-theme
sphinxcontrib-napoleon

View File

@@ -1,7 +1,7 @@
import ezkl
project = 'ezkl'
release = '14.2.0'
release = '0.0.0'
version = release

View File

@@ -177,7 +177,7 @@ impl<'source> FromPyObject<'source> for Tolerance {
#[derive(Clone, Debug, Default)]
pub struct DynamicLookups {
/// [Selector]s generated when configuring the layer. We use a [BTreeMap] as we expect to configure many dynamic lookup ops.
pub lookup_selectors: BTreeMap<(usize, usize), Selector>,
pub lookup_selectors: BTreeMap<(usize, (usize, usize)), Selector>,
/// Selectors for the dynamic lookup tables
pub table_selectors: Vec<Selector>,
/// Inputs:
@@ -209,7 +209,7 @@ impl DynamicLookups {
#[derive(Clone, Debug, Default)]
pub struct Shuffles {
/// [Selector]s generated when configuring the layer. We use a [BTreeMap] as we expect to configure many dynamic lookup ops.
pub input_selectors: BTreeMap<(usize, usize), Selector>,
pub input_selectors: BTreeMap<(usize, (usize, usize)), Selector>,
/// Selectors for the dynamic lookup tables
pub reference_selectors: Vec<Selector>,
/// Inputs:
@@ -646,57 +646,73 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> BaseConfig<F> {
}
for t in tables.iter() {
if !t.is_advice() || t.num_blocks() > 1 || t.num_inner_cols() > 1 {
if !t.is_advice() || t.num_inner_cols() > 1 {
return Err(CircuitError::WrongDynamicColumnType(t.name().to_string()));
}
}
// assert all tables have the same number of inner columns
if tables
.iter()
.map(|t| t.num_blocks())
.collect::<Vec<_>>()
.windows(2)
.any(|w| w[0] != w[1])
{
return Err(CircuitError::WrongDynamicColumnType(
"tables inner cols".to_string(),
));
}
let one = Expression::Constant(F::ONE);
let s_ltable = cs.complex_selector();
for q in 0..tables[0].num_blocks() {
let s_ltable = cs.complex_selector();
for x in 0..lookups[0].num_blocks() {
for y in 0..lookups[0].num_inner_cols() {
let s_lookup = cs.complex_selector();
for x in 0..lookups[0].num_blocks() {
for y in 0..lookups[0].num_inner_cols() {
let s_lookup = cs.complex_selector();
cs.lookup_any("lookup", |cs| {
let s_lookupq = cs.query_selector(s_lookup);
let mut expression = vec![];
let s_ltableq = cs.query_selector(s_ltable);
let mut lookup_queries = vec![one.clone()];
cs.lookup_any("lookup", |cs| {
let s_lookupq = cs.query_selector(s_lookup);
let mut expression = vec![];
let s_ltableq = cs.query_selector(s_ltable);
let mut lookup_queries = vec![one.clone()];
for lookup in lookups {
lookup_queries.push(match lookup {
VarTensor::Advice { inner: advices, .. } => {
cs.query_advice(advices[x][y], Rotation(0))
}
_ => unreachable!(),
});
}
for lookup in lookups {
lookup_queries.push(match lookup {
VarTensor::Advice { inner: advices, .. } => {
cs.query_advice(advices[x][y], Rotation(0))
}
_ => unreachable!(),
});
}
let mut table_queries = vec![one.clone()];
for table in tables {
table_queries.push(match table {
VarTensor::Advice { inner: advices, .. } => {
cs.query_advice(advices[0][0], Rotation(0))
}
_ => unreachable!(),
});
}
let mut table_queries = vec![one.clone()];
for table in tables {
table_queries.push(match table {
VarTensor::Advice { inner: advices, .. } => {
cs.query_advice(advices[q][0], Rotation(0))
}
_ => unreachable!(),
});
}
let lhs = lookup_queries.into_iter().map(|c| c * s_lookupq.clone());
let rhs = table_queries.into_iter().map(|c| c * s_ltableq.clone());
expression.extend(lhs.zip(rhs));
let lhs = lookup_queries.into_iter().map(|c| c * s_lookupq.clone());
let rhs = table_queries.into_iter().map(|c| c * s_ltableq.clone());
expression.extend(lhs.zip(rhs));
expression
});
self.dynamic_lookups
.lookup_selectors
.entry((x, y))
.or_insert(s_lookup);
expression
});
self.dynamic_lookups
.lookup_selectors
.entry((q, (x, y)))
.or_insert(s_lookup);
}
}
self.dynamic_lookups.table_selectors.push(s_ltable);
}
self.dynamic_lookups.table_selectors.push(s_ltable);
// if we haven't previously initialized the input/output, do so now
if self.dynamic_lookups.tables.is_empty() {
@@ -729,57 +745,72 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> BaseConfig<F> {
}
for t in references.iter() {
if !t.is_advice() || t.num_blocks() > 1 || t.num_inner_cols() > 1 {
if !t.is_advice() || t.num_inner_cols() > 1 {
return Err(CircuitError::WrongDynamicColumnType(t.name().to_string()));
}
}
// assert all tables have the same number of blocks
if references
.iter()
.map(|t| t.num_blocks())
.collect::<Vec<_>>()
.windows(2)
.any(|w| w[0] != w[1])
{
return Err(CircuitError::WrongDynamicColumnType(
"references inner cols".to_string(),
));
}
let one = Expression::Constant(F::ONE);
let s_reference = cs.complex_selector();
for q in 0..references[0].num_blocks() {
let s_reference = cs.complex_selector();
for x in 0..inputs[0].num_blocks() {
for y in 0..inputs[0].num_inner_cols() {
let s_input = cs.complex_selector();
for x in 0..inputs[0].num_blocks() {
for y in 0..inputs[0].num_inner_cols() {
let s_input = cs.complex_selector();
cs.lookup_any("lookup", |cs| {
let s_inputq = cs.query_selector(s_input);
let mut expression = vec![];
let s_referenceq = cs.query_selector(s_reference);
let mut input_queries = vec![one.clone()];
cs.lookup_any("lookup", |cs| {
let s_inputq = cs.query_selector(s_input);
let mut expression = vec![];
let s_referenceq = cs.query_selector(s_reference);
let mut input_queries = vec![one.clone()];
for input in inputs {
input_queries.push(match input {
VarTensor::Advice { inner: advices, .. } => {
cs.query_advice(advices[x][y], Rotation(0))
}
_ => unreachable!(),
});
}
for input in inputs {
input_queries.push(match input {
VarTensor::Advice { inner: advices, .. } => {
cs.query_advice(advices[x][y], Rotation(0))
}
_ => unreachable!(),
});
}
let mut ref_queries = vec![one.clone()];
for reference in references {
ref_queries.push(match reference {
VarTensor::Advice { inner: advices, .. } => {
cs.query_advice(advices[0][0], Rotation(0))
}
_ => unreachable!(),
});
}
let mut ref_queries = vec![one.clone()];
for reference in references {
ref_queries.push(match reference {
VarTensor::Advice { inner: advices, .. } => {
cs.query_advice(advices[q][0], Rotation(0))
}
_ => unreachable!(),
});
}
let lhs = input_queries.into_iter().map(|c| c * s_inputq.clone());
let rhs = ref_queries.into_iter().map(|c| c * s_referenceq.clone());
expression.extend(lhs.zip(rhs));
let lhs = input_queries.into_iter().map(|c| c * s_inputq.clone());
let rhs = ref_queries.into_iter().map(|c| c * s_referenceq.clone());
expression.extend(lhs.zip(rhs));
expression
});
self.shuffles
.input_selectors
.entry((x, y))
.or_insert(s_input);
expression
});
self.shuffles
.input_selectors
.entry((q, (x, y)))
.or_insert(s_input);
}
}
self.shuffles.reference_selectors.push(s_reference);
}
self.shuffles.reference_selectors.push(s_reference);
// if we haven't previously initialized the input/output, do so now
if self.shuffles.references.is_empty() {

View File

@@ -979,8 +979,16 @@ pub(crate) fn dynamic_lookup<F: PrimeField + TensorType + PartialOrd + std::hash
let (lookup_0, lookup_1) = (lookups[0].clone(), lookups[1].clone());
let (table_0, table_1) = (tables[0].clone(), tables[1].clone());
let table_0 = region.assign_dynamic_lookup(&config.dynamic_lookups.tables[0], &table_0)?;
let _table_1 = region.assign_dynamic_lookup(&config.dynamic_lookups.tables[1], &table_1)?;
let (table_0, flush_len_0) =
region.assign_dynamic_lookup(&config.dynamic_lookups.tables[0], &table_0)?;
let (_table_1, flush_len_1) =
region.assign_dynamic_lookup(&config.dynamic_lookups.tables[1], &table_1)?;
if flush_len_0 != flush_len_1 {
return Err(CircuitError::MismatchedLookupTableLength(
flush_len_0,
flush_len_1,
));
}
let table_len = table_0.len();
trace!("assigning tables took: {:?}", start.elapsed());
@@ -1005,13 +1013,21 @@ pub(crate) fn dynamic_lookup<F: PrimeField + TensorType + PartialOrd + std::hash
trace!("assigning lookup index took: {:?}", start.elapsed());
let mut lookup_block = 0;
if !region.is_dummy() {
(0..table_len)
.map(|i| {
let table_selector = config.dynamic_lookups.table_selectors[0];
let (_, _, z) = config.dynamic_lookups.tables[0]
.cartesian_coord(region.combined_dynamic_shuffle_coord() + i);
let (x, _, z) = config.dynamic_lookups.tables[0]
.cartesian_coord(region.combined_dynamic_shuffle_coord() + i + flush_len_0);
if lookup_block != x {
lookup_block = x;
}
let table_selector = config.dynamic_lookups.table_selectors[lookup_block];
region.enable(Some(&table_selector), z)?;
Ok(())
})
.collect::<Result<Vec<_>, CircuitError>>()?;
@@ -1023,20 +1039,23 @@ pub(crate) fn dynamic_lookup<F: PrimeField + TensorType + PartialOrd + std::hash
.map(|i| {
let (x, y, z) =
config.dynamic_lookups.inputs[0].cartesian_coord(region.linear_coord() + i);
let lookup_selector = config
.dynamic_lookups
.lookup_selectors
.get(&(x, y))
.get(&(lookup_block, (x, y)))
.ok_or(CircuitError::MissingSelectors(format!("{:?}", (x, y))))?;
region.enable(Some(lookup_selector), z)?;
// region.enable(Some(lookup_selector), z)?;
Ok(())
})
.collect::<Result<Vec<_>, CircuitError>>()?;
}
region.increment_dynamic_lookup_col_coord(table_len);
region.increment_dynamic_lookup_col_coord(table_len + flush_len_0);
region.increment_dynamic_lookup_index(1);
region.increment(lookup_len);
@@ -1064,22 +1083,33 @@ pub(crate) fn shuffles<F: PrimeField + TensorType + PartialOrd + std::hash::Hash
));
}
let reference = region.assign_shuffle(&config.shuffles.references[0], &reference)?;
let (reference, flush_len_ref) =
region.assign_shuffle(&config.shuffles.references[0], &reference)?;
let reference_len = reference.len();
// now create a vartensor of constants for the shuffle index
let index = create_constant_tensor(F::from(shuffle_index as u64), reference_len);
let index = region.assign_shuffle(&config.shuffles.references[1], &index)?;
let (index, flush_len_index) = region.assign_shuffle(&config.shuffles.references[1], &index)?;
if flush_len_index != flush_len_ref {
return Err(CircuitError::MismatchedShuffleLength(
flush_len_index,
flush_len_ref,
));
}
let input = region.assign(&config.shuffles.inputs[0], &input)?;
region.assign(&config.shuffles.inputs[1], &index)?;
let mut shuffle_block = 0;
if !region.is_dummy() {
(0..reference_len)
.map(|i| {
let ref_selector = config.shuffles.reference_selectors[0];
let (_, _, z) = config.shuffles.references[0]
.cartesian_coord(region.combined_dynamic_shuffle_coord() + i);
let (x, _, z) = config.shuffles.references[0]
.cartesian_coord(region.combined_dynamic_shuffle_coord() + i + flush_len_ref);
shuffle_block = x;
let ref_selector = config.shuffles.reference_selectors[shuffle_block];
region.enable(Some(&ref_selector), z)?;
Ok(())
})
@@ -1095,7 +1125,7 @@ pub(crate) fn shuffles<F: PrimeField + TensorType + PartialOrd + std::hash::Hash
let input_selector = config
.shuffles
.input_selectors
.get(&(x, y))
.get(&(shuffle_block, (x, y)))
.ok_or(CircuitError::MissingSelectors(format!("{:?}", (x, y))))?;
region.enable(Some(input_selector), z)?;
@@ -1105,7 +1135,7 @@ pub(crate) fn shuffles<F: PrimeField + TensorType + PartialOrd + std::hash::Hash
.collect::<Result<Vec<_>, CircuitError>>()?;
}
region.increment_shuffle_col_coord(reference_len);
region.increment_shuffle_col_coord(reference_len + flush_len_ref);
region.increment_shuffle_index(1);
region.increment(reference_len);

View File

@@ -255,7 +255,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> Constant<F> {
self.raw_values = Tensor::new(None, &[0]).unwrap();
}
///
/// Pre-assign a value
pub fn pre_assign(&mut self, val: ValTensor<F>) {
self.pre_assigned_val = Some(val)
}

View File

@@ -180,6 +180,7 @@ pub struct RegionCtx<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Ha
statistics: RegionStatistics,
settings: RegionSettings,
assigned_constants: ConstantsMap<F>,
max_dynamic_input_len: usize,
}
impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a, F> {
@@ -193,11 +194,16 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
self.settings.legs
}
/// get the max dynamic input len
pub fn max_dynamic_input_len(&self) -> usize {
self.max_dynamic_input_len
}
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]
///
pub fn debug_report(&self) {
log::debug!(
"(rows={}, coord={}, constants={}, max_lookup_inputs={}, min_lookup_inputs={}, max_range_size={}, dynamic_lookup_col_coord={}, shuffle_col_coord={})",
"(rows={}, coord={}, constants={}, max_lookup_inputs={}, min_lookup_inputs={}, max_range_size={}, dynamic_lookup_col_coord={}, shuffle_col_coord={}, max_dynamic_input_len={})",
self.row().to_string().blue(),
self.linear_coord().to_string().yellow(),
self.total_constants().to_string().red(),
@@ -205,7 +211,9 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
self.min_lookup_inputs().to_string().green(),
self.max_range_size().to_string().green(),
self.dynamic_lookup_col_coord().to_string().green(),
self.shuffle_col_coord().to_string().green());
self.shuffle_col_coord().to_string().green(),
self.max_dynamic_input_len().to_string().green()
);
}
///
@@ -223,6 +231,11 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
self.dynamic_lookup_index.index += n;
}
/// increment the max dynamic input len
pub fn update_max_dynamic_input_len(&mut self, n: usize) {
self.max_dynamic_input_len = self.max_dynamic_input_len.max(n);
}
///
pub fn increment_dynamic_lookup_col_coord(&mut self, n: usize) {
self.dynamic_lookup_index.col_coord += n;
@@ -274,6 +287,7 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
statistics: RegionStatistics::default(),
settings: RegionSettings::all_true(decomp_base, decomp_legs),
assigned_constants: HashMap::new(),
max_dynamic_input_len: 0,
}
}
@@ -310,6 +324,7 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
statistics: RegionStatistics::default(),
settings,
assigned_constants: HashMap::new(),
max_dynamic_input_len: 0,
}
}
@@ -331,6 +346,7 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
statistics: RegionStatistics::default(),
settings,
assigned_constants: HashMap::new(),
max_dynamic_input_len: 0,
}
}
@@ -583,9 +599,12 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
&mut self,
var: &VarTensor,
values: &ValTensor<F>,
) -> Result<ValTensor<F>, CircuitError> {
) -> Result<(ValTensor<F>, usize), CircuitError> {
self.update_max_dynamic_input_len(values.len());
if let Some(region) = &self.region {
Ok(var.assign(
Ok(var.assign_exact_column(
&mut region.borrow_mut(),
self.combined_dynamic_shuffle_coord(),
values,
@@ -596,7 +615,11 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
let values_map = values.create_constants_map_iterator();
self.assigned_constants.par_extend(values_map);
}
Ok(values.clone())
let flush_len = var.get_column_flush(self.combined_dynamic_shuffle_coord(), values)?;
// get the diff between the current column and the next row
Ok((values.clone(), flush_len))
}
}
@@ -605,7 +628,7 @@ impl<'a, F: PrimeField + TensorType + PartialOrd + std::hash::Hash> RegionCtx<'a
&mut self,
var: &VarTensor,
values: &ValTensor<F>,
) -> Result<ValTensor<F>, CircuitError> {
) -> Result<(ValTensor<F>, usize), CircuitError> {
self.assign_dynamic_lookup(var, values)
}

View File

@@ -1516,7 +1516,7 @@ mod add_w_shape_casting {
// parameters
let a = Tensor::from((0..LEN).map(|i| Value::known(F::from(i as u64 + 1))));
let b = Tensor::from((0..1).map(|i| Value::known(F::from(i as u64 + 1))));
let b = Tensor::from((0..1).map(|i| Value::known(F::from(i + 1))));
let circuit = MyCircuit::<F> {
inputs: [ValTensor::from(a), ValTensor::from(b)],

View File

@@ -95,6 +95,9 @@ pub const DEFAULT_USE_REDUCED_SRS_FOR_VERIFICATION: &str = "false";
pub const DEFAULT_ONLY_RANGE_CHECK_REBASE: &str = "false";
/// Default commitment
pub const DEFAULT_COMMITMENT: &str = "kzg";
// TODO: In prod this will be the same across all chains we deploy to using the EZKL multisig create2 deployment.
/// Default address of the verifier manager.
pub const DEFAULT_VERIFIER_MANAGER_ADDRESS: &str = "0xdc64a140aa3e981100a9beca4e685f962f0cf6c9";
#[cfg(feature = "python-bindings")]
/// Converts TranscriptType into a PyObject (Required for TranscriptType to be compatible with Python)
@@ -187,11 +190,13 @@ pub enum ContractType {
/// Deploys a verifier contrat tailored to the circuit and not reusable
Verifier {
/// Whether to deploy a reusable verifier. This can reduce state bloat on-chain since you need only deploy a verifying key artifact (vka) for a given circuit which is significantly smaller than the verifier contract (up to 4 times smaller for large circuits)
/// Can also be used as an alternative to aggregation for verifiers that are otherwise too large to fit on-chain.
/// Can also be used as an alternative to aggregation for verifiers that are otherwise too large to fit on-chain.
reusable: bool,
},
/// Deploys a verifying key artifact that the reusable verifier loads into memory during runtime. Encodes the circuit specific data that was otherwise hardcoded onto the stack.
VerifyingKeyArtifact,
/// Manages the deployments of all reusable verifier and verifying artifact keys. Routes all the verification tx to the correct artifacts.
VerifierManager
}
impl Default for ContractType {
@@ -215,6 +220,7 @@ impl std::fmt::Display for ContractType {
reusable: false,
} => "verifier".to_string(),
ContractType::VerifyingKeyArtifact => "vka".to_string(),
ContractType::VerifierManager => "manager".to_string()
}
)
}
@@ -232,16 +238,16 @@ impl From<&str> for ContractType {
"verifier" => ContractType::Verifier { reusable: false },
"verifier/reusable" => ContractType::Verifier { reusable: true },
"vka" => ContractType::VerifyingKeyArtifact,
"manager" => ContractType::VerifierManager,
_ => {
log::error!("Invalid value for ContractType");
log::warn!("Defaulting to verifier");
ContractType::default()
}
},
}
}
}
#[derive(Debug, Copy, Clone, Serialize, Deserialize, PartialEq, PartialOrd)]
/// wrapper for H160 to make it easy to parse into flag vals
pub struct H160Flag {
@@ -876,6 +882,14 @@ pub enum Commands {
/// Private secp256K1 key in hex format, 64 chars, no 0x prefix, of the account signing transactions. If None the private key will be generated by Anvil
#[arg(short = 'P', long, value_hint = clap::ValueHint::Other)]
private_key: Option<String>,
/// Deployed verifier manager contract's address
/// Used to facilitate reusable verifier and vk artifact deployment
#[arg(long, value_hint = clap::ValueHint::Other)]
addr_verifier_manager: Option<H160Flag>,
/// Deployed reusable verifier contract's address
/// Use to facilitate reusable verifier and vk artifact deployment
#[arg(long, value_hint = clap::ValueHint::Other)]
addr_reusable_verifier: Option<H160Flag>,
/// Contract type to be deployed
#[arg(long = "contract-type", short = 'C', default_value = DEFAULT_CONTRACT_DEPLOYMENT_TYPE, value_hint = clap::ValueHint::Other)]
contract: ContractType,

View File

@@ -31,7 +31,7 @@ use alloy::transports::{RpcError, TransportErrorKind};
use foundry_compilers::artifacts::Settings as SolcSettings;
use foundry_compilers::error::{SolcError, SolcIoError};
use foundry_compilers::Solc;
use halo2_solidity_verifier::encode_calldata;
use halo2_solidity_verifier::{encode_calldata, encode_deploy};
use halo2curves::bn256::{Fr, G1Affine};
use halo2curves::group::ff::PrimeField;
use itertools::Itertools;
@@ -213,6 +213,16 @@ abigen!(
}
);
// The bytecode here was generated from running solc compiler version 0.8.20 with optimization enabled and runs param set to 1.
abigen!(
#[allow(missing_docs)]
#[sol(
rpc,
bytecode = "60806040525f80546001600160a01b03191673f39fd6e51aad88f6f4ce6ab8827279cfffb92266179055348015610034575f80fd5b5061003e33610043565b610092565b5f80546001600160a01b038381166001600160a01b0319831681178455604051919092169283917f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e09190a35050565b6103dc8061009f5f395ff3fe608060405234801561000f575f80fd5b5060043610610060575f3560e01c80635717ecef146100645780635d34fd561461009b578063715018a6146100bb5780637a33ac87146100c55780638da5cb5b14610125578063f2fde38b1461012d575b5f80fd5b6100866100723660046102a7565b60016020525f908152604090205460ff1681565b60405190151581526020015b60405180910390f35b6100ae6100a93660046102e8565b610140565b6040516100929190610392565b6100c36101bf565b005b6100ae6100d33660046102e8565b8051602091820120604080516001600160f81b0319818501523060601b6001600160601b03191660218201525f6035820152605580820193909352815180820390930183526075019052805191012090565b6100ae6101d2565b6100c361013b3660046102a7565b6101e0565b5f610149610226565b5f8251602084015ff59050803b61015e575f80fd5b6001600160a01b0381165f90815260016020819052604091829020805460ff19169091179055517f27bf8213352a1c07513a54703c920b9e437940154edead05874c43279acf166c906101b2908390610392565b60405180910390a1919050565b6101c7610226565b6101d05f610258565b565b5f546001600160a01b031690565b6101e8610226565b6001600160a01b03811661021a575f604051631e4fbdf760e01b81526004016102119190610392565b60405180910390fd5b61022381610258565b50565b3361022f6101d2565b6001600160a01b0316146101d0573360405163118cdaa760e01b81526004016102119190610392565b5f80546001600160a01b038381166001600160a01b0319831681178455604051919092169283917f8be0079c531659141344cd1fd0a4f28419497f9722a3daafe3b4186f6b6457e09190a35050565b5f602082840312156102b7575f80fd5b81356001600160a01b03811681146102cd575f80fd5b9392505050565b634e487b7160e01b5f52604160045260245ffd5b5f602082840312156102f8575f80fd5b81356001600160401b038082111561030e575f80fd5b818401915084601f830112610321575f80fd5b813581811115610333576103336102d4565b604051601f8201601f19908116603f0116810190838211818310171561035b5761035b6102d4565b81604052828152876020848701011115610373575f80fd5b826020860160208301375f928101602001929092525095945050505050565b6001600160a01b039190911681526020019056fea26469706673582212201d85104628b308554b775f612650220008f8e318f66dc4ace466d82d70bae4e264736f6c63430008140033"
)]
EZKLVerifierManager,
"./abis/EZKLVerifierManager.json"
);
#[derive(Debug, thiserror::Error)]
pub enum EthError {
#[error("a transport error occurred: {0}")]
@@ -352,6 +362,99 @@ pub async fn deploy_contract_via_solidity(
Ok(contract)
}
pub async fn deploy_vka(
sol_code_path: PathBuf,
rpc_url: Option<&str>,
runs: usize,
private_key: Option<&str>,
contract_name: &str,
verifier_manager: H160,
reusable_verifier: H160,
) -> Result<H160, EthError> {
let (client, _) = setup_eth_backend(rpc_url, private_key).await?;
// Create an instance of the EZKLVerifierManager contract
let verifier_manager_contract = EZKLVerifierManager::new(verifier_manager, client.clone());
// Get the bytecode of the contract to be deployed
let (_, bytecode, _run_time_bytecode) =
get_contract_artifacts(sol_code_path.clone(), contract_name, runs).await?;
// Check if the reusable verifier is already deployed
let deployed_verifier: bool = verifier_manager_contract
.verifierAddresses(reusable_verifier)
.call()
.await?
._0;
if deployed_verifier == false {
panic!("The reusable verifier for this VKA has not been deployed yet.");
}
let encoded = encode_deploy(&bytecode);
debug!("encoded: {:#?}", hex::encode(&encoded));
let input: TransactionInput = encoded.into();
let tx = TransactionRequest::default()
.to(reusable_verifier)
.input(input);
debug!("transaction {:#?}", tx);
let result = client.call(&tx).await;
if let Err(e) = result {
return Err(EvmVerificationError::SolidityExecution(e.to_string()).into());
}
// Now send the tx
let _ = client.send_transaction(tx).await?;
let result = result?;
debug!("result: {:#?}", result.to_vec());
let contract = H160::from_slice(&result.to_vec()[12..32]);
return Ok(contract);
}
pub async fn deploy_reusable_verifier(
sol_code_path: PathBuf,
rpc_url: Option<&str>,
runs: usize,
private_key: Option<&str>,
contract_name: &str,
verifier_manager: H160,
) -> Result<H160, EthError> {
let (client, _) = setup_eth_backend(rpc_url, private_key).await?;
// Create an instance of the EZKLVerifierManager contract
let verifier_manager_contract = EZKLVerifierManager::new(verifier_manager, client.clone());
// Get the bytecode of the contract to be deployed
let (_, bytecode, _run_time_bytecode) =
get_contract_artifacts(sol_code_path.clone(), contract_name, runs).await?;
// Deploy the contract using the EZKLVerifierManager
let output = verifier_manager_contract
.deployVerifier(bytecode.clone().into())
.call()
.await?;
let out = verifier_manager_contract
.precomputeAddress(bytecode.clone().into())
.call()
.await?;
// assert that out == output
assert_eq!(out._0, output.addr);
// Get the deployed contract address from the receipt
let contract = output.addr;
let _ = verifier_manager_contract
.deployVerifier(bytecode.into())
.send()
.await?;
return Ok(contract);
}
///
pub async fn deploy_da_verifier_via_solidity(
settings_path: PathBuf,

View File

@@ -410,24 +410,46 @@ pub async fn run(command: Commands) -> Result<String, EZKLError> {
commitment.into(),
)
.map(|e| serde_json::to_string(&e).unwrap()),
#[cfg(not(target_arch = "wasm32"))]
Commands::DeployEvm {
sol_code_path,
rpc_url,
addr_path,
optimizer_runs,
private_key,
addr_verifier_manager,
addr_reusable_verifier,
contract,
} => {
// if contract type is either verifier/reusable
match contract {
ContractType::Verifier { reusable: true } => {
if addr_verifier_manager.is_none() {
panic!("Must pass a verifier manager address for reusable verifier")
}
}
ContractType::VerifyingKeyArtifact => {
if addr_verifier_manager.is_none() || addr_reusable_verifier.is_none() {
panic!(
"Must pass a verifier manager address and reusable verifier address for verifying key artifact"
)
}
}
_ => {}
};
deploy_evm(
sol_code_path.unwrap_or(DEFAULT_SOL_CODE.into()),
rpc_url,
addr_path.unwrap_or(DEFAULT_CONTRACT_ADDRESS.into()),
optimizer_runs,
private_key,
addr_verifier_manager.map(|s| s.into()),
addr_reusable_verifier.map(|s| s.into()),
contract,
)
.await
}
#[cfg(not(target_arch = "wasm32"))]
Commands::DeployEvmDataAttestation {
data,
settings_path,
@@ -671,10 +693,10 @@ pub(crate) async fn get_srs_cmd(
let srs_uri = format!("{}{}", PUBLIC_SRS_URL, k);
let mut reader = Cursor::new(fetch_srs(&srs_uri).await?);
// check the SRS
let pb = init_spinner();
pb.set_message("Validating SRS (this may take a while) ...");
let pb = init_spinner();
pb.set_message("Validating SRS (this may take a while) ...");
let params = ParamsKZG::<Bn256>::read(&mut reader)?;
pb.finish_with_message("SRS validated.");
pb.finish_with_message("SRS validated.");
info!("Saving SRS to disk...");
let mut file = std::fs::File::create(get_srs_path(k, srs_path.clone(), commitment))?;
@@ -727,7 +749,7 @@ pub(crate) async fn gen_witness(
None
};
let mut input = circuit.load_graph_input(&data).await?;
let mut input = circuit.load_graph_input(&data).await?;
#[cfg(any(not(feature = "ezkl"), target_arch = "wasm32"))]
let mut input = circuit.load_graph_input(&data)?;
@@ -1203,6 +1225,7 @@ pub(crate) async fn calibrate(
num_rows: new_settings.num_rows,
total_assignments: new_settings.total_assignments,
total_const_size: new_settings.total_const_size,
total_dynamic_col_size: new_settings.total_dynamic_col_size,
..settings.clone()
};
@@ -1320,7 +1343,9 @@ pub(crate) async fn calibrate(
let lookup_log_rows = best_params.lookup_log_rows_with_blinding();
let module_log_row = best_params.module_constraint_logrows_with_blinding();
let instance_logrows = best_params.log2_total_instances_with_blinding();
let dynamic_lookup_logrows = best_params.dynamic_lookup_and_shuffle_logrows_with_blinding();
let dynamic_lookup_logrows =
best_params.min_dynamic_lookup_and_shuffle_logrows_with_blinding();
let range_check_logrows = best_params.range_check_log_rows_with_blinding();
let mut reduction = std::cmp::max(lookup_log_rows, module_log_row);
@@ -1414,6 +1439,7 @@ pub(crate) async fn create_evm_verifier(
Ok(String::new())
}
#[cfg(not(target_arch = "wasm32"))]
pub(crate) async fn create_evm_vka(
vk_path: PathBuf,
srs_path: Option<PathBuf>,
@@ -1442,9 +1468,20 @@ pub(crate) async fn create_evm_vka(
num_instance,
);
let vk_solidity = generator.render_separately()?.1;
let (reusable_verifier, vk_solidity) = generator.render_separately()?;
File::create(sol_code_path.clone())?.write_all(vk_solidity.as_bytes())?;
// Remove the first line of vk_solidity (license identifier). Same license identifier for all contracts in this .sol
let vk_solidity = vk_solidity
.lines()
.skip(1)
.collect::<Vec<&str>>()
.join("\n");
// We store each contracts to the same file...
// We need to do this so that during the deployment transaction we make sure
// verifier manager links the VKA to the correct reusable_verifier.
let combined_solidity = format!("{}\n\n{}", reusable_verifier, vk_solidity);
File::create(sol_code_path.clone())?.write_all(combined_solidity.as_bytes())?;
// fetch abi of the contract
let (abi, _, _) = get_contract_artifacts(sol_code_path, "Halo2VerifyingArtifact", 0).await?;
@@ -1562,21 +1599,51 @@ pub(crate) async fn deploy_evm(
addr_path: PathBuf,
runs: usize,
private_key: Option<String>,
verifier_manager: Option<alloy::primitives::Address>,
reusable_verifier: Option<alloy::primitives::Address>,
contract: ContractType,
) -> Result<String, EZKLError> {
use crate::eth::{deploy_reusable_verifier, deploy_vka};
let contract_name = match contract {
ContractType::Verifier { reusable: false } => "Halo2Verifier",
ContractType::Verifier { reusable: true } => "Halo2VerifierReusable",
ContractType::VerifyingKeyArtifact => "Halo2VerifyingArtifact",
ContractType::VerifierManager => "EZKLVerifierManager",
};
let contract_address = if contract_name == "Halo2VerifierReusable" {
// Use VerifierManager to deploy the contract
deploy_reusable_verifier(
sol_code_path,
rpc_url.as_deref(),
runs,
private_key.as_deref(),
contract_name,
verifier_manager.unwrap(),
)
.await?
} else if contract_name == "Halo2VerifyingArtifact" {
deploy_vka(
sol_code_path,
rpc_url.as_deref(),
runs,
private_key.as_deref(),
contract_name,
verifier_manager.unwrap(),
reusable_verifier.unwrap(),
)
.await?
} else {
deploy_contract_via_solidity(
sol_code_path,
rpc_url.as_deref(),
runs,
private_key.as_deref(),
contract_name,
)
.await?
};
let contract_address = deploy_contract_via_solidity(
sol_code_path,
rpc_url.as_deref(),
runs,
private_key.as_deref(),
contract_name,
)
.await?;
info!("Contract deployed at: {:#?}", contract_address);
@@ -2018,7 +2085,7 @@ pub(crate) fn mock_aggregate(
}
}
// proof aggregation
let pb = {
let pb = {
let pb = init_spinner();
pb.set_message("Aggregating (may take a while)...");
pb
@@ -2029,7 +2096,7 @@ pub(crate) fn mock_aggregate(
let prover = halo2_proofs::dev::MockProver::run(logrows, &circuit, vec![circuit.instances()])
.map_err(|e| ExecutionError::MockProverError(e.to_string()))?;
prover.verify().map_err(ExecutionError::VerifyError)?;
pb.finish_with_message("Done.");
pb.finish_with_message("Done.");
Ok(String::new())
}
@@ -2123,7 +2190,7 @@ pub(crate) fn aggregate(
}
// proof aggregation
let pb = {
let pb = {
let pb = init_spinner();
pb.set_message("Aggregating (may take a while)...");
pb
@@ -2272,7 +2339,7 @@ pub(crate) fn aggregate(
);
snark.save(&proof_path)?;
pb.finish_with_message("Done.");
pb.finish_with_message("Done.");
Ok(snark)
}

View File

@@ -408,6 +408,8 @@ pub struct GraphSettings {
pub total_const_size: usize,
/// total dynamic column size
pub total_dynamic_col_size: usize,
/// max dynamic column input length
pub max_dynamic_input_len: usize,
/// number of dynamic lookups
pub num_dynamic_lookups: usize,
/// number of shuffles
@@ -485,6 +487,13 @@ impl GraphSettings {
.ceil() as u32
}
/// calculate the number of rows required for the dynamic lookup and shuffle
pub fn min_dynamic_lookup_and_shuffle_logrows_with_blinding(&self) -> u32 {
(self.max_dynamic_input_len as f64 + RESERVED_BLINDING_ROWS as f64)
.log2()
.ceil() as u32
}
fn dynamic_lookup_and_shuffle_col_size(&self) -> usize {
self.total_dynamic_col_size + self.total_shuffle_col_size
}

View File

@@ -103,6 +103,8 @@ pub struct DummyPassRes {
pub num_rows: usize,
/// num dynamic lookups
pub num_dynamic_lookups: usize,
/// max dynamic lookup input len
pub max_dynamic_input_len: usize,
/// dynamic lookup col size
pub dynamic_lookup_col_coord: usize,
/// num shuffles
@@ -360,6 +362,14 @@ impl NodeType {
NodeType::SubGraph { .. } => SupportedOp::Unknown(Unknown),
}
}
/// check if it is a softmax
pub fn is_softmax(&self) -> bool {
match self {
NodeType::Node(n) => n.is_softmax(),
NodeType::SubGraph { .. } => false,
}
}
}
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
@@ -562,6 +572,7 @@ impl Model {
num_rows: res.num_rows,
total_assignments: res.linear_coord,
required_lookups: res.lookup_ops.into_iter().collect(),
max_dynamic_input_len: res.max_dynamic_input_len,
required_range_checks: res.range_checks.into_iter().collect(),
model_output_scales: self.graph.get_output_scales()?,
model_input_scales: self.graph.get_input_scales(),
@@ -1465,6 +1476,7 @@ impl Model {
let res = DummyPassRes {
num_rows: region.row(),
linear_coord: region.linear_coord(),
max_dynamic_input_len: region.max_dynamic_input_len(),
total_const_size: region.total_constants(),
lookup_ops: region.used_lookups(),
range_checks: region.used_range_checks(),

View File

@@ -623,6 +623,15 @@ impl Node {
num_uses,
})
}
/// check if it is a softmax node
pub fn is_softmax(&self) -> bool {
if let SupportedOp::Hybrid(HybridOp::Softmax { .. }) = self.opkind {
true
} else {
false
}
}
}
#[cfg(all(feature = "ezkl", not(target_arch = "wasm32")))]

View File

@@ -443,7 +443,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> ModelVars<F> {
let dynamic_lookup =
VarTensor::new_advice(cs, logrows, 1, dynamic_lookup_and_shuffle_size);
if dynamic_lookup.num_blocks() > 1 {
panic!("dynamic lookup or shuffle should only have one block");
warn!("dynamic lookup has {} blocks", dynamic_lookup.num_blocks());
};
advices.push(dynamic_lookup);
}

View File

@@ -541,7 +541,7 @@ impl<F: PrimeField + TensorType + PartialOrd + std::hash::Hash> ValTensor<F> {
let mut is_empty = true;
x.map(|_| is_empty = false);
if is_empty {
return Ok::<_, TensorError>(vec![Value::<F>::unknown(); n + 1]);
Ok::<_, TensorError>(vec![Value::<F>::unknown(); n + 1])
} else {
let mut res = vec![Value::unknown(); n + 1];
let mut int_rep = 0;

View File

@@ -396,6 +396,53 @@ impl VarTensor {
Ok(res)
}
/// Helper function to get the remaining size of the column
pub fn get_column_flush<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
&self,
offset: usize,
values: &ValTensor<F>,
) -> Result<usize, halo2_proofs::plonk::Error> {
if values.len() > self.col_size() {
error!("Values are too large for the column");
return Err(halo2_proofs::plonk::Error::Synthesis);
}
// this can only be called on columns that have a single inner column
if self.num_inner_cols() != 1 {
error!("This function can only be called on columns with a single inner column");
return Err(halo2_proofs::plonk::Error::Synthesis);
}
// check if the values fit in the remaining space of the column
let current_cartesian = self.cartesian_coord(offset);
let final_cartesian = self.cartesian_coord(offset + values.len());
let mut flush_len = 0;
if current_cartesian.0 != final_cartesian.0 {
debug!("Values overflow the column, flushing to next column");
// diff is the number of values that overflow the column
flush_len += self.col_size() - current_cartesian.2;
}
Ok(flush_len)
}
/// Assigns [ValTensor] to the columns of the inner tensor. Whereby the values are assigned to a single column, without overflowing.
/// So for instance if we are assigning 10 values and we are at index 18 of the column, and the columns are of length 20, we skip the last 2 values of current column and start from the beginning of the next column.
pub fn assign_exact_column<F: PrimeField + TensorType + PartialOrd + std::hash::Hash>(
&self,
region: &mut Region<F>,
offset: usize,
values: &ValTensor<F>,
constants: &mut ConstantsMap<F>,
) -> Result<(ValTensor<F>, usize), halo2_proofs::plonk::Error> {
let flush_len = self.get_column_flush(offset, values)?;
let assigned_vals = self.assign(region, offset + flush_len, values, constants)?;
Ok((assigned_vals, flush_len))
}
/// Assigns specific values (`ValTensor`) to the columns of the inner tensor but allows for column wrapping for accumulated operations.
/// Duplication occurs by copying the last cell of the column to the first cell next column and creating a copy constraint between the two.
pub fn dummy_assign_with_duplication<

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -33,6 +33,7 @@
"total_assignments": 92,
"total_const_size": 3,
"total_dynamic_col_size": 0,
"max_dynamic_input_len": 0,
"num_dynamic_lookups": 0,
"num_shuffles": 0,
"total_shuffle_col_size": 0,

View File

@@ -1000,13 +1000,21 @@ mod native_tests {
use crate::native_tests::run_js_tests;
use ezkl::logger::init_logger;
use crate::native_tests::lazy_static;
use std::sync::Once;
// Global variables to store verifier hashes and identical verifiers
lazy_static! {
// create a new variable of type
static ref REUSABLE_VERIFIER_ADDR: std::sync::Mutex<Option<String>> = std::sync::Mutex::new(None);
static ref ANVIL_INSTANCE: std::sync::Mutex<Option<std::process::Child>> = std::sync::Mutex::new(None);
}
static INIT: Once = Once::new();
fn initialize() {
INIT.call_once(|| {
let anvil_child = crate::native_tests::start_anvil(false, Hardfork::Latest);
*ANVIL_INSTANCE.lock().unwrap() = Some(anvil_child);
});
}
/// Currently only on chain inputs that return a non-negative value are supported.
const TESTS_ON_CHAIN_INPUT: [&str; 17] = [
@@ -1121,6 +1129,7 @@ mod native_tests {
seq!(N in 0..=93 {
#(#[test_case(TESTS[N])])*
fn kzg_evm_prove_and_verify_reusable_verifier_(test: &str) {
initialize();
crate::native_tests::init_binary();
let test_dir = TempDir::new(test).unwrap();
let path = test_dir.path().to_str().unwrap(); crate::native_tests::mv_test_(path, test);
@@ -1128,28 +1137,18 @@ mod native_tests {
init_logger();
log::error!("Running kzg_evm_prove_and_verify_reusable_verifier_ for test: {}", test);
// default vis
let reusable_verifier_address: String = kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "private", "private", "public", &mut REUSABLE_VERIFIER_ADDR.lock().unwrap(), false);
kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "private", "private", "public", false);
// public/public vis
let reusable_verifier_address: String = kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "public", "private", "public", &mut Some(reusable_verifier_address), false);
kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "public", "private", "public", false);
// hashed input
let reusable_verifier_address: String = kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "hashed", "private", "public", &mut Some(reusable_verifier_address), false);
match REUSABLE_VERIFIER_ADDR.try_lock() {
Ok(mut addr) => {
*addr = Some(reusable_verifier_address.clone());
log::error!("Reusing the same verifeir deployed at address: {}", reusable_verifier_address);
}
Err(_) => {
log::error!("Failed to acquire lock on REUSABLE_VERIFIER_ADDR");
}
}
kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "hashed", "private", "public", false);
test_dir.close().unwrap();
}
#(#[test_case(TESTS[N])])*
fn kzg_evm_prove_and_verify_reusable_verifier_with_overflow_(test: &str) {
initialize();
// verifier too big to fit on chain with overflow calibration target
if test == "1l_eltwise_div" || test == "lenet_5" || test == "ltsf" || test == "lstm_large" {
return;
@@ -1161,24 +1160,13 @@ mod native_tests {
init_logger();
log::error!("Running kzg_evm_prove_and_verify_reusable_verifier_with_overflow_ for test: {}", test);
// default vis
let reusable_verifier_address: String = kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "private", "private", "public", &mut REUSABLE_VERIFIER_ADDR.lock().unwrap(), true);
kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "private", "private", "public", true);
// public/public vis
let reusable_verifier_address: String = kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "public", "private", "public", &mut Some(reusable_verifier_address), true);
kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "public", "private", "public", true);
// hashed input
let reusable_verifier_address: String = kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "hashed", "private", "public", &mut Some(reusable_verifier_address), true);
match REUSABLE_VERIFIER_ADDR.try_lock() {
Ok(mut addr) => {
*addr = Some(reusable_verifier_address.clone());
log::error!("Reusing the same verifeir deployed at address: {}", reusable_verifier_address);
}
Err(_) => {
log::error!("Failed to acquire lock on REUSABLE_VERIFIER_ADDR");
}
}
kzg_evm_prove_and_verify_reusable_verifier(2, path, test.to_string(), "hashed", "private", "public", true);
test_dir.close().unwrap();
}
});
@@ -2228,9 +2216,8 @@ mod native_tests {
input_visibility: &str,
param_visibility: &str,
output_visibility: &str,
reusable_verifier_address: &mut Option<String>,
overflow: bool,
) -> String {
) {
let anvil_url = ANVIL_URL.as_str();
prove_and_verify(
@@ -2253,57 +2240,82 @@ mod native_tests {
let vk_arg = format!("{}/{}/key.vk", test_dir, example_name);
let rpc_arg = format!("--rpc-url={}", anvil_url);
// addr path for verifier manager contract
let addr_path_arg = format!("--addr-path={}/{}/addr.txt", test_dir, example_name);
let verifier_manager_arg: String;
let settings_arg = format!("--settings-path={}", settings_path);
// reusable verifier sol_arg
let sol_arg = format!("--sol-code-path={}/{}/kzg.sol", test_dir, example_name);
// if the reusable verifier address is not set, create the verifier
let deployed_addr_arg = match reusable_verifier_address {
Some(addr) => addr.clone(),
None => {
// create the reusable verifier
let args = vec![
"create-evm-verifier",
"--vk-path",
&vk_arg,
&settings_arg,
&sol_arg,
"--reusable",
];
// create the reusable verifier
let args = vec![
"create-evm-verifier",
"--vk-path",
&vk_arg,
&settings_arg,
&sol_arg,
"--reusable",
];
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args(&args)
.status()
.expect("failed to execute process");
assert!(status.success());
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args(&args)
.status()
.expect("failed to execute process");
assert!(status.success());
// deploy the verifier
let args = vec![
"deploy-evm",
rpc_arg.as_str(),
addr_path_arg.as_str(),
sol_arg.as_str(),
"-C=verifier/reusable",
];
// deploy the verifier manager
let args = vec![
"deploy-evm",
rpc_arg.as_str(),
addr_path_arg.as_str(),
// set the sol code path to be contracts/VerifierManager.sol relative to root
"--sol-code-path=contracts/VerifierManager.sol",
"-C=manager",
];
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args(&args)
.status()
.expect("failed to execute process");
assert!(status.success());
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args(&args)
.status()
.expect("failed to execute process");
assert!(status.success());
// read in the address
let addr =
std::fs::read_to_string(format!("{}/{}/addr.txt", test_dir, example_name))
.expect("failed to read address file");
// read in the address of the verifier manager
let addr = std::fs::read_to_string(format!("{}/{}/addr.txt", test_dir, example_name))
.expect("failed to read address file");
let deployed_addr_arg = format!("--addr-verifier={}", addr);
// set the reusable verifier address
*reusable_verifier_address = Some(addr);
deployed_addr_arg
}
verifier_manager_arg = format!("--addr-verifier-manager={}", addr);
// if the reusable verifier address is not set, deploy the verifier manager and then create the verifier
let rv_addr = {
// addr path for rv contract
let addr_path_arg = format!("--addr-path={}/{}/addr_rv.txt", test_dir, example_name);
// deploy the reusable verifier via the verifier router.
let args = vec![
"deploy-evm",
rpc_arg.as_str(),
addr_path_arg.as_str(),
sol_arg.as_str(),
verifier_manager_arg.as_str(),
"-C=verifier/reusable",
];
let status = Command::new(format!("{}/release/ezkl", *CARGO_TARGET_DIR))
.args(&args)
.status()
.expect("failed to execute process");
assert!(status.success());
// read in the address of the verifier manager
let addr =
std::fs::read_to_string(format!("{}/{}/addr_rv.txt", test_dir, example_name))
.expect("failed to read address file");
addr
};
let addr_path_arg_vk = format!("--addr-path={}/{}/addr_vk.txt", test_dir, example_name);
let sol_arg_vk: String = format!("--sol-code-path={}/{}/vk.sol", test_dir, example_name);
// create the verifier
let addr_path_arg_vk = format!("--addr-path={}/{}/addr_vk.txt", test_dir, example_name);
let sol_arg_vk: String = format!("--sol-code-path={}/{}/vk.sol", test_dir, example_name);
// create the verifier
@@ -2321,11 +2333,15 @@ mod native_tests {
.expect("failed to execute process");
assert!(status.success());
// deploy the vka
let rv_addr_arg = format!("--addr-reusable-verifier={}", rv_addr);
// deploy the vka via the "DeployVKA" command on the reusable verifier
let args = vec![
"deploy-evm",
rpc_arg.as_str(),
addr_path_arg_vk.as_str(),
verifier_manager_arg.as_str(),
rv_addr_arg.as_str(),
sol_arg_vk.as_str(),
"-C=vka",
];
@@ -2355,6 +2371,8 @@ mod native_tests {
assert!(status.success());
let deployed_addr_arg = format!("--addr-verifier={}", rv_addr);
// now verify the proof
let pf_arg = format!("{}/{}/proof.pf", test_dir, example_name);
let args = vec![
@@ -2414,9 +2432,6 @@ mod native_tests {
i
);
}
// Returned deploy_addr_arg for reusable verifier
deployed_addr_arg
}
// run js browser evm verify tests for a given example

View File

@@ -1 +1 @@
[{"type":"function","name":"verifyProof","inputs":[{"internalType":"bytes","name":"proof","type":"bytes"},{"internalType":"uint256[]","name":"instances","type":"uint256[]"}],"outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable"}]
[{"type":"function","name":"deployVKA","inputs":[{"name":"bytecode","type":"bytes","internalType":"bytes"}],"outputs":[{"name":"addr","type":"address","internalType":"address"}],"stateMutability":"nonpayable"},{"type":"function","name":"precomputeAddress","inputs":[{"name":"bytecode","type":"bytes","internalType":"bytes"}],"outputs":[{"name":"","type":"address","internalType":"address"}],"stateMutability":"view"},{"type":"function","name":"verifyProof","inputs":[{"name":"vk","type":"address","internalType":"address"},{"name":"proof","type":"bytes","internalType":"bytes"},{"name":"instances","type":"uint256[]","internalType":"uint256[]"}],"outputs":[{"name":"","type":"bool","internalType":"bool"}],"stateMutability":"nonpayable"},{"type":"function","name":"vkaLog","inputs":[{"name":"","type":"address","internalType":"address"}],"outputs":[{"name":"","type":"bool","internalType":"bool"}],"stateMutability":"view"},{"type":"event","name":"DeployedVKArtifact","inputs":[{"name":"vka","type":"address","indexed":false,"internalType":"address"}],"anonymous":false},{"type":"error","name":"UnloggedVka","inputs":[{"name":"vka","type":"address","internalType":"address"}]}]

2
vk.abi
View File

@@ -1 +1 @@
[{"type":"constructor","inputs":[]}]
[{"type":"constructor","inputs":[],"stateMutability":"nonpayable"}]