mirror of
https://github.com/privacy-scaling-explorations/snark-verifier.git
synced 2026-01-09 14:27:54 -05:00
feat: add snark-verifier-sdk (#38)
* feat: add snark-verifier-sdk * chore: remove config and add example dir * feat: add "derive_serde" feature to `snark-verifier` This way `Snark` can be derive `Serialize`d * fix: remove leftover artifacts * feat: add back feature `halo2_circuit_params` and fix clippy * fix: consider for all targets --------- Co-authored-by: Jonathan Wang <jonathanpwang@users.noreply.github.com> Co-authored-by: Han <tinghan0110@gmail.com>
This commit is contained in:
@@ -1,4 +1,5 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"snark-verifier",
|
||||
"snark-verifier-sdk"
|
||||
]
|
||||
|
||||
52
snark-verifier-sdk/Cargo.toml
Normal file
52
snark-verifier-sdk/Cargo.toml
Normal file
@@ -0,0 +1,52 @@
|
||||
[package]
|
||||
name = "snark-verifier-sdk"
|
||||
version = "0.1.1"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
itertools = "0.10.3"
|
||||
lazy_static = "1.4.0"
|
||||
num-bigint = "0.4.3"
|
||||
num-integer = "0.1.45"
|
||||
num-traits = "0.2.15"
|
||||
rand = "0.8"
|
||||
rand_chacha = "0.3.1"
|
||||
hex = "0.4"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
bincode = "1.3.3"
|
||||
ark-std = { version = "0.3.0", features = ["print-trace"], optional = true }
|
||||
|
||||
snark-verifier = { path = "../snark-verifier", default-features = false }
|
||||
|
||||
# system_halo2
|
||||
halo2_proofs = { git = "https://github.com/privacy-scaling-explorations/halo2", tag = "v2023_04_20" } # not optional for now
|
||||
halo2curves = { git = 'https://github.com/privacy-scaling-explorations/halo2curves', tag = "0.3.2" } # must be same version as in halo2_proofs
|
||||
|
||||
# loader_halo2
|
||||
halo2_wrong_ecc = { git = "https://github.com/privacy-scaling-explorations/halo2wrong", tag = "v2023_04_20", package = "ecc", optional = true }
|
||||
poseidon = { git = "https://github.com/privacy-scaling-explorations/poseidon", tag = "v2023_04_20", optional = true }
|
||||
|
||||
# loader_evm
|
||||
ethereum-types = { version = "0.14", default-features = false, features = ["std"], optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
ark-std = { version = "0.3.0", features = ["print-trace"] }
|
||||
paste = "1.0.7"
|
||||
pprof = { version = "0.11", features = ["criterion", "flamegraph"] }
|
||||
criterion = "0.4"
|
||||
criterion-macro = "0.4"
|
||||
|
||||
[features]
|
||||
default = ["loader_halo2", "loader_evm", "derive_serde", "display"]
|
||||
display = ["dep:ark-std"]
|
||||
loader_evm = ["snark-verifier/loader_evm", "dep:ethereum-types"]
|
||||
loader_halo2 = ["snark-verifier/system_halo2", "snark-verifier/loader_halo2", "dep:halo2_wrong_ecc", "dep:poseidon"]
|
||||
parallel = ["snark-verifier/parallel"]
|
||||
derive_serde = ["snark-verifier/derive_serde", "halo2curves/derive_serde"]
|
||||
halo2_circuit_params = ["snark-verifier/halo2_circuit_params"]
|
||||
|
||||
[[bench]]
|
||||
name = "standard_plonk"
|
||||
required-features = ["loader_halo2"]
|
||||
harness = false
|
||||
25
snark-verifier-sdk/README.md
Normal file
25
snark-verifier-sdk/README.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# Snark Verifier SDK
|
||||
|
||||
To make file storage go in the correct places,
|
||||
|
||||
```bash
|
||||
cd snark-verifier-sdk
|
||||
```
|
||||
|
||||
To run standard plonk example:
|
||||
|
||||
```bash
|
||||
cargo run --example standard_plonk --release
|
||||
```
|
||||
|
||||
If feature "loader_evm" is on, this will generate yul code for the verifier contract and simulate a transaction call to that contract with generated proof calldata using revm.
|
||||
|
||||
This example is essentially the same as [`evm-verifier-with-accumulator`](../snark-verifier/examples/evm-verifier-with-accumulator.rs) except that it uses this SDK and uses SHPLONK as the polynomial multi-open scheme instead of GWC (multi-open scheme from original PLONK paper).
|
||||
|
||||
To run standard Plonk benchmark:
|
||||
|
||||
```bash
|
||||
cargo bench --bench standard_plonk
|
||||
```
|
||||
|
||||
These examples/benches will generate unsafe trusted setups in `./params` folder. It will also cache proving keys and certain snarks.
|
||||
193
snark-verifier-sdk/benches/standard_plonk.rs
Normal file
193
snark-verifier-sdk/benches/standard_plonk.rs
Normal file
@@ -0,0 +1,193 @@
|
||||
use ark_std::{end_timer, start_timer};
|
||||
use criterion::{criterion_group, criterion_main};
|
||||
use criterion::{BenchmarkId, Criterion};
|
||||
use halo2_proofs::halo2curves as halo2_curves;
|
||||
use halo2_proofs::{
|
||||
halo2curves::bn256::Bn256,
|
||||
poly::{commitment::Params, kzg::commitment::ParamsKZG},
|
||||
};
|
||||
use pprof::criterion::{Output, PProfProfiler};
|
||||
use rand::rngs::OsRng;
|
||||
use snark_verifier_sdk::halo2::gen_srs;
|
||||
use snark_verifier_sdk::SHPLONK;
|
||||
use snark_verifier_sdk::{
|
||||
gen_pk,
|
||||
halo2::{aggregation::AggregationCircuit, gen_snark_shplonk},
|
||||
Snark,
|
||||
};
|
||||
use std::path::Path;
|
||||
|
||||
mod application {
|
||||
use super::halo2_curves::bn256::Fr;
|
||||
use halo2_proofs::{
|
||||
circuit::{Layouter, SimpleFloorPlanner, Value},
|
||||
plonk::{Advice, Circuit, Column, ConstraintSystem, Error, Fixed, Instance},
|
||||
poly::Rotation,
|
||||
};
|
||||
use rand::RngCore;
|
||||
use snark_verifier_sdk::CircuitExt;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct StandardPlonkConfig {
|
||||
a: Column<Advice>,
|
||||
b: Column<Advice>,
|
||||
c: Column<Advice>,
|
||||
q_a: Column<Fixed>,
|
||||
q_b: Column<Fixed>,
|
||||
q_c: Column<Fixed>,
|
||||
q_ab: Column<Fixed>,
|
||||
constant: Column<Fixed>,
|
||||
#[allow(dead_code)]
|
||||
instance: Column<Instance>,
|
||||
}
|
||||
|
||||
impl StandardPlonkConfig {
|
||||
fn configure(meta: &mut ConstraintSystem<Fr>) -> Self {
|
||||
let [a, b, c] = [(); 3].map(|_| meta.advice_column());
|
||||
let [q_a, q_b, q_c, q_ab, constant] = [(); 5].map(|_| meta.fixed_column());
|
||||
let instance = meta.instance_column();
|
||||
|
||||
[a, b, c].map(|column| meta.enable_equality(column));
|
||||
|
||||
meta.create_gate(
|
||||
"q_a·a + q_b·b + q_c·c + q_ab·a·b + constant + instance = 0",
|
||||
|meta| {
|
||||
let [a, b, c] =
|
||||
[a, b, c].map(|column| meta.query_advice(column, Rotation::cur()));
|
||||
let [q_a, q_b, q_c, q_ab, constant] = [q_a, q_b, q_c, q_ab, constant]
|
||||
.map(|column| meta.query_fixed(column, Rotation::cur()));
|
||||
let instance = meta.query_instance(instance, Rotation::cur());
|
||||
Some(
|
||||
q_a * a.clone()
|
||||
+ q_b * b.clone()
|
||||
+ q_c * c
|
||||
+ q_ab * a * b
|
||||
+ constant
|
||||
+ instance,
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
StandardPlonkConfig {
|
||||
a,
|
||||
b,
|
||||
c,
|
||||
q_a,
|
||||
q_b,
|
||||
q_c,
|
||||
q_ab,
|
||||
constant,
|
||||
instance,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct StandardPlonk(Fr);
|
||||
|
||||
impl StandardPlonk {
|
||||
pub fn rand<R: RngCore>(mut rng: R) -> Self {
|
||||
Self(Fr::from(rng.next_u32() as u64))
|
||||
}
|
||||
}
|
||||
|
||||
impl CircuitExt<Fr> for StandardPlonk {
|
||||
fn num_instance(&self) -> Vec<usize> {
|
||||
vec![1]
|
||||
}
|
||||
|
||||
fn instances(&self) -> Vec<Vec<Fr>> {
|
||||
vec![vec![self.0]]
|
||||
}
|
||||
}
|
||||
|
||||
impl Circuit<Fr> for StandardPlonk {
|
||||
type Config = StandardPlonkConfig;
|
||||
type FloorPlanner = SimpleFloorPlanner;
|
||||
#[cfg(feature = "halo2_circuit_params")]
|
||||
type Params = ();
|
||||
|
||||
fn without_witnesses(&self) -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
fn configure(meta: &mut ConstraintSystem<Fr>) -> Self::Config {
|
||||
meta.set_minimum_degree(4);
|
||||
StandardPlonkConfig::configure(meta)
|
||||
}
|
||||
|
||||
fn synthesize(
|
||||
&self,
|
||||
config: Self::Config,
|
||||
mut layouter: impl Layouter<Fr>,
|
||||
) -> Result<(), Error> {
|
||||
layouter.assign_region(
|
||||
|| "",
|
||||
|mut region| {
|
||||
region.assign_advice(|| "", config.a, 0, || Value::known(self.0))?;
|
||||
region.assign_fixed(|| "", config.q_a, 0, || Value::known(-Fr::one()))?;
|
||||
region.assign_advice(|| "", config.a, 1, || Value::known(-Fr::from(5u64)))?;
|
||||
for (idx, column) in (1..).zip([
|
||||
config.q_a,
|
||||
config.q_b,
|
||||
config.q_c,
|
||||
config.q_ab,
|
||||
config.constant,
|
||||
]) {
|
||||
region.assign_fixed(
|
||||
|| "",
|
||||
column,
|
||||
1,
|
||||
|| Value::known(Fr::from(idx as u64)),
|
||||
)?;
|
||||
}
|
||||
let a = region.assign_advice(|| "", config.a, 2, || Value::known(Fr::one()))?;
|
||||
a.copy_advice(|| "", &mut region, config.b, 3)?;
|
||||
a.copy_advice(|| "", &mut region, config.c, 4)?;
|
||||
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_application_snark(params: &ParamsKZG<Bn256>) -> Snark {
|
||||
let circuit = application::StandardPlonk::rand(OsRng);
|
||||
|
||||
let pk = gen_pk(params, &circuit, Some(Path::new("./benches/app.pk")));
|
||||
gen_snark_shplonk(params, &pk, circuit, None::<&str>)
|
||||
}
|
||||
|
||||
fn bench(c: &mut Criterion) {
|
||||
let params_app = gen_srs(8);
|
||||
let snarks = [(); 3].map(|_| gen_application_snark(¶ms_app));
|
||||
|
||||
let params = gen_srs(22);
|
||||
let agg_circuit = AggregationCircuit::<SHPLONK>::new(¶ms, snarks.clone());
|
||||
|
||||
let start0 = start_timer!(|| "gen vk & pk");
|
||||
let pk = gen_pk(¶ms, &agg_circuit, Some(Path::new("./benches/agg.pk")));
|
||||
end_timer!(start0);
|
||||
|
||||
let mut group = c.benchmark_group("plonk-prover");
|
||||
group.sample_size(10);
|
||||
group.bench_with_input(
|
||||
BenchmarkId::new("standard-plonk-agg", params.k()),
|
||||
&(¶ms, &pk, &snarks),
|
||||
|b, &(params, pk, snarks)| {
|
||||
b.iter(|| {
|
||||
let agg_circuit = AggregationCircuit::<SHPLONK>::new(params, snarks.clone());
|
||||
gen_snark_shplonk(params, pk, agg_circuit, None::<&str>)
|
||||
})
|
||||
},
|
||||
);
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group! {
|
||||
name = benches;
|
||||
config = Criterion::default().with_profiler(PProfProfiler::new(10, Output::Flamegraph(None)));
|
||||
targets = bench
|
||||
}
|
||||
criterion_main!(benches);
|
||||
196
snark-verifier-sdk/examples/standard_plonk.rs
Normal file
196
snark-verifier-sdk/examples/standard_plonk.rs
Normal file
@@ -0,0 +1,196 @@
|
||||
use ark_std::{end_timer, start_timer};
|
||||
use halo2_proofs::halo2curves as halo2_curves;
|
||||
use halo2_proofs::plonk::Circuit;
|
||||
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
|
||||
use rand::rngs::OsRng;
|
||||
use snark_verifier_sdk::evm::{evm_verify, gen_evm_proof_shplonk, gen_evm_verifier_shplonk};
|
||||
use snark_verifier_sdk::halo2::gen_srs;
|
||||
use snark_verifier_sdk::{
|
||||
gen_pk,
|
||||
halo2::{aggregation::AggregationCircuit, gen_snark_shplonk},
|
||||
Snark,
|
||||
};
|
||||
use snark_verifier_sdk::{CircuitExt, SHPLONK};
|
||||
use std::path::Path;
|
||||
|
||||
mod application {
|
||||
use super::halo2_curves::bn256::Fr;
|
||||
use halo2_proofs::{
|
||||
circuit::{Layouter, SimpleFloorPlanner, Value},
|
||||
plonk::{Advice, Circuit, Column, ConstraintSystem, Error, Fixed, Instance},
|
||||
poly::Rotation,
|
||||
};
|
||||
use rand::RngCore;
|
||||
use snark_verifier_sdk::CircuitExt;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct StandardPlonkConfig {
|
||||
a: Column<Advice>,
|
||||
b: Column<Advice>,
|
||||
c: Column<Advice>,
|
||||
q_a: Column<Fixed>,
|
||||
q_b: Column<Fixed>,
|
||||
q_c: Column<Fixed>,
|
||||
q_ab: Column<Fixed>,
|
||||
constant: Column<Fixed>,
|
||||
#[allow(dead_code)]
|
||||
instance: Column<Instance>,
|
||||
}
|
||||
|
||||
impl StandardPlonkConfig {
|
||||
fn configure(meta: &mut ConstraintSystem<Fr>) -> Self {
|
||||
let [a, b, c] = [(); 3].map(|_| meta.advice_column());
|
||||
let [q_a, q_b, q_c, q_ab, constant] = [(); 5].map(|_| meta.fixed_column());
|
||||
let instance = meta.instance_column();
|
||||
|
||||
[a, b, c].map(|column| meta.enable_equality(column));
|
||||
|
||||
meta.create_gate(
|
||||
"q_a·a + q_b·b + q_c·c + q_ab·a·b + constant + instance = 0",
|
||||
|meta| {
|
||||
let [a, b, c] =
|
||||
[a, b, c].map(|column| meta.query_advice(column, Rotation::cur()));
|
||||
let [q_a, q_b, q_c, q_ab, constant] = [q_a, q_b, q_c, q_ab, constant]
|
||||
.map(|column| meta.query_fixed(column, Rotation::cur()));
|
||||
let instance = meta.query_instance(instance, Rotation::cur());
|
||||
Some(
|
||||
q_a * a.clone()
|
||||
+ q_b * b.clone()
|
||||
+ q_c * c
|
||||
+ q_ab * a * b
|
||||
+ constant
|
||||
+ instance,
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
StandardPlonkConfig {
|
||||
a,
|
||||
b,
|
||||
c,
|
||||
q_a,
|
||||
q_b,
|
||||
q_c,
|
||||
q_ab,
|
||||
constant,
|
||||
instance,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct StandardPlonk(Fr);
|
||||
|
||||
impl StandardPlonk {
|
||||
pub fn rand<R: RngCore>(mut rng: R) -> Self {
|
||||
Self(Fr::from(rng.next_u32() as u64))
|
||||
}
|
||||
}
|
||||
|
||||
impl CircuitExt<Fr> for StandardPlonk {
|
||||
fn num_instance(&self) -> Vec<usize> {
|
||||
vec![1]
|
||||
}
|
||||
|
||||
fn instances(&self) -> Vec<Vec<Fr>> {
|
||||
vec![vec![self.0]]
|
||||
}
|
||||
}
|
||||
|
||||
impl Circuit<Fr> for StandardPlonk {
|
||||
type Config = StandardPlonkConfig;
|
||||
type FloorPlanner = SimpleFloorPlanner;
|
||||
#[cfg(feature = "halo2_circuit_params")]
|
||||
type Params = ();
|
||||
|
||||
fn without_witnesses(&self) -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
fn configure(meta: &mut ConstraintSystem<Fr>) -> Self::Config {
|
||||
meta.set_minimum_degree(4);
|
||||
StandardPlonkConfig::configure(meta)
|
||||
}
|
||||
|
||||
fn synthesize(
|
||||
&self,
|
||||
config: Self::Config,
|
||||
mut layouter: impl Layouter<Fr>,
|
||||
) -> Result<(), Error> {
|
||||
layouter.assign_region(
|
||||
|| "",
|
||||
|mut region| {
|
||||
region.assign_advice(|| "", config.a, 0, || Value::known(self.0))?;
|
||||
region.assign_fixed(|| "", config.q_a, 0, || Value::known(-Fr::one()))?;
|
||||
region.assign_advice(|| "", config.a, 1, || Value::known(-Fr::from(5u64)))?;
|
||||
for (idx, column) in (1..).zip([
|
||||
config.q_a,
|
||||
config.q_b,
|
||||
config.q_c,
|
||||
config.q_ab,
|
||||
config.constant,
|
||||
]) {
|
||||
region.assign_fixed(
|
||||
|| "",
|
||||
column,
|
||||
1,
|
||||
|| Value::known(Fr::from(idx as u64)),
|
||||
)?;
|
||||
}
|
||||
let a = region.assign_advice(|| "", config.a, 2, || Value::known(Fr::one()))?;
|
||||
a.copy_advice(|| "", &mut region, config.b, 3)?;
|
||||
a.copy_advice(|| "", &mut region, config.c, 4)?;
|
||||
|
||||
Ok(())
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_application_snark(params: &ParamsKZG<Bn256>) -> Snark {
|
||||
let circuit = application::StandardPlonk::rand(OsRng);
|
||||
|
||||
let pk = gen_pk(params, &circuit, Some(Path::new("./examples/app.pk")));
|
||||
gen_snark_shplonk(params, &pk, circuit, None::<&str>)
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let params_app = gen_srs(8);
|
||||
let snarks = [(); 3].map(|_| gen_application_snark(¶ms_app));
|
||||
|
||||
let params = gen_srs(22);
|
||||
let agg_circuit = AggregationCircuit::<SHPLONK>::new(¶ms, snarks);
|
||||
|
||||
let start0 = start_timer!(|| "gen vk & pk");
|
||||
let pk = gen_pk(
|
||||
¶ms,
|
||||
&agg_circuit.without_witnesses(),
|
||||
Some(Path::new("./examples/agg.pk")),
|
||||
);
|
||||
end_timer!(start0);
|
||||
|
||||
std::fs::remove_file("./examples/agg.snark").unwrap_or_default();
|
||||
let _snark = gen_snark_shplonk(
|
||||
¶ms,
|
||||
&pk,
|
||||
agg_circuit.clone(),
|
||||
Some(Path::new("./examples/agg.snark")),
|
||||
);
|
||||
|
||||
#[cfg(feature = "loader_evm")]
|
||||
{
|
||||
// do one more time to verify
|
||||
let num_instances = agg_circuit.num_instance();
|
||||
let instances = agg_circuit.instances();
|
||||
let proof_calldata = gen_evm_proof_shplonk(¶ms, &pk, agg_circuit, instances.clone());
|
||||
|
||||
let deployment_code = gen_evm_verifier_shplonk::<AggregationCircuit<SHPLONK>>(
|
||||
¶ms,
|
||||
pk.get_vk(),
|
||||
num_instances,
|
||||
Some(Path::new("./examples/standard_plonk.yul")),
|
||||
);
|
||||
evm_verify(deployment_code, instances, proof_calldata);
|
||||
}
|
||||
}
|
||||
207
snark-verifier-sdk/src/evm.rs
Normal file
207
snark-verifier-sdk/src/evm.rs
Normal file
@@ -0,0 +1,207 @@
|
||||
use crate::{GWC, SHPLONK};
|
||||
|
||||
use super::{CircuitExt, PlonkVerifier};
|
||||
#[cfg(feature = "display")]
|
||||
use ark_std::{end_timer, start_timer};
|
||||
use ethereum_types::Address;
|
||||
use halo2_proofs::{
|
||||
halo2curves::bn256::{Bn256, Fq, Fr, G1Affine},
|
||||
plonk::{create_proof, verify_proof, Circuit, ProvingKey, VerifyingKey},
|
||||
poly::{
|
||||
commitment::{ParamsProver, Prover, Verifier},
|
||||
kzg::{
|
||||
commitment::{KZGCommitmentScheme, ParamsKZG},
|
||||
msm::DualMSM,
|
||||
multiopen::{ProverGWC, ProverSHPLONK, VerifierGWC, VerifierSHPLONK},
|
||||
strategy::{AccumulatorStrategy, GuardKZG},
|
||||
},
|
||||
VerificationStrategy,
|
||||
},
|
||||
transcript::{TranscriptReadBuffer, TranscriptWriterBuffer},
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use rand::{rngs::StdRng, SeedableRng};
|
||||
pub use snark_verifier::loader::evm::encode_calldata;
|
||||
use snark_verifier::{
|
||||
loader::evm::{compile_yul, EvmLoader, ExecutorBuilder},
|
||||
pcs::{
|
||||
kzg::{KzgAccumulator, KzgAsVerifyingKey, KzgDecidingKey, KzgSuccinctVerifyingKey},
|
||||
AccumulationDecider, AccumulationScheme, PolynomialCommitmentScheme,
|
||||
},
|
||||
system::halo2::{compile, transcript::evm::EvmTranscript, Config},
|
||||
verifier::SnarkVerifier,
|
||||
};
|
||||
use std::{fs, io, path::Path, rc::Rc};
|
||||
|
||||
/// Generates a proof for evm verification using either SHPLONK or GWC proving method. Uses Keccak for Fiat-Shamir.
|
||||
pub fn gen_evm_proof<'params, C, P, V>(
|
||||
params: &'params ParamsKZG<Bn256>,
|
||||
pk: &'params ProvingKey<G1Affine>,
|
||||
circuit: C,
|
||||
instances: Vec<Vec<Fr>>,
|
||||
) -> Vec<u8>
|
||||
where
|
||||
C: Circuit<Fr>,
|
||||
P: Prover<'params, KZGCommitmentScheme<Bn256>>,
|
||||
V: Verifier<
|
||||
'params,
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
Guard = GuardKZG<'params, Bn256>,
|
||||
MSMAccumulator = DualMSM<'params, Bn256>,
|
||||
>,
|
||||
{
|
||||
let instances = instances
|
||||
.iter()
|
||||
.map(|instances| instances.as_slice())
|
||||
.collect_vec();
|
||||
|
||||
#[cfg(feature = "display")]
|
||||
let proof_time = start_timer!(|| "Create EVM proof");
|
||||
let rng = StdRng::from_entropy();
|
||||
let proof = {
|
||||
let mut transcript = TranscriptWriterBuffer::<_, G1Affine, _>::init(Vec::new());
|
||||
create_proof::<KZGCommitmentScheme<Bn256>, P, _, _, EvmTranscript<_, _, _, _>, _>(
|
||||
params,
|
||||
pk,
|
||||
&[circuit],
|
||||
&[instances.as_slice()],
|
||||
rng,
|
||||
&mut transcript,
|
||||
)
|
||||
.unwrap();
|
||||
transcript.finalize()
|
||||
};
|
||||
#[cfg(feature = "display")]
|
||||
end_timer!(proof_time);
|
||||
|
||||
let accept = {
|
||||
let mut transcript = TranscriptReadBuffer::<_, G1Affine, _>::init(proof.as_slice());
|
||||
VerificationStrategy::<_, V>::finalize(
|
||||
verify_proof::<_, V, _, EvmTranscript<_, _, _, _>, _>(
|
||||
params.verifier_params(),
|
||||
pk.get_vk(),
|
||||
AccumulatorStrategy::new(params.verifier_params()),
|
||||
&[instances.as_slice()],
|
||||
&mut transcript,
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
};
|
||||
assert!(accept);
|
||||
|
||||
proof
|
||||
}
|
||||
|
||||
pub fn gen_evm_proof_gwc<'params, C: Circuit<Fr>>(
|
||||
params: &'params ParamsKZG<Bn256>,
|
||||
pk: &'params ProvingKey<G1Affine>,
|
||||
circuit: C,
|
||||
instances: Vec<Vec<Fr>>,
|
||||
) -> Vec<u8> {
|
||||
gen_evm_proof::<C, ProverGWC<_>, VerifierGWC<_>>(params, pk, circuit, instances)
|
||||
}
|
||||
|
||||
pub fn gen_evm_proof_shplonk<'params, C: Circuit<Fr>>(
|
||||
params: &'params ParamsKZG<Bn256>,
|
||||
pk: &'params ProvingKey<G1Affine>,
|
||||
circuit: C,
|
||||
instances: Vec<Vec<Fr>>,
|
||||
) -> Vec<u8> {
|
||||
gen_evm_proof::<C, ProverSHPLONK<_>, VerifierSHPLONK<_>>(params, pk, circuit, instances)
|
||||
}
|
||||
|
||||
pub fn gen_evm_verifier<C, AS>(
|
||||
params: &ParamsKZG<Bn256>,
|
||||
vk: &VerifyingKey<G1Affine>,
|
||||
num_instance: Vec<usize>,
|
||||
path: Option<&Path>,
|
||||
) -> Vec<u8>
|
||||
where
|
||||
C: CircuitExt<Fr>,
|
||||
AS: PolynomialCommitmentScheme<
|
||||
G1Affine,
|
||||
Rc<EvmLoader>,
|
||||
VerifyingKey = KzgSuccinctVerifyingKey<G1Affine>,
|
||||
Output = KzgAccumulator<G1Affine, Rc<EvmLoader>>,
|
||||
> + AccumulationScheme<
|
||||
G1Affine,
|
||||
Rc<EvmLoader>,
|
||||
VerifyingKey = KzgAsVerifyingKey,
|
||||
Accumulator = KzgAccumulator<G1Affine, Rc<EvmLoader>>,
|
||||
> + AccumulationDecider<G1Affine, Rc<EvmLoader>, DecidingKey = KzgDecidingKey<Bn256>>,
|
||||
{
|
||||
let protocol = compile(
|
||||
params,
|
||||
vk,
|
||||
Config::kzg()
|
||||
.with_num_instance(num_instance.clone())
|
||||
.with_accumulator_indices(C::accumulator_indices()),
|
||||
);
|
||||
// deciding key
|
||||
let dk = (params.get_g()[0], params.g2(), params.s_g2()).into();
|
||||
|
||||
let loader = EvmLoader::new::<Fq, Fr>();
|
||||
let protocol = protocol.loaded(&loader);
|
||||
let mut transcript = EvmTranscript::<_, Rc<EvmLoader>, _, _>::new(&loader);
|
||||
|
||||
let instances = transcript.load_instances(num_instance);
|
||||
let proof =
|
||||
PlonkVerifier::<AS>::read_proof(&dk, &protocol, &instances, &mut transcript).unwrap();
|
||||
PlonkVerifier::<AS>::verify(&dk, &protocol, &instances, &proof).unwrap();
|
||||
|
||||
let yul_code = loader.yul_code();
|
||||
let byte_code = compile_yul(&yul_code);
|
||||
if let Some(path) = path {
|
||||
path.parent()
|
||||
.and_then(|dir| fs::create_dir_all(dir).ok())
|
||||
.unwrap();
|
||||
fs::write(path, yul_code).unwrap();
|
||||
}
|
||||
byte_code
|
||||
}
|
||||
|
||||
pub fn gen_evm_verifier_gwc<C: CircuitExt<Fr>>(
|
||||
params: &ParamsKZG<Bn256>,
|
||||
vk: &VerifyingKey<G1Affine>,
|
||||
num_instance: Vec<usize>,
|
||||
path: Option<&Path>,
|
||||
) -> Vec<u8> {
|
||||
gen_evm_verifier::<C, GWC>(params, vk, num_instance, path)
|
||||
}
|
||||
|
||||
pub fn gen_evm_verifier_shplonk<C: CircuitExt<Fr>>(
|
||||
params: &ParamsKZG<Bn256>,
|
||||
vk: &VerifyingKey<G1Affine>,
|
||||
num_instance: Vec<usize>,
|
||||
path: Option<&Path>,
|
||||
) -> Vec<u8> {
|
||||
gen_evm_verifier::<C, SHPLONK>(params, vk, num_instance, path)
|
||||
}
|
||||
|
||||
pub fn evm_verify(deployment_code: Vec<u8>, instances: Vec<Vec<Fr>>, proof: Vec<u8>) {
|
||||
let calldata = encode_calldata(&instances, &proof);
|
||||
let success = {
|
||||
let mut evm = ExecutorBuilder::default()
|
||||
.with_gas_limit(u64::MAX.into())
|
||||
.build();
|
||||
|
||||
let caller = Address::from_low_u64_be(0xfe);
|
||||
let verifier = evm
|
||||
.deploy(caller, deployment_code.into(), 0.into())
|
||||
.address
|
||||
.unwrap();
|
||||
let result = evm.call_raw(caller, verifier, calldata.into(), 0.into());
|
||||
|
||||
dbg!(result.gas_used);
|
||||
|
||||
!result.reverted
|
||||
};
|
||||
assert!(success);
|
||||
}
|
||||
|
||||
pub fn write_calldata(instances: &[Vec<Fr>], proof: &[u8], path: &Path) -> io::Result<String> {
|
||||
let calldata = encode_calldata(instances, proof);
|
||||
let calldata = hex::encode(calldata);
|
||||
fs::write(path, &calldata)?;
|
||||
Ok(calldata)
|
||||
}
|
||||
316
snark-verifier-sdk/src/halo2.rs
Normal file
316
snark-verifier-sdk/src/halo2.rs
Normal file
@@ -0,0 +1,316 @@
|
||||
#![allow(unused_imports)]
|
||||
use super::{read_instances, write_instances, CircuitExt, PlonkSuccinctVerifier, Snark};
|
||||
use ark_std::{end_timer, start_timer};
|
||||
use halo2_proofs::{
|
||||
circuit::Layouter,
|
||||
halo2curves::{
|
||||
bn256::{Bn256, Fr, G1Affine},
|
||||
group::ff::Field,
|
||||
},
|
||||
plonk::{
|
||||
create_proof, keygen_vk, verify_proof, Circuit, ConstraintSystem, Error, ProvingKey,
|
||||
VerifyingKey,
|
||||
},
|
||||
poly::{
|
||||
commitment::{ParamsProver, Prover, Verifier},
|
||||
kzg::{
|
||||
commitment::{KZGCommitmentScheme, ParamsKZG},
|
||||
msm::DualMSM,
|
||||
multiopen::{ProverGWC, ProverSHPLONK, VerifierGWC, VerifierSHPLONK},
|
||||
strategy::{AccumulatorStrategy, GuardKZG},
|
||||
},
|
||||
VerificationStrategy,
|
||||
},
|
||||
};
|
||||
use halo2curves::CurveAffine;
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use poseidon::Spec as PoseidonSpec;
|
||||
use rand::{rngs::StdRng, SeedableRng};
|
||||
use rand_chacha::ChaCha20Rng;
|
||||
use snark_verifier::{
|
||||
cost::CostEstimation,
|
||||
loader::native::NativeLoader,
|
||||
pcs::{
|
||||
kzg::{KzgAccumulator, KzgAsVerifyingKey, KzgSuccinctVerifyingKey},
|
||||
AccumulationScheme, PolynomialCommitmentScheme, Query,
|
||||
},
|
||||
system::halo2::{compile, Config},
|
||||
util::transcript::TranscriptWrite,
|
||||
verifier::plonk::PlonkProof,
|
||||
};
|
||||
use std::{
|
||||
env::var,
|
||||
fs::{self, File},
|
||||
io::{BufReader, BufWriter},
|
||||
marker::PhantomData,
|
||||
path::Path,
|
||||
};
|
||||
|
||||
pub mod aggregation;
|
||||
|
||||
// Different Poseidon parameters can be set based on usage and security level
|
||||
const T: usize = 5; // 3;
|
||||
const RATE: usize = T - 1;
|
||||
const R_F: usize = 8;
|
||||
const R_P: usize = 60; // 57;
|
||||
|
||||
pub type PoseidonTranscript<L, S> =
|
||||
snark_verifier::system::halo2::transcript::halo2::PoseidonTranscript<
|
||||
G1Affine,
|
||||
L,
|
||||
S,
|
||||
T,
|
||||
RATE,
|
||||
R_F,
|
||||
R_P,
|
||||
>;
|
||||
|
||||
lazy_static! {
|
||||
/// Poseidon spec recomputed matrix round constants each time so it is expensive to create.
|
||||
/// We use lazy_static to create it only once and then clone as needed.
|
||||
pub static ref POSEIDON_SPEC: PoseidonSpec<Fr, T, RATE> = PoseidonSpec::new(R_F, R_P);
|
||||
}
|
||||
|
||||
/// Attempts to read the srs from a file found in `./params/kzg_bn254_{k}.srs` or `{dir}/kzg_bn254_{k}.srs` if `PARAMS_DIR` env var is specified, creates a file it if it does not exist.
|
||||
/// * `k`: degree that expresses the size of circuit (i.e., 2^<sup>k</sup> is the number of rows in the circuit)
|
||||
/// * `setup`: a function that creates the srs
|
||||
pub fn read_or_create_srs<'a, C: CurveAffine, P: ParamsProver<'a, C>>(
|
||||
k: u32,
|
||||
setup: impl Fn(u32) -> P,
|
||||
) -> P {
|
||||
let dir = var("PARAMS_DIR").unwrap_or_else(|_| "./params".to_string());
|
||||
let path = format!("{dir}/kzg_bn254_{k}.srs");
|
||||
match File::open(path.as_str()) {
|
||||
Ok(f) => {
|
||||
#[cfg(feature = "display")]
|
||||
println!("read params from {path}");
|
||||
let mut reader = BufReader::new(f);
|
||||
P::read(&mut reader).unwrap()
|
||||
}
|
||||
Err(_) => {
|
||||
#[cfg(feature = "display")]
|
||||
println!("creating params for {k}");
|
||||
fs::create_dir_all(dir).unwrap();
|
||||
let params = setup(k);
|
||||
params
|
||||
.write(&mut BufWriter::new(File::create(path).unwrap()))
|
||||
.unwrap();
|
||||
params
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generates the SRS for the KZG scheme and writes it to a file found in "./params/kzg_bn2_{k}.srs` or `{dir}/kzg_bn254_{k}.srs` if `PARAMS_DIR` env var is specified, creates a file it if it does not exist"
|
||||
/// * `k`: degree that expresses the size of circuit (i.e., 2^<sup>k</sup> is the number of rows in the circuit)
|
||||
pub fn gen_srs(k: u32) -> ParamsKZG<Bn256> {
|
||||
read_or_create_srs::<G1Affine, _>(k, |k| {
|
||||
ParamsKZG::<Bn256>::setup(k, ChaCha20Rng::from_seed(Default::default()))
|
||||
})
|
||||
}
|
||||
|
||||
/// Generates a native proof using either SHPLONK or GWC proving method. Uses Poseidon for Fiat-Shamir.
|
||||
///
|
||||
/// Caches the instances and proof if `path = Some(instance_path, proof_path)` is specified.
|
||||
pub fn gen_proof<'params, C, P, V>(
|
||||
// TODO: pass Option<&'params ParamsKZG<Bn256>> but hard to get lifetimes to work with `Cow`
|
||||
params: &'params ParamsKZG<Bn256>,
|
||||
pk: &ProvingKey<G1Affine>,
|
||||
circuit: C,
|
||||
instances: Vec<Vec<Fr>>,
|
||||
path: Option<(impl AsRef<Path>, impl AsRef<Path>)>,
|
||||
) -> Vec<u8>
|
||||
where
|
||||
C: Circuit<Fr>,
|
||||
P: Prover<'params, KZGCommitmentScheme<Bn256>>,
|
||||
V: Verifier<
|
||||
'params,
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
Guard = GuardKZG<'params, Bn256>,
|
||||
MSMAccumulator = DualMSM<'params, Bn256>,
|
||||
>,
|
||||
{
|
||||
if let Some((instance_path, proof_path)) = &path {
|
||||
let proof_path = proof_path.as_ref();
|
||||
let cached_instances = read_instances(instance_path.as_ref());
|
||||
if matches!(cached_instances, Ok(tmp) if tmp == instances) && proof_path.exists() {
|
||||
#[cfg(feature = "display")]
|
||||
let read_time = start_timer!(|| format!("Reading proof from {proof_path:?}"));
|
||||
|
||||
let proof = fs::read(proof_path).unwrap();
|
||||
|
||||
#[cfg(feature = "display")]
|
||||
end_timer!(read_time);
|
||||
return proof;
|
||||
}
|
||||
}
|
||||
|
||||
let instances = instances.iter().map(Vec::as_slice).collect_vec();
|
||||
|
||||
#[cfg(feature = "display")]
|
||||
let proof_time = start_timer!(|| "Create proof");
|
||||
|
||||
let mut transcript =
|
||||
PoseidonTranscript::<NativeLoader, _>::from_spec(vec![], POSEIDON_SPEC.clone());
|
||||
let rng = StdRng::from_entropy();
|
||||
create_proof::<_, P, _, _, _, _>(params, pk, &[circuit], &[&instances], rng, &mut transcript)
|
||||
.unwrap();
|
||||
let proof = transcript.finalize();
|
||||
|
||||
#[cfg(feature = "display")]
|
||||
end_timer!(proof_time);
|
||||
|
||||
// validate proof before caching
|
||||
assert!({
|
||||
let mut transcript_read =
|
||||
PoseidonTranscript::<NativeLoader, &[u8]>::from_spec(&proof[..], POSEIDON_SPEC.clone());
|
||||
VerificationStrategy::<_, V>::finalize(
|
||||
verify_proof::<_, V, _, _, _>(
|
||||
params.verifier_params(),
|
||||
pk.get_vk(),
|
||||
AccumulatorStrategy::new(params.verifier_params()),
|
||||
&[instances.as_slice()],
|
||||
&mut transcript_read,
|
||||
)
|
||||
.unwrap(),
|
||||
)
|
||||
});
|
||||
|
||||
if let Some((instance_path, proof_path)) = path {
|
||||
write_instances(&instances, instance_path);
|
||||
fs::write(proof_path, &proof).unwrap();
|
||||
}
|
||||
|
||||
proof
|
||||
}
|
||||
|
||||
/// Generates a native proof using original Plonk (GWC '19) multi-open scheme. Uses Poseidon for Fiat-Shamir.
|
||||
///
|
||||
/// Caches the instances and proof if `path = Some(instance_path, proof_path)` is specified.
|
||||
pub fn gen_proof_gwc<C: Circuit<Fr>>(
|
||||
params: &ParamsKZG<Bn256>,
|
||||
pk: &ProvingKey<G1Affine>,
|
||||
circuit: C,
|
||||
instances: Vec<Vec<Fr>>,
|
||||
path: Option<(&Path, &Path)>,
|
||||
) -> Vec<u8> {
|
||||
gen_proof::<C, ProverGWC<_>, VerifierGWC<_>>(params, pk, circuit, instances, path)
|
||||
}
|
||||
|
||||
/// Generates a native proof using SHPLONK multi-open scheme. Uses Poseidon for Fiat-Shamir.
|
||||
///
|
||||
/// Caches the instances and proof if `path` is specified.
|
||||
pub fn gen_proof_shplonk<C: Circuit<Fr>>(
|
||||
params: &ParamsKZG<Bn256>,
|
||||
pk: &ProvingKey<G1Affine>,
|
||||
circuit: C,
|
||||
instances: Vec<Vec<Fr>>,
|
||||
path: Option<(&Path, &Path)>,
|
||||
) -> Vec<u8> {
|
||||
gen_proof::<C, ProverSHPLONK<_>, VerifierSHPLONK<_>>(params, pk, circuit, instances, path)
|
||||
}
|
||||
|
||||
/// Generates a SNARK using either SHPLONK or GWC multi-open scheme. Uses Poseidon for Fiat-Shamir.
|
||||
///
|
||||
/// Tries to first deserialize from / later serialize the entire SNARK into `path` if specified.
|
||||
/// Serialization is done using `bincode`.
|
||||
pub fn gen_snark<'params, ConcreteCircuit, P, V>(
|
||||
params: &'params ParamsKZG<Bn256>,
|
||||
pk: &ProvingKey<G1Affine>,
|
||||
circuit: ConcreteCircuit,
|
||||
path: Option<impl AsRef<Path>>,
|
||||
) -> Snark
|
||||
where
|
||||
ConcreteCircuit: CircuitExt<Fr>,
|
||||
P: Prover<'params, KZGCommitmentScheme<Bn256>>,
|
||||
V: Verifier<
|
||||
'params,
|
||||
KZGCommitmentScheme<Bn256>,
|
||||
Guard = GuardKZG<'params, Bn256>,
|
||||
MSMAccumulator = DualMSM<'params, Bn256>,
|
||||
>,
|
||||
{
|
||||
#[cfg(feature = "derive_serde")]
|
||||
if let Some(path) = &path {
|
||||
if let Ok(snark) = read_snark(path) {
|
||||
return snark;
|
||||
}
|
||||
}
|
||||
let protocol = compile(
|
||||
params,
|
||||
pk.get_vk(),
|
||||
Config::kzg()
|
||||
.with_num_instance(circuit.num_instance())
|
||||
.with_accumulator_indices(ConcreteCircuit::accumulator_indices()),
|
||||
);
|
||||
|
||||
let instances = circuit.instances();
|
||||
#[cfg(feature = "derive_serde")]
|
||||
let proof = gen_proof::<ConcreteCircuit, P, V>(
|
||||
params,
|
||||
pk,
|
||||
circuit,
|
||||
instances.clone(),
|
||||
None::<(&str, &str)>,
|
||||
);
|
||||
// If we can't serialize the entire snark, at least serialize the proof
|
||||
#[cfg(not(feature = "derive_serde"))]
|
||||
let proof = {
|
||||
let path = path.map(|path| {
|
||||
let path = path.as_ref().to_str().unwrap();
|
||||
(format!("{path}.instances"), format!("{path}.proof"))
|
||||
});
|
||||
let paths = path
|
||||
.as_ref()
|
||||
.map(|path| (Path::new(&path.0), Path::new(&path.1)));
|
||||
gen_proof::<ConcreteCircuit, P, V>(params, pk, circuit, instances.clone(), paths)
|
||||
};
|
||||
|
||||
let snark = Snark::new(protocol, instances, proof);
|
||||
#[cfg(feature = "derive_serde")]
|
||||
if let Some(path) = &path {
|
||||
let f = File::create(path).unwrap();
|
||||
#[cfg(feature = "display")]
|
||||
let write_time = start_timer!(|| "Write SNARK");
|
||||
bincode::serialize_into(f, &snark).unwrap();
|
||||
#[cfg(feature = "display")]
|
||||
end_timer!(write_time);
|
||||
}
|
||||
#[allow(clippy::let_and_return)]
|
||||
snark
|
||||
}
|
||||
|
||||
/// Generates a SNARK using GWC multi-open scheme. Uses Poseidon for Fiat-Shamir.
|
||||
///
|
||||
/// Tries to first deserialize from / later serialize the entire SNARK into `path` if specified.
|
||||
/// Serialization is done using `bincode`.
|
||||
pub fn gen_snark_gwc<ConcreteCircuit: CircuitExt<Fr>>(
|
||||
params: &ParamsKZG<Bn256>,
|
||||
pk: &ProvingKey<G1Affine>,
|
||||
circuit: ConcreteCircuit,
|
||||
path: Option<impl AsRef<Path>>,
|
||||
) -> Snark {
|
||||
gen_snark::<ConcreteCircuit, ProverGWC<_>, VerifierGWC<_>>(params, pk, circuit, path)
|
||||
}
|
||||
|
||||
/// Generates a SNARK using SHPLONK multi-open scheme. Uses Poseidon for Fiat-Shamir.
|
||||
///
|
||||
/// Tries to first deserialize from / later serialize the entire SNARK into `path` if specified.
|
||||
/// Serialization is done using `bincode`.
|
||||
pub fn gen_snark_shplonk<ConcreteCircuit: CircuitExt<Fr>>(
|
||||
params: &ParamsKZG<Bn256>,
|
||||
pk: &ProvingKey<G1Affine>,
|
||||
circuit: ConcreteCircuit,
|
||||
path: Option<impl AsRef<Path>>,
|
||||
) -> Snark {
|
||||
gen_snark::<ConcreteCircuit, ProverSHPLONK<_>, VerifierSHPLONK<_>>(params, pk, circuit, path)
|
||||
}
|
||||
|
||||
/// Tries to deserialize a SNARK from the specified `path` using `bincode`.
|
||||
///
|
||||
/// WARNING: The user must keep track of whether the SNARK was generated using the GWC or SHPLONK multi-open scheme.
|
||||
#[cfg(feature = "derive_serde")]
|
||||
pub fn read_snark(path: impl AsRef<Path>) -> Result<Snark, bincode::Error> {
|
||||
let f = File::open(path).map_err(Box::<bincode::ErrorKind>::from)?;
|
||||
bincode::deserialize_from(f)
|
||||
}
|
||||
436
snark-verifier-sdk/src/halo2/aggregation.rs
Normal file
436
snark-verifier-sdk/src/halo2/aggregation.rs
Normal file
@@ -0,0 +1,436 @@
|
||||
// This is mostly a generalization of `snark_verifier::examples::evm-verifier-with-accumulators`
|
||||
use super::PlonkSuccinctVerifier;
|
||||
use crate::{SnarkWitness, BITS, LIMBS};
|
||||
use halo2_proofs::{
|
||||
circuit::{AssignedCell, Layouter, SimpleFloorPlanner, Value},
|
||||
halo2curves::bn256::{Bn256, Fr, G1Affine},
|
||||
plonk::{self, Circuit, ConstraintSystem, Selector},
|
||||
poly::{commitment::ParamsProver, kzg::commitment::ParamsKZG},
|
||||
};
|
||||
use halo2_wrong_ecc::{
|
||||
integer::rns::Rns,
|
||||
maingate::{
|
||||
MainGate, MainGateConfig, MainGateInstructions, RangeChip, RangeConfig, RangeInstructions,
|
||||
RegionCtx,
|
||||
},
|
||||
EccConfig,
|
||||
};
|
||||
use halo2curves::{bn256::Fq, ff::PrimeField};
|
||||
use itertools::Itertools;
|
||||
use rand::{rngs::StdRng, SeedableRng};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use snark_verifier::util::arithmetic::fe_to_limbs;
|
||||
use snark_verifier::{
|
||||
loader::{self, halo2::EccInstructions, native::NativeLoader},
|
||||
pcs::{
|
||||
kzg::{
|
||||
KzgAccumulator, KzgAsProvingKey, KzgAsVerifyingKey, KzgSuccinctVerifyingKey,
|
||||
LimbsEncodingInstructions,
|
||||
},
|
||||
AccumulationScheme, AccumulationSchemeProver, PolynomialCommitmentScheme,
|
||||
},
|
||||
verifier::SnarkVerifier,
|
||||
};
|
||||
use std::{fs::File, marker::PhantomData, path::Path, rc::Rc};
|
||||
|
||||
use super::{CircuitExt, PoseidonTranscript, Snark, POSEIDON_SPEC};
|
||||
|
||||
pub type Svk = KzgSuccinctVerifyingKey<G1Affine>;
|
||||
pub type BaseFieldEccChip = halo2_wrong_ecc::BaseFieldEccChip<G1Affine, LIMBS, BITS>;
|
||||
pub type Halo2Loader<'a> = loader::halo2::Halo2Loader<'a, G1Affine, BaseFieldEccChip>;
|
||||
|
||||
#[allow(clippy::type_complexity)]
|
||||
/// Core function used in `synthesize` to aggregate multiple `snarks`.
|
||||
///
|
||||
/// Returns the assigned instances of previous snarks and the new final pair that needs to be verified in a pairing check.
|
||||
/// For each previous snark, we concatenate all instances into a single vector. We return a vector of vectors,
|
||||
/// one vector per snark, for convenience.
|
||||
///
|
||||
/// # Assumptions
|
||||
/// * `snarks` is not empty
|
||||
pub fn aggregate<'a, AS>(
|
||||
svk: &Svk,
|
||||
loader: &Rc<Halo2Loader<'a>>,
|
||||
snarks: &[SnarkWitness],
|
||||
as_proof: Value<&'_ [u8]>,
|
||||
) -> (
|
||||
Vec<Vec<<BaseFieldEccChip as EccInstructions<'a, G1Affine>>::AssignedCell>>, // this is Vec<Vec<AssignedCell<Fr, Fr>>>, but we note what the actual trait type is for future reference
|
||||
KzgAccumulator<G1Affine, Rc<Halo2Loader<'a>>>,
|
||||
)
|
||||
where
|
||||
AS: PolynomialCommitmentScheme<
|
||||
G1Affine,
|
||||
Rc<Halo2Loader<'a>>,
|
||||
VerifyingKey = Svk,
|
||||
Output = KzgAccumulator<G1Affine, Rc<Halo2Loader<'a>>>,
|
||||
> + AccumulationScheme<
|
||||
G1Affine,
|
||||
Rc<Halo2Loader<'a>>,
|
||||
Accumulator = KzgAccumulator<G1Affine, Rc<Halo2Loader<'a>>>,
|
||||
VerifyingKey = KzgAsVerifyingKey,
|
||||
>,
|
||||
{
|
||||
assert!(!snarks.is_empty(), "trying to aggregate 0 snarks");
|
||||
let assign_instances = |instances: &[Vec<Value<Fr>>]| {
|
||||
instances
|
||||
.iter()
|
||||
.map(|instances| {
|
||||
instances
|
||||
.iter()
|
||||
.map(|instance| loader.assign_scalar(*instance))
|
||||
.collect_vec()
|
||||
})
|
||||
.collect_vec()
|
||||
};
|
||||
|
||||
let mut previous_instances = Vec::with_capacity(snarks.len());
|
||||
let mut accumulators = snarks
|
||||
.iter()
|
||||
.flat_map(|snark| {
|
||||
let protocol = snark.protocol.loaded(loader);
|
||||
let instances = assign_instances(&snark.instances);
|
||||
// read the transcript and perform Fiat-Shamir
|
||||
// run through verification computation and produce the final pair `succinct`
|
||||
let mut transcript = PoseidonTranscript::<Rc<Halo2Loader>, _>::from_spec(
|
||||
loader,
|
||||
snark.proof(),
|
||||
POSEIDON_SPEC.clone(),
|
||||
);
|
||||
let proof = PlonkSuccinctVerifier::<AS>::read_proof(
|
||||
svk,
|
||||
&protocol,
|
||||
&instances,
|
||||
&mut transcript,
|
||||
)
|
||||
.unwrap();
|
||||
let accumulator =
|
||||
PlonkSuccinctVerifier::<AS>::verify(svk, &protocol, &instances, &proof).unwrap();
|
||||
|
||||
previous_instances.push(
|
||||
instances
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.map(|scalar| scalar.into_assigned())
|
||||
.collect(),
|
||||
);
|
||||
|
||||
accumulator
|
||||
})
|
||||
.collect_vec();
|
||||
|
||||
let accumulator = if accumulators.len() > 1 {
|
||||
let mut transcript = PoseidonTranscript::<Rc<Halo2Loader>, _>::from_spec(
|
||||
loader,
|
||||
as_proof,
|
||||
POSEIDON_SPEC.clone(),
|
||||
);
|
||||
let proof = <AS as AccumulationScheme<_, _>>::read_proof(
|
||||
&Default::default(),
|
||||
&accumulators,
|
||||
&mut transcript,
|
||||
)
|
||||
.unwrap();
|
||||
<AS as AccumulationScheme<_, _>>::verify(&Default::default(), &accumulators, &proof)
|
||||
.unwrap()
|
||||
} else {
|
||||
accumulators.pop().unwrap()
|
||||
};
|
||||
|
||||
(previous_instances, accumulator)
|
||||
}
|
||||
|
||||
/// `AS` should be the [`AccumulationScheme`] and [`PolynomialCommitmentScheme`] used to create `snarks`.
|
||||
/// Many things will fail if `AS` does not match how `snarks` were actually created.
|
||||
///
|
||||
/// In practice, `AS` is either `SHPLONK` or `GWC`.
|
||||
#[derive(Clone)]
|
||||
pub struct AggregationCircuit<AS> {
|
||||
svk: Svk,
|
||||
pub snarks: Vec<SnarkWitness>,
|
||||
instances: Vec<Fr>,
|
||||
as_proof: Value<Vec<u8>>,
|
||||
_as: PhantomData<AS>,
|
||||
}
|
||||
|
||||
impl<AS> AggregationCircuit<AS>
|
||||
// without unstable rust, I don't know how to make this where clause go away...
|
||||
where
|
||||
for<'a> AS: PolynomialCommitmentScheme<
|
||||
G1Affine,
|
||||
Rc<Halo2Loader<'a>>,
|
||||
VerifyingKey = Svk,
|
||||
Output = KzgAccumulator<G1Affine, Rc<Halo2Loader<'a>>>,
|
||||
> + AccumulationScheme<
|
||||
G1Affine,
|
||||
Rc<Halo2Loader<'a>>,
|
||||
Accumulator = KzgAccumulator<G1Affine, Rc<Halo2Loader<'a>>>,
|
||||
VerifyingKey = KzgAsVerifyingKey,
|
||||
> + PolynomialCommitmentScheme<
|
||||
G1Affine,
|
||||
NativeLoader,
|
||||
VerifyingKey = Svk,
|
||||
Output = KzgAccumulator<G1Affine, NativeLoader>,
|
||||
> + AccumulationScheme<
|
||||
G1Affine,
|
||||
NativeLoader,
|
||||
Accumulator = KzgAccumulator<G1Affine, NativeLoader>,
|
||||
VerifyingKey = KzgAsVerifyingKey,
|
||||
> + AccumulationSchemeProver<G1Affine, ProvingKey = KzgAsProvingKey<G1Affine>>,
|
||||
{
|
||||
/// Given snarks, this creates a circuit and runs the `GateThreadBuilder` to verify all the snarks.
|
||||
/// By default, the returned circuit has public instances equal to the limbs of the pair of elliptic curve points, referred to as the `accumulator`, that need to be verified in a final pairing check.
|
||||
///
|
||||
/// The user can optionally modify the circuit after calling this function to add more instances to `assigned_instances` to expose.
|
||||
///
|
||||
/// Warning: will fail silently if `snarks` were created using a different multi-open scheme than `AS`
|
||||
/// where `AS` can be either [`crate::SHPLONK`] or [`crate::GWC`] (for original PLONK multi-open scheme)
|
||||
pub fn new(params: &ParamsKZG<Bn256>, snarks: impl IntoIterator<Item = Snark>) -> Self {
|
||||
let svk: Svk = params.get_g()[0].into();
|
||||
let snarks = snarks.into_iter().collect_vec();
|
||||
|
||||
// TODO: the snarks can probably store these accumulators
|
||||
let accumulators = snarks
|
||||
.iter()
|
||||
.flat_map(|snark| {
|
||||
let mut transcript_read = PoseidonTranscript::<NativeLoader, &[u8]>::from_spec(
|
||||
snark.proof(),
|
||||
POSEIDON_SPEC.clone(),
|
||||
);
|
||||
let proof = PlonkSuccinctVerifier::<AS>::read_proof(
|
||||
&svk,
|
||||
&snark.protocol,
|
||||
&snark.instances,
|
||||
&mut transcript_read,
|
||||
)
|
||||
.unwrap();
|
||||
PlonkSuccinctVerifier::<AS>::verify(&svk, &snark.protocol, &snark.instances, &proof)
|
||||
.unwrap()
|
||||
})
|
||||
.collect_vec();
|
||||
|
||||
let (accumulator, as_proof) = {
|
||||
let mut transcript_write = PoseidonTranscript::<NativeLoader, Vec<u8>>::from_spec(
|
||||
vec![],
|
||||
POSEIDON_SPEC.clone(),
|
||||
);
|
||||
let rng = StdRng::from_entropy();
|
||||
let accumulator = AS::create_proof(
|
||||
&Default::default(),
|
||||
&accumulators,
|
||||
&mut transcript_write,
|
||||
rng,
|
||||
)
|
||||
.unwrap();
|
||||
(accumulator, transcript_write.finalize())
|
||||
};
|
||||
|
||||
let KzgAccumulator { lhs, rhs } = accumulator;
|
||||
let instances = [lhs.x, lhs.y, rhs.x, rhs.y]
|
||||
.map(fe_to_limbs::<_, _, LIMBS, BITS>)
|
||||
.concat();
|
||||
|
||||
Self {
|
||||
svk,
|
||||
snarks: snarks.into_iter().map_into().collect(),
|
||||
instances,
|
||||
as_proof: Value::known(as_proof),
|
||||
_as: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_proof(&self) -> Value<&[u8]> {
|
||||
self.as_proof.as_ref().map(|proof| proof.as_slice())
|
||||
}
|
||||
|
||||
pub fn instance(&self) -> &[Fr] {
|
||||
&self.instances
|
||||
}
|
||||
|
||||
/// In a single Halo2 region, aggregates previous snarks but does not expose public instances.
|
||||
///
|
||||
/// Returns `(accumulator_limbs, prev_instances)` as `AssignedCell`s.
|
||||
///
|
||||
/// The `accumulator_limbs` **must** be exposed as public instances.
|
||||
/// One can create a wrapper circuit around `Self` to expose more instances from `prev_instances` as necessary.
|
||||
///
|
||||
/// # Assumptions
|
||||
/// * RangeChip lookup table has already been loaded
|
||||
#[allow(clippy::type_complexity)]
|
||||
pub fn aggregation_region(
|
||||
&self,
|
||||
config: AggregationConfig,
|
||||
layouter: &mut impl Layouter<Fr>,
|
||||
) -> Result<(Vec<AssignedCell<Fr, Fr>>, Vec<Vec<AssignedCell<Fr, Fr>>>), plonk::Error> {
|
||||
layouter.assign_region(
|
||||
|| "",
|
||||
|region| {
|
||||
let ctx = RegionCtx::new(region, 0);
|
||||
|
||||
let ecc_chip = config.ecc_chip();
|
||||
let loader = Halo2Loader::new(ecc_chip, ctx);
|
||||
let (prev_instances, accumulator) =
|
||||
aggregate::<AS>(&self.svk, &loader, &self.snarks, self.as_proof());
|
||||
|
||||
let accumulator_limbs = [accumulator.lhs, accumulator.rhs]
|
||||
.iter()
|
||||
.map(|ec_point| {
|
||||
loader
|
||||
.ecc_chip()
|
||||
.assign_ec_point_to_limbs(&mut loader.ctx_mut(), ec_point.assigned())
|
||||
})
|
||||
.collect::<Result<Vec<_>, plonk::Error>>()?
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect_vec();
|
||||
|
||||
Ok((accumulator_limbs, prev_instances))
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AggregationConfig {
|
||||
main_gate_config: MainGateConfig,
|
||||
range_config: RangeConfig,
|
||||
}
|
||||
|
||||
impl AggregationConfig {
|
||||
pub fn configure<F: PrimeField>(
|
||||
meta: &mut ConstraintSystem<F>,
|
||||
composition_bits: Vec<usize>,
|
||||
overflow_bits: Vec<usize>,
|
||||
) -> Self {
|
||||
let main_gate_config = MainGate::<F>::configure(meta);
|
||||
let range_config =
|
||||
RangeChip::<F>::configure(meta, &main_gate_config, composition_bits, overflow_bits);
|
||||
AggregationConfig {
|
||||
main_gate_config,
|
||||
range_config,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn main_gate(&self) -> MainGate<Fr> {
|
||||
MainGate::new(self.main_gate_config.clone())
|
||||
}
|
||||
|
||||
pub fn range_chip(&self) -> RangeChip<Fr> {
|
||||
RangeChip::new(self.range_config.clone())
|
||||
}
|
||||
|
||||
pub fn ecc_chip(&self) -> BaseFieldEccChip {
|
||||
BaseFieldEccChip::new(EccConfig::new(
|
||||
self.range_config.clone(),
|
||||
self.main_gate_config.clone(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl<AS> Circuit<Fr> for AggregationCircuit<AS>
|
||||
// without unstable rust, I don't know how to make this where clause go away...
|
||||
where
|
||||
for<'a> AS: PolynomialCommitmentScheme<
|
||||
G1Affine,
|
||||
Rc<Halo2Loader<'a>>,
|
||||
VerifyingKey = Svk,
|
||||
Output = KzgAccumulator<G1Affine, Rc<Halo2Loader<'a>>>,
|
||||
> + AccumulationScheme<
|
||||
G1Affine,
|
||||
Rc<Halo2Loader<'a>>,
|
||||
Accumulator = KzgAccumulator<G1Affine, Rc<Halo2Loader<'a>>>,
|
||||
VerifyingKey = KzgAsVerifyingKey,
|
||||
> + PolynomialCommitmentScheme<
|
||||
G1Affine,
|
||||
NativeLoader,
|
||||
VerifyingKey = Svk,
|
||||
Output = KzgAccumulator<G1Affine, NativeLoader>,
|
||||
> + AccumulationScheme<
|
||||
G1Affine,
|
||||
NativeLoader,
|
||||
Accumulator = KzgAccumulator<G1Affine, NativeLoader>,
|
||||
VerifyingKey = KzgAsVerifyingKey,
|
||||
> + AccumulationSchemeProver<G1Affine, ProvingKey = KzgAsProvingKey<G1Affine>>,
|
||||
{
|
||||
type Config = AggregationConfig;
|
||||
type FloorPlanner = SimpleFloorPlanner;
|
||||
#[cfg(feature = "halo2_circuit_params")]
|
||||
type Params = ();
|
||||
|
||||
fn without_witnesses(&self) -> Self {
|
||||
Self {
|
||||
svk: self.svk,
|
||||
snarks: self
|
||||
.snarks
|
||||
.iter()
|
||||
.map(SnarkWitness::without_witnesses)
|
||||
.collect(),
|
||||
instances: Vec::new(),
|
||||
as_proof: Value::unknown(),
|
||||
_as: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
fn configure(meta: &mut ConstraintSystem<Fr>) -> Self::Config {
|
||||
AggregationConfig::configure(
|
||||
meta,
|
||||
vec![BITS / LIMBS],
|
||||
Rns::<Fq, Fr, LIMBS, BITS>::construct().overflow_lengths(),
|
||||
)
|
||||
}
|
||||
|
||||
fn synthesize(
|
||||
&self,
|
||||
config: Self::Config,
|
||||
mut layouter: impl Layouter<Fr>,
|
||||
) -> Result<(), plonk::Error> {
|
||||
let main_gate = config.main_gate();
|
||||
let range_chip = config.range_chip();
|
||||
range_chip.load_table(&mut layouter)?;
|
||||
|
||||
let (accumulator_limbs, _) = self.aggregation_region(config, &mut layouter)?;
|
||||
|
||||
for (row, limb) in accumulator_limbs.into_iter().enumerate() {
|
||||
main_gate.expose_public(layouter.namespace(|| ""), limb, row)?;
|
||||
}
|
||||
// @dev: one could expose more instances here if necessary
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<AS> CircuitExt<Fr> for AggregationCircuit<AS>
|
||||
// without unstable rust, I don't know how to make this where clause go away...
|
||||
where
|
||||
for<'a> AS: PolynomialCommitmentScheme<
|
||||
G1Affine,
|
||||
Rc<Halo2Loader<'a>>,
|
||||
VerifyingKey = Svk,
|
||||
Output = KzgAccumulator<G1Affine, Rc<Halo2Loader<'a>>>,
|
||||
> + AccumulationScheme<
|
||||
G1Affine,
|
||||
Rc<Halo2Loader<'a>>,
|
||||
Accumulator = KzgAccumulator<G1Affine, Rc<Halo2Loader<'a>>>,
|
||||
VerifyingKey = KzgAsVerifyingKey,
|
||||
> + PolynomialCommitmentScheme<
|
||||
G1Affine,
|
||||
NativeLoader,
|
||||
VerifyingKey = Svk,
|
||||
Output = KzgAccumulator<G1Affine, NativeLoader>,
|
||||
> + AccumulationScheme<
|
||||
G1Affine,
|
||||
NativeLoader,
|
||||
Accumulator = KzgAccumulator<G1Affine, NativeLoader>,
|
||||
VerifyingKey = KzgAsVerifyingKey,
|
||||
> + AccumulationSchemeProver<G1Affine, ProvingKey = KzgAsProvingKey<G1Affine>>,
|
||||
{
|
||||
fn num_instance(&self) -> Vec<usize> {
|
||||
vec![self.instances.len()]
|
||||
}
|
||||
|
||||
fn instances(&self) -> Vec<Vec<Fr>> {
|
||||
vec![self.instances.clone()]
|
||||
}
|
||||
|
||||
fn accumulator_indices() -> Option<Vec<(usize, usize)>> {
|
||||
Some((0..4 * LIMBS).map(|idx| (0, idx)).collect())
|
||||
}
|
||||
}
|
||||
243
snark-verifier-sdk/src/lib.rs
Normal file
243
snark-verifier-sdk/src/lib.rs
Normal file
@@ -0,0 +1,243 @@
|
||||
#[cfg(feature = "display")]
|
||||
use ark_std::{end_timer, start_timer};
|
||||
use halo2_proofs::{
|
||||
circuit::Value,
|
||||
halo2curves::{
|
||||
bn256::{Bn256, Fr, G1Affine},
|
||||
group::ff::Field,
|
||||
},
|
||||
plonk::{keygen_pk, keygen_vk, Circuit, ProvingKey, Selector},
|
||||
poly::kzg::commitment::ParamsKZG,
|
||||
SerdeFormat,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
#[cfg(feature = "derive_serde")]
|
||||
use serde::{Deserialize, Serialize};
|
||||
pub use snark_verifier::loader::native::NativeLoader;
|
||||
use snark_verifier::{
|
||||
pcs::kzg::{Bdfg21, Gwc19, KzgAs, LimbsEncoding},
|
||||
verifier::{self, plonk::PlonkProtocol},
|
||||
};
|
||||
use std::{
|
||||
fs::{self, File},
|
||||
io::{self, BufReader, BufWriter},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
#[cfg(feature = "loader_evm")]
|
||||
pub mod evm;
|
||||
#[cfg(feature = "loader_halo2")]
|
||||
pub mod halo2;
|
||||
|
||||
pub const LIMBS: usize = 4;
|
||||
pub const BITS: usize = 68;
|
||||
|
||||
/// AS stands for accumulation scheme.
|
||||
/// AS can be either `Kzg<Bn256, Gwc19>` (the original PLONK KZG multi-open) or `Kzg<Bn256, Bdfg21>` (SHPLONK)
|
||||
pub type PlonkVerifier<AS> = verifier::plonk::PlonkVerifier<AS, LimbsEncoding<LIMBS, BITS>>;
|
||||
pub type PlonkSuccinctVerifier<AS> =
|
||||
verifier::plonk::PlonkSuccinctVerifier<AS, LimbsEncoding<LIMBS, BITS>>;
|
||||
pub type SHPLONK = KzgAs<Bn256, Bdfg21>;
|
||||
pub type GWC = KzgAs<Bn256, Gwc19>;
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[cfg_attr(feature = "derive_serde", derive(Serialize, Deserialize))]
|
||||
pub struct Snark {
|
||||
pub protocol: PlonkProtocol<G1Affine>,
|
||||
pub instances: Vec<Vec<Fr>>,
|
||||
pub proof: Vec<u8>,
|
||||
}
|
||||
|
||||
impl Snark {
|
||||
pub fn new(protocol: PlonkProtocol<G1Affine>, instances: Vec<Vec<Fr>>, proof: Vec<u8>) -> Self {
|
||||
Self {
|
||||
protocol,
|
||||
instances,
|
||||
proof,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn proof(&self) -> &[u8] {
|
||||
&self.proof[..]
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Snark> for SnarkWitness {
|
||||
fn from(snark: Snark) -> Self {
|
||||
Self {
|
||||
protocol: snark.protocol,
|
||||
instances: snark
|
||||
.instances
|
||||
.into_iter()
|
||||
.map(|instances| instances.into_iter().map(Value::known).collect_vec())
|
||||
.collect(),
|
||||
proof: Value::known(snark.proof),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SnarkWitness {
|
||||
protocol: PlonkProtocol<G1Affine>,
|
||||
instances: Vec<Vec<Value<Fr>>>,
|
||||
proof: Value<Vec<u8>>,
|
||||
}
|
||||
|
||||
impl SnarkWitness {
|
||||
fn without_witnesses(&self) -> Self {
|
||||
SnarkWitness {
|
||||
protocol: self.protocol.clone(),
|
||||
instances: self
|
||||
.instances
|
||||
.iter()
|
||||
.map(|instances| vec![Value::unknown(); instances.len()])
|
||||
.collect(),
|
||||
proof: Value::unknown(),
|
||||
}
|
||||
}
|
||||
|
||||
fn proof(&self) -> Value<&[u8]> {
|
||||
self.proof.as_ref().map(Vec::as_slice)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait CircuitExt<F: Field>: Circuit<F> {
|
||||
/// Return the number of instances of the circuit.
|
||||
/// This may depend on extra circuit parameters but NOT on private witnesses.
|
||||
fn num_instance(&self) -> Vec<usize>;
|
||||
|
||||
fn instances(&self) -> Vec<Vec<F>>;
|
||||
|
||||
fn accumulator_indices() -> Option<Vec<(usize, usize)>> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Output the simple selector columns (before selector compression) of the circuit
|
||||
fn selectors(_: &Self::Config) -> Vec<Selector> {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read_pk<C: Circuit<Fr>>(
|
||||
path: &Path,
|
||||
#[cfg(feature = "halo2_circuit_params")] param: C::Params,
|
||||
) -> io::Result<ProvingKey<G1Affine>> {
|
||||
let f = File::open(path)?;
|
||||
#[cfg(feature = "display")]
|
||||
let read_time = start_timer!(|| format!("Reading pkey from {path:?}"));
|
||||
|
||||
// BufReader is indeed MUCH faster than Read
|
||||
let mut bufreader = BufReader::new(f);
|
||||
// But it's even faster to load the whole file into memory first and then process,
|
||||
// HOWEVER this requires twice as much memory to initialize
|
||||
// let initial_buffer_size = f.metadata().map(|m| m.len() as usize + 1).unwrap_or(0);
|
||||
// let mut bufreader = Vec::with_capacity(initial_buffer_size);
|
||||
// f.read_to_end(&mut bufreader)?;
|
||||
let pk = ProvingKey::read::<_, C>(
|
||||
&mut bufreader,
|
||||
SerdeFormat::RawBytes,
|
||||
#[cfg(feature = "halo2_circuit_params")]
|
||||
param,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
#[cfg(feature = "display")]
|
||||
end_timer!(read_time);
|
||||
|
||||
Ok(pk)
|
||||
}
|
||||
|
||||
#[allow(clippy::let_and_return)]
|
||||
pub fn gen_pk<C: Circuit<Fr>>(
|
||||
params: &ParamsKZG<Bn256>, // TODO: read pk without params
|
||||
circuit: &C,
|
||||
path: Option<&Path>,
|
||||
) -> ProvingKey<G1Affine> {
|
||||
if let Some(path) = path {
|
||||
if let Ok(pk) = read_pk::<C>(
|
||||
path,
|
||||
#[cfg(feature = "halo2_circuit_params")]
|
||||
circuit.params(),
|
||||
) {
|
||||
return pk;
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "display")]
|
||||
let pk_time = start_timer!(|| "Generating vkey & pkey");
|
||||
|
||||
let vk = keygen_vk(params, circuit).unwrap();
|
||||
let pk = keygen_pk(params, vk, circuit).unwrap();
|
||||
|
||||
#[cfg(feature = "display")]
|
||||
end_timer!(pk_time);
|
||||
|
||||
if let Some(path) = path {
|
||||
#[cfg(feature = "display")]
|
||||
let write_time = start_timer!(|| format!("Writing pkey to {path:?}"));
|
||||
|
||||
path.parent()
|
||||
.and_then(|dir| fs::create_dir_all(dir).ok())
|
||||
.unwrap();
|
||||
let mut f = BufWriter::new(File::create(path).unwrap());
|
||||
pk.write(&mut f, SerdeFormat::RawBytesUnchecked).unwrap();
|
||||
|
||||
#[cfg(feature = "display")]
|
||||
end_timer!(write_time);
|
||||
}
|
||||
pk
|
||||
}
|
||||
|
||||
pub fn read_instances(path: impl AsRef<Path>) -> Result<Vec<Vec<Fr>>, bincode::Error> {
|
||||
let f = File::open(path)?;
|
||||
let reader = BufReader::new(f);
|
||||
let instances: Vec<Vec<[u8; 32]>> = bincode::deserialize_from(reader)?;
|
||||
instances
|
||||
.into_iter()
|
||||
.map(|instance_column| {
|
||||
instance_column
|
||||
.iter()
|
||||
.map(|bytes| {
|
||||
Option::from(Fr::from_bytes(bytes)).ok_or_else(|| {
|
||||
Box::new(bincode::ErrorKind::Custom(
|
||||
"Invalid finite field point".to_owned(),
|
||||
))
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn write_instances(instances: &[&[Fr]], path: impl AsRef<Path>) {
|
||||
let instances: Vec<Vec<[u8; 32]>> = instances
|
||||
.iter()
|
||||
.map(|instance_column| instance_column.iter().map(|x| x.to_bytes()).collect_vec())
|
||||
.collect_vec();
|
||||
let f = BufWriter::new(File::create(path).unwrap());
|
||||
bincode::serialize_into(f, &instances).unwrap();
|
||||
}
|
||||
|
||||
#[cfg(feature = "zkevm")]
|
||||
mod zkevm {
|
||||
use super::CircuitExt;
|
||||
use eth_types::Field;
|
||||
use zkevm_circuits::{evm_circuit::EvmCircuit, state_circuit::StateCircuit};
|
||||
|
||||
impl<F: Field> CircuitExt<F> for EvmCircuit<F> {
|
||||
fn instances(&self) -> Vec<Vec<F>> {
|
||||
vec![]
|
||||
}
|
||||
fn num_instance(&self) -> Vec<usize> {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: Field> CircuitExt<F> for StateCircuit<F> {
|
||||
fn instances(&self) -> Vec<Vec<F>> {
|
||||
vec![]
|
||||
}
|
||||
fn num_instance(&self) -> Vec<usize> {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -30,6 +30,9 @@ revm = { version = "= 2.3.1", optional = true }
|
||||
halo2_wrong_ecc = { git = "https://github.com/privacy-scaling-explorations/halo2wrong", tag = "v2023_04_20", package = "ecc", optional = true }
|
||||
poseidon = { git = "https://github.com/privacy-scaling-explorations/poseidon", tag = "v2023_04_20", optional = true }
|
||||
|
||||
# derive_serde
|
||||
serde = { version = "1.0", features = ["derive"], optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
rand_chacha = "0.3.1"
|
||||
paste = "1.0.7"
|
||||
@@ -55,6 +58,7 @@ system_halo2 = ["dep:halo2_proofs"]
|
||||
|
||||
# features of halo2
|
||||
halo2_circuit_params = ["halo2_proofs?/circuit-params", "halo2_wrong_ecc?/circuit-params"]
|
||||
derive_serde = ["dep:serde"]
|
||||
|
||||
[[example]]
|
||||
name = "evm-verifier"
|
||||
|
||||
@@ -341,6 +341,15 @@ impl<'a, C: CurveAffine, EccChip: EccInstructions<'a, C>> Scalar<'a, C, EccChip>
|
||||
Ref::map(self.value.borrow(), Value::assigned)
|
||||
}
|
||||
|
||||
/// If scalar already assigned, returns itself as [`EccInstructions::AssignedScalar`]. Otherwise,
|
||||
/// scalar is constant, so loader assigned the constant scalar and returns the assigned scalar.
|
||||
pub fn into_assigned(self) -> EccChip::AssignedScalar {
|
||||
match self.value.into_inner() {
|
||||
Value::Constant(constant) => self.loader.assign_const_scalar(constant),
|
||||
Value::Assigned(assigned) => assigned,
|
||||
}
|
||||
}
|
||||
|
||||
fn value(&self) -> Ref<Value<C::Scalar, EccChip::AssignedScalar>> {
|
||||
self.value.borrow()
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ use crate::{
|
||||
Error,
|
||||
};
|
||||
use halo2_proofs::{circuit::Value, transcript::EncodedChallenge};
|
||||
use poseidon::Spec;
|
||||
use std::{
|
||||
io::{self, Read, Write},
|
||||
rc::Rc,
|
||||
@@ -71,6 +72,20 @@ where
|
||||
buf,
|
||||
}
|
||||
}
|
||||
|
||||
/// Initialize [`PoseidonTranscript`] from a precomputed spec of round constants and MDS matrix because computing the constants is expensive.
|
||||
pub fn from_spec(
|
||||
loader: &Rc<Halo2Loader<'a, C, EccChip>>,
|
||||
stream: Value<R>,
|
||||
spec: Spec<C::Scalar, T, RATE>,
|
||||
) -> Self {
|
||||
let buf = Poseidon::from_spec(loader, spec);
|
||||
Self {
|
||||
loader: loader.clone(),
|
||||
stream,
|
||||
buf,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, C, R, EccChip, const T: usize, const RATE: usize, const R_F: usize, const R_P: usize>
|
||||
@@ -171,6 +186,16 @@ where
|
||||
buf: Poseidon::new(&NativeLoader, R_F, R_P),
|
||||
}
|
||||
}
|
||||
|
||||
/// Initialize [`PoseidonTranscript`] from a precomputed spec of round constants and MDS matrix because computing the constants is expensive.
|
||||
pub fn from_spec(stream: S, spec: Spec<C::Scalar, T, RATE>) -> Self {
|
||||
let buf = Poseidon::from_spec(&NativeLoader, spec);
|
||||
Self {
|
||||
loader: NativeLoader,
|
||||
stream,
|
||||
buf,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<C: CurveAffine, S, const T: usize, const RATE: usize, const R_F: usize, const R_P: usize>
|
||||
|
||||
@@ -91,6 +91,7 @@ pub fn root_of_unity<F: PrimeField>(k: usize) -> F {
|
||||
|
||||
/// Rotation on a group.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[cfg_attr(feature = "derive_serde", derive(serde::Serialize, serde::Deserialize))]
|
||||
pub struct Rotation(pub i32);
|
||||
|
||||
impl Rotation {
|
||||
@@ -118,6 +119,7 @@ impl From<i32> for Rotation {
|
||||
|
||||
/// 2-adicity multiplicative domain
|
||||
#[derive(Clone, Debug)]
|
||||
#[cfg_attr(feature = "derive_serde", derive(serde::Serialize, serde::Deserialize))]
|
||||
pub struct Domain<F: PrimeField> {
|
||||
/// Log size of the domain.
|
||||
pub k: usize,
|
||||
|
||||
@@ -134,6 +134,19 @@ impl<F: FromUniformBytes<64>, L: LoadedScalar<F>, const T: usize, const RATE: us
|
||||
}
|
||||
}
|
||||
|
||||
/// Same as `new`, but uses the given `spec` instead of creating a new one.
|
||||
pub fn from_spec(loader: &L::Loader, spec: Spec<F, T, RATE>) -> Self {
|
||||
Self {
|
||||
spec,
|
||||
state: State::new(
|
||||
poseidon::State::default()
|
||||
.words()
|
||||
.map(|state| loader.load_const(&state)),
|
||||
),
|
||||
buf: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Store given `elements` into buffer.
|
||||
pub fn update(&mut self, elements: &[L]) {
|
||||
self.buf.extend_from_slice(elements);
|
||||
|
||||
@@ -16,13 +16,28 @@ use std::{
|
||||
|
||||
/// Protocol specifying configuration of a PLONK.
|
||||
#[derive(Clone, Debug)]
|
||||
#[cfg_attr(feature = "derive_serde", derive(serde::Serialize, serde::Deserialize))]
|
||||
pub struct PlonkProtocol<C, L = NativeLoader>
|
||||
where
|
||||
C: CurveAffine,
|
||||
L: Loader<C>,
|
||||
{
|
||||
#[cfg_attr(
|
||||
feature = "derive_serde",
|
||||
serde(bound(
|
||||
serialize = "C::Scalar: serde::Serialize",
|
||||
deserialize = "C::Scalar: serde::Deserialize<'de>"
|
||||
))
|
||||
)]
|
||||
/// Working domain.
|
||||
pub domain: Domain<C::Scalar>,
|
||||
#[cfg_attr(
|
||||
feature = "derive_serde",
|
||||
serde(bound(
|
||||
serialize = "L::LoadedEcPoint: serde::Serialize",
|
||||
deserialize = "L::LoadedEcPoint: serde::Deserialize<'de>"
|
||||
))
|
||||
)]
|
||||
/// Commitments of preprocessed polynomials.
|
||||
pub preprocessed: Vec<L::LoadedEcPoint>,
|
||||
/// Number of instances in each instance polynomial.
|
||||
@@ -37,6 +52,13 @@ where
|
||||
pub queries: Vec<Query>,
|
||||
/// Structure of quotient polynomial.
|
||||
pub quotient: QuotientPolynomial<C::Scalar>,
|
||||
#[cfg_attr(
|
||||
feature = "derive_serde",
|
||||
serde(bound(
|
||||
serialize = "L::LoadedScalar: serde::Serialize",
|
||||
deserialize = "L::LoadedScalar: serde::Deserialize<'de>"
|
||||
))
|
||||
)]
|
||||
/// Prover and verifier common initial state to write to transcript if any.
|
||||
pub transcript_initial_state: Option<L::LoadedScalar>,
|
||||
/// Instance polynomials commiting key if any.
|
||||
@@ -167,6 +189,7 @@ mod halo2 {
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
#[cfg_attr(feature = "derive_serde", derive(serde::Serialize, serde::Deserialize))]
|
||||
pub enum CommonPolynomial {
|
||||
Identity,
|
||||
Lagrange(i32),
|
||||
@@ -261,6 +284,7 @@ where
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[cfg_attr(feature = "derive_serde", derive(serde::Serialize, serde::Deserialize))]
|
||||
pub struct QuotientPolynomial<F: Clone> {
|
||||
pub chunk_degree: usize,
|
||||
// Note that `num_chunk` might be larger than necessary, due to the degree
|
||||
@@ -276,6 +300,7 @@ impl<F: Clone> QuotientPolynomial<F> {
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[cfg_attr(feature = "derive_serde", derive(serde::Serialize, serde::Deserialize))]
|
||||
pub struct Query {
|
||||
pub poly: usize,
|
||||
pub rotation: Rotation,
|
||||
@@ -291,6 +316,7 @@ impl Query {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[cfg_attr(feature = "derive_serde", derive(serde::Serialize, serde::Deserialize))]
|
||||
pub enum Expression<F> {
|
||||
Constant(F),
|
||||
CommonPolynomial(CommonPolynomial),
|
||||
@@ -501,6 +527,7 @@ fn merge_left_right<T: Ord>(a: Option<BTreeSet<T>>, b: Option<BTreeSet<T>>) -> O
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
#[cfg_attr(feature = "derive_serde", derive(serde::Serialize, serde::Deserialize))]
|
||||
pub enum LinearizationStrategy {
|
||||
/// Older linearization strategy of GWC19, which has linearization
|
||||
/// polynomial that doesn't evaluate to 0, and requires prover to send extra
|
||||
@@ -513,6 +540,7 @@ pub enum LinearizationStrategy {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
#[cfg_attr(feature = "derive_serde", derive(serde::Serialize, serde::Deserialize))]
|
||||
pub struct InstanceCommittingKey<C> {
|
||||
pub bases: Vec<C>,
|
||||
pub constant: Option<C>,
|
||||
|
||||
Reference in New Issue
Block a user