Add plonky3 support (#1158)

plonky3 backend integration for witness columns only
This commit is contained in:
Thibaut Schaeffer
2024-06-26 22:05:05 +02:00
committed by GitHub
parent 924b9b0660
commit 01acfd736e
18 changed files with 638 additions and 29 deletions

View File

@@ -15,6 +15,7 @@ members = [
"pil-analyzer",
"pipeline",
"pilopt",
"plonky3",
"asm-to-pil",
"backend",
"ast",

View File

@@ -11,6 +11,7 @@ repository = { workspace = true }
default = []
halo2 = ["dep:halo2_proofs", "dep:halo2_curves", "dep:snark-verifier", "dep:halo2_solidity_verifier"]
estark-polygon = ["dep:pil-stark-prover"]
plonky3 = ["dep:powdr-plonky3"]
[dependencies]
powdr-ast.workspace = true
@@ -19,6 +20,8 @@ powdr-pil-analyzer.workspace = true
powdr-executor.workspace = true
powdr-parser-util.workspace = true
powdr-plonky3 = { path = "../plonky3", optional = true }
starky = { git = "https://github.com/0xEigenLabs/eigen-zkvm.git", rev = "cf405b2e2cecb8567cfd083a55936b71722276d5" }
pil-stark-prover = { git = "https://github.com/powdr-labs/pil-stark-prover.git", rev = "769b1153f3ae2d7cbab4c8acf33865ed13f8a823", optional = true }

View File

@@ -3,6 +3,8 @@
mod estark;
#[cfg(feature = "halo2")]
mod halo2;
#[cfg(feature = "plonky3")]
mod plonky3;
mod composite;
@@ -30,6 +32,9 @@ pub enum BackendType {
EStarkStarky,
#[strum(serialize = "estark-dump")]
EStarkDump,
#[cfg(feature = "plonky3")]
#[strum(serialize = "plonky3")]
Plonky3,
}
pub type BackendOptions = String;
@@ -52,6 +57,8 @@ impl BackendType {
BackendType::EStarkPolygon => Box::new(estark::polygon_wrapper::Factory),
BackendType::EStarkStarky => Box::new(estark::starky_wrapper::Factory),
BackendType::EStarkDump => Box::new(estark::DumpFactory),
#[cfg(feature = "plonky3")]
BackendType::Plonky3 => Box::new(plonky3::Factory),
}
}
}

View File

@@ -0,0 +1,53 @@
use std::{io, path::Path};
use powdr_ast::analyzed::Analyzed;
use powdr_executor::witgen::WitgenCallback;
use powdr_number::FieldElement;
use powdr_plonky3::Plonky3Prover;
use crate::{Backend, BackendFactory, BackendOptions, Error, Proof};
pub(crate) struct Factory;
impl<T: FieldElement> BackendFactory<T> for Factory {
fn create<'a>(
&self,
pil: &'a Analyzed<T>,
_fixed: &'a [(String, Vec<T>)],
_output_dir: Option<&'a Path>,
setup: Option<&mut dyn io::Read>,
verification_key: Option<&mut dyn io::Read>,
verification_app_key: Option<&mut dyn io::Read>,
_: BackendOptions,
) -> Result<Box<dyn crate::Backend<'a, T> + 'a>, Error> {
if setup.is_some() {
return Err(Error::NoSetupAvailable);
}
if verification_key.is_some() {
return Err(Error::NoVerificationAvailable);
}
if verification_app_key.is_some() {
return Err(Error::NoAggregationAvailable);
}
Ok(Box::new(Plonky3Prover::new(pil)))
}
}
impl<'a, T: FieldElement> Backend<'a, T> for Plonky3Prover<'a, T> {
fn verify(&self, proof: &[u8], instances: &[Vec<T>]) -> Result<(), Error> {
Ok(self.verify(proof, instances)?)
}
fn prove(
&self,
witness: &[(String, Vec<T>)],
prev_proof: Option<Proof>,
witgen_callback: WitgenCallback<T>,
) -> Result<Proof, Error> {
if prev_proof.is_some() {
return Err(Error::NoAggregationAvailable);
}
Ok(self.prove(witness, witgen_callback)?)
}
}

View File

@@ -0,0 +1,3 @@
# Plonky3
powdr partially supports [plonky3](https://github.com/Plonky3/Plonky3) with the Goldilocks field. Progress is tracked [here](https://github.com/powdr-labs/powdr/issues/1468).

View File

@@ -11,6 +11,7 @@ default-run = "powdr"
[features]
default = [] # halo2 is disabled by default
halo2 = ["powdr-backend/halo2", "powdr-pipeline/halo2"]
plonky3 = ["powdr-backend/plonky3"]
estark-polygon = ["powdr-backend/estark-polygon", "powdr-pipeline/estark-polygon"]
[dependencies]

View File

@@ -1,5 +1,4 @@
use std::collections::{BTreeMap, HashMap};
use std::rc::Rc;
use std::sync::Arc;
use powdr_ast::analyzed::{
@@ -49,15 +48,15 @@ impl<T, F> QueryCallback<T> for F where F: Fn(&str) -> Result<Option<T>, String>
#[derive(Clone)]
pub struct WitgenCallback<T> {
analyzed: Rc<Analyzed<T>>,
fixed_col_values: Rc<Vec<(String, Vec<T>)>>,
analyzed: Arc<Analyzed<T>>,
fixed_col_values: Arc<Vec<(String, Vec<T>)>>,
query_callback: Arc<dyn QueryCallback<T>>,
}
impl<T: FieldElement> WitgenCallback<T> {
pub fn new(
analyzed: Rc<Analyzed<T>>,
fixed_col_values: Rc<Vec<(String, Vec<T>)>>,
analyzed: Arc<Analyzed<T>>,
fixed_col_values: Arc<Vec<(String, Vec<T>)>>,
query_callback: Option<Arc<dyn QueryCallback<T>>>,
) -> Self {
let query_callback = query_callback.unwrap_or_else(|| Arc::new(unused_query_callback()));

View File

@@ -10,6 +10,7 @@ repository = { workspace = true }
[features]
default = [] # halo2 is disabled by default
halo2 = ["powdr-backend/halo2"]
plonky3 = ["powdr-backend/plonky3"]
estark-polygon = ["powdr-backend/estark-polygon"]
[dependencies]

View File

@@ -5,7 +5,6 @@ use std::{
io::{self, BufReader},
marker::Send,
path::{Path, PathBuf},
rc::Rc,
sync::Arc,
time::Instant,
};
@@ -63,11 +62,11 @@ pub struct Artifacts<T: FieldElement> {
/// An analyzed .pil file, with all dependencies imported, potentially from other files.
analyzed_pil: Option<Analyzed<T>>,
/// An optimized .pil file.
optimized_pil: Option<Rc<Analyzed<T>>>,
optimized_pil: Option<Arc<Analyzed<T>>>,
/// Fully evaluated fixed columns.
fixed_cols: Option<Rc<Columns<T>>>,
fixed_cols: Option<Arc<Columns<T>>>,
/// Generated witnesses.
witness: Option<Rc<Columns<T>>>,
witness: Option<Arc<Columns<T>>>,
/// The proof (if successful).
proof: Option<Proof>,
}
@@ -373,7 +372,7 @@ impl<T: FieldElement> Pipeline<T> {
Ok(Pipeline {
artifact: Artifacts {
optimized_pil: Some(Rc::new(analyzed)),
optimized_pil: Some(Arc::new(analyzed)),
..Default::default()
},
name,
@@ -394,7 +393,7 @@ impl<T: FieldElement> Pipeline<T> {
Pipeline {
artifact: Artifacts {
fixed_cols: Some(Rc::new(fixed)),
fixed_cols: Some(Arc::new(fixed)),
..self.artifact
},
..self
@@ -414,7 +413,7 @@ impl<T: FieldElement> Pipeline<T> {
Pipeline {
artifact: Artifacts {
witness: Some(Rc::new(witness)),
witness: Some(Arc::new(witness)),
..self.artifact
},
..self
@@ -430,7 +429,7 @@ impl<T: FieldElement> Pipeline<T> {
}
Pipeline {
artifact: Artifacts {
witness: Some(Rc::new(witness)),
witness: Some(Arc::new(witness)),
..self.artifact
},
..self
@@ -777,7 +776,7 @@ impl<T: FieldElement> Pipeline<T> {
Ok(self.artifact.analyzed_pil.as_ref().unwrap())
}
pub fn compute_optimized_pil(&mut self) -> Result<Rc<Analyzed<T>>, Vec<String>> {
pub fn compute_optimized_pil(&mut self) -> Result<Arc<Analyzed<T>>, Vec<String>> {
if let Some(ref optimized_pil) = self.artifact.optimized_pil {
return Ok(optimized_pil.clone());
}
@@ -790,16 +789,16 @@ impl<T: FieldElement> Pipeline<T> {
self.maybe_write_pil(&optimized, "_opt")?;
self.maybe_write_pil_object(&optimized, "_opt")?;
self.artifact.optimized_pil = Some(Rc::new(optimized));
self.artifact.optimized_pil = Some(Arc::new(optimized));
Ok(self.artifact.optimized_pil.as_ref().unwrap().clone())
}
pub fn optimized_pil(&self) -> Result<Rc<Analyzed<T>>, Vec<String>> {
pub fn optimized_pil(&self) -> Result<Arc<Analyzed<T>>, Vec<String>> {
Ok(self.artifact.optimized_pil.as_ref().unwrap().clone())
}
pub fn compute_fixed_cols(&mut self) -> Result<Rc<Columns<T>>, Vec<String>> {
pub fn compute_fixed_cols(&mut self) -> Result<Arc<Columns<T>>, Vec<String>> {
if let Some(ref fixed_cols) = self.artifact.fixed_cols {
return Ok(fixed_cols.clone());
}
@@ -813,16 +812,16 @@ impl<T: FieldElement> Pipeline<T> {
self.maybe_write_constants(&fixed_cols)?;
self.log(&format!("Took {}", start.elapsed().as_secs_f32()));
self.artifact.fixed_cols = Some(Rc::new(fixed_cols));
self.artifact.fixed_cols = Some(Arc::new(fixed_cols));
Ok(self.artifact.fixed_cols.as_ref().unwrap().clone())
}
pub fn fixed_cols(&self) -> Result<Rc<Columns<T>>, Vec<String>> {
pub fn fixed_cols(&self) -> Result<Arc<Columns<T>>, Vec<String>> {
Ok(self.artifact.fixed_cols.as_ref().unwrap().clone())
}
pub fn compute_witness(&mut self) -> Result<Rc<Columns<T>>, Vec<String>> {
pub fn compute_witness(&mut self) -> Result<Arc<Columns<T>>, Vec<String>> {
if let Some(ref witness) = self.artifact.witness {
return Ok(witness.clone());
}
@@ -849,12 +848,12 @@ impl<T: FieldElement> Pipeline<T> {
self.maybe_write_witness(&fixed_cols, &witness)?;
self.artifact.witness = Some(Rc::new(witness));
self.artifact.witness = Some(Arc::new(witness));
Ok(self.artifact.witness.as_ref().unwrap().clone())
}
pub fn witness(&self) -> Result<Rc<Columns<T>>, Vec<String>> {
pub fn witness(&self) -> Result<Arc<Columns<T>>, Vec<String>> {
Ok(self.artifact.witness.as_ref().unwrap().clone())
}
@@ -1083,11 +1082,13 @@ impl<T: FieldElement> Pipeline<T> {
.as_ref()
.map(|path| BufReader::new(fs::File::open(path).unwrap()));
let mut vkey_file = if let Some(ref path) = self.arguments.vkey_file {
BufReader::new(fs::File::open(path).unwrap())
} else {
panic!("Verification key should have been provided for verification")
};
let mut vkey_file = self
.arguments
.vkey_file
.as_ref()
.map(fs::File::open)
.map(Result::unwrap)
.map(BufReader::new);
let pil = self.compute_optimized_pil()?;
let fixed_cols = self.compute_fixed_cols()?;
@@ -1100,7 +1101,9 @@ impl<T: FieldElement> Pipeline<T> {
setup_file
.as_mut()
.map(|file| file as &mut dyn std::io::Read),
Some(&mut vkey_file),
vkey_file
.as_mut()
.map(|file| file as &mut dyn std::io::Read),
// We shouldn't need the app verification key for this
None,
self.arguments.backend_options.clone(),

View File

@@ -175,6 +175,35 @@ pub fn gen_halo2_proof(file_name: &str, inputs: Vec<Bn254Field>) {
#[cfg(not(feature = "halo2"))]
pub fn gen_halo2_proof(_file_name: &str, _inputs: Vec<Bn254Field>) {}
#[cfg(feature = "plonky3")]
pub fn test_plonky3(file_name: &str, inputs: Vec<GoldilocksField>) {
let tmp_dir = mktemp::Temp::new_dir().unwrap();
let mut pipeline = Pipeline::default()
.with_tmp_output(&tmp_dir)
.from_file(resolve_test_file(file_name))
.with_prover_inputs(inputs)
.with_backend(powdr_backend::BackendType::Plonky3, None);
// Generate a proof
let proof = pipeline.compute_proof().cloned().unwrap();
let publics: Vec<GoldilocksField> = pipeline
.publics()
.clone()
.unwrap()
.iter()
.map(|(_name, v)| *v)
.collect();
pipeline.verify(&proof, &[publics]).unwrap();
}
#[cfg(not(feature = "plonky3"))]
pub fn test_plonky3(_: &str, _: Vec<GoldilocksField>) {}
#[cfg(not(feature = "plonky3"))]
pub fn gen_plonky3_proof(_: &str, _: Vec<GoldilocksField>) {}
/// Returns the analyzed PIL containing only the std library.
pub fn std_analyzed<T: FieldElement>() -> Analyzed<T> {
let mut pipeline = Pipeline::default().from_asm_string(String::new(), None);

View File

@@ -4,9 +4,10 @@ use powdr_number::GoldilocksField;
use powdr_pipeline::test_util::{
assert_proofs_fail_for_invalid_witnesses, assert_proofs_fail_for_invalid_witnesses_estark,
assert_proofs_fail_for_invalid_witnesses_halo2,
assert_proofs_fail_for_invalid_witnesses_pilcom, gen_estark_proof, test_halo2,
assert_proofs_fail_for_invalid_witnesses_pilcom, gen_estark_proof, test_halo2, test_plonky3,
verify_test_file,
};
use test_log::test;
pub fn verify_pil(file_name: &str, inputs: Vec<GoldilocksField>) {
@@ -238,6 +239,12 @@ fn halo_without_lookup() {
gen_estark_proof(f, Default::default());
}
#[test]
fn add() {
let f = "pil/add.pil";
test_plonky3(f, Default::default());
}
#[test]
fn simple_div() {
let f = "pil/simple_div.pil";

36
plonky3/Cargo.toml Normal file
View File

@@ -0,0 +1,36 @@
[package]
name = "powdr-plonky3"
version.workspace = true
edition.workspace = true
license.workspace = true
homepage.workspace = true
repository.workspace = true
[dependencies]
powdr-ast.workspace = true
powdr-number.workspace = true
rand = "0.8.5"
powdr-analysis = { path = "../analysis" }
powdr-executor = { path = "../executor" }
serde_json = "1.0.116"
p3-air = { git = "https://github.com/Plonky3/Plonky3.git", rev = "7e4f7af" }
p3-matrix = { git = "https://github.com/Plonky3/Plonky3.git", rev = "7e4f7af" }
p3-field = { git = "https://github.com/Plonky3/Plonky3.git", rev = "7e4f7af" }
p3-uni-stark = { git = "https://github.com/Plonky3/Plonky3.git", rev = "7e4f7af" }
p3-commit = { git = "https://github.com/Plonky3/Plonky3.git", rev = "7e4f7af", features = ["test-utils"] }
p3-poseidon2 = { git = "https://github.com/Plonky3/Plonky3.git", rev = "7e4f7af" }
p3-poseidon = { git = "https://github.com/Plonky3/Plonky3.git", rev = "7e4f7af" }
p3-fri = { git = "https://github.com/Plonky3/Plonky3.git", rev = "7e4f7af" }
p3-mds = { git = "https://github.com/Plonky3/Plonky3.git", rev = "7e4f7af" }
p3-merkle-tree = { git = "https://github.com/Plonky3/Plonky3.git", rev = "7e4f7af" }
p3-goldilocks = { git = "https://github.com/Plonky3/Plonky3.git", rev = "7e4f7af" }
p3-symmetric = { git = "https://github.com/Plonky3/Plonky3.git", rev = "7e4f7af" }
p3-dft = { git = "https://github.com/Plonky3/Plonky3.git", rev = "7e4f7af" }
p3-challenger = { git = "https://github.com/Plonky3/Plonky3.git", rev = "7e4f7af" }
p3-util = { git = "https://github.com/Plonky3/Plonky3.git", rev = "7e4f7af" }
lazy_static = "1.4.0"
rand_chacha = "0.3.1"
[dev-dependencies]
powdr-pipeline.workspace = true

View File

@@ -0,0 +1,199 @@
//! A plonky3 adapter for powdr
use std::any::TypeId;
use p3_air::{Air, AirBuilder, BaseAir};
use p3_field::AbstractField;
use p3_goldilocks::Goldilocks;
use p3_matrix::{dense::RowMajorMatrix, MatrixRowSlices};
use powdr_ast::analyzed::{
AlgebraicBinaryOperation, AlgebraicBinaryOperator, AlgebraicExpression,
AlgebraicUnaryOperation, AlgebraicUnaryOperator, Analyzed, IdentityKind, PolynomialType,
};
use powdr_executor::witgen::WitgenCallback;
use powdr_number::{FieldElement, GoldilocksField, LargeInt};
pub type Val = p3_goldilocks::Goldilocks;
pub(crate) struct PowdrCircuit<'a, T> {
/// The analyzed PIL
analyzed: &'a Analyzed<T>,
/// The value of the witness columns, if set
witness: Option<&'a [(String, Vec<T>)]>,
/// Callback to augment the witness in the later stages
_witgen_callback: Option<WitgenCallback<T>>,
}
impl<'a, T: FieldElement> PowdrCircuit<'a, T> {
pub fn generate_trace_rows(&self) -> RowMajorMatrix<Goldilocks> {
// an iterator over all columns, committed then fixed
let witness = self.witness().iter();
let len = self.analyzed.degree.unwrap();
// for each row, get the value of each column
let values = (0..len)
.flat_map(move |i| {
witness
.clone()
.map(move |(_, v)| cast_to_goldilocks(v[i as usize]))
})
.collect();
RowMajorMatrix::new(values, self.width())
}
}
pub fn cast_to_goldilocks<T: FieldElement>(v: T) -> Val {
assert_eq!(TypeId::of::<T>(), TypeId::of::<GoldilocksField>());
Val::from_canonical_u64(v.to_integer().try_into_u64().unwrap())
}
impl<'a, T: FieldElement> PowdrCircuit<'a, T> {
pub(crate) fn new(analyzed: &'a Analyzed<T>) -> Self {
if analyzed.constant_count() > 0 {
unimplemented!("Fixed columns are not supported in Plonky3");
}
if !analyzed.public_declarations.is_empty() {
unimplemented!("Public declarations are not supported in Plonky3");
}
if analyzed
.definitions
.iter()
.any(|(_, (s, _))| matches!(s.stage, Some(stage) if stage > 0))
{
unimplemented!("Multi-stage proving is not supported in Plonky3")
}
Self {
analyzed,
witness: None,
_witgen_callback: None,
}
}
fn witness(&self) -> &'a [(String, Vec<T>)] {
self.witness.as_ref().unwrap()
}
pub(crate) fn with_witness(self, witness: &'a [(String, Vec<T>)]) -> Self {
assert_eq!(witness.len(), self.analyzed.commitment_count());
Self {
witness: Some(witness),
..self
}
}
pub(crate) fn with_witgen_callback(self, witgen_callback: WitgenCallback<T>) -> Self {
Self {
_witgen_callback: Some(witgen_callback),
..self
}
}
/// Conversion to plonky3 expression
fn to_plonky3_expr<AB: AirBuilder<F = Val>>(
&self,
e: &AlgebraicExpression<T>,
matrix: &AB::M,
) -> AB::Expr {
let res = match e {
AlgebraicExpression::Reference(r) => {
let poly_id = r.poly_id;
let row = match r.next {
true => matrix.row_slice(1),
false => matrix.row_slice(0),
};
// witness columns indexes are unchanged, fixed ones are offset by `commitment_count`
let index = match poly_id.ptype {
PolynomialType::Committed => {
assert!(
r.poly_id.id < self.analyzed.commitment_count() as u64,
"Plonky3 expects `poly_id` to be contiguous"
);
r.poly_id.id as usize
}
PolynomialType::Constant => {
unreachable!(
"fixed columns are not supported, should have been checked earlier"
)
}
PolynomialType::Intermediate => {
unreachable!("intermediate polynomials should have been inlined")
}
};
row[index].into()
}
AlgebraicExpression::PublicReference(_) => unimplemented!(
"public references are not supported inside algebraic expressions in plonky3"
),
AlgebraicExpression::Number(n) => AB::Expr::from(cast_to_goldilocks(*n)),
AlgebraicExpression::BinaryOperation(AlgebraicBinaryOperation { left, op, right }) => {
let left = self.to_plonky3_expr::<AB>(left, matrix);
let right = self.to_plonky3_expr::<AB>(right, matrix);
match op {
AlgebraicBinaryOperator::Add => left + right,
AlgebraicBinaryOperator::Sub => left - right,
AlgebraicBinaryOperator::Mul => left * right,
AlgebraicBinaryOperator::Pow => {
unreachable!("exponentiations should have been evaluated")
}
}
}
AlgebraicExpression::UnaryOperation(AlgebraicUnaryOperation { op, expr }) => {
let expr: <AB as AirBuilder>::Expr = self.to_plonky3_expr::<AB>(expr, matrix);
match op {
AlgebraicUnaryOperator::Minus => -expr,
}
}
AlgebraicExpression::Challenge(challenge) => {
unimplemented!("Challenge API for {challenge:?} not accessible in plonky3")
}
};
res
}
}
impl<'a, T: FieldElement> BaseAir<Val> for PowdrCircuit<'a, T> {
fn width(&self) -> usize {
assert_eq!(self.analyzed.constant_count(), 0);
self.analyzed.commitment_count()
}
fn preprocessed_trace(&self) -> Option<RowMajorMatrix<Val>> {
unimplemented!()
}
}
impl<'a, T: FieldElement, AB: AirBuilder<F = Val>> Air<AB> for PowdrCircuit<'a, T> {
fn eval(&self, builder: &mut AB) {
let matrix = builder.main();
for identity in &self
.analyzed
.identities_with_inlined_intermediate_polynomials()
{
match identity.kind {
IdentityKind::Polynomial => {
assert_eq!(identity.left.expressions.len(), 0);
assert_eq!(identity.right.expressions.len(), 0);
assert!(identity.right.selector.is_none());
let left = self
.to_plonky3_expr::<AB>(identity.left.selector.as_ref().unwrap(), &matrix);
builder.assert_zero(left);
}
IdentityKind::Plookup => unimplemented!("Plonky3 does not support plookup"),
IdentityKind::Permutation => {
unimplemented!("Plonky3 does not support permutations")
}
IdentityKind::Connect => unimplemented!("Plonky3 does not support connections"),
}
}
}
}

4
plonky3/src/lib.rs Normal file
View File

@@ -0,0 +1,4 @@
mod circuit_builder;
mod prover;
pub use prover::Plonky3Prover;

159
plonky3/src/prover/mod.rs Normal file
View File

@@ -0,0 +1,159 @@
//! A plonky3 prover using FRI and Poseidon
mod params;
use powdr_ast::analyzed::Analyzed;
use powdr_executor::witgen::WitgenCallback;
use p3_uni_stark::{prove, verify, Proof};
use powdr_number::{FieldElement, KnownField};
use crate::circuit_builder::{cast_to_goldilocks, PowdrCircuit};
use self::params::{get_challenger, get_config};
#[derive(Clone)]
pub struct Plonky3Prover<'a, T> {
/// The analyzed PIL
analyzed: &'a Analyzed<T>,
}
impl<'a, T> Plonky3Prover<'a, T> {
pub fn new(analyzed: &'a Analyzed<T>) -> Self {
Self { analyzed }
}
}
impl<'a, T: FieldElement> Plonky3Prover<'a, T> {
pub fn prove(
&self,
witness: &[(String, Vec<T>)],
witgen_callback: WitgenCallback<T>,
) -> Result<Vec<u8>, String> {
assert_eq!(T::known_field(), Some(KnownField::GoldilocksField));
let circuit = PowdrCircuit::new(self.analyzed)
.with_witgen_callback(witgen_callback)
.with_witness(witness);
let publics = vec![];
let trace = circuit.generate_trace_rows();
let config = get_config(self.analyzed.degree());
let mut challenger = get_challenger();
let proof = prove(&config, &circuit, &mut challenger, trace, &publics);
let mut challenger = get_challenger();
verify(&config, &circuit, &mut challenger, &proof, &publics).unwrap();
Ok(serde_json::to_vec(&proof).unwrap())
}
pub fn verify(&self, proof: &[u8], instances: &[Vec<T>]) -> Result<(), String> {
let proof: Proof<_> = serde_json::from_slice(proof)
.map_err(|e| format!("Failed to deserialize proof: {e}"))?;
let publics = instances
.iter()
.flatten()
.map(|v| cast_to_goldilocks(*v))
.collect();
let config = get_config(self.analyzed.degree());
let mut challenger = get_challenger();
verify(
&config,
&PowdrCircuit::new(self.analyzed),
&mut challenger,
&proof,
&publics,
)
.map_err(|e| format!("Failed to verify proof: {e:?}"))
}
}
#[cfg(test)]
mod tests {
use powdr_number::GoldilocksField;
use powdr_pipeline::Pipeline;
use crate::Plonky3Prover;
/// Prove and verify execution
fn run_test_goldilocks(pil: &str) {
let mut pipeline = Pipeline::<GoldilocksField>::default().from_pil_string(pil.to_string());
let pil = pipeline.compute_optimized_pil().unwrap();
let witness_callback = pipeline.witgen_callback().unwrap();
let witness = pipeline.compute_witness().unwrap();
let proof = Plonky3Prover::new(&pil).prove(&witness, witness_callback);
assert!(proof.is_ok());
}
#[test]
#[should_panic = "not implemented"]
fn publics() {
let content = "namespace Global(8); pol witness x; x * (x - 1) = 0; public out = x(7);";
run_test_goldilocks(content);
}
#[test]
#[should_panic = "assertion failed: width >= 1"]
fn empty() {
let content = "namespace Global(8);";
run_test_goldilocks(content);
}
#[test]
fn add() {
let content = r#"
namespace Add(8);
col witness x;
col witness y;
col witness z;
x + y = z;
"#;
run_test_goldilocks(content);
}
#[test]
#[should_panic = "not implemented"]
fn challenge() {
let content = r#"
let N: int = 8;
namespace std::prover(N);
let challenge = [];
enum Query {
Hint(int)
}
namespace Global(N);
let beta: expr = std::prover::challenge(0, 42);
col witness stage(0) x;
col witness stage(1) y;
x = y + beta;
"#;
run_test_goldilocks(content);
}
#[test]
#[should_panic = "not implemented"]
fn polynomial_identity() {
let content = "namespace Global(8); pol fixed z = [1, 2]*; pol witness a; a = z + 1;";
run_test_goldilocks(content);
}
#[test]
#[should_panic = "not implemented"]
fn lookup() {
let content = "namespace Global(8); pol fixed z = [0, 1]*; pol witness a; a in z;";
run_test_goldilocks(content);
}
}

View File

@@ -0,0 +1,98 @@
//! The concrete parameters used in the prover
//! Inspired from [this example](https://github.com/Plonky3/Plonky3/blob/6a1b0710fdf85136d0fdd645b92933615867740a/keccak-air/examples/prove_goldilocks_keccak.rs#L57)
use lazy_static::lazy_static;
use p3_challenger::DuplexChallenger;
use p3_commit::ExtensionMmcs;
use p3_dft::Radix2DitParallel;
use p3_field::{extension::BinomialExtensionField, Field};
use p3_fri::{FriConfig, TwoAdicFriPcs};
use p3_goldilocks::MdsMatrixGoldilocks;
use p3_merkle_tree::FieldMerkleTreeMmcs;
use p3_poseidon::Poseidon;
use p3_symmetric::{PaddingFreeSponge, TruncatedPermutation};
use p3_uni_stark::StarkConfig;
use p3_util::log2_ceil_usize;
use rand::{distributions::Standard, Rng, SeedableRng};
use crate::circuit_builder::Val;
const D: usize = 2;
type Challenge = BinomialExtensionField<Val, D>;
const WIDTH: usize = 8;
const ALPHA: u64 = 7;
type Perm = Poseidon<Val, MdsMatrixGoldilocks, WIDTH, ALPHA>;
const RATE: usize = 4;
const OUT: usize = 4;
type Hash = PaddingFreeSponge<Perm, WIDTH, RATE, OUT>;
const N: usize = 2;
const CHUNK: usize = 4;
type Compress = TruncatedPermutation<Perm, N, CHUNK, WIDTH>;
const DIGEST_ELEMS: usize = 4;
type ValMmcs = FieldMerkleTreeMmcs<
<Val as Field>::Packing,
<Val as Field>::Packing,
Hash,
Compress,
DIGEST_ELEMS,
>;
type Challenger = DuplexChallenger<Val, Perm, WIDTH>;
type ChallengeMmcs = ExtensionMmcs<Val, Challenge, ValMmcs>;
type Dft = Radix2DitParallel;
type Pcs = TwoAdicFriPcs<Val, Dft, ValMmcs, ChallengeMmcs>;
type Config = StarkConfig<Pcs, Challenge, Challenger>;
const HALF_NUM_FULL_ROUNDS: usize = 4;
const NUM_PARTIAL_ROUNDS: usize = 22;
const FRI_LOG_BLOWUP: usize = 1;
const FRI_NUM_QUERIES: usize = 100;
const FRI_PROOF_OF_WORK_BITS: usize = 16;
const NUM_ROUNDS: usize = 2 * HALF_NUM_FULL_ROUNDS + NUM_PARTIAL_ROUNDS;
const NUM_CONSTANTS: usize = WIDTH * NUM_ROUNDS;
const RNG_SEED: u64 = 42;
lazy_static! {
static ref PERM: Perm = Perm::new(
HALF_NUM_FULL_ROUNDS,
NUM_PARTIAL_ROUNDS,
rand_chacha::ChaCha8Rng::seed_from_u64(RNG_SEED)
.sample_iter(Standard)
.take(NUM_CONSTANTS)
.collect(),
MdsMatrixGoldilocks,
);
}
pub fn get_challenger() -> Challenger {
Challenger::new(PERM.clone())
}
pub fn get_config(degree: u64) -> StarkConfig<Pcs, Challenge, Challenger> {
let hash = Hash::new(PERM.clone());
let compress = Compress::new(PERM.clone());
let val_mmcs = ValMmcs::new(hash, compress);
let challenge_mmcs = ChallengeMmcs::new(val_mmcs.clone());
let dft = Dft {};
let fri_config = FriConfig {
log_blowup: FRI_LOG_BLOWUP,
num_queries: FRI_NUM_QUERIES,
proof_of_work_bits: FRI_PROOF_OF_WORK_BITS,
mmcs: challenge_mmcs,
};
let pcs = Pcs::new(log2_ceil_usize(degree as usize), dft, val_mmcs, fri_config);
Config::new(pcs)
}

View File

@@ -19,6 +19,7 @@ powdr-riscv-executor.workspace = true
[features]
default = ["halo2"]
plonky3 = ["powdr-backend/plonky3"]
halo2 = ["powdr-backend/halo2", "powdr-pipeline/halo2"]
estark-polygon = ["powdr-backend/estark-polygon", "powdr-pipeline/estark-polygon", "powdr-riscv/estark-polygon"]

5
test_data/pil/add.pil Normal file
View File

@@ -0,0 +1,5 @@
namespace Add(8);
let a;
let b;
let c;
a + b = c;