mirror of
https://github.com/0xPolygonZero/plonky2.git
synced 2026-01-10 05:38:01 -05:00
Add constraints binding (#1679)
* Add constraints binding * Add ctl vars to constraint binding. Cleanup * Cleanup comments * Turn from_proof into from_opening_set for CtlCheckVars * Tiny cleanup * Fix poly_evals number of quotient targets * Fix no std * Apply comments * Address comments * Clippy * Simulate polynomial values for sontraint evaluation * Apply most comments * Make 50 modular * Apply comments * Apply comments
This commit is contained in:
@@ -265,6 +265,16 @@ impl<F: RichField + Extendable<D>, H: AlgebraicHasher<F>, const D: usize>
|
||||
self.get_n_challenges(builder, D).try_into().unwrap()
|
||||
}
|
||||
|
||||
pub fn get_n_extension_challenges(
|
||||
&mut self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
n: usize,
|
||||
) -> Vec<ExtensionTarget<D>> {
|
||||
(0..n)
|
||||
.map(|_| self.get_extension_challenge(builder))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Absorb any buffered inputs. After calling this, the input buffer will be empty, and the
|
||||
/// output buffer will be full.
|
||||
fn absorb_buffered_inputs(&mut self, builder: &mut CircuitBuilder<F, D>) {
|
||||
|
||||
@@ -99,7 +99,7 @@ impl StarkConfig {
|
||||
}
|
||||
|
||||
/// Observes this [`StarkConfig`] for the given [`Challenger`].
|
||||
pub(crate) fn observe<F: RichField, H: Hasher<F>>(&self, challenger: &mut Challenger<F, H>) {
|
||||
pub fn observe<F: RichField, H: Hasher<F>>(&self, challenger: &mut Challenger<F, H>) {
|
||||
challenger.observe_element(F::from_canonical_usize(self.security_bits));
|
||||
challenger.observe_element(F::from_canonical_usize(self.num_challenges));
|
||||
|
||||
|
||||
@@ -449,7 +449,7 @@ impl<'a, F: RichField + Extendable<D>, const D: usize>
|
||||
total_num_helper_columns: usize,
|
||||
num_helper_ctl_columns: &[usize],
|
||||
) -> Vec<Self> {
|
||||
// Get all cross-table lookup polynomial openings for the provided STARK proof.
|
||||
// Get all cross-table lookup polynomial openings for the provided STARK opening set.
|
||||
let ctl_zs = {
|
||||
let auxiliary_polys = proof
|
||||
.openings
|
||||
@@ -647,7 +647,7 @@ pub struct CtlCheckVarsTarget<F: Field, const D: usize> {
|
||||
}
|
||||
|
||||
impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<F, D> {
|
||||
/// Circuit version of `from_proofs`, for a single STARK.
|
||||
/// Circuit version of `from_proof`, for a single STARK.
|
||||
pub fn from_proof(
|
||||
table: TableIdx,
|
||||
proof: &StarkProofTarget<D>,
|
||||
@@ -657,19 +657,20 @@ impl<'a, F: Field, const D: usize> CtlCheckVarsTarget<F, D> {
|
||||
total_num_helper_columns: usize,
|
||||
num_helper_ctl_columns: &[usize],
|
||||
) -> Vec<Self> {
|
||||
// Get all cross-table lookup polynomial openings for each STARK proof.
|
||||
// Get all cross-table lookup polynomial openings.
|
||||
let ctl_zs = {
|
||||
let openings = &proof.openings;
|
||||
let ctl_zs = openings
|
||||
let ctl_zs = proof
|
||||
.openings
|
||||
.auxiliary_polys
|
||||
.as_ref()
|
||||
.expect("We cannot have CTls without auxiliary polynomials.")
|
||||
.expect("We cannot have CTLs without auxiliary polynomials.")
|
||||
.iter()
|
||||
.skip(num_lookup_columns);
|
||||
let ctl_zs_next = openings
|
||||
let ctl_zs_next = proof
|
||||
.openings
|
||||
.auxiliary_polys_next
|
||||
.as_ref()
|
||||
.expect("We cannot have CTls without auxiliary polynomials.")
|
||||
.expect("We cannot have CTLs without auxiliary polynomials.")
|
||||
.iter()
|
||||
.skip(num_lookup_columns);
|
||||
ctl_zs.zip(ctl_zs_next).collect::<Vec<_>>()
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::vec::Vec;
|
||||
use core::cmp::min;
|
||||
|
||||
use plonky2::field::extension::Extendable;
|
||||
use plonky2::field::polynomial::PolynomialCoeffs;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::fri::proof::{FriProof, FriProofTarget};
|
||||
use plonky2::fri::prover::final_poly_coeff_len;
|
||||
use plonky2::fri::FriParams;
|
||||
@@ -10,13 +15,17 @@ use plonky2::iop::challenger::{Challenger, RecursiveChallenger};
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::plonk::config::{AlgebraicHasher, GenericConfig};
|
||||
use plonky2_util::log2_ceil;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::cross_table_lookup::{CtlCheckVars, CtlCheckVarsTarget};
|
||||
use crate::lookup::{
|
||||
get_grand_product_challenge_set, get_grand_product_challenge_set_target,
|
||||
GrandProductChallengeSet,
|
||||
};
|
||||
use crate::proof::*;
|
||||
use crate::stark::Stark;
|
||||
use crate::vanishing_poly::{compute_eval_vanishing_poly, compute_eval_vanishing_poly_circuit};
|
||||
|
||||
/// Generates challenges for a STARK proof from a challenger and given
|
||||
/// all the arguments needed to update the challenger state.
|
||||
@@ -25,12 +34,15 @@ use crate::proof::*;
|
||||
/// or not by the challenger. Observing it here could be redundant in a
|
||||
/// multi-STARK system where trace caps would have already been observed
|
||||
/// before proving individually each STARK.
|
||||
fn get_challenges<F, C, const D: usize>(
|
||||
fn get_challenges<F, C, S: Stark<F, D>, const D: usize>(
|
||||
stark: &S,
|
||||
public_inputs: &[F],
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
challenges: Option<&GrandProductChallengeSet<F>>,
|
||||
trace_cap: Option<&MerkleCap<F, C::Hasher>>,
|
||||
auxiliary_polys_cap: Option<&MerkleCap<F, C::Hasher>>,
|
||||
quotient_polys_cap: Option<&MerkleCap<F, C::Hasher>>,
|
||||
ctl_vars: Option<&[CtlCheckVars<F, F::Extension, F::Extension, D>]>,
|
||||
openings: &StarkOpeningSet<F, D>,
|
||||
commit_phase_merkle_caps: &[MerkleCap<F, C::Hasher>],
|
||||
final_poly: &PolynomialCoeffs<F::Extension>,
|
||||
@@ -64,6 +76,90 @@ where
|
||||
challenger.observe_cap(cap);
|
||||
}
|
||||
|
||||
let num_lookup_columns = stark.num_lookup_helper_columns(config);
|
||||
let lookup_challenges = if stark.uses_lookups() {
|
||||
Some(
|
||||
lookup_challenge_set
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.challenges
|
||||
.iter()
|
||||
.map(|ch| ch.beta)
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Before computing the quotient polynomial, we use grinding to "bind" the constraints with high probability.
|
||||
// To do so, we get random challenges to represent the trace, auxiliary and CTL polynomials.
|
||||
// We evaluate the constraints using those random values and combine them with `stark_alphas_prime`.
|
||||
// Then, the challenger observes the resulting evaluations, so that the constraints are bound to `stark_alphas`
|
||||
// (the challenges used in the quotient polynomials).
|
||||
let stark_alphas_prime = challenger.get_n_challenges(num_challenges);
|
||||
|
||||
// First power unreachable by the constraints.
|
||||
let pow_degree = core::cmp::max(2, stark.constraint_degree() + 1);
|
||||
|
||||
let poly_evals = get_dummy_polys::<F, C, D>(
|
||||
challenger,
|
||||
S::COLUMNS,
|
||||
openings.auxiliary_polys.as_ref().map_or(0, |aux| aux.len()),
|
||||
pow_degree,
|
||||
);
|
||||
|
||||
// Get dummy ctl_vars.
|
||||
let total_num_ctl_polys: usize = ctl_vars
|
||||
.map(|ctls| ctls.iter().map(|ctl| ctl.helper_columns.len()).sum())
|
||||
.unwrap_or_default();
|
||||
let ctl_vars_poly_evals = ctl_vars.map(|ctl_z| {
|
||||
let mut start_index = 0;
|
||||
ctl_z
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, ctl_check_vars)| {
|
||||
let num_ctl_helper_cols = ctl_check_vars.helper_columns.len();
|
||||
let helper_columns =
|
||||
poly_evals.auxiliary_polys.as_ref().unwrap()[num_lookup_columns + start_index
|
||||
..num_lookup_columns + start_index + num_ctl_helper_cols]
|
||||
.to_vec();
|
||||
|
||||
let ctl_vars = CtlCheckVars::<F, F::Extension, F::Extension, D> {
|
||||
helper_columns,
|
||||
local_z: poly_evals.auxiliary_polys.as_ref().unwrap()
|
||||
[num_lookup_columns + total_num_ctl_polys + i],
|
||||
next_z: poly_evals.auxiliary_polys_next.as_ref().unwrap()
|
||||
[num_lookup_columns + total_num_ctl_polys + i],
|
||||
challenges: ctl_check_vars.challenges,
|
||||
columns: ctl_check_vars.columns.clone(),
|
||||
filter: ctl_check_vars.filter.clone(),
|
||||
};
|
||||
|
||||
start_index += num_ctl_helper_cols;
|
||||
|
||||
ctl_vars
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let zeta_prime = challenger.get_extension_challenge();
|
||||
|
||||
// Bind constraints.
|
||||
let constraint_evals = compute_eval_vanishing_poly::<F, S, D>(
|
||||
stark,
|
||||
&poly_evals,
|
||||
ctl_vars_poly_evals.as_deref(),
|
||||
lookup_challenges.as_ref(),
|
||||
&stark.lookups(),
|
||||
public_inputs,
|
||||
stark_alphas_prime.clone(),
|
||||
zeta_prime,
|
||||
degree_bits,
|
||||
num_lookup_columns,
|
||||
);
|
||||
|
||||
challenger.observe_extension_elements(&constraint_evals);
|
||||
|
||||
let stark_alphas = challenger.get_n_challenges(num_challenges);
|
||||
|
||||
if let Some(quotient_polys_cap) = quotient_polys_cap {
|
||||
@@ -102,6 +198,63 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
/// Simulate the trace, ctl, and auxiliary polynomials using dummy values. This is used to bind the constraints before committing to the quotient polynomial.
|
||||
fn get_dummy_polys<F, C, const D: usize>(
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
num_trace_polys: usize,
|
||||
num_aux_polys: usize,
|
||||
pow_degree: usize,
|
||||
) -> StarkOpeningSet<F, D>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
{
|
||||
let log_pow_degree = log2_ceil(pow_degree);
|
||||
let num_extension_powers = core::cmp::max(1, 50 / log_pow_degree - 1);
|
||||
|
||||
let total_num_dummy_extension_evals = num_trace_polys * 2 + num_aux_polys * 2; // for auxiliary_polys and auxiliary_polys_next
|
||||
|
||||
// Get extension field challenges that will simulate the trace, ctl, and auxiliary polynomials.
|
||||
// Since sampling challenges for all polynomials might be heavy, we sample enough challenges {c_i}_i and use:
|
||||
// c_i, c_i^{pow_degree}, ..., c_i^{pow_degree * 50} as simulated values.
|
||||
let simulating_zetas = challenger
|
||||
.get_n_extension_challenges(total_num_dummy_extension_evals.div_ceil(num_extension_powers));
|
||||
|
||||
// For each zeta in zetas, we compute the powers z^{(constraint_degree + 1)^i} for i = 0..num_extension_powers.
|
||||
let nb_dummy_per_zeta = min(num_extension_powers + 1, total_num_dummy_extension_evals);
|
||||
let dummy_extension_evals = simulating_zetas
|
||||
.into_iter()
|
||||
.flat_map(|zeta: F::Extension| {
|
||||
core::iter::successors(Some(zeta), move |prev| {
|
||||
Some(prev.exp_u64(pow_degree as u64))
|
||||
})
|
||||
.take(nb_dummy_per_zeta)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let next_values_start = num_trace_polys;
|
||||
let auxiliary_polys_start = num_trace_polys * 2;
|
||||
let auxiliary_polys_next_start = auxiliary_polys_start + num_aux_polys;
|
||||
let is_aux_polys = num_aux_polys > 0;
|
||||
|
||||
StarkOpeningSet {
|
||||
local_values: dummy_extension_evals[..next_values_start].to_vec(),
|
||||
next_values: dummy_extension_evals[next_values_start..auxiliary_polys_start].to_vec(),
|
||||
auxiliary_polys: if is_aux_polys {
|
||||
Some(dummy_extension_evals[auxiliary_polys_start..auxiliary_polys_next_start].to_vec())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
auxiliary_polys_next: if is_aux_polys {
|
||||
Some(dummy_extension_evals[auxiliary_polys_next_start..].to_vec())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
ctl_zs_first: None, // Unused in the constraints.
|
||||
quotient_polys: None, // We don't need to simulate the quotient polynomials.
|
||||
}
|
||||
}
|
||||
|
||||
impl<F, C, const D: usize> StarkProof<F, C, D>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
@@ -114,10 +267,13 @@ where
|
||||
/// Multi-STARK systems may already observe individual trace caps
|
||||
/// ahead of proving each table, and hence may ignore observing
|
||||
/// again the cap when generating individual challenges.
|
||||
pub fn get_challenges(
|
||||
pub fn get_challenges<S: Stark<F, D>>(
|
||||
&self,
|
||||
stark: &S,
|
||||
public_inputs: &[F],
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
challenges: Option<&GrandProductChallengeSet<F>>,
|
||||
ctl_vars: Option<&[CtlCheckVars<F, F::Extension, F::Extension, D>]>,
|
||||
ignore_trace_cap: bool,
|
||||
config: &StarkConfig,
|
||||
verifier_circuit_fri_params: Option<FriParams>,
|
||||
@@ -144,12 +300,15 @@ where
|
||||
Some(trace_cap)
|
||||
};
|
||||
|
||||
get_challenges::<F, C, D>(
|
||||
get_challenges::<F, C, S, D>(
|
||||
stark,
|
||||
public_inputs,
|
||||
challenger,
|
||||
challenges,
|
||||
trace_cap,
|
||||
auxiliary_polys_cap.as_ref(),
|
||||
quotient_polys_cap.as_ref(),
|
||||
ctl_vars,
|
||||
openings,
|
||||
commit_phase_merkle_caps,
|
||||
final_poly,
|
||||
@@ -173,18 +332,23 @@ where
|
||||
/// Multi-STARK systems may already observe individual trace caps
|
||||
/// ahead of proving each table, and hence may ignore observing
|
||||
/// again the cap when generating individual challenges.
|
||||
pub fn get_challenges(
|
||||
pub fn get_challenges<S: Stark<F, D>>(
|
||||
&self,
|
||||
stark: &S,
|
||||
challenger: &mut Challenger<F, C::Hasher>,
|
||||
challenges: Option<&GrandProductChallengeSet<F>>,
|
||||
ctl_vars: Option<&[CtlCheckVars<F, F::Extension, F::Extension, D>]>,
|
||||
ignore_trace_cap: bool,
|
||||
config: &StarkConfig,
|
||||
verifier_circuit_fri_params: Option<FriParams>,
|
||||
) -> StarkProofChallenges<F, D> {
|
||||
challenger.observe_elements(&self.public_inputs);
|
||||
self.proof.get_challenges(
|
||||
self.proof.get_challenges::<S>(
|
||||
stark,
|
||||
&self.public_inputs,
|
||||
challenger,
|
||||
challenges,
|
||||
ctl_vars,
|
||||
ignore_trace_cap,
|
||||
config,
|
||||
verifier_circuit_fri_params,
|
||||
@@ -194,17 +358,22 @@ where
|
||||
|
||||
/// Circuit version of `get_challenges`, with the same flexibility around
|
||||
/// `trace_cap` being passed as an `Option`.
|
||||
fn get_challenges_target<F, C, const D: usize>(
|
||||
fn get_challenges_target<F, C, S: Stark<F, D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
public_inputs: &[Target],
|
||||
challenger: &mut RecursiveChallenger<F, C::Hasher, D>,
|
||||
challenges: Option<&GrandProductChallengeSet<Target>>,
|
||||
trace_cap: Option<&MerkleCapTarget>,
|
||||
auxiliary_polys_cap: Option<&MerkleCapTarget>,
|
||||
quotient_polys_cap: Option<&MerkleCapTarget>,
|
||||
ctl_vars: Option<&[CtlCheckVarsTarget<F, D>]>,
|
||||
openings: &StarkOpeningSetTarget<D>,
|
||||
commit_phase_merkle_caps: &[MerkleCapTarget],
|
||||
final_poly: &PolynomialCoeffsExtTarget<D>,
|
||||
pow_witness: Target,
|
||||
degree_bits: usize,
|
||||
degree_bits_target: Target,
|
||||
config: &StarkConfig,
|
||||
) -> StarkProofChallengesTarget<D>
|
||||
where
|
||||
@@ -233,6 +402,86 @@ where
|
||||
challenger.observe_cap(cap);
|
||||
}
|
||||
|
||||
let num_lookup_columns = stark.num_lookup_helper_columns(config);
|
||||
let lookup_challenges = stark.uses_lookups().then(|| {
|
||||
lookup_challenge_set
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.challenges
|
||||
.iter()
|
||||
.map(|ch| ch.beta)
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
// Before computing the quotient polynomial, we use grinding to "bind" the constraints with high probability.
|
||||
// To do so, we get random challenges to represent the trace, auxiliary and CTL polynomials.
|
||||
// We evaluate the constraints using those random values and combine them with `stark_alphas_prime`.
|
||||
// Then, the challenger observes the resulting evaluations, so that the constraints are bound to `stark_alphas`
|
||||
// (the challenges used in the quotient polynomials).
|
||||
let stark_alphas_prime = challenger.get_n_challenges(builder, num_challenges);
|
||||
|
||||
let pow_degree = core::cmp::max(2, stark.constraint_degree() + 1);
|
||||
let poly_evals = get_dummy_polys_circuit::<F, C, D>(
|
||||
builder,
|
||||
challenger,
|
||||
S::COLUMNS,
|
||||
openings.auxiliary_polys.as_ref().map_or(0, |aux| aux.len()),
|
||||
pow_degree,
|
||||
);
|
||||
|
||||
// Get dummy ctl_vars.
|
||||
let total_num_ctl_polys: usize = ctl_vars
|
||||
.map(|ctls| ctls.iter().map(|ctl| ctl.helper_columns.len()).sum())
|
||||
.unwrap_or_default();
|
||||
let ctl_vars_poly_evals = ctl_vars.map(|ctl_z| {
|
||||
let mut start_index = 0;
|
||||
ctl_z
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, ctl_check_vars)| {
|
||||
let num_ctl_helper_cols = ctl_check_vars.helper_columns.len();
|
||||
let helper_columns =
|
||||
poly_evals.auxiliary_polys.as_ref().unwrap()[num_lookup_columns + start_index
|
||||
..num_lookup_columns + start_index + num_ctl_helper_cols]
|
||||
.to_vec();
|
||||
|
||||
let ctl_vars = CtlCheckVarsTarget::<F, D> {
|
||||
helper_columns,
|
||||
local_z: poly_evals.auxiliary_polys.as_ref().unwrap()
|
||||
[num_lookup_columns + total_num_ctl_polys + i],
|
||||
next_z: poly_evals.auxiliary_polys_next.as_ref().unwrap()
|
||||
[num_lookup_columns + total_num_ctl_polys + i],
|
||||
challenges: ctl_check_vars.challenges,
|
||||
columns: ctl_check_vars.columns.clone(),
|
||||
filter: ctl_check_vars.filter.clone(),
|
||||
};
|
||||
|
||||
start_index += num_ctl_helper_cols;
|
||||
|
||||
ctl_vars
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let zeta_prime = challenger.get_extension_challenge(builder);
|
||||
|
||||
// Bind constraints.
|
||||
let constraint_evals = compute_eval_vanishing_poly_circuit::<F, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
&poly_evals,
|
||||
ctl_vars_poly_evals.as_deref(),
|
||||
lookup_challenges.as_ref(),
|
||||
public_inputs,
|
||||
stark_alphas_prime.clone(),
|
||||
zeta_prime,
|
||||
degree_bits,
|
||||
degree_bits_target,
|
||||
num_lookup_columns,
|
||||
);
|
||||
|
||||
challenger.observe_extension_elements(&constraint_evals);
|
||||
|
||||
let stark_alphas = challenger.get_n_challenges(builder, num_challenges);
|
||||
|
||||
if let Some(cap) = quotient_polys_cap {
|
||||
@@ -257,6 +506,71 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
// Simulate the trace, ctl, and auxiliary polynomials using dummy values. This is used to bind the constraints before committing to the quotient polynomial.
|
||||
fn get_dummy_polys_circuit<F, C, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, C::Hasher, D>,
|
||||
num_trace_polys: usize,
|
||||
num_aux_polys: usize,
|
||||
pow_degree: usize,
|
||||
) -> StarkOpeningSetTarget<D>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
C: GenericConfig<D, F = F>,
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
let log_pow_degree = log2_ceil(pow_degree);
|
||||
let num_extension_powers = core::cmp::max(1, 50 / log_pow_degree - 1);
|
||||
|
||||
let total_num_dummy_extension_evals = num_trace_polys * 2 + num_aux_polys * 2; // for auxiliary_polys and auxiliary_polys_next
|
||||
|
||||
// Get extension field challenges that will simulate the trace, ctl, and auxiliary polynomials.
|
||||
// Since sampling challenges for all polynomials might be heavy, we sample enough challenges {c_i}_i and use:
|
||||
// c_i, c_i^{pow_degree}, ..., c_i^{pow_degree * 50} as simulated values.
|
||||
let simulating_zetas = challenger.get_n_extension_challenges(
|
||||
builder,
|
||||
total_num_dummy_extension_evals.div_ceil(num_extension_powers),
|
||||
);
|
||||
|
||||
// For each zeta in zetas, we compute the powers z^{(constraint_degree + 1)^i} for i = 0..num_extension_powers.
|
||||
let nb_dummy_per_zeta = min(num_extension_powers + 1, total_num_dummy_extension_evals);
|
||||
let dummy_extension_evals = simulating_zetas
|
||||
.into_iter()
|
||||
.flat_map(|zeta| {
|
||||
let mut powers = Vec::with_capacity(num_extension_powers);
|
||||
powers.push(zeta);
|
||||
let mut pow_val = zeta;
|
||||
for _ in 1..nb_dummy_per_zeta {
|
||||
pow_val = builder.exp_u64_extension(pow_val, pow_degree as u64);
|
||||
powers.push(pow_val);
|
||||
}
|
||||
powers
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let next_values_start = num_trace_polys;
|
||||
let auxiliary_polys_start = num_trace_polys * 2;
|
||||
let auxiliary_polys_next_start = auxiliary_polys_start + num_aux_polys;
|
||||
let is_aux_polys = num_aux_polys > 0;
|
||||
|
||||
StarkOpeningSetTarget {
|
||||
local_values: dummy_extension_evals[..next_values_start].to_vec(),
|
||||
next_values: dummy_extension_evals[next_values_start..auxiliary_polys_start].to_vec(),
|
||||
auxiliary_polys: if is_aux_polys {
|
||||
Some(dummy_extension_evals[auxiliary_polys_start..auxiliary_polys_next_start].to_vec())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
auxiliary_polys_next: if is_aux_polys {
|
||||
Some(dummy_extension_evals[auxiliary_polys_next_start..].to_vec())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
ctl_zs_first: None,
|
||||
quotient_polys: None, // We don't need to simulate the quotient polynomials.
|
||||
}
|
||||
}
|
||||
|
||||
impl<const D: usize> StarkProofTarget<D> {
|
||||
/// Creates all Fiat-Shamir `Target` challenges used in the STARK proof.
|
||||
/// For a single STARK system, the `ignore_trace_cap` boolean should
|
||||
@@ -265,11 +579,15 @@ impl<const D: usize> StarkProofTarget<D> {
|
||||
/// Multi-STARK systems may already observe individual trace caps
|
||||
/// ahead of proving each table, and hence may ignore observing
|
||||
/// again the cap when generating individual challenges.
|
||||
pub fn get_challenges<F, C>(
|
||||
pub fn get_challenges<F, C, S: Stark<F, D>>(
|
||||
&self,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
public_inputs: &[Target],
|
||||
challenger: &mut RecursiveChallenger<F, C::Hasher, D>,
|
||||
challenges: Option<&GrandProductChallengeSet<Target>>,
|
||||
ctl_vars: Option<&[CtlCheckVarsTarget<F, D>]>,
|
||||
degree_bits: usize,
|
||||
ignore_trace_cap: bool,
|
||||
config: &StarkConfig,
|
||||
) -> StarkProofChallengesTarget<D>
|
||||
@@ -299,17 +617,22 @@ impl<const D: usize> StarkProofTarget<D> {
|
||||
Some(trace_cap)
|
||||
};
|
||||
|
||||
get_challenges_target::<F, C, D>(
|
||||
get_challenges_target::<F, C, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
public_inputs,
|
||||
challenger,
|
||||
challenges,
|
||||
trace_cap,
|
||||
auxiliary_polys_cap.as_ref(),
|
||||
quotient_polys_cap.as_ref(),
|
||||
ctl_vars,
|
||||
openings,
|
||||
commit_phase_merkle_caps,
|
||||
final_poly,
|
||||
*pow_witness,
|
||||
degree_bits,
|
||||
self.degree_bits,
|
||||
config,
|
||||
)
|
||||
}
|
||||
@@ -323,11 +646,14 @@ impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
|
||||
/// Multi-STARK systems may already observe individual trace caps
|
||||
/// ahead of proving each table, and hence may ignore observing
|
||||
/// again the cap when generating individual challenges.
|
||||
pub fn get_challenges<F, C>(
|
||||
pub fn get_challenges<F, C, S: Stark<F, D>>(
|
||||
&self,
|
||||
stark: &S,
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
challenger: &mut RecursiveChallenger<F, C::Hasher, D>,
|
||||
challenges: Option<&GrandProductChallengeSet<Target>>,
|
||||
ctl_vars: Option<&[CtlCheckVarsTarget<F, D>]>,
|
||||
degree_bits: usize,
|
||||
ignore_trace_cap: bool,
|
||||
config: &StarkConfig,
|
||||
) -> StarkProofChallengesTarget<D>
|
||||
@@ -337,7 +663,16 @@ impl<const D: usize> StarkProofWithPublicInputsTarget<D> {
|
||||
C::Hasher: AlgebraicHasher<F>,
|
||||
{
|
||||
challenger.observe_elements(&self.public_inputs);
|
||||
self.proof
|
||||
.get_challenges::<F, C>(builder, challenger, challenges, ignore_trace_cap, config)
|
||||
self.proof.get_challenges::<F, C, S>(
|
||||
builder,
|
||||
stark,
|
||||
&self.public_inputs,
|
||||
challenger,
|
||||
challenges,
|
||||
ctl_vars,
|
||||
degree_bits,
|
||||
ignore_trace_cap,
|
||||
config,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -83,6 +83,7 @@ impl<const D: usize> StarkProofTarget<D> {
|
||||
if let Some(poly) = &self.quotient_polys_cap {
|
||||
buffer.write_target_merkle_cap(poly)?;
|
||||
}
|
||||
|
||||
buffer.write_target_fri_proof(&self.opening_proof)?;
|
||||
self.openings.to_buffer(buffer)?;
|
||||
Ok(())
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::vec::Vec;
|
||||
use core::iter::once;
|
||||
use core::iter::{once, successors};
|
||||
|
||||
use anyhow::{ensure, Result};
|
||||
use itertools::Itertools;
|
||||
@@ -34,7 +34,7 @@ use crate::lookup::{
|
||||
};
|
||||
use crate::proof::{StarkOpeningSet, StarkProof, StarkProofWithPublicInputs};
|
||||
use crate::stark::Stark;
|
||||
use crate::vanishing_poly::eval_vanishing_poly;
|
||||
use crate::vanishing_poly::{compute_eval_vanishing_poly, eval_vanishing_poly};
|
||||
|
||||
/// From a STARK trace, computes a STARK proof to attest its correctness.
|
||||
pub fn prove<F, C, S, const D: usize>(
|
||||
@@ -120,6 +120,8 @@ where
|
||||
/// - all the required polynomial and FRI argument openings.
|
||||
/// - individual `ctl_data` and common `ctl_challenges` if the STARK is part
|
||||
/// of a multi-STARK system.
|
||||
///
|
||||
/// /!\ Note that this method does not observe the `config`, and it assumes the `config` has already been observed.
|
||||
pub fn prove_with_commitment<F, C, S, const D: usize>(
|
||||
stark: &S,
|
||||
config: &StarkConfig,
|
||||
@@ -143,6 +145,11 @@ where
|
||||
let fri_params = config.fri_params(degree_bits);
|
||||
let rate_bits = config.fri_config.rate_bits;
|
||||
let cap_height = config.fri_config.cap_height;
|
||||
|
||||
let num_ctl_polys = ctl_data
|
||||
.map(|data| data.num_ctl_helper_polys())
|
||||
.unwrap_or_default();
|
||||
|
||||
assert!(
|
||||
fri_params.total_arities() <= degree_bits + rate_bits - cap_height,
|
||||
"FRI total reduction arity is too large.",
|
||||
@@ -229,11 +236,7 @@ where
|
||||
challenger.observe_cap(cap);
|
||||
}
|
||||
|
||||
let alphas = challenger.get_n_challenges(config.num_challenges);
|
||||
|
||||
let num_ctl_polys = ctl_data
|
||||
.map(|data| data.num_ctl_helper_polys())
|
||||
.unwrap_or_default();
|
||||
let alphas_prime = challenger.get_n_challenges(config.num_challenges);
|
||||
|
||||
// This is an expensive check, hence is only run when `debug_assertions` are enabled.
|
||||
#[cfg(debug_assertions)]
|
||||
@@ -246,13 +249,126 @@ where
|
||||
lookup_challenges.as_ref(),
|
||||
&lookups,
|
||||
ctl_data,
|
||||
alphas.clone(),
|
||||
alphas_prime.clone(),
|
||||
degree_bits,
|
||||
num_lookup_columns,
|
||||
&num_ctl_polys,
|
||||
);
|
||||
}
|
||||
|
||||
let g = F::primitive_root_of_unity(degree_bits);
|
||||
|
||||
// Before computing the quotient polynomial, we use grinding to "bind" the constraints with high probability.
|
||||
// To do so, we get random challenges to represent the trace, auxiliary and CTL polynomials.
|
||||
// We evaluate the constraints using those random values and combine them with `stark_alphas_prime`.
|
||||
// Then, the challenger observes the resulting evaluations, so that the constraints are bound to `stark_alphas`
|
||||
// (the challenges used in the quotient polynomials).
|
||||
let is_aux_polys = auxiliary_polys_commitment.is_some();
|
||||
let num_auxiliary_polys = auxiliary_polys_commitment
|
||||
.as_ref()
|
||||
.map_or(0, |p| p.polynomials.len());
|
||||
let total_num_ctl_polys = num_ctl_polys.iter().sum::<usize>();
|
||||
|
||||
// Get the number of challenges we need to simulate the trace, auxiliary polynomials and CTL polynomials.
|
||||
let total_num_dummy_extension_evals =
|
||||
trace_commitment.polynomials.len() * 2 + num_auxiliary_polys * 2; // for auxiliary_polys and auxiliary_polys_next
|
||||
|
||||
// First power unreachable by the constraints.
|
||||
let pow_degree = core::cmp::max(2, stark.constraint_degree() + 1);
|
||||
let num_extension_powers = core::cmp::max(1, 50 / log2_ceil(pow_degree) - 1);
|
||||
|
||||
// Get extension field challenges that will simulate the trace, ctl, and auxiliary polynomials.
|
||||
// Since sampling challenges for all polynomials might be heavy, we sample enough challenges {c_i}_i and use:
|
||||
// c_i, c_i^{pow_degree}, ..., c_i^{pow_degree * 50} as simulated values.
|
||||
let simulating_zetas = challenger
|
||||
.get_n_extension_challenges(total_num_dummy_extension_evals.div_ceil(num_extension_powers));
|
||||
|
||||
// For each zeta in zetas, we compute the powers z^{(constraint_degree + 1)^i} for i = 0..num_extension_powers.
|
||||
let nb_dummy_per_zeta =
|
||||
core::cmp::min(num_extension_powers + 1, total_num_dummy_extension_evals);
|
||||
let dummy_extension_evals = simulating_zetas
|
||||
.iter()
|
||||
.flat_map(|&zeta| {
|
||||
successors(Some(zeta), move |prev| {
|
||||
Some(prev.exp_u64(pow_degree as u64))
|
||||
})
|
||||
.take(nb_dummy_per_zeta)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Simulate the opening set.
|
||||
let next_values_start = S::COLUMNS;
|
||||
let auxiliary_polys_start = S::COLUMNS * 2;
|
||||
let auxiliary_polys_next_start = auxiliary_polys_start + num_auxiliary_polys;
|
||||
|
||||
let poly_evals = StarkOpeningSet {
|
||||
local_values: dummy_extension_evals[..next_values_start].to_vec(),
|
||||
next_values: dummy_extension_evals[next_values_start..auxiliary_polys_start].to_vec(),
|
||||
auxiliary_polys: if is_aux_polys {
|
||||
Some(dummy_extension_evals[auxiliary_polys_start..auxiliary_polys_next_start].to_vec())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
auxiliary_polys_next: if is_aux_polys {
|
||||
Some(dummy_extension_evals[auxiliary_polys_next_start..].to_vec())
|
||||
} else {
|
||||
None
|
||||
},
|
||||
ctl_zs_first: None,
|
||||
quotient_polys: None,
|
||||
};
|
||||
|
||||
// Get dummy ctl_vars.
|
||||
let ctl_vars = ctl_data.map(|data| {
|
||||
let mut start_index = 0;
|
||||
data.zs_columns
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, zs_columns)| {
|
||||
let num_ctl_helper_cols = num_ctl_polys[i];
|
||||
let helper_columns =
|
||||
poly_evals.auxiliary_polys.as_ref().unwrap()[num_lookup_columns + start_index
|
||||
..num_lookup_columns + start_index + num_ctl_helper_cols]
|
||||
.to_vec();
|
||||
|
||||
let ctl_vars = CtlCheckVars::<F, F::Extension, F::Extension, D> {
|
||||
helper_columns,
|
||||
local_z: poly_evals.auxiliary_polys.as_ref().unwrap()
|
||||
[num_lookup_columns + total_num_ctl_polys + i],
|
||||
next_z: poly_evals.auxiliary_polys_next.as_ref().unwrap()
|
||||
[num_lookup_columns + total_num_ctl_polys + i],
|
||||
challenges: zs_columns.challenge,
|
||||
columns: zs_columns.columns.clone(),
|
||||
filter: zs_columns.filter.clone(),
|
||||
};
|
||||
|
||||
start_index += num_ctl_helper_cols;
|
||||
|
||||
ctl_vars
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let zeta_prime = challenger.get_extension_challenge::<D>();
|
||||
|
||||
// Bind constraints.
|
||||
let constraints = compute_eval_vanishing_poly::<F, S, D>(
|
||||
stark,
|
||||
&poly_evals,
|
||||
ctl_vars.as_deref(),
|
||||
lookup_challenges.as_ref(),
|
||||
&lookups,
|
||||
public_inputs,
|
||||
alphas_prime.clone(),
|
||||
zeta_prime,
|
||||
degree_bits,
|
||||
num_lookup_columns,
|
||||
);
|
||||
|
||||
challenger.observe_extension_elements(&constraints);
|
||||
|
||||
let alphas = challenger.get_n_challenges(config.num_challenges);
|
||||
|
||||
let quotient_polys = timed!(
|
||||
timing,
|
||||
"compute quotient polys",
|
||||
@@ -314,7 +430,7 @@ where
|
||||
// To avoid leaking witness data, we want to ensure that our opening locations, `zeta` and
|
||||
// `g * zeta`, are not in our subgroup `H`. It suffices to check `zeta` only, since
|
||||
// `(g * zeta)^n = zeta^n`, where `n` is the order of `g`.
|
||||
let g = F::primitive_root_of_unity(degree_bits);
|
||||
|
||||
ensure!(
|
||||
zeta.exp_power_of_2(degree_bits) != F::Extension::ONE,
|
||||
"Opening point is in the subgroup."
|
||||
|
||||
@@ -11,7 +11,6 @@ use plonky2::field::extension::Extendable;
|
||||
use plonky2::fri::witness_util::set_fri_proof_target;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::challenger::RecursiveChallenger;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::iop::witness::WitnessWrite;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
@@ -20,16 +19,13 @@ use plonky2::util::reducing::ReducingFactorTarget;
|
||||
use plonky2::with_context;
|
||||
|
||||
use crate::config::StarkConfig;
|
||||
use crate::constraint_consumer::RecursiveConstraintConsumer;
|
||||
use crate::cross_table_lookup::CtlCheckVarsTarget;
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::LookupCheckVarsTarget;
|
||||
use crate::proof::{
|
||||
StarkOpeningSetTarget, StarkProof, StarkProofChallengesTarget, StarkProofTarget,
|
||||
StarkProofWithPublicInputs, StarkProofWithPublicInputsTarget,
|
||||
};
|
||||
use crate::stark::Stark;
|
||||
use crate::vanishing_poly::eval_vanishing_poly_circuit;
|
||||
use crate::vanishing_poly::compute_eval_vanishing_poly_circuit;
|
||||
|
||||
/// Encodes the verification of a [`StarkProofWithPublicInputsTarget`]
|
||||
/// for some statement in a circuit.
|
||||
@@ -54,7 +50,16 @@ pub fn verify_stark_proof_circuit<
|
||||
let challenges = with_context!(
|
||||
builder,
|
||||
"compute challenges",
|
||||
proof_with_pis.get_challenges::<F, C>(builder, &mut challenger, None, false, inner_config)
|
||||
proof_with_pis.get_challenges::<F, C, S>(
|
||||
&stark,
|
||||
builder,
|
||||
&mut challenger,
|
||||
None,
|
||||
None,
|
||||
max_degree_bits_to_support,
|
||||
false,
|
||||
inner_config
|
||||
)
|
||||
);
|
||||
|
||||
verify_stark_proof_with_challenges_circuit::<F, C, S, D>(
|
||||
@@ -99,34 +104,18 @@ pub fn verify_stark_proof_with_challenges_circuit<
|
||||
.map(|v| v.iter().map(|ctl| ctl.helper_columns.len()).sum::<usize>())
|
||||
.unwrap_or_default();
|
||||
|
||||
let StarkOpeningSetTarget {
|
||||
local_values,
|
||||
next_values,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
ctl_zs_first,
|
||||
quotient_polys,
|
||||
} = &proof.openings;
|
||||
|
||||
let vars = S::EvaluationFrameTarget::from_values(
|
||||
local_values,
|
||||
next_values,
|
||||
&public_inputs
|
||||
.iter()
|
||||
.map(|&t| builder.convert_to_ext(t))
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
// degree_bits should be nonzero.
|
||||
let _ = builder.inverse(proof.degree_bits);
|
||||
|
||||
let quotient_polys = &proof.openings.quotient_polys;
|
||||
let ctl_zs_first = &proof.openings.ctl_zs_first;
|
||||
|
||||
let max_num_of_bits_in_degree = degree_bits + 1;
|
||||
let degree = builder.exp(two, proof.degree_bits, max_num_of_bits_in_degree);
|
||||
let degree_bits_vec = builder.split_le(degree, max_num_of_bits_in_degree);
|
||||
|
||||
let zeta_pow_deg = builder.exp_extension_from_bits(challenges.stark_zeta, °ree_bits_vec);
|
||||
let z_h_zeta = builder.sub_extension(zeta_pow_deg, one);
|
||||
let degree_ext = builder.convert_to_ext(degree);
|
||||
|
||||
// Calculate primitive_root_of_unity(degree_bits)
|
||||
let two_adicity = builder.constant(F::from_canonical_usize(F::TWO_ADICITY));
|
||||
@@ -135,20 +124,6 @@ pub fn verify_stark_proof_with_challenges_circuit<
|
||||
builder.exp(two, two_adicity_sub_degree_bits, F::TWO_ADICITY);
|
||||
let base = builder.constant(F::POWER_OF_TWO_GENERATOR);
|
||||
let g = builder.exp(base, two_exp_two_adicity_sub_degree_bits, F::TWO_ADICITY);
|
||||
let g_ext = builder.convert_to_ext(g);
|
||||
|
||||
let (l_0, l_last) =
|
||||
eval_l_0_and_l_last_circuit(builder, degree_ext, g_ext, challenges.stark_zeta, z_h_zeta);
|
||||
let last = builder.inverse_extension(g_ext);
|
||||
let z_last = builder.sub_extension(challenges.stark_zeta, last);
|
||||
|
||||
let mut consumer = RecursiveConstraintConsumer::<F, D>::new(
|
||||
builder.zero_extension(),
|
||||
challenges.stark_alphas,
|
||||
z_last,
|
||||
l_0,
|
||||
l_last,
|
||||
);
|
||||
|
||||
let num_lookup_columns = stark.num_lookup_helper_columns(inner_config);
|
||||
let lookup_challenges = stark.uses_lookups().then(|| {
|
||||
@@ -162,26 +137,19 @@ pub fn verify_stark_proof_with_challenges_circuit<
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let lookup_vars = stark.uses_lookups().then(|| LookupCheckVarsTarget {
|
||||
local_values: auxiliary_polys.as_ref().unwrap()[..num_lookup_columns].to_vec(),
|
||||
next_values: auxiliary_polys_next.as_ref().unwrap()[..num_lookup_columns].to_vec(),
|
||||
challenges: lookup_challenges.unwrap(),
|
||||
});
|
||||
|
||||
with_context!(
|
||||
let vanishing_polys_zeta = compute_eval_vanishing_poly_circuit(
|
||||
builder,
|
||||
"evaluate vanishing polynomial",
|
||||
eval_vanishing_poly_circuit::<F, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
&vars,
|
||||
lookup_vars,
|
||||
ctl_vars,
|
||||
&mut consumer
|
||||
)
|
||||
stark,
|
||||
&proof.openings,
|
||||
ctl_vars,
|
||||
lookup_challenges.as_ref(),
|
||||
public_inputs,
|
||||
challenges.stark_alphas,
|
||||
challenges.stark_zeta,
|
||||
degree_bits,
|
||||
proof.degree_bits,
|
||||
num_lookup_columns,
|
||||
);
|
||||
let vanishing_polys_zeta = consumer.accumulators();
|
||||
|
||||
// Check each polynomial identity, of the form `vanishing(x) = Z_H(x) quotient(x)`, at zeta.
|
||||
let mut scale = ReducingFactorTarget::new(zeta_pow_deg);
|
||||
if let Some(quotient_polys) = quotient_polys {
|
||||
@@ -238,24 +206,6 @@ pub fn verify_stark_proof_with_challenges_circuit<
|
||||
}
|
||||
}
|
||||
|
||||
fn eval_l_0_and_l_last_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
n: ExtensionTarget<D>,
|
||||
g: ExtensionTarget<D>,
|
||||
x: ExtensionTarget<D>,
|
||||
z_x: ExtensionTarget<D>,
|
||||
) -> (ExtensionTarget<D>, ExtensionTarget<D>) {
|
||||
let one = builder.one_extension();
|
||||
let l_0_deno = builder.mul_sub_extension(n, x, n);
|
||||
let l_last_deno = builder.mul_sub_extension(g, x, one);
|
||||
let l_last_deno = builder.mul_extension(n, l_last_deno);
|
||||
|
||||
(
|
||||
builder.div_extension(z_x, l_0_deno),
|
||||
builder.div_extension(z_x, l_last_deno),
|
||||
)
|
||||
}
|
||||
|
||||
/// Adds a new `StarkProofWithPublicInputsTarget` to this circuit.
|
||||
pub fn add_virtual_stark_proof_with_pis<
|
||||
F: RichField + Extendable<D>,
|
||||
|
||||
@@ -1,17 +1,26 @@
|
||||
#[cfg(not(feature = "std"))]
|
||||
use alloc::vec::Vec;
|
||||
|
||||
use plonky2::field::extension::{Extendable, FieldExtension};
|
||||
use plonky2::field::packed::PackedField;
|
||||
use plonky2::field::types::Field;
|
||||
use plonky2::hash::hash_types::RichField;
|
||||
use plonky2::iop::ext_target::ExtensionTarget;
|
||||
use plonky2::iop::target::Target;
|
||||
use plonky2::plonk::circuit_builder::CircuitBuilder;
|
||||
use plonky2::with_context;
|
||||
|
||||
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
|
||||
use crate::cross_table_lookup::{
|
||||
eval_cross_table_lookup_checks, eval_cross_table_lookup_checks_circuit, CtlCheckVars,
|
||||
CtlCheckVarsTarget,
|
||||
};
|
||||
use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::{
|
||||
eval_ext_lookups_circuit, eval_packed_lookups_generic, Lookup, LookupCheckVars,
|
||||
LookupCheckVarsTarget,
|
||||
};
|
||||
use crate::proof::{StarkOpeningSet, StarkOpeningSetTarget};
|
||||
use crate::stark::Stark;
|
||||
|
||||
/// Evaluates all constraint, permutation and cross-table lookup polynomials
|
||||
@@ -83,3 +92,188 @@ pub(crate) fn eval_vanishing_poly_circuit<F, S, const D: usize>(
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Evaluate the Lagrange polynomials `L_0` and `L_(n-1)` at a point `x`.
|
||||
/// `L_0(x) = (x^n - 1)/(n * (x - 1))`
|
||||
/// `L_(n-1)(x) = (x^n - 1)/(n * (g * x - 1))`, with `g` the first element of the subgroup.
|
||||
pub(crate) fn eval_l_0_and_l_last<F: Field>(log_n: usize, x: F) -> (F, F) {
|
||||
let n = F::from_canonical_usize(1 << log_n);
|
||||
let g = F::primitive_root_of_unity(log_n);
|
||||
let z_x = x.exp_power_of_2(log_n) - F::ONE;
|
||||
let invs = F::batch_multiplicative_inverse(&[n * (x - F::ONE), n * (g * x - F::ONE)]);
|
||||
|
||||
(z_x * invs[0], z_x * invs[1])
|
||||
}
|
||||
|
||||
/// Evaluates the constraints at a random extension point. It is used to bind the constraints.
|
||||
pub(crate) fn compute_eval_vanishing_poly<F, S, const D: usize>(
|
||||
stark: &S,
|
||||
stark_opening_set: &StarkOpeningSet<F, D>,
|
||||
ctl_vars: Option<&[CtlCheckVars<F, F::Extension, F::Extension, D>]>,
|
||||
lookup_challenges: Option<&Vec<F>>,
|
||||
lookups: &[Lookup<F>],
|
||||
public_inputs: &[F],
|
||||
alphas: Vec<F>,
|
||||
zeta: F::Extension,
|
||||
degree_bits: usize,
|
||||
num_lookup_columns: usize,
|
||||
) -> Vec<F::Extension>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let StarkOpeningSet {
|
||||
local_values,
|
||||
next_values,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
ctl_zs_first: _,
|
||||
quotient_polys: _,
|
||||
} = &stark_opening_set;
|
||||
|
||||
let (l_0, l_last) = eval_l_0_and_l_last(degree_bits, zeta);
|
||||
let last = F::primitive_root_of_unity(degree_bits).inverse();
|
||||
let z_last = zeta - last.into();
|
||||
|
||||
let mut consumer = ConstraintConsumer::<F::Extension>::new(
|
||||
alphas
|
||||
.iter()
|
||||
.map(|&alpha| F::Extension::from_basefield(alpha))
|
||||
.collect::<Vec<_>>(),
|
||||
z_last,
|
||||
l_0,
|
||||
l_last,
|
||||
);
|
||||
|
||||
let vars = S::EvaluationFrame::from_values(
|
||||
local_values,
|
||||
next_values,
|
||||
&public_inputs
|
||||
.iter()
|
||||
.copied()
|
||||
.map(F::Extension::from_basefield)
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
let lookup_vars = lookup_challenges.map(|l_c| LookupCheckVars {
|
||||
local_values: auxiliary_polys.as_ref().unwrap()[..num_lookup_columns].to_vec(),
|
||||
next_values: auxiliary_polys_next.as_ref().unwrap()[..num_lookup_columns].to_vec(),
|
||||
challenges: l_c.to_vec(),
|
||||
});
|
||||
|
||||
eval_vanishing_poly::<F, F::Extension, F::Extension, S, D, D>(
|
||||
stark,
|
||||
&vars,
|
||||
lookups,
|
||||
lookup_vars,
|
||||
ctl_vars,
|
||||
&mut consumer,
|
||||
);
|
||||
consumer.accumulators()
|
||||
}
|
||||
|
||||
pub(crate) fn eval_l_0_and_l_last_circuit<F: RichField + Extendable<D>, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
n: ExtensionTarget<D>,
|
||||
g: ExtensionTarget<D>,
|
||||
x: ExtensionTarget<D>,
|
||||
z_x: ExtensionTarget<D>,
|
||||
) -> (ExtensionTarget<D>, ExtensionTarget<D>) {
|
||||
let one = builder.one_extension();
|
||||
let l_0_deno = builder.mul_sub_extension(n, x, n);
|
||||
let l_last_deno = builder.mul_sub_extension(g, x, one);
|
||||
let l_last_deno = builder.mul_extension(n, l_last_deno);
|
||||
|
||||
(
|
||||
builder.div_extension(z_x, l_0_deno),
|
||||
builder.div_extension(z_x, l_last_deno),
|
||||
)
|
||||
}
|
||||
|
||||
/// Evaluates the constraints at a random extension point. It is used to bind the constraints.
|
||||
pub(crate) fn compute_eval_vanishing_poly_circuit<F, S, const D: usize>(
|
||||
builder: &mut CircuitBuilder<F, D>,
|
||||
stark: &S,
|
||||
stark_opening_set: &StarkOpeningSetTarget<D>,
|
||||
ctl_vars: Option<&[CtlCheckVarsTarget<F, D>]>,
|
||||
lookup_challenges: Option<&Vec<Target>>,
|
||||
public_inputs: &[Target],
|
||||
alphas: Vec<Target>,
|
||||
zeta: ExtensionTarget<D>,
|
||||
degree_bits: usize,
|
||||
degree_bits_target: Target,
|
||||
num_lookup_columns: usize,
|
||||
) -> Vec<ExtensionTarget<D>>
|
||||
where
|
||||
F: RichField + Extendable<D>,
|
||||
S: Stark<F, D>,
|
||||
{
|
||||
let one = builder.one_extension();
|
||||
let two = builder.two();
|
||||
|
||||
let StarkOpeningSetTarget {
|
||||
local_values,
|
||||
next_values,
|
||||
auxiliary_polys,
|
||||
auxiliary_polys_next,
|
||||
ctl_zs_first: _,
|
||||
quotient_polys: _,
|
||||
} = stark_opening_set;
|
||||
|
||||
let max_num_of_bits_in_degree = degree_bits + 1;
|
||||
let degree = builder.exp(two, degree_bits_target, max_num_of_bits_in_degree);
|
||||
let degree_bits_vec = builder.split_le(degree, max_num_of_bits_in_degree);
|
||||
let zeta_pow_deg = builder.exp_extension_from_bits(zeta, °ree_bits_vec);
|
||||
let z_h_zeta = builder.sub_extension(zeta_pow_deg, one);
|
||||
let degree_ext = builder.convert_to_ext(degree);
|
||||
|
||||
// Calculate primitive_root_of_unity(degree_bits)
|
||||
let two_adicity = builder.constant(F::from_canonical_usize(F::TWO_ADICITY));
|
||||
let two_adicity_sub_degree_bits = builder.sub(two_adicity, degree_bits_target);
|
||||
let two_exp_two_adicity_sub_degree_bits =
|
||||
builder.exp(two, two_adicity_sub_degree_bits, F::TWO_ADICITY);
|
||||
let base = builder.constant(F::POWER_OF_TWO_GENERATOR);
|
||||
let g = builder.exp(base, two_exp_two_adicity_sub_degree_bits, F::TWO_ADICITY);
|
||||
let g_ext = builder.convert_to_ext(g);
|
||||
|
||||
let (l_0, l_last) = eval_l_0_and_l_last_circuit(builder, degree_ext, g_ext, zeta, z_h_zeta);
|
||||
let last = builder.inverse_extension(g_ext);
|
||||
let z_last = builder.sub_extension(zeta, last);
|
||||
|
||||
let mut consumer = RecursiveConstraintConsumer::<F, D>::new(
|
||||
builder.zero_extension(),
|
||||
alphas,
|
||||
z_last,
|
||||
l_0,
|
||||
l_last,
|
||||
);
|
||||
|
||||
let vars = S::EvaluationFrameTarget::from_values(
|
||||
local_values,
|
||||
next_values,
|
||||
&public_inputs
|
||||
.iter()
|
||||
.map(|&t| builder.convert_to_ext(t))
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
let lookup_vars = stark.uses_lookups().then(|| LookupCheckVarsTarget {
|
||||
local_values: auxiliary_polys.as_ref().unwrap()[..num_lookup_columns].to_vec(),
|
||||
next_values: auxiliary_polys_next.as_ref().unwrap()[..num_lookup_columns].to_vec(),
|
||||
challenges: lookup_challenges.unwrap().to_vec(),
|
||||
});
|
||||
|
||||
with_context!(
|
||||
builder,
|
||||
"evaluate extra vanishing polynomial",
|
||||
eval_vanishing_poly_circuit::<F, S, D>(
|
||||
builder,
|
||||
stark,
|
||||
&vars,
|
||||
lookup_vars,
|
||||
ctl_vars,
|
||||
&mut consumer
|
||||
)
|
||||
);
|
||||
consumer.accumulators()
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ use crate::evaluation_frame::StarkEvaluationFrame;
|
||||
use crate::lookup::LookupCheckVars;
|
||||
use crate::proof::{StarkOpeningSet, StarkProof, StarkProofChallenges, StarkProofWithPublicInputs};
|
||||
use crate::stark::Stark;
|
||||
use crate::vanishing_poly::eval_vanishing_poly;
|
||||
use crate::vanishing_poly::{eval_l_0_and_l_last, eval_vanishing_poly};
|
||||
|
||||
/// Verifies a [`StarkProofWithPublicInputs`] against a STARK statement.
|
||||
pub fn verify_stark_proof<
|
||||
@@ -42,8 +42,10 @@ pub fn verify_stark_proof<
|
||||
let mut challenger = Challenger::<F, C::Hasher>::new();
|
||||
|
||||
let challenges = proof_with_pis.get_challenges(
|
||||
&stark,
|
||||
&mut challenger,
|
||||
None,
|
||||
None,
|
||||
false,
|
||||
config,
|
||||
verifier_circuit_fri_params,
|
||||
@@ -282,18 +284,6 @@ where
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Evaluate the Lagrange polynomials `L_0` and `L_(n-1)` at a point `x`.
|
||||
/// `L_0(x) = (x^n - 1)/(n * (x - 1))`
|
||||
/// `L_(n-1)(x) = (x^n - 1)/(n * (g * x - 1))`, with `g` the first element of the subgroup.
|
||||
fn eval_l_0_and_l_last<F: Field>(log_n: usize, x: F) -> (F, F) {
|
||||
let n = F::from_canonical_usize(1 << log_n);
|
||||
let g = F::primitive_root_of_unity(log_n);
|
||||
let z_x = x.exp_power_of_2(log_n) - F::ONE;
|
||||
let invs = F::batch_multiplicative_inverse(&[n * (x - F::ONE), n * (g * x - F::ONE)]);
|
||||
|
||||
(z_x * invs[0], z_x * invs[1])
|
||||
}
|
||||
|
||||
/// Utility function to check that all lookups data wrapped in `Option`s are `Some` iff
|
||||
/// the STARK uses a permutation argument.
|
||||
fn check_lookup_options<F, C, S, const D: usize>(
|
||||
|
||||
Reference in New Issue
Block a user