mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-09 22:18:00 -05:00
[Refactor] Universal task (#1680)
Co-authored-by: georgehao <haohongfan@gmail.com>
This commit is contained in:
23
crates/libzkp/Cargo.toml
Normal file
23
crates/libzkp/Cargo.toml
Normal file
@@ -0,0 +1,23 @@
|
||||
[package]
|
||||
name = "libzkp"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[dependencies]
|
||||
scroll-zkvm-types.workspace = true
|
||||
scroll-zkvm-verifier-euclid.workspace = true
|
||||
|
||||
sbv-primitives.workspace = true
|
||||
base64.workspace = true
|
||||
serde.workspace = true
|
||||
serde_derive.workspace = true
|
||||
serde_json = { workspace = true, features = ["raw_value"]}
|
||||
tracing.workspace = true
|
||||
eyre.workspace = true
|
||||
|
||||
git-version = "0.3.5"
|
||||
serde_stacker = "0.1"
|
||||
regex = "1.11"
|
||||
c-kzg = { version = "1.0", features = ["serde"] }
|
||||
|
||||
9
crates/libzkp/rustfmt.toml
Normal file
9
crates/libzkp/rustfmt.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
edition = "2021"
|
||||
|
||||
comment_width = 100
|
||||
imports_granularity = "Crate"
|
||||
max_width = 100
|
||||
newline_style = "Unix"
|
||||
# normalize_comments = true
|
||||
reorder_imports = true
|
||||
wrap_comments = true
|
||||
121
crates/libzkp/src/lib.rs
Normal file
121
crates/libzkp/src/lib.rs
Normal file
@@ -0,0 +1,121 @@
|
||||
pub mod proofs;
|
||||
pub mod tasks;
|
||||
pub mod verifier;
|
||||
pub use verifier::{TaskType, VerifierConfig};
|
||||
mod utils;
|
||||
|
||||
use sbv_primitives::B256;
|
||||
use scroll_zkvm_types::util::vec_as_base64;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::value::RawValue;
|
||||
use std::path::Path;
|
||||
use tasks::chunk_interpreter::{ChunkInterpreter, TryFromWithInterpreter};
|
||||
|
||||
/// Turn the coordinator's chunk task into a json string for formal chunk proving
|
||||
/// task (with full witnesses)
|
||||
pub fn checkout_chunk_task(
|
||||
task_json: &str,
|
||||
interpreter: impl ChunkInterpreter,
|
||||
) -> eyre::Result<String> {
|
||||
let chunk_task = serde_json::from_str::<tasks::ChunkTask>(task_json)?;
|
||||
let ret = serde_json::to_string(&tasks::ChunkProvingTask::try_from_with_interpret(
|
||||
chunk_task,
|
||||
interpreter,
|
||||
)?)?;
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
/// Generate required staff for proving tasks
|
||||
pub fn gen_universal_task(
|
||||
task_type: i32,
|
||||
task_json: &str,
|
||||
fork_name: &str,
|
||||
interpreter: Option<impl ChunkInterpreter>,
|
||||
) -> eyre::Result<(B256, String, String)> {
|
||||
use proofs::*;
|
||||
use tasks::*;
|
||||
|
||||
/// Wrapper for metadata
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum AnyMetaData {
|
||||
Chunk(ChunkProofMetadata),
|
||||
Batch(BatchProofMetadata),
|
||||
Bundle(BundleProofMetadata),
|
||||
}
|
||||
|
||||
let (pi_hash, metadata, u_task) = match task_type {
|
||||
x if x == TaskType::Chunk as i32 => {
|
||||
let task = serde_json::from_str::<ChunkProvingTask>(task_json)?;
|
||||
let (pi_hash, metadata, u_task) =
|
||||
gen_universal_chunk_task(task, fork_name.into(), interpreter)?;
|
||||
(pi_hash, AnyMetaData::Chunk(metadata), u_task)
|
||||
}
|
||||
x if x == TaskType::Batch as i32 => {
|
||||
let task = serde_json::from_str::<BatchProvingTask>(task_json)?;
|
||||
let (pi_hash, metadata, u_task) = gen_universal_batch_task(task, fork_name.into())?;
|
||||
(pi_hash, AnyMetaData::Batch(metadata), u_task)
|
||||
}
|
||||
x if x == TaskType::Bundle as i32 => {
|
||||
let task = serde_json::from_str::<BundleProvingTask>(task_json)?;
|
||||
let (pi_hash, metadata, u_task) = gen_universal_bundle_task(task, fork_name.into())?;
|
||||
(pi_hash, AnyMetaData::Bundle(metadata), u_task)
|
||||
}
|
||||
_ => return Err(eyre::eyre!("unrecognized task type {task_type}")),
|
||||
};
|
||||
|
||||
Ok((
|
||||
pi_hash,
|
||||
serde_json::to_string(&metadata)?,
|
||||
serde_json::to_string(&u_task)?,
|
||||
))
|
||||
}
|
||||
|
||||
/// helper to rearrange the proof return by universal prover into corresponding wrapped proof
|
||||
pub fn gen_wrapped_proof(proof_json: &str, metadata: &str, vk: &[u8]) -> eyre::Result<String> {
|
||||
#[derive(Serialize)]
|
||||
struct RearrangeWrappedProofJson<'a> {
|
||||
#[serde(borrow)]
|
||||
pub metadata: &'a RawValue,
|
||||
#[serde(borrow)]
|
||||
pub proof: &'a RawValue,
|
||||
#[serde(with = "vec_as_base64", default)]
|
||||
pub vk: Vec<u8>,
|
||||
pub git_version: String,
|
||||
}
|
||||
|
||||
let re_arrange = RearrangeWrappedProofJson {
|
||||
metadata: serde_json::from_str(metadata)?,
|
||||
proof: serde_json::from_str(proof_json)?,
|
||||
vk: vk.to_vec(),
|
||||
git_version: utils::short_git_version(),
|
||||
};
|
||||
|
||||
let ret = serde_json::to_string(&re_arrange)?;
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
/// init verifier
|
||||
pub fn verifier_init(config: &str) -> eyre::Result<()> {
|
||||
let cfg: VerifierConfig = serde_json::from_str(config)?;
|
||||
verifier::init(cfg);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// verify proof
|
||||
pub fn verify_proof(proof: Vec<u8>, fork_name: &str, task_type: TaskType) -> eyre::Result<bool> {
|
||||
let verifier = verifier::get_verifier(fork_name)?;
|
||||
|
||||
let ret = verifier.verify(task_type, proof)?;
|
||||
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
/// dump vk
|
||||
pub fn dump_vk(fork_name: &str, file: &str) -> eyre::Result<()> {
|
||||
let verifier = verifier::get_verifier(fork_name)?;
|
||||
|
||||
verifier.dump_vk(Path::new(file));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
BIN
crates/libzkp/src/plonk_verifier_0.11.4.bin
Normal file
BIN
crates/libzkp/src/plonk_verifier_0.11.4.bin
Normal file
Binary file not shown.
344
crates/libzkp/src/proofs.rs
Normal file
344
crates/libzkp/src/proofs.rs
Normal file
@@ -0,0 +1,344 @@
|
||||
use std::path::Path;
|
||||
|
||||
use crate::utils::short_git_version;
|
||||
use eyre::Result;
|
||||
use sbv_primitives::B256;
|
||||
use scroll_zkvm_types::{
|
||||
batch::BatchInfo,
|
||||
bundle::BundleInfo,
|
||||
chunk::ChunkInfo,
|
||||
proof::{EvmProof, OpenVmEvmProof, ProofEnum, RootProof},
|
||||
public_inputs::{ForkName, MultiVersionPublicInputs},
|
||||
types_agg::{AggregationInput, ProgramCommitment},
|
||||
util::vec_as_base64,
|
||||
};
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
|
||||
/// A wrapper around the actual inner proof.
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct WrappedProof<Metadata> {
|
||||
/// Generic metadata carried by a proof.
|
||||
pub metadata: Metadata,
|
||||
/// The inner proof, either a [`RootProof`] or [`EvmProof`] depending on the
|
||||
/// [`crate::ProverType`].
|
||||
pub proof: ProofEnum,
|
||||
/// Represents the verifying key in serialized form. The purpose of including the verifying key
|
||||
/// along with the proof is to allow a verifier-only mode to identify the source of proof
|
||||
/// generation.
|
||||
///
|
||||
/// For [`RootProof`] the verifying key is denoted by the digest of the VM's program.
|
||||
///
|
||||
/// For [`EvmProof`] its the raw bytes of the halo2 circuit's `VerifyingKey`.
|
||||
///
|
||||
/// We encode the vk in base64 format during JSON serialization.
|
||||
#[serde(with = "vec_as_base64", default)]
|
||||
pub vk: Vec<u8>,
|
||||
/// Represents the git ref for `zkvm-prover` that was used to construct the proof.
|
||||
///
|
||||
/// This is useful for debugging.
|
||||
pub git_version: String,
|
||||
}
|
||||
|
||||
pub trait AsRootProof {
|
||||
fn as_root_proof(&self) -> &RootProof;
|
||||
}
|
||||
|
||||
pub trait AsEvmProof {
|
||||
fn as_evm_proof(&self) -> &EvmProof;
|
||||
}
|
||||
|
||||
pub trait IntoEvmProof {
|
||||
fn into_evm_proof(self) -> OpenVmEvmProof;
|
||||
}
|
||||
|
||||
/// Alias for convenience.
|
||||
pub type ChunkProof = WrappedProof<ChunkProofMetadata>;
|
||||
|
||||
/// Alias for convenience.
|
||||
pub type BatchProof = WrappedProof<BatchProofMetadata>;
|
||||
|
||||
/// Alias for convenience.
|
||||
pub type BundleProof = WrappedProof<BundleProofMetadata>;
|
||||
|
||||
impl AsRootProof for ChunkProof {
|
||||
fn as_root_proof(&self) -> &RootProof {
|
||||
self.proof
|
||||
.as_root_proof()
|
||||
.expect("batch proof use root proof")
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRootProof for BatchProof {
|
||||
fn as_root_proof(&self) -> &RootProof {
|
||||
self.proof
|
||||
.as_root_proof()
|
||||
.expect("batch proof use root proof")
|
||||
}
|
||||
}
|
||||
|
||||
impl AsEvmProof for BundleProof {
|
||||
fn as_evm_proof(&self) -> &EvmProof {
|
||||
self.proof
|
||||
.as_evm_proof()
|
||||
.expect("bundle proof use evm proof")
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoEvmProof for BundleProof {
|
||||
fn into_evm_proof(self) -> OpenVmEvmProof {
|
||||
self.proof
|
||||
.as_evm_proof()
|
||||
.expect("bundle proof use evm proof")
|
||||
.clone()
|
||||
.into()
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait to enable operations in metadata
|
||||
pub trait ProofMetadata: Serialize + DeserializeOwned + std::fmt::Debug {
|
||||
type PublicInputs: MultiVersionPublicInputs;
|
||||
|
||||
fn pi_hash_info(&self) -> &Self::PublicInputs;
|
||||
|
||||
fn new_proof<P: Into<ProofEnum>>(self, proof: P, vk: Option<&[u8]>) -> WrappedProof<Self> {
|
||||
WrappedProof {
|
||||
metadata: self,
|
||||
proof: proof.into(),
|
||||
vk: vk.map(Vec::from).unwrap_or_default(),
|
||||
git_version: short_git_version(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait PersistableProof: Sized {
|
||||
/// Read and deserialize the proof.
|
||||
fn from_json<P: AsRef<Path>>(path_proof: P) -> Result<Self>;
|
||||
/// Serialize the proof and dumping at the given path.
|
||||
fn dump<P: AsRef<Path>>(&self, path_proof: P) -> Result<()>;
|
||||
}
|
||||
|
||||
/// Metadata attached to [`ChunkProof`].
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct ChunkProofMetadata {
|
||||
/// The chunk information describing the list of blocks contained within the chunk.
|
||||
pub chunk_info: ChunkInfo,
|
||||
}
|
||||
|
||||
impl ProofMetadata for ChunkProofMetadata {
|
||||
type PublicInputs = ChunkInfo;
|
||||
|
||||
fn pi_hash_info(&self) -> &Self::PublicInputs {
|
||||
&self.chunk_info
|
||||
}
|
||||
}
|
||||
|
||||
/// Metadata attached to [`BatchProof`].
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct BatchProofMetadata {
|
||||
/// The batch information describing the list of chunks.
|
||||
pub batch_info: BatchInfo,
|
||||
/// The [`scroll_zkvm_types::batch::BatchHeader`]'s digest.
|
||||
pub batch_hash: B256,
|
||||
}
|
||||
|
||||
impl ProofMetadata for BatchProofMetadata {
|
||||
type PublicInputs = BatchInfo;
|
||||
|
||||
fn pi_hash_info(&self) -> &Self::PublicInputs {
|
||||
&self.batch_info
|
||||
}
|
||||
}
|
||||
|
||||
/// Metadata attached to [`BundleProof`].
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct BundleProofMetadata {
|
||||
/// The bundle information describing the list of batches to be finalised on-chain.
|
||||
pub bundle_info: BundleInfo,
|
||||
/// The public-input digest for the bundle.
|
||||
pub bundle_pi_hash: B256,
|
||||
}
|
||||
|
||||
impl ProofMetadata for BundleProofMetadata {
|
||||
type PublicInputs = BundleInfo;
|
||||
|
||||
fn pi_hash_info(&self) -> &Self::PublicInputs {
|
||||
&self.bundle_info
|
||||
}
|
||||
}
|
||||
|
||||
impl<Metadata> From<&WrappedProof<Metadata>> for AggregationInput {
|
||||
fn from(value: &WrappedProof<Metadata>) -> Self {
|
||||
Self {
|
||||
public_values: value.proof.public_values(),
|
||||
commitment: ProgramCommitment::deserialize(&value.vk),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Metadata: ProofMetadata> WrappedProof<Metadata> {
|
||||
/// Sanity checks on the wrapped proof:
|
||||
///
|
||||
/// - pi_hash computed in host does in fact match pi_hash computed in guest
|
||||
pub fn sanity_check(&self, fork_name: ForkName) {
|
||||
let proof_pi = self.proof.public_values();
|
||||
|
||||
let expected_pi = self
|
||||
.metadata
|
||||
.pi_hash_info()
|
||||
.pi_hash_by_fork(fork_name)
|
||||
.0
|
||||
.as_ref()
|
||||
.iter()
|
||||
.map(|&v| v as u32)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(
|
||||
expected_pi, proof_pi,
|
||||
"pi mismatch: expected={expected_pi:?}, found={proof_pi:?}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
impl<Metadata: ProofMetadata> PersistableProof for WrappedProof<Metadata> {
|
||||
fn from_json<P: AsRef<Path>>(path_proof: P) -> Result<Self> {
|
||||
crate::utils::read_json_deep(path_proof)
|
||||
}
|
||||
|
||||
fn dump<P: AsRef<Path>>(&self, path_proof: P) -> Result<()> {
|
||||
crate::utils::write_json(path_proof, &self)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use base64::{prelude::BASE64_STANDARD, Engine};
|
||||
use sbv_primitives::B256;
|
||||
use scroll_zkvm_types::{
|
||||
bundle::{BundleInfo, BundleInfoV1},
|
||||
proof::EvmProof,
|
||||
public_inputs::PublicInputs,
|
||||
};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_roundtrip() -> eyre::Result<()> {
|
||||
macro_rules! assert_roundtrip {
|
||||
($fd:expr, $proof:ident) => {
|
||||
let proof_str_expected =
|
||||
std::fs::read_to_string(std::path::Path::new("./testdata").join($fd))?;
|
||||
let proof = serde_json::from_str::<$proof>(&proof_str_expected)?;
|
||||
let proof_str_got = serde_json::to_string(&proof)?;
|
||||
assert_eq!(proof_str_got, proof_str_expected);
|
||||
};
|
||||
}
|
||||
|
||||
assert_roundtrip!("chunk-proof.json", ChunkProof);
|
||||
assert_roundtrip!("batch-proof.json", BatchProof);
|
||||
assert_roundtrip!("bundle-proof.json", BundleProof);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dummy_proof() -> eyre::Result<()> {
|
||||
// 1. Metadata
|
||||
let metadata = {
|
||||
let bundle_info: BundleInfoV1 = BundleInfo {
|
||||
chain_id: 12345,
|
||||
num_batches: 12,
|
||||
prev_state_root: B256::repeat_byte(1),
|
||||
prev_batch_hash: B256::repeat_byte(2),
|
||||
post_state_root: B256::repeat_byte(3),
|
||||
batch_hash: B256::repeat_byte(4),
|
||||
withdraw_root: B256::repeat_byte(5),
|
||||
msg_queue_hash: B256::repeat_byte(6),
|
||||
}
|
||||
.into();
|
||||
let bundle_pi_hash = bundle_info.pi_hash();
|
||||
BundleProofMetadata {
|
||||
bundle_info: bundle_info.0,
|
||||
bundle_pi_hash,
|
||||
}
|
||||
};
|
||||
|
||||
// 2. Proof
|
||||
let (proof, proof_base64) = {
|
||||
let proof = std::iter::empty()
|
||||
.chain(std::iter::repeat_n(1, 1))
|
||||
.chain(std::iter::repeat_n(2, 2))
|
||||
.chain(std::iter::repeat_n(3, 3))
|
||||
.chain(std::iter::repeat_n(4, 4))
|
||||
.chain(std::iter::repeat_n(5, 5))
|
||||
.chain(std::iter::repeat_n(6, 6))
|
||||
.chain(std::iter::repeat_n(7, 7))
|
||||
.chain(std::iter::repeat_n(8, 8))
|
||||
.chain(std::iter::repeat_n(9, 9))
|
||||
.collect::<Vec<u8>>();
|
||||
let proof_base64 = BASE64_STANDARD.encode(&proof);
|
||||
(proof, proof_base64)
|
||||
};
|
||||
|
||||
// 3. Instances
|
||||
let (instances, instances_base64) = {
|
||||
// LE: [0x56, 0x34, 0x12, 0x00, 0x00, ..., 0x00]
|
||||
// LE: [0x32, 0x54, 0x76, 0x98, 0x00, ..., 0x00]
|
||||
let instances = std::iter::empty()
|
||||
.chain(std::iter::repeat_n(0x00, 29))
|
||||
.chain(std::iter::once(0x12))
|
||||
.chain(std::iter::once(0x34))
|
||||
.chain(std::iter::once(0x56))
|
||||
.chain(std::iter::repeat_n(0x00, 28))
|
||||
.chain(std::iter::once(0x98))
|
||||
.chain(std::iter::once(0x76))
|
||||
.chain(std::iter::once(0x54))
|
||||
.chain(std::iter::once(0x32))
|
||||
.collect::<Vec<u8>>();
|
||||
let instances_base64 = BASE64_STANDARD.encode(&instances);
|
||||
(instances, instances_base64)
|
||||
};
|
||||
|
||||
// 4. VK
|
||||
let (vk, vk_base64) = {
|
||||
let vk = std::iter::empty()
|
||||
.chain(std::iter::repeat_n(1, 9))
|
||||
.chain(std::iter::repeat_n(2, 8))
|
||||
.chain(std::iter::repeat_n(3, 7))
|
||||
.chain(std::iter::repeat_n(4, 6))
|
||||
.chain(std::iter::repeat_n(5, 5))
|
||||
.chain(std::iter::repeat_n(6, 4))
|
||||
.chain(std::iter::repeat_n(7, 3))
|
||||
.chain(std::iter::repeat_n(8, 2))
|
||||
.chain(std::iter::repeat_n(9, 1))
|
||||
.collect::<Vec<u8>>();
|
||||
let vk_base64 = BASE64_STANDARD.encode(&vk);
|
||||
(vk, vk_base64)
|
||||
};
|
||||
|
||||
let evm_proof = EvmProof { instances, proof };
|
||||
let bundle_proof = metadata.new_proof(evm_proof, Some(vk.as_slice()));
|
||||
let bundle_proof_json = serde_json::to_value(&bundle_proof)?;
|
||||
|
||||
assert_eq!(
|
||||
bundle_proof_json.get("proof").unwrap(),
|
||||
&serde_json::json!({
|
||||
"proof": proof_base64,
|
||||
"instances": instances_base64,
|
||||
}),
|
||||
);
|
||||
assert_eq!(
|
||||
bundle_proof_json.get("vk").unwrap(),
|
||||
&serde_json::Value::String(vk_base64),
|
||||
);
|
||||
|
||||
let bundle_proof_de = serde_json::from_value::<BundleProof>(bundle_proof_json)?;
|
||||
|
||||
assert_eq!(
|
||||
bundle_proof_de.proof.as_evm_proof(),
|
||||
bundle_proof.proof.as_evm_proof()
|
||||
);
|
||||
assert_eq!(bundle_proof_de.vk, bundle_proof.vk);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
76
crates/libzkp/src/tasks.rs
Normal file
76
crates/libzkp/src/tasks.rs
Normal file
@@ -0,0 +1,76 @@
|
||||
pub mod batch;
|
||||
pub mod bundle;
|
||||
pub mod chunk;
|
||||
pub mod chunk_interpreter;
|
||||
|
||||
pub use batch::BatchProvingTask;
|
||||
pub use bundle::BundleProvingTask;
|
||||
pub use chunk::{ChunkProvingTask, ChunkTask};
|
||||
pub use chunk_interpreter::ChunkInterpreter;
|
||||
pub use scroll_zkvm_types::task::ProvingTask;
|
||||
|
||||
use crate::proofs::{BatchProofMetadata, BundleProofMetadata, ChunkProofMetadata};
|
||||
use chunk_interpreter::{DummyInterpreter, TryFromWithInterpreter};
|
||||
use sbv_primitives::B256;
|
||||
use scroll_zkvm_types::{
|
||||
chunk::ChunkInfo,
|
||||
public_inputs::{ForkName, MultiVersionPublicInputs},
|
||||
};
|
||||
|
||||
/// Generate required staff for chunk proving
|
||||
pub fn gen_universal_chunk_task(
|
||||
mut task: ChunkProvingTask,
|
||||
fork_name: ForkName,
|
||||
interpreter: Option<impl ChunkInterpreter>,
|
||||
) -> eyre::Result<(B256, ChunkProofMetadata, ProvingTask)> {
|
||||
let chunk_info = if let Some(interpreter) = interpreter {
|
||||
ChunkInfo::try_from_with_interpret(&mut task, interpreter)
|
||||
} else {
|
||||
ChunkInfo::try_from_with_interpret(&mut task, DummyInterpreter {})
|
||||
}?;
|
||||
let proving_task = task.try_into()?;
|
||||
let expected_pi_hash = chunk_info.pi_hash_by_fork(fork_name);
|
||||
Ok((
|
||||
expected_pi_hash,
|
||||
ChunkProofMetadata { chunk_info },
|
||||
proving_task,
|
||||
))
|
||||
}
|
||||
|
||||
/// Generate required staff for batch proving
|
||||
pub fn gen_universal_batch_task(
|
||||
task: BatchProvingTask,
|
||||
fork_name: ForkName,
|
||||
) -> eyre::Result<(B256, BatchProofMetadata, ProvingTask)> {
|
||||
let batch_info = task.precheck_and_build_metadata()?;
|
||||
let proving_task = task.try_into()?;
|
||||
let expected_pi_hash = batch_info.pi_hash_by_fork(fork_name);
|
||||
|
||||
Ok((
|
||||
expected_pi_hash,
|
||||
BatchProofMetadata {
|
||||
batch_info,
|
||||
batch_hash: expected_pi_hash,
|
||||
},
|
||||
proving_task,
|
||||
))
|
||||
}
|
||||
|
||||
/// Generate required staff for bundle proving
|
||||
pub fn gen_universal_bundle_task(
|
||||
task: BundleProvingTask,
|
||||
fork_name: ForkName,
|
||||
) -> eyre::Result<(B256, BundleProofMetadata, ProvingTask)> {
|
||||
let bundle_info = task.precheck_and_build_metadata()?;
|
||||
let proving_task = task.try_into()?;
|
||||
let expected_pi_hash = bundle_info.pi_hash_by_fork(fork_name);
|
||||
|
||||
Ok((
|
||||
expected_pi_hash,
|
||||
BundleProofMetadata {
|
||||
bundle_info,
|
||||
bundle_pi_hash: expected_pi_hash,
|
||||
},
|
||||
proving_task,
|
||||
))
|
||||
}
|
||||
253
crates/libzkp/src/tasks/batch.rs
Normal file
253
crates/libzkp/src/tasks/batch.rs
Normal file
@@ -0,0 +1,253 @@
|
||||
use crate::proofs::ChunkProof;
|
||||
use c_kzg::Bytes48;
|
||||
use eyre::Result;
|
||||
use sbv_primitives::{B256, U256};
|
||||
use scroll_zkvm_types::{
|
||||
batch::{
|
||||
BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchInfo, BatchWitness, EnvelopeV6, EnvelopeV7,
|
||||
PointEvalWitness, ReferenceHeader, N_BLOB_BYTES,
|
||||
},
|
||||
public_inputs::ForkName,
|
||||
task::ProvingTask,
|
||||
utils::{to_rkyv_bytes, RancorError},
|
||||
};
|
||||
|
||||
mod utils;
|
||||
use utils::{base64, point_eval};
|
||||
|
||||
/// Define variable batch header type, since BatchHeaderV6 can not
|
||||
/// be decoded as V7 we can always has correct deserialization
|
||||
/// Notice: V6 header MUST be put above V7 since untagged enum
|
||||
/// try to decode each defination in order
|
||||
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum BatchHeaderV {
|
||||
V6(BatchHeaderV6),
|
||||
V7(BatchHeaderV7),
|
||||
}
|
||||
|
||||
impl From<BatchHeaderV> for ReferenceHeader {
|
||||
fn from(value: BatchHeaderV) -> Self {
|
||||
match value {
|
||||
BatchHeaderV::V6(h) => ReferenceHeader::V6(h),
|
||||
BatchHeaderV::V7(h) => ReferenceHeader::V7(h),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BatchHeaderV {
|
||||
pub fn batch_hash(&self) -> B256 {
|
||||
match self {
|
||||
BatchHeaderV::V6(h) => h.batch_hash(),
|
||||
BatchHeaderV::V7(h) => h.batch_hash(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn must_v6_header(&self) -> &BatchHeaderV6 {
|
||||
match self {
|
||||
BatchHeaderV::V6(h) => h,
|
||||
BatchHeaderV::V7(_) => panic!("try to pick v7 header"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn must_v7_header(&self) -> &BatchHeaderV7 {
|
||||
match self {
|
||||
BatchHeaderV::V7(h) => h,
|
||||
BatchHeaderV::V6(_) => panic!("try to pick v6 header"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Defines a proving task for batch proof generation, the format
|
||||
/// is compatible with both pre-euclidv2 and euclidv2
|
||||
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
||||
pub struct BatchProvingTask {
|
||||
/// Chunk proofs for the contiguous list of chunks within the batch.
|
||||
pub chunk_proofs: Vec<ChunkProof>,
|
||||
/// The [`BatchHeaderV6/V7`], as computed on-chain for this batch.
|
||||
pub batch_header: BatchHeaderV,
|
||||
/// The bytes encoding the batch data that will finally be published on-chain in the form of an
|
||||
/// EIP-4844 blob.
|
||||
#[serde(with = "base64")]
|
||||
pub blob_bytes: Vec<u8>,
|
||||
/// Challenge digest computed using the blob's bytes and versioned hash.
|
||||
pub challenge_digest: Option<U256>,
|
||||
/// KZG commitment for the blob.
|
||||
pub kzg_commitment: Option<Bytes48>,
|
||||
/// KZG proof.
|
||||
pub kzg_proof: Option<Bytes48>,
|
||||
/// fork version specify, for sanity check with batch_header and chunk proof
|
||||
pub fork_name: String,
|
||||
}
|
||||
|
||||
impl TryFrom<BatchProvingTask> for ProvingTask {
|
||||
type Error = eyre::Error;
|
||||
|
||||
fn try_from(value: BatchProvingTask) -> Result<Self> {
|
||||
let witness = value.build_guest_input();
|
||||
|
||||
Ok(ProvingTask {
|
||||
identifier: value.batch_header.batch_hash().to_string(),
|
||||
fork_name: value.fork_name,
|
||||
aggregated_proofs: value
|
||||
.chunk_proofs
|
||||
.into_iter()
|
||||
.map(|w_proof| w_proof.proof.into_root_proof().expect("expect root proof"))
|
||||
.collect(),
|
||||
serialized_witness: vec![to_rkyv_bytes::<RancorError>(&witness)?.into_vec()],
|
||||
vk: Vec::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl BatchProvingTask {
|
||||
fn build_guest_input(&self) -> BatchWitness {
|
||||
let fork_name = self.fork_name.to_lowercase().as_str().into();
|
||||
|
||||
// calculate point eval needed and compare with task input
|
||||
let (kzg_commitment, kzg_proof, challenge_digest) = {
|
||||
let blob = point_eval::to_blob(&self.blob_bytes);
|
||||
let commitment = point_eval::blob_to_kzg_commitment(&blob);
|
||||
let versioned_hash = point_eval::get_versioned_hash(&commitment);
|
||||
let challenge_digest = match &self.batch_header {
|
||||
BatchHeaderV::V6(_) => {
|
||||
assert_eq!(
|
||||
fork_name,
|
||||
ForkName::EuclidV1,
|
||||
"hardfork mismatch for da-codec@v6 header: found={fork_name:?}, expected={:?}",
|
||||
ForkName::EuclidV1,
|
||||
);
|
||||
EnvelopeV6::from(self.blob_bytes.as_slice()).challenge_digest(versioned_hash)
|
||||
}
|
||||
BatchHeaderV::V7(_) => {
|
||||
assert_eq!(
|
||||
fork_name,
|
||||
ForkName::EuclidV2,
|
||||
"hardfork mismatch for da-codec@v7 header: found={fork_name:?}, expected={:?}",
|
||||
ForkName::EuclidV2,
|
||||
);
|
||||
let padded_blob_bytes = {
|
||||
let mut padded_blob_bytes = self.blob_bytes.to_vec();
|
||||
padded_blob_bytes.resize(N_BLOB_BYTES, 0);
|
||||
padded_blob_bytes
|
||||
};
|
||||
EnvelopeV7::from(padded_blob_bytes.as_slice()).challenge_digest(versioned_hash)
|
||||
}
|
||||
};
|
||||
|
||||
let (proof, _) = point_eval::get_kzg_proof(&blob, challenge_digest);
|
||||
|
||||
(commitment.to_bytes(), proof.to_bytes(), challenge_digest)
|
||||
};
|
||||
|
||||
if let Some(k) = self.kzg_commitment {
|
||||
assert_eq!(k, kzg_commitment);
|
||||
}
|
||||
|
||||
if let Some(c) = self.challenge_digest {
|
||||
assert_eq!(c, U256::from_be_bytes(challenge_digest.0));
|
||||
}
|
||||
|
||||
if let Some(p) = self.kzg_proof {
|
||||
assert_eq!(p, kzg_proof);
|
||||
}
|
||||
|
||||
let point_eval_witness = PointEvalWitness {
|
||||
kzg_commitment: kzg_commitment.into_inner(),
|
||||
kzg_proof: kzg_proof.into_inner(),
|
||||
};
|
||||
|
||||
let reference_header = self.batch_header.clone().into();
|
||||
|
||||
BatchWitness {
|
||||
fork_name,
|
||||
chunk_proofs: self.chunk_proofs.iter().map(|proof| proof.into()).collect(),
|
||||
chunk_infos: self
|
||||
.chunk_proofs
|
||||
.iter()
|
||||
.map(|p| p.metadata.chunk_info.clone())
|
||||
.collect(),
|
||||
blob_bytes: self.blob_bytes.clone(),
|
||||
reference_header,
|
||||
point_eval_witness,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn precheck_and_build_metadata(&self) -> Result<BatchInfo> {
|
||||
let fork_name = ForkName::from(self.fork_name.as_str());
|
||||
let (parent_state_root, state_root, chain_id, withdraw_root) = (
|
||||
self.chunk_proofs
|
||||
.first()
|
||||
.expect("at least one chunk in batch")
|
||||
.metadata
|
||||
.chunk_info
|
||||
.prev_state_root,
|
||||
self.chunk_proofs
|
||||
.last()
|
||||
.expect("at least one chunk in batch")
|
||||
.metadata
|
||||
.chunk_info
|
||||
.post_state_root,
|
||||
self.chunk_proofs
|
||||
.last()
|
||||
.expect("at least one chunk in batch")
|
||||
.metadata
|
||||
.chunk_info
|
||||
.chain_id,
|
||||
self.chunk_proofs
|
||||
.last()
|
||||
.expect("at least one chunk in batch")
|
||||
.metadata
|
||||
.chunk_info
|
||||
.withdraw_root,
|
||||
);
|
||||
let (parent_batch_hash, prev_msg_queue_hash, post_msg_queue_hash) = match self.batch_header
|
||||
{
|
||||
BatchHeaderV::V6(h) => {
|
||||
assert_eq!(
|
||||
fork_name,
|
||||
ForkName::EuclidV1,
|
||||
"hardfork mismatch for da-codec@v6 header: found={fork_name:?}, expected={:?}",
|
||||
ForkName::EuclidV1,
|
||||
);
|
||||
(h.parent_batch_hash, Default::default(), Default::default())
|
||||
}
|
||||
BatchHeaderV::V7(h) => {
|
||||
assert_eq!(
|
||||
fork_name,
|
||||
ForkName::EuclidV2,
|
||||
"hardfork mismatch for da-codec@v7 header: found={fork_name:?}, expected={:?}",
|
||||
ForkName::EuclidV2,
|
||||
);
|
||||
(
|
||||
h.parent_batch_hash,
|
||||
self.chunk_proofs
|
||||
.first()
|
||||
.expect("at least one chunk in batch")
|
||||
.metadata
|
||||
.chunk_info
|
||||
.prev_msg_queue_hash,
|
||||
self.chunk_proofs
|
||||
.last()
|
||||
.expect("at least one chunk in batch")
|
||||
.metadata
|
||||
.chunk_info
|
||||
.post_msg_queue_hash,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let batch_hash = self.batch_header.batch_hash();
|
||||
|
||||
Ok(BatchInfo {
|
||||
parent_state_root,
|
||||
parent_batch_hash,
|
||||
state_root,
|
||||
batch_hash,
|
||||
chain_id,
|
||||
withdraw_root,
|
||||
prev_msg_queue_hash,
|
||||
post_msg_queue_hash,
|
||||
})
|
||||
}
|
||||
}
|
||||
77
crates/libzkp/src/tasks/batch/utils.rs
Normal file
77
crates/libzkp/src/tasks/batch/utils.rs
Normal file
@@ -0,0 +1,77 @@
|
||||
pub mod base64 {
|
||||
use base64::prelude::*;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
pub fn serialize<S: Serializer>(v: &Vec<u8>, s: S) -> Result<S::Ok, S::Error> {
|
||||
let base64 = BASE64_STANDARD.encode(v);
|
||||
String::serialize(&base64, s)
|
||||
}
|
||||
|
||||
pub fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result<Vec<u8>, D::Error> {
|
||||
let base64 = String::deserialize(d)?;
|
||||
BASE64_STANDARD
|
||||
.decode(base64.as_bytes())
|
||||
.map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
pub mod point_eval {
|
||||
use c_kzg;
|
||||
use sbv_primitives::{types::eips::eip4844::BLS_MODULUS, B256 as H256, U256};
|
||||
use scroll_zkvm_types::util::sha256_rv32;
|
||||
|
||||
/// Given the blob-envelope, translate it to a fixed size EIP-4844 blob.
|
||||
///
|
||||
/// For every 32-bytes chunk in the blob, the most-significant byte is set to 0 while the other
|
||||
/// 31 bytes are copied from the provided blob-envelope.
|
||||
pub fn to_blob(envelope_bytes: &[u8]) -> c_kzg::Blob {
|
||||
let mut blob_bytes = [0u8; c_kzg::BYTES_PER_BLOB];
|
||||
|
||||
assert!(
|
||||
envelope_bytes.len()
|
||||
<= c_kzg::FIELD_ELEMENTS_PER_BLOB * (c_kzg::BYTES_PER_FIELD_ELEMENT - 1),
|
||||
"too many bytes in blob envelope",
|
||||
);
|
||||
|
||||
for (i, &byte) in envelope_bytes.iter().enumerate() {
|
||||
blob_bytes[(i / 31) * 32 + 1 + (i % 31)] = byte;
|
||||
}
|
||||
|
||||
c_kzg::Blob::new(blob_bytes)
|
||||
}
|
||||
|
||||
/// Get the KZG commitment from an EIP-4844 blob.
|
||||
pub fn blob_to_kzg_commitment(blob: &c_kzg::Blob) -> c_kzg::KzgCommitment {
|
||||
c_kzg::KzgCommitment::blob_to_kzg_commitment(blob, c_kzg::ethereum_kzg_settings())
|
||||
.expect("blob to kzg commitment should succeed")
|
||||
}
|
||||
|
||||
/// The version for KZG as per EIP-4844.
|
||||
const VERSIONED_HASH_VERSION_KZG: u8 = 1;
|
||||
|
||||
/// Get the EIP-4844 versioned hash from the KZG commitment.
|
||||
pub fn get_versioned_hash(commitment: &c_kzg::KzgCommitment) -> H256 {
|
||||
let mut hash: [u8; 32] = sha256_rv32(commitment.to_bytes().as_slice()).into();
|
||||
hash[0] = VERSIONED_HASH_VERSION_KZG;
|
||||
H256::new(hash)
|
||||
}
|
||||
|
||||
/// Get x for kzg proof from challenge hash
|
||||
pub fn get_x_from_challenge(challenge: H256) -> U256 {
|
||||
U256::from_be_bytes(challenge.0) % BLS_MODULUS
|
||||
}
|
||||
|
||||
/// Generate KZG proof and evaluation given the blob (polynomial) and a random challenge.
|
||||
pub fn get_kzg_proof(blob: &c_kzg::Blob, challenge: H256) -> (c_kzg::KzgProof, U256) {
|
||||
let challenge = get_x_from_challenge(challenge);
|
||||
|
||||
let (proof, y) = c_kzg::KzgProof::compute_kzg_proof(
|
||||
blob,
|
||||
&c_kzg::Bytes32::new(challenge.to_be_bytes()),
|
||||
c_kzg::ethereum_kzg_settings(),
|
||||
)
|
||||
.expect("kzg proof should succeed");
|
||||
|
||||
(proof, U256::from_be_slice(y.as_slice()))
|
||||
}
|
||||
}
|
||||
125
crates/libzkp/src/tasks/bundle.rs
Normal file
125
crates/libzkp/src/tasks/bundle.rs
Normal file
@@ -0,0 +1,125 @@
|
||||
use crate::proofs::BatchProof;
|
||||
use eyre::Result;
|
||||
use scroll_zkvm_types::{
|
||||
bundle::{BundleInfo, BundleWitness},
|
||||
task::ProvingTask,
|
||||
utils::{to_rkyv_bytes, RancorError},
|
||||
};
|
||||
|
||||
/// Message indicating a sanity check failure.
|
||||
const BUNDLE_SANITY_MSG: &str = "bundle must have at least one batch";
|
||||
|
||||
#[derive(Clone, serde::Deserialize, serde::Serialize)]
|
||||
pub struct BundleProvingTask {
|
||||
pub batch_proofs: Vec<BatchProof>,
|
||||
/// for sanity check
|
||||
pub bundle_info: Option<BundleInfo>,
|
||||
/// Fork name specify
|
||||
pub fork_name: String,
|
||||
}
|
||||
|
||||
impl BundleProvingTask {
|
||||
fn identifier(&self) -> String {
|
||||
assert!(!self.batch_proofs.is_empty(), "{BUNDLE_SANITY_MSG}",);
|
||||
|
||||
let (first, last) = (
|
||||
self.batch_proofs
|
||||
.first()
|
||||
.expect(BUNDLE_SANITY_MSG)
|
||||
.metadata
|
||||
.batch_hash,
|
||||
self.batch_proofs
|
||||
.last()
|
||||
.expect(BUNDLE_SANITY_MSG)
|
||||
.metadata
|
||||
.batch_hash,
|
||||
);
|
||||
|
||||
format!("{first}-{last}")
|
||||
}
|
||||
|
||||
fn build_guest_input(&self) -> BundleWitness {
|
||||
BundleWitness {
|
||||
batch_proofs: self.batch_proofs.iter().map(|proof| proof.into()).collect(),
|
||||
batch_infos: self
|
||||
.batch_proofs
|
||||
.iter()
|
||||
.map(|wrapped_proof| wrapped_proof.metadata.batch_info.clone())
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn precheck_and_build_metadata(&self) -> Result<BundleInfo> {
|
||||
use eyre::eyre;
|
||||
let err_prefix = format!("metadata_with_prechecks for task_id={}", self.identifier());
|
||||
|
||||
for w in self.batch_proofs.windows(2) {
|
||||
if w[1].metadata.batch_info.chain_id != w[0].metadata.batch_info.chain_id {
|
||||
return Err(eyre!("{err_prefix}: chain_id mismatch"));
|
||||
}
|
||||
|
||||
if w[1].metadata.batch_info.parent_state_root != w[0].metadata.batch_info.state_root {
|
||||
return Err(eyre!("{err_prefix}: state_root not chained"));
|
||||
}
|
||||
|
||||
if w[1].metadata.batch_info.parent_batch_hash != w[0].metadata.batch_info.batch_hash {
|
||||
return Err(eyre!("{err_prefix}: batch_hash not chained"));
|
||||
}
|
||||
}
|
||||
|
||||
let (first_batch, last_batch) = (
|
||||
&self
|
||||
.batch_proofs
|
||||
.first()
|
||||
.expect("at least one batch in bundle")
|
||||
.metadata
|
||||
.batch_info,
|
||||
&self
|
||||
.batch_proofs
|
||||
.last()
|
||||
.expect("at least one batch in bundle")
|
||||
.metadata
|
||||
.batch_info,
|
||||
);
|
||||
|
||||
let chain_id = first_batch.chain_id;
|
||||
let num_batches = u32::try_from(self.batch_proofs.len()).expect("num_batches: u32");
|
||||
let prev_state_root = first_batch.parent_state_root;
|
||||
let prev_batch_hash = first_batch.parent_batch_hash;
|
||||
let post_state_root = last_batch.state_root;
|
||||
let batch_hash = last_batch.batch_hash;
|
||||
let withdraw_root = last_batch.withdraw_root;
|
||||
let msg_queue_hash = last_batch.post_msg_queue_hash;
|
||||
|
||||
Ok(BundleInfo {
|
||||
chain_id,
|
||||
msg_queue_hash,
|
||||
num_batches,
|
||||
prev_state_root,
|
||||
prev_batch_hash,
|
||||
post_state_root,
|
||||
batch_hash,
|
||||
withdraw_root,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<BundleProvingTask> for ProvingTask {
|
||||
type Error = eyre::Error;
|
||||
|
||||
fn try_from(value: BundleProvingTask) -> Result<Self> {
|
||||
let witness = value.build_guest_input();
|
||||
|
||||
Ok(ProvingTask {
|
||||
identifier: value.identifier(),
|
||||
fork_name: value.fork_name,
|
||||
aggregated_proofs: value
|
||||
.batch_proofs
|
||||
.into_iter()
|
||||
.map(|w_proof| w_proof.proof.into_root_proof().expect("expect root proof"))
|
||||
.collect(),
|
||||
serialized_witness: vec![to_rkyv_bytes::<RancorError>(&witness)?.to_vec()],
|
||||
vk: Vec::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
185
crates/libzkp/src/tasks/chunk.rs
Normal file
185
crates/libzkp/src/tasks/chunk.rs
Normal file
@@ -0,0 +1,185 @@
|
||||
use super::chunk_interpreter::*;
|
||||
use eyre::Result;
|
||||
use sbv_primitives::{types::BlockWitness, B256};
|
||||
use scroll_zkvm_types::{
|
||||
chunk::{execute, ChunkInfo, ChunkWitness},
|
||||
task::ProvingTask,
|
||||
utils::{to_rkyv_bytes, RancorError},
|
||||
};
|
||||
|
||||
/// The type aligned with coordinator's defination
|
||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||
pub struct ChunkTask {
|
||||
/// block hashes for a series of block
|
||||
pub block_hashes: Vec<B256>,
|
||||
/// The on-chain L1 msg queue hash before applying L1 msg txs from the chunk.
|
||||
pub prev_msg_queue_hash: B256,
|
||||
/// Fork name specify
|
||||
pub fork_name: String,
|
||||
}
|
||||
|
||||
impl TryFromWithInterpreter<ChunkTask> for ChunkProvingTask {
|
||||
fn try_from_with_interpret(
|
||||
value: ChunkTask,
|
||||
interpreter: impl ChunkInterpreter,
|
||||
) -> Result<Self> {
|
||||
let mut block_witnesses = Vec::new();
|
||||
for block_hash in value.block_hashes {
|
||||
let witness =
|
||||
interpreter.try_fetch_block_witness(block_hash, block_witnesses.last())?;
|
||||
block_witnesses.push(witness);
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
block_witnesses,
|
||||
prev_msg_queue_hash: value.prev_msg_queue_hash,
|
||||
fork_name: value.fork_name,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Message indicating a sanity check failure.
|
||||
const CHUNK_SANITY_MSG: &str = "chunk must have at least one block";
|
||||
|
||||
/// Proving task for the [`ChunkCircuit`][scroll_zkvm_chunk_circuit].
|
||||
///
|
||||
/// The identifier for a chunk proving task is:
|
||||
/// - {first_block_number}-{last_block_number}
|
||||
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||
pub struct ChunkProvingTask {
|
||||
/// Witnesses for every block in the chunk.
|
||||
pub block_witnesses: Vec<BlockWitness>,
|
||||
/// The on-chain L1 msg queue hash before applying L1 msg txs from the chunk.
|
||||
pub prev_msg_queue_hash: B256,
|
||||
/// Fork name specify
|
||||
pub fork_name: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ChunkDetails {
|
||||
pub num_blocks: usize,
|
||||
pub num_txs: usize,
|
||||
pub total_gas_used: u64,
|
||||
}
|
||||
|
||||
impl TryFrom<ChunkProvingTask> for ProvingTask {
|
||||
type Error = eyre::Error;
|
||||
|
||||
fn try_from(value: ChunkProvingTask) -> Result<Self> {
|
||||
let witness = value.build_guest_input();
|
||||
|
||||
Ok(ProvingTask {
|
||||
identifier: value.identifier(),
|
||||
fork_name: value.fork_name,
|
||||
aggregated_proofs: Vec::new(),
|
||||
serialized_witness: vec![to_rkyv_bytes::<RancorError>(&witness)?.to_vec()],
|
||||
vk: Vec::new(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ChunkProvingTask {
|
||||
pub fn stats(&self) -> ChunkDetails {
|
||||
let num_blocks = self.block_witnesses.len();
|
||||
let num_txs = self
|
||||
.block_witnesses
|
||||
.iter()
|
||||
.map(|b| b.transaction.len())
|
||||
.sum::<usize>();
|
||||
let total_gas_used = self
|
||||
.block_witnesses
|
||||
.iter()
|
||||
.map(|b| b.header.gas_used)
|
||||
.sum::<u64>();
|
||||
|
||||
ChunkDetails {
|
||||
num_blocks,
|
||||
num_txs,
|
||||
total_gas_used,
|
||||
}
|
||||
}
|
||||
|
||||
fn identifier(&self) -> String {
|
||||
assert!(!self.block_witnesses.is_empty(), "{CHUNK_SANITY_MSG}",);
|
||||
|
||||
let (first, last) = (
|
||||
self.block_witnesses
|
||||
.first()
|
||||
.expect(CHUNK_SANITY_MSG)
|
||||
.header
|
||||
.number,
|
||||
self.block_witnesses
|
||||
.last()
|
||||
.expect(CHUNK_SANITY_MSG)
|
||||
.header
|
||||
.number,
|
||||
);
|
||||
|
||||
format!("{first}-{last}")
|
||||
}
|
||||
|
||||
fn build_guest_input(&self) -> ChunkWitness {
|
||||
ChunkWitness {
|
||||
blocks: self.block_witnesses.to_vec(),
|
||||
prev_msg_queue_hash: self.prev_msg_queue_hash,
|
||||
fork_name: self.fork_name.to_lowercase().as_str().into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn insert_state(&mut self, node: sbv_primitives::Bytes) {
|
||||
self.block_witnesses[0].states.push(node);
|
||||
}
|
||||
}
|
||||
|
||||
const MAX_FETCH_NODES_ATTEMPTS: usize = 15;
|
||||
|
||||
impl TryFromWithInterpreter<&mut ChunkProvingTask> for ChunkInfo {
|
||||
fn try_from_with_interpret(
|
||||
value: &mut ChunkProvingTask,
|
||||
interpreter: impl ChunkInterpreter,
|
||||
) -> eyre::Result<Self> {
|
||||
use eyre::eyre;
|
||||
|
||||
let err_prefix = format!(
|
||||
"metadata_with_prechecks for task_id={:?}",
|
||||
value.identifier()
|
||||
);
|
||||
|
||||
if value.block_witnesses.is_empty() {
|
||||
return Err(eyre!(
|
||||
"{err_prefix}: chunk should contain at least one block",
|
||||
));
|
||||
}
|
||||
|
||||
// resume from node missing error and keep executing process
|
||||
let pattern = r"SparseTrieError\(BlindedNode \{ path: Nibbles\((0x[0-9a-fA-F]+)\), hash: (0x[0-9a-fA-F]+) \}\)";
|
||||
let err_parse_re = regex::Regex::new(pattern)?;
|
||||
let mut attempts = 0;
|
||||
loop {
|
||||
match execute(&value.build_guest_input()) {
|
||||
Ok(chunk_info) => return Ok(chunk_info),
|
||||
Err(e) => {
|
||||
if let Some(caps) = err_parse_re.captures(&e) {
|
||||
let hash = caps[2].to_string();
|
||||
tracing::debug!("missing trie hash {hash}");
|
||||
|
||||
attempts += 1;
|
||||
if attempts >= MAX_FETCH_NODES_ATTEMPTS {
|
||||
return Err(eyre!(
|
||||
"failed to fetch nodes after {MAX_FETCH_NODES_ATTEMPTS} attempts: {e}"
|
||||
));
|
||||
}
|
||||
|
||||
let node_hash =
|
||||
hash.parse::<sbv_primitives::B256>().expect("should be hex");
|
||||
let node = interpreter.try_fetch_storage_node(node_hash)?;
|
||||
tracing::warn!("missing node fetched: {node}");
|
||||
value.insert_state(node);
|
||||
} else {
|
||||
return Err(eyre!("{err_prefix}: {e}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
26
crates/libzkp/src/tasks/chunk_interpreter.rs
Normal file
26
crates/libzkp/src/tasks/chunk_interpreter.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
use eyre::Result;
|
||||
use sbv_primitives::{types::BlockWitness, Bytes, B256};
|
||||
|
||||
/// An interpreter which is cirtical in translating chunk data
|
||||
/// since we need to grep block witness and storage node data
|
||||
/// (in rare case) from external
|
||||
pub trait ChunkInterpreter {
|
||||
fn try_fetch_block_witness(
|
||||
&self,
|
||||
_block_hash: B256,
|
||||
_prev_witness: Option<&BlockWitness>,
|
||||
) -> Result<BlockWitness> {
|
||||
Err(eyre::eyre!("no implement"))
|
||||
}
|
||||
fn try_fetch_storage_node(&self, _node_hash: B256) -> Result<Bytes> {
|
||||
Err(eyre::eyre!("no implement"))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TryFromWithInterpreter<T>: Sized {
|
||||
fn try_from_with_interpret(value: T, intepreter: impl ChunkInterpreter) -> Result<Self>;
|
||||
}
|
||||
|
||||
pub struct DummyInterpreter {}
|
||||
|
||||
impl ChunkInterpreter for DummyInterpreter {}
|
||||
53
crates/libzkp/src/utils.rs
Normal file
53
crates/libzkp/src/utils.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use std::{
|
||||
panic::{catch_unwind, AssertUnwindSafe},
|
||||
path::Path,
|
||||
};
|
||||
|
||||
use git_version::git_version;
|
||||
use serde::{
|
||||
de::{Deserialize, DeserializeOwned},
|
||||
Serialize,
|
||||
};
|
||||
|
||||
use eyre::Result;
|
||||
|
||||
const GIT_VERSION: &str = git_version!(args = ["--abbrev=7", "--always"]);
|
||||
|
||||
/// Shortened git commit ref from [`scroll_zkvm_prover`].
|
||||
pub(crate) fn short_git_version() -> String {
|
||||
let commit_version = GIT_VERSION.split('-').next_back().unwrap();
|
||||
|
||||
// Check if use commit object as fallback.
|
||||
if commit_version.len() < 8 {
|
||||
commit_version.to_string()
|
||||
} else {
|
||||
commit_version[1..8].to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// Wrapper to read JSON that might be deeply nested.
|
||||
pub(crate) fn read_json_deep<P: AsRef<Path>, T: DeserializeOwned>(path: P) -> Result<T> {
|
||||
let fd = std::fs::File::open(path)?;
|
||||
let mut deserializer = serde_json::Deserializer::from_reader(fd);
|
||||
deserializer.disable_recursion_limit();
|
||||
let deserializer = serde_stacker::Deserializer::new(&mut deserializer);
|
||||
Ok(Deserialize::deserialize(deserializer)?)
|
||||
}
|
||||
|
||||
/// Serialize the provided type to JSON format and write to the given path.
|
||||
pub(crate) fn write_json<P: AsRef<Path>, T: Serialize>(path: P, value: &T) -> Result<()> {
|
||||
let mut writer = std::fs::File::create(path)?;
|
||||
Ok(serde_json::to_writer(&mut writer, value)?)
|
||||
}
|
||||
|
||||
pub(crate) fn panic_catch<F: FnOnce() -> R, R>(f: F) -> Result<R, String> {
|
||||
catch_unwind(AssertUnwindSafe(f)).map_err(|err| {
|
||||
if let Some(s) = err.downcast_ref::<String>() {
|
||||
s.to_string()
|
||||
} else if let Some(s) = err.downcast_ref::<&str>() {
|
||||
s.to_string()
|
||||
} else {
|
||||
format!("unable to get panic info {err:?}")
|
||||
}
|
||||
})
|
||||
}
|
||||
78
crates/libzkp/src/verifier.rs
Normal file
78
crates/libzkp/src/verifier.rs
Normal file
@@ -0,0 +1,78 @@
|
||||
#![allow(static_mut_refs)]
|
||||
|
||||
mod euclidv2;
|
||||
use euclidv2::EuclidV2Verifier;
|
||||
use eyre::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{cell::OnceCell, path::Path, rc::Rc};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum TaskType {
|
||||
Chunk,
|
||||
Batch,
|
||||
Bundle,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for TaskType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Chunk => write!(f, "chunk"),
|
||||
Self::Batch => write!(f, "batch"),
|
||||
Self::Bundle => write!(f, "bundle"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct VKDump {
|
||||
pub chunk_vk: String,
|
||||
pub batch_vk: String,
|
||||
pub bundle_vk: String,
|
||||
}
|
||||
|
||||
pub trait ProofVerifier {
|
||||
fn verify(&self, task_type: TaskType, proof: Vec<u8>) -> Result<bool>;
|
||||
fn dump_vk(&self, file: &Path);
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CircuitConfig {
|
||||
pub fork_name: String,
|
||||
pub assets_path: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct VerifierConfig {
|
||||
pub high_version_circuit: CircuitConfig,
|
||||
}
|
||||
|
||||
type HardForkName = String;
|
||||
|
||||
struct VerifierPair(HardForkName, Rc<Box<dyn ProofVerifier>>);
|
||||
static mut VERIFIER_HIGH: OnceCell<VerifierPair> = OnceCell::new();
|
||||
|
||||
pub fn init(config: VerifierConfig) {
|
||||
let verifier = EuclidV2Verifier::new(&config.high_version_circuit.assets_path);
|
||||
unsafe {
|
||||
VERIFIER_HIGH
|
||||
.set(VerifierPair(
|
||||
config.high_version_circuit.fork_name,
|
||||
Rc::new(Box::new(verifier)),
|
||||
))
|
||||
.unwrap_unchecked();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_verifier(fork_name: &str) -> Result<Rc<Box<dyn ProofVerifier>>> {
|
||||
unsafe {
|
||||
if let Some(verifier) = VERIFIER_HIGH.get() {
|
||||
if verifier.0 == fork_name {
|
||||
return Ok(verifier.1.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(eyre::eyre!(
|
||||
"failed to get verifier, key not found, {}",
|
||||
fork_name
|
||||
))
|
||||
}
|
||||
66
crates/libzkp/src/verifier/euclidv2.rs
Normal file
66
crates/libzkp/src/verifier/euclidv2.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
use super::{ProofVerifier, TaskType, VKDump};
|
||||
|
||||
use eyre::Result;
|
||||
|
||||
use crate::{
|
||||
proofs::{AsRootProof, BatchProof, BundleProof, ChunkProof, IntoEvmProof},
|
||||
utils::panic_catch,
|
||||
};
|
||||
use scroll_zkvm_verifier_euclid::verifier::{BatchVerifier, BundleVerifierEuclidV2, ChunkVerifier};
|
||||
use std::{fs::File, path::Path};
|
||||
|
||||
pub struct EuclidV2Verifier {
|
||||
chunk_verifier: ChunkVerifier,
|
||||
batch_verifier: BatchVerifier,
|
||||
bundle_verifier: BundleVerifierEuclidV2,
|
||||
}
|
||||
|
||||
impl EuclidV2Verifier {
|
||||
pub fn new(assets_dir: &str) -> Self {
|
||||
let verifier_bin = Path::new(assets_dir).join("verifier.bin");
|
||||
let config = Path::new(assets_dir).join("root-verifier-vm-config");
|
||||
let exe = Path::new(assets_dir).join("root-verifier-committed-exe");
|
||||
|
||||
Self {
|
||||
chunk_verifier: ChunkVerifier::setup(&config, &exe, &verifier_bin)
|
||||
.expect("Setting up chunk verifier"),
|
||||
batch_verifier: BatchVerifier::setup(&config, &exe, &verifier_bin)
|
||||
.expect("Setting up batch verifier"),
|
||||
bundle_verifier: BundleVerifierEuclidV2::setup(&config, &exe, &verifier_bin)
|
||||
.expect("Setting up bundle verifier"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProofVerifier for EuclidV2Verifier {
|
||||
fn verify(&self, task_type: super::TaskType, proof: Vec<u8>) -> Result<bool> {
|
||||
panic_catch(|| match task_type {
|
||||
TaskType::Chunk => {
|
||||
let proof = serde_json::from_slice::<ChunkProof>(proof.as_slice()).unwrap();
|
||||
self.chunk_verifier.verify_proof(proof.as_root_proof())
|
||||
}
|
||||
TaskType::Batch => {
|
||||
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
|
||||
self.batch_verifier.verify_proof(proof.as_root_proof())
|
||||
}
|
||||
TaskType::Bundle => {
|
||||
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
|
||||
self.bundle_verifier
|
||||
.verify_proof_evm(&proof.into_evm_proof())
|
||||
}
|
||||
})
|
||||
.map_err(|err_str: String| eyre::eyre!("{err_str}"))
|
||||
}
|
||||
|
||||
fn dump_vk(&self, file: &Path) {
|
||||
use base64::{prelude::BASE64_STANDARD, Engine};
|
||||
let f = File::create(file).expect("Failed to open file to dump VK");
|
||||
|
||||
let dump = VKDump {
|
||||
chunk_vk: BASE64_STANDARD.encode(self.chunk_verifier.get_app_vk()),
|
||||
batch_vk: BASE64_STANDARD.encode(self.batch_verifier.get_app_vk()),
|
||||
bundle_vk: BASE64_STANDARD.encode(self.bundle_verifier.get_app_vk()),
|
||||
};
|
||||
serde_json::to_writer(f, &dump).expect("Failed to dump VK");
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user