mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-12 15:38:18 -05:00
Compare commits
2 Commits
develop
...
feat/upgra
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d25bca8d09 | ||
|
|
27eaebd6e8 |
671
prover/Cargo.lock
generated
671
prover/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -29,8 +29,8 @@ ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "
|
||||
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
|
||||
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
|
||||
prover_darwin = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.2", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
|
||||
prover_darwin_v2 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.13.1", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
|
||||
prover_euclid = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "feat/hybrid-snark-agg", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
|
||||
base64 = "0.13.1"
|
||||
reqwest = { version = "0.12.4", features = ["gzip"] }
|
||||
reqwest-middleware = "0.3"
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
nightly-2023-12-03
|
||||
2
prover/rust-toolchain.toml
Normal file
2
prover/rust-toolchain.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[toolchain]
|
||||
channel = "nightly-2024-01-25"
|
||||
@@ -1,4 +1,5 @@
|
||||
#![feature(lazy_cell)]
|
||||
#![allow(internal_features)]
|
||||
#![feature(core_intrinsics)]
|
||||
|
||||
mod config;
|
||||
|
||||
@@ -29,7 +29,10 @@ impl<'a> Prover<'a> {
|
||||
let keystore_path = &config.keystore_path;
|
||||
let keystore_password = &config.keystore_password;
|
||||
|
||||
let geth_client = if config.prover_type == ProverType::Chunk {
|
||||
let geth_client = if matches!(
|
||||
config.prover_type,
|
||||
ProverType::ChunkHalo2 | ProverType::ChunkSp1 | ProverType::ChunkAll
|
||||
) {
|
||||
Some(Rc::new(RefCell::new(
|
||||
GethClient::new(
|
||||
&config.prover_name,
|
||||
@@ -73,7 +76,10 @@ impl<'a> Prover<'a> {
|
||||
prover_height: None,
|
||||
};
|
||||
|
||||
if self.config.prover_type == ProverType::Chunk {
|
||||
if matches!(
|
||||
self.config.prover_type,
|
||||
ProverType::ChunkHalo2 | ProverType::ChunkSp1 | ProverType::ChunkAll
|
||||
) {
|
||||
let latest_block_number = self.get_latest_block_number_value()?;
|
||||
if let Some(v) = latest_block_number {
|
||||
if v.as_u64() == 0 {
|
||||
|
||||
@@ -8,7 +8,8 @@ pub type CommonHash = H256;
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum TaskType {
|
||||
Undefined,
|
||||
Chunk,
|
||||
ChunkHalo2,
|
||||
ChunkSp1,
|
||||
Batch,
|
||||
Bundle,
|
||||
}
|
||||
@@ -16,9 +17,10 @@ pub enum TaskType {
|
||||
impl TaskType {
|
||||
fn from_u8(v: u8) -> Self {
|
||||
match v {
|
||||
1 => TaskType::Chunk,
|
||||
1 => TaskType::ChunkHalo2,
|
||||
2 => TaskType::Batch,
|
||||
3 => TaskType::Bundle,
|
||||
4 => TaskType::ChunkSp1,
|
||||
_ => TaskType::Undefined,
|
||||
}
|
||||
}
|
||||
@@ -31,9 +33,10 @@ impl Serialize for TaskType {
|
||||
{
|
||||
match *self {
|
||||
TaskType::Undefined => serializer.serialize_u8(0),
|
||||
TaskType::Chunk => serializer.serialize_u8(1),
|
||||
TaskType::ChunkHalo2 => serializer.serialize_u8(1),
|
||||
TaskType::Batch => serializer.serialize_u8(2),
|
||||
TaskType::Bundle => serializer.serialize_u8(3),
|
||||
TaskType::ChunkSp1 => serializer.serialize_u8(4),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -56,15 +59,19 @@ impl Default for TaskType {
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum ProverType {
|
||||
Chunk,
|
||||
ChunkHalo2,
|
||||
ChunkSp1,
|
||||
ChunkAll,
|
||||
Batch,
|
||||
}
|
||||
|
||||
impl ProverType {
|
||||
fn from_u8(v: u8) -> Self {
|
||||
match v {
|
||||
1 => ProverType::Chunk,
|
||||
1 => ProverType::ChunkHalo2,
|
||||
2 => ProverType::Batch,
|
||||
3 => ProverType::ChunkSp1,
|
||||
4 => ProverType::ChunkAll,
|
||||
_ => {
|
||||
panic!("invalid prover_type")
|
||||
}
|
||||
@@ -73,8 +80,10 @@ impl ProverType {
|
||||
|
||||
pub fn to_u8(self) -> u8 {
|
||||
match self {
|
||||
ProverType::Chunk => 1,
|
||||
ProverType::ChunkHalo2 => 1,
|
||||
ProverType::Batch => 2,
|
||||
ProverType::ChunkSp1 => 3,
|
||||
ProverType::ChunkAll => 4,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -85,8 +94,10 @@ impl Serialize for ProverType {
|
||||
S: Serializer,
|
||||
{
|
||||
match *self {
|
||||
ProverType::Chunk => serializer.serialize_u8(1),
|
||||
ProverType::ChunkHalo2 => serializer.serialize_u8(1),
|
||||
ProverType::Batch => serializer.serialize_u8(2),
|
||||
ProverType::ChunkSp1 => serializer.serialize_u8(3),
|
||||
ProverType::ChunkAll => serializer.serialize_u8(4),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,7 +26,9 @@ pub fn log_init(log_file: Option<String>) {
|
||||
|
||||
pub fn get_task_types(prover_type: ProverType) -> Vec<TaskType> {
|
||||
match prover_type {
|
||||
ProverType::Chunk => vec![TaskType::Chunk],
|
||||
ProverType::ChunkHalo2 => vec![TaskType::ChunkHalo2],
|
||||
ProverType::ChunkSp1 => vec![TaskType::ChunkSp1],
|
||||
ProverType::ChunkAll => vec![TaskType::ChunkHalo2, TaskType::ChunkSp1],
|
||||
ProverType::Batch => vec![TaskType::Batch, TaskType::Bundle],
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
mod common;
|
||||
mod darwin;
|
||||
mod darwin_v2;
|
||||
mod euclid;
|
||||
|
||||
use super::geth_client::GethClient;
|
||||
use crate::{
|
||||
@@ -9,8 +9,8 @@ use crate::{
|
||||
utils::get_task_types,
|
||||
};
|
||||
use anyhow::{bail, Result};
|
||||
use darwin::DarwinHandler;
|
||||
use darwin_v2::DarwinV2Handler;
|
||||
use euclid::EuclidHandler;
|
||||
use std::{cell::RefCell, collections::HashMap, rc::Rc};
|
||||
|
||||
type HardForkName = String;
|
||||
@@ -61,7 +61,7 @@ impl<'a> CircuitsHandlerProvider<'a> {
|
||||
&config.low_version_circuit.hard_fork_name
|
||||
);
|
||||
AssetsDirEnvConfig::enable_first();
|
||||
DarwinHandler::new(
|
||||
DarwinV2Handler::new(
|
||||
prover_type,
|
||||
&config.low_version_circuit.params_path,
|
||||
&config.low_version_circuit.assets_path,
|
||||
@@ -84,7 +84,7 @@ impl<'a> CircuitsHandlerProvider<'a> {
|
||||
&config.high_version_circuit.hard_fork_name
|
||||
);
|
||||
AssetsDirEnvConfig::enable_second();
|
||||
DarwinV2Handler::new(
|
||||
EuclidHandler::new(
|
||||
prover_type,
|
||||
&config.high_version_circuit.params_path,
|
||||
&config.high_version_circuit.assets_path,
|
||||
|
||||
@@ -1,467 +0,0 @@
|
||||
use super::{common::*, CircuitsHandler};
|
||||
use crate::{
|
||||
geth_client::GethClient,
|
||||
types::{ProverType, TaskType},
|
||||
};
|
||||
use anyhow::{bail, Context, Ok, Result};
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::types::{CommonHash, Task};
|
||||
use std::{cell::RefCell, cmp::Ordering, env, rc::Rc};
|
||||
|
||||
use prover_darwin::{
|
||||
aggregator::Prover as BatchProver,
|
||||
check_chunk_hashes,
|
||||
common::Prover as CommonProver,
|
||||
config::{AGG_DEGREES, ZKEVM_DEGREES},
|
||||
zkevm::Prover as ChunkProver,
|
||||
BatchProof, BatchProvingTask, BlockTrace, BundleProof, BundleProvingTask, ChunkInfo,
|
||||
ChunkProof, ChunkProvingTask,
|
||||
};
|
||||
|
||||
// Only used for debugging.
|
||||
static OUTPUT_DIR: Lazy<Option<String>> = Lazy::new(|| env::var("PROVER_OUTPUT_DIR").ok());
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct BatchTaskDetail {
|
||||
pub chunk_infos: Vec<ChunkInfo>,
|
||||
#[serde(flatten)]
|
||||
pub batch_proving_task: BatchProvingTask,
|
||||
}
|
||||
|
||||
type BundleTaskDetail = BundleProvingTask;
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct ChunkTaskDetail {
|
||||
pub block_hashes: Vec<CommonHash>,
|
||||
}
|
||||
|
||||
fn get_block_number(block_trace: &BlockTrace) -> Option<u64> {
|
||||
block_trace.header.number.map(|n| n.as_u64())
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct DarwinHandler {
|
||||
chunk_prover: Option<RefCell<ChunkProver<'static>>>,
|
||||
batch_prover: Option<RefCell<BatchProver<'static>>>,
|
||||
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
}
|
||||
|
||||
impl DarwinHandler {
|
||||
pub fn new_multi(
|
||||
prover_types: Vec<ProverType>,
|
||||
params_dir: &str,
|
||||
assets_dir: &str,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
) -> Result<Self> {
|
||||
let class_name = std::intrinsics::type_name::<Self>();
|
||||
let prover_types_set = prover_types
|
||||
.into_iter()
|
||||
.collect::<std::collections::HashSet<ProverType>>();
|
||||
let mut handler = Self {
|
||||
batch_prover: None,
|
||||
chunk_prover: None,
|
||||
geth_client,
|
||||
};
|
||||
let degrees: Vec<u32> = get_degrees(&prover_types_set, |prover_type| match prover_type {
|
||||
ProverType::Chunk => ZKEVM_DEGREES.clone(),
|
||||
ProverType::Batch => AGG_DEGREES.clone(),
|
||||
});
|
||||
let params_map = get_params_map_instance(|| {
|
||||
log::info!(
|
||||
"calling get_params_map from {}, prover_types: {:?}, degrees: {:?}",
|
||||
class_name,
|
||||
prover_types_set,
|
||||
degrees
|
||||
);
|
||||
CommonProver::load_params_map(params_dir, °rees)
|
||||
});
|
||||
for prover_type in prover_types_set {
|
||||
match prover_type {
|
||||
ProverType::Chunk => {
|
||||
handler.chunk_prover = Some(RefCell::new(ChunkProver::from_params_and_assets(
|
||||
params_map, assets_dir,
|
||||
)));
|
||||
}
|
||||
ProverType::Batch => {
|
||||
handler.batch_prover = Some(RefCell::new(BatchProver::from_params_and_assets(
|
||||
params_map, assets_dir,
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(handler)
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
prover_type: ProverType,
|
||||
params_dir: &str,
|
||||
assets_dir: &str,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
) -> Result<Self> {
|
||||
Self::new_multi(vec![prover_type], params_dir, assets_dir, geth_client)
|
||||
}
|
||||
|
||||
fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
|
||||
if let Some(prover) = self.chunk_prover.as_ref() {
|
||||
let chunk = ChunkProvingTask::from(chunk_trace);
|
||||
|
||||
let chunk_proof =
|
||||
prover
|
||||
.borrow_mut()
|
||||
.gen_chunk_proof(chunk, None, None, self.get_output_dir())?;
|
||||
|
||||
return Ok(chunk_proof);
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
let chunk_trace = self.gen_chunk_traces(task)?;
|
||||
let chunk_proof = self.gen_chunk_proof_raw(chunk_trace)?;
|
||||
Ok(serde_json::to_string(&chunk_proof)?)
|
||||
}
|
||||
|
||||
fn gen_batch_proof_raw(&self, batch_task_detail: BatchTaskDetail) -> Result<BatchProof> {
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> = batch_task_detail
|
||||
.chunk_infos
|
||||
.clone()
|
||||
.into_iter()
|
||||
.zip(batch_task_detail.batch_proving_task.chunk_proofs.clone())
|
||||
.collect();
|
||||
|
||||
let chunk_proofs: Vec<ChunkProof> =
|
||||
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
|
||||
|
||||
let is_valid = prover.borrow_mut().check_protocol_of_chunks(&chunk_proofs);
|
||||
|
||||
if !is_valid {
|
||||
bail!("non-match chunk protocol")
|
||||
}
|
||||
check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?;
|
||||
let batch_proof = prover.borrow_mut().gen_batch_proof(
|
||||
batch_task_detail.batch_proving_task,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
)?;
|
||||
|
||||
return Ok(batch_proof);
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
log::info!("[circuit] gen_batch_proof for task {}", task.id);
|
||||
|
||||
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
let batch_proof = self.gen_batch_proof_raw(batch_task_detail)?;
|
||||
Ok(serde_json::to_string(&batch_proof)?)
|
||||
}
|
||||
|
||||
fn gen_bundle_proof_raw(&self, bundle_task_detail: BundleTaskDetail) -> Result<BundleProof> {
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let bundle_proof = prover.borrow_mut().gen_bundle_proof(
|
||||
bundle_task_detail,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
)?;
|
||||
|
||||
return Ok(bundle_proof);
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_bundle_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
log::info!("[circuit] gen_bundle_proof for task {}", task.id);
|
||||
let bundle_task_detail: BundleTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
let bundle_proof = self.gen_bundle_proof_raw(bundle_task_detail)?;
|
||||
Ok(serde_json::to_string(&bundle_proof)?)
|
||||
}
|
||||
|
||||
fn get_output_dir(&self) -> Option<&str> {
|
||||
OUTPUT_DIR.as_deref()
|
||||
}
|
||||
|
||||
fn gen_chunk_traces(&self, task: &Task) -> Result<Vec<BlockTrace>> {
|
||||
let chunk_task_detail: ChunkTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
self.get_sorted_traces_by_hashes(&chunk_task_detail.block_hashes)
|
||||
}
|
||||
|
||||
fn get_sorted_traces_by_hashes(&self, block_hashes: &[CommonHash]) -> Result<Vec<BlockTrace>> {
|
||||
if block_hashes.is_empty() {
|
||||
log::error!("[prover] failed to get sorted traces: block_hashes are empty");
|
||||
bail!("block_hashes are empty")
|
||||
}
|
||||
|
||||
let mut block_traces = Vec::new();
|
||||
for hash in block_hashes.iter() {
|
||||
let trace = self
|
||||
.geth_client
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.borrow_mut()
|
||||
.get_block_trace_by_hash(hash)?;
|
||||
block_traces.push(trace);
|
||||
}
|
||||
|
||||
block_traces.sort_by(|a, b| {
|
||||
if get_block_number(a).is_none() {
|
||||
Ordering::Less
|
||||
} else if get_block_number(b).is_none() {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
get_block_number(a)
|
||||
.unwrap()
|
||||
.cmp(&get_block_number(b).unwrap())
|
||||
}
|
||||
});
|
||||
|
||||
let block_numbers: Vec<u64> = block_traces
|
||||
.iter()
|
||||
.map(|trace| get_block_number(trace).unwrap_or(0))
|
||||
.collect();
|
||||
let mut i = 0;
|
||||
while i < block_numbers.len() - 1 {
|
||||
if block_numbers[i] + 1 != block_numbers[i + 1] {
|
||||
log::error!(
|
||||
"[prover] block numbers are not continuous, got {} and {}",
|
||||
block_numbers[i],
|
||||
block_numbers[i + 1]
|
||||
);
|
||||
bail!(
|
||||
"block numbers are not continuous, got {} and {}",
|
||||
block_numbers[i],
|
||||
block_numbers[i + 1]
|
||||
)
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
Ok(block_traces)
|
||||
}
|
||||
}
|
||||
|
||||
impl CircuitsHandler for DarwinHandler {
|
||||
fn get_vk(&self, task_type: TaskType) -> Option<Vec<u8>> {
|
||||
match task_type {
|
||||
TaskType::Chunk => self
|
||||
.chunk_prover
|
||||
.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_vk()),
|
||||
TaskType::Batch => self
|
||||
.batch_prover
|
||||
.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_batch_vk()),
|
||||
TaskType::Bundle => self
|
||||
.batch_prover
|
||||
.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_bundle_vk()),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_proof_data(&self, task_type: TaskType, task: &crate::types::Task) -> Result<String> {
|
||||
match task_type {
|
||||
TaskType::Chunk => self.gen_chunk_proof(task),
|
||||
TaskType::Batch => self.gen_batch_proof(task),
|
||||
TaskType::Bundle => self.gen_bundle_proof(task),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// =================================== tests module ========================================
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::zk_circuits_handler::utils::encode_vk;
|
||||
use prover_darwin::utils::chunk_trace_to_witness_block;
|
||||
use std::{path::PathBuf, sync::LazyLock};
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init() {
|
||||
crate::utils::log_init(None);
|
||||
log::info!("logger initialized");
|
||||
}
|
||||
|
||||
static DEFAULT_WORK_DIR: &str = "/assets";
|
||||
static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
|
||||
std::env::var("DARWIN_TEST_DIR")
|
||||
.unwrap_or(String::from(DEFAULT_WORK_DIR))
|
||||
.trim_end_matches('/')
|
||||
.to_string()
|
||||
});
|
||||
static PARAMS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_params", *WORK_DIR));
|
||||
static ASSETS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_assets", *WORK_DIR));
|
||||
static PROOF_DUMP_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/proof_data", *WORK_DIR));
|
||||
static BATCH_DIR_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
|
||||
static BATCH_VK_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/test_assets/vk_batch.vkey", *WORK_DIR));
|
||||
static CHUNK_VK_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/test_assets/vk_chunk.vkey", *WORK_DIR));
|
||||
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let result = true;
|
||||
assert!(result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_circuits() -> Result<()> {
|
||||
let bi_handler = DarwinHandler::new_multi(
|
||||
vec![ProverType::Chunk, ProverType::Batch],
|
||||
&PARAMS_PATH,
|
||||
&ASSETS_PATH,
|
||||
None,
|
||||
)?;
|
||||
|
||||
let chunk_handler = bi_handler;
|
||||
let chunk_vk = chunk_handler.get_vk(TaskType::Chunk).unwrap();
|
||||
|
||||
check_vk(TaskType::Chunk, chunk_vk, "chunk vk must be available");
|
||||
let chunk_dir_paths = get_chunk_dir_paths()?;
|
||||
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
|
||||
let mut chunk_infos = vec![];
|
||||
let mut chunk_proofs = vec![];
|
||||
for (id, chunk_path) in chunk_dir_paths.into_iter().enumerate() {
|
||||
let chunk_id = format!("chunk_proof{}", id + 1);
|
||||
log::info!("start to process {chunk_id}");
|
||||
let chunk_trace = read_chunk_trace(chunk_path)?;
|
||||
|
||||
let chunk_info = traces_to_chunk_info(chunk_trace.clone())?;
|
||||
chunk_infos.push(chunk_info);
|
||||
|
||||
log::info!("start to prove {chunk_id}");
|
||||
let chunk_proof = chunk_handler.gen_chunk_proof_raw(chunk_trace)?;
|
||||
let proof_data = serde_json::to_string(&chunk_proof)?;
|
||||
dump_proof(chunk_id, proof_data)?;
|
||||
chunk_proofs.push(chunk_proof);
|
||||
}
|
||||
|
||||
let batch_handler = chunk_handler;
|
||||
let batch_vk = batch_handler.get_vk(TaskType::Batch).unwrap();
|
||||
check_vk(TaskType::Batch, batch_vk, "batch vk must be available");
|
||||
let batch_task_detail = make_batch_task_detail(chunk_infos, chunk_proofs);
|
||||
log::info!("start to prove batch");
|
||||
let batch_proof = batch_handler.gen_batch_proof_raw(batch_task_detail)?;
|
||||
let proof_data = serde_json::to_string(&batch_proof)?;
|
||||
dump_proof("batch_proof".to_string(), proof_data)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn make_batch_task_detail(_: Vec<ChunkInfo>, _: Vec<ChunkProof>) -> BatchTaskDetail {
|
||||
todo!();
|
||||
// BatchTaskDetail {
|
||||
// chunk_infos,
|
||||
// batch_proving_task: BatchProvingTask {
|
||||
// parent_batch_hash: todo!(),
|
||||
// parent_state_root: todo!(),
|
||||
// batch_header: todo!(),
|
||||
// chunk_proofs,
|
||||
// },
|
||||
// }
|
||||
}
|
||||
|
||||
fn check_vk(proof_type: TaskType, vk: Vec<u8>, info: &str) {
|
||||
log::info!("check_vk, {:?}", proof_type);
|
||||
let vk_from_file = read_vk(proof_type).unwrap();
|
||||
assert_eq!(vk_from_file, encode_vk(vk), "{info}")
|
||||
}
|
||||
|
||||
fn read_vk(proof_type: TaskType) -> Result<String> {
|
||||
log::info!("read_vk, {:?}", proof_type);
|
||||
let vk_file = match proof_type {
|
||||
TaskType::Chunk => CHUNK_VK_PATH.clone(),
|
||||
TaskType::Batch => BATCH_VK_PATH.clone(),
|
||||
TaskType::Bundle => todo!(),
|
||||
TaskType::Undefined => unreachable!(),
|
||||
};
|
||||
|
||||
let data = std::fs::read(vk_file)?;
|
||||
Ok(encode_vk(data))
|
||||
}
|
||||
|
||||
fn read_chunk_trace(path: PathBuf) -> Result<Vec<BlockTrace>> {
|
||||
log::info!("read_chunk_trace, {:?}", path);
|
||||
let mut chunk_trace: Vec<BlockTrace> = vec![];
|
||||
|
||||
fn read_block_trace(file: &PathBuf) -> Result<BlockTrace> {
|
||||
let f = std::fs::File::open(file)?;
|
||||
Ok(serde_json::from_reader(&f)?)
|
||||
}
|
||||
|
||||
if path.is_dir() {
|
||||
let entries = std::fs::read_dir(&path)?;
|
||||
let mut files: Vec<String> = entries
|
||||
.into_iter()
|
||||
.filter_map(|e| {
|
||||
if e.is_err() {
|
||||
return None;
|
||||
}
|
||||
let entry = e.unwrap();
|
||||
if entry.path().is_dir() {
|
||||
return None;
|
||||
}
|
||||
if let Result::Ok(file_name) = entry.file_name().into_string() {
|
||||
Some(file_name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
files.sort();
|
||||
|
||||
log::info!("files in chunk {:?} is {:?}", path, files);
|
||||
for file in files {
|
||||
let block_trace = read_block_trace(&path.join(file))?;
|
||||
chunk_trace.push(block_trace);
|
||||
}
|
||||
} else {
|
||||
let block_trace = read_block_trace(&path)?;
|
||||
chunk_trace.push(block_trace);
|
||||
}
|
||||
Ok(chunk_trace)
|
||||
}
|
||||
|
||||
fn get_chunk_dir_paths() -> Result<Vec<PathBuf>> {
|
||||
let batch_path = PathBuf::from(BATCH_DIR_PATH.clone());
|
||||
let entries = std::fs::read_dir(&batch_path)?;
|
||||
let mut files: Vec<String> = entries
|
||||
.filter_map(|e| {
|
||||
if e.is_err() {
|
||||
return None;
|
||||
}
|
||||
let entry = e.unwrap();
|
||||
if entry.path().is_dir() {
|
||||
if let Result::Ok(file_name) = entry.file_name().into_string() {
|
||||
Some(file_name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
files.sort();
|
||||
log::info!("files in batch {:?} is {:?}", batch_path, files);
|
||||
Ok(files.into_iter().map(|f| batch_path.join(f)).collect())
|
||||
}
|
||||
|
||||
fn traces_to_chunk_info(chunk_trace: Vec<BlockTrace>) -> Result<ChunkInfo> {
|
||||
let witness_block = chunk_trace_to_witness_block(chunk_trace)?;
|
||||
Ok(ChunkInfo::from_witness_block(&witness_block, false))
|
||||
}
|
||||
|
||||
fn dump_proof(id: String, proof_data: String) -> Result<()> {
|
||||
let dump_path = PathBuf::from(PROOF_DUMP_PATH.clone());
|
||||
Ok(std::fs::write(dump_path.join(id), proof_data)?)
|
||||
}
|
||||
}
|
||||
@@ -24,7 +24,7 @@ use prover_darwin_v2::{
|
||||
static OUTPUT_DIR: Lazy<Option<String>> = Lazy::new(|| env::var("PROVER_OUTPUT_DIR").ok());
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct BatchTaskDetail {
|
||||
struct BatchTaskDetail {
|
||||
pub chunk_infos: Vec<ChunkInfo>,
|
||||
#[serde(flatten)]
|
||||
pub batch_proving_task: BatchProvingTask,
|
||||
@@ -66,8 +66,9 @@ impl DarwinV2Handler {
|
||||
geth_client,
|
||||
};
|
||||
let degrees: Vec<u32> = get_degrees(&prover_types_set, |prover_type| match prover_type {
|
||||
ProverType::Chunk => ZKEVM_DEGREES.clone(),
|
||||
ProverType::ChunkHalo2 => ZKEVM_DEGREES.clone(),
|
||||
ProverType::Batch => AGG_DEGREES.clone(),
|
||||
_ => unreachable!("Darwin supports prover types in [ChunkHalo2 and Batch] only"),
|
||||
});
|
||||
let params_map = get_params_map_instance(|| {
|
||||
log::info!(
|
||||
@@ -80,7 +81,7 @@ impl DarwinV2Handler {
|
||||
});
|
||||
for prover_type in prover_types_set {
|
||||
match prover_type {
|
||||
ProverType::Chunk => {
|
||||
ProverType::ChunkHalo2 => {
|
||||
handler.chunk_prover = Some(RefCell::new(ChunkProver::from_params_and_assets(
|
||||
params_map, assets_dir,
|
||||
)));
|
||||
@@ -90,6 +91,7 @@ impl DarwinV2Handler {
|
||||
params_map, assets_dir,
|
||||
)))
|
||||
}
|
||||
_ => unreachable!("Darwin supports prover types in [ChunkHalo2 and Batch] only"),
|
||||
}
|
||||
}
|
||||
Ok(handler)
|
||||
@@ -247,7 +249,7 @@ impl DarwinV2Handler {
|
||||
impl CircuitsHandler for DarwinV2Handler {
|
||||
fn get_vk(&self, task_type: TaskType) -> Option<Vec<u8>> {
|
||||
match task_type {
|
||||
TaskType::Chunk => self
|
||||
TaskType::ChunkHalo2 => self
|
||||
.chunk_prover
|
||||
.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_vk()),
|
||||
@@ -265,7 +267,7 @@ impl CircuitsHandler for DarwinV2Handler {
|
||||
|
||||
fn get_proof_data(&self, task_type: TaskType, task: &crate::types::Task) -> Result<String> {
|
||||
match task_type {
|
||||
TaskType::Chunk => self.gen_chunk_proof(task),
|
||||
TaskType::ChunkHalo2 => self.gen_chunk_proof(task),
|
||||
TaskType::Batch => self.gen_batch_proof(task),
|
||||
TaskType::Bundle => self.gen_bundle_proof(task),
|
||||
_ => unreachable!(),
|
||||
@@ -319,16 +321,16 @@ mod tests {
|
||||
#[test]
|
||||
fn test_circuits() -> Result<()> {
|
||||
let bi_handler = DarwinV2Handler::new_multi(
|
||||
vec![ProverType::Chunk, ProverType::Batch],
|
||||
vec![ProverType::ChunkHalo2, ProverType::Batch],
|
||||
&PARAMS_PATH,
|
||||
&ASSETS_PATH,
|
||||
None,
|
||||
)?;
|
||||
|
||||
let chunk_handler = bi_handler;
|
||||
let chunk_vk = chunk_handler.get_vk(TaskType::Chunk).unwrap();
|
||||
let chunk_vk = chunk_handler.get_vk(TaskType::ChunkHalo2).unwrap();
|
||||
|
||||
check_vk(TaskType::Chunk, chunk_vk, "chunk vk must be available");
|
||||
check_vk(TaskType::ChunkHalo2, chunk_vk, "chunk vk must be available");
|
||||
let chunk_dir_paths = get_chunk_dir_paths()?;
|
||||
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
|
||||
let mut chunk_traces = vec![];
|
||||
@@ -436,9 +438,10 @@ mod tests {
|
||||
fn read_vk(proof_type: TaskType) -> Result<String> {
|
||||
log::info!("read_vk, {:?}", proof_type);
|
||||
let vk_file = match proof_type {
|
||||
TaskType::Chunk => CHUNK_VK_PATH.clone(),
|
||||
TaskType::ChunkHalo2 => CHUNK_VK_PATH.clone(),
|
||||
TaskType::Batch => BATCH_VK_PATH.clone(),
|
||||
TaskType::Bundle => todo!(),
|
||||
TaskType::ChunkSp1 => todo!(),
|
||||
TaskType::Undefined => unreachable!(),
|
||||
};
|
||||
|
||||
|
||||
311
prover/src/zk_circuits_handler/euclid.rs
Normal file
311
prover/src/zk_circuits_handler/euclid.rs
Normal file
@@ -0,0 +1,311 @@
|
||||
use std::{cell::RefCell, cmp::Ordering, env, rc::Rc};
|
||||
|
||||
use anyhow::{bail, Context, Result};
|
||||
use once_cell::sync::Lazy;
|
||||
use prover_euclid::{
|
||||
aggregator::Prover as BatchProver,
|
||||
check_chunk_hashes,
|
||||
common::Prover as CommonProver,
|
||||
config::{AGG_DEGREES, ZKEVM_DEGREES},
|
||||
zkevm::Prover as Halo2Prover,
|
||||
BatchProof, BatchProvingTask, BlockTrace, BundleProof, BundleProvingTask, ChunkInfo,
|
||||
ChunkProof, ChunkProvingTask,
|
||||
};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{
|
||||
geth_client::GethClient,
|
||||
types::{CommonHash, ProverType, Task, TaskType},
|
||||
};
|
||||
|
||||
use super::{common::*, darwin_v2::ChunkTaskDetail, CircuitsHandler};
|
||||
// TODO: enable_post_sp1_prover
|
||||
// use sp1_prover::Sp1Prover;
|
||||
|
||||
// Only used for debugging.
|
||||
static OUTPUT_DIR: Lazy<Option<String>> = Lazy::new(|| env::var("PROVER_OUTPUT_DIR").ok());
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
struct BatchTaskDetail {
|
||||
pub chunk_infos: Vec<ChunkInfo>,
|
||||
#[serde(flatten)]
|
||||
pub batch_proving_task: BatchProvingTask,
|
||||
}
|
||||
|
||||
type BundleTaskDetail = BundleProvingTask;
|
||||
|
||||
struct ChunkProver {
|
||||
halo2_prover: Option<RefCell<Halo2Prover<'static>>>,
|
||||
// TODO: enable_post_sp1_prover
|
||||
// sp1_prover: Option<RefCell<Sp1Prover>>,
|
||||
}
|
||||
|
||||
fn get_block_number(block_trace: &BlockTrace) -> Option<u64> {
|
||||
block_trace.header.number.map(|n| n.as_u64())
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct EuclidHandler {
|
||||
chunk_prover: Option<ChunkProver>,
|
||||
batch_prover: Option<RefCell<BatchProver<'static>>>,
|
||||
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
}
|
||||
|
||||
impl EuclidHandler {
|
||||
pub fn new_multi(
|
||||
prover_types: Vec<ProverType>,
|
||||
params_dir: &str,
|
||||
assets_dir: &str,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
) -> Result<Self> {
|
||||
let class_name = std::intrinsics::type_name::<Self>();
|
||||
let prover_types_set = prover_types
|
||||
.into_iter()
|
||||
.collect::<std::collections::HashSet<ProverType>>();
|
||||
let mut handler = Self {
|
||||
batch_prover: None,
|
||||
chunk_prover: None,
|
||||
geth_client,
|
||||
};
|
||||
let degrees: Vec<u32> = get_degrees(&prover_types_set, |prover_type| match prover_type {
|
||||
ProverType::ChunkHalo2 | ProverType::ChunkAll => ZKEVM_DEGREES.clone(),
|
||||
ProverType::ChunkSp1 => ZKEVM_DEGREES.clone(), // TODO(rohit)
|
||||
ProverType::Batch => AGG_DEGREES.clone(),
|
||||
});
|
||||
let params_map = get_params_map_instance(|| {
|
||||
log::info!(
|
||||
"calling get_params_map from {}, prover_types: {:?}, degrees: {:?}",
|
||||
class_name,
|
||||
prover_types_set,
|
||||
degrees
|
||||
);
|
||||
CommonProver::load_params_map(params_dir, °rees)
|
||||
});
|
||||
for prover_type in prover_types_set {
|
||||
match prover_type {
|
||||
ProverType::ChunkHalo2 => {
|
||||
handler.chunk_prover = Some(ChunkProver {
|
||||
halo2_prover: Some(RefCell::new(Halo2Prover::from_params_and_assets(
|
||||
params_map, assets_dir,
|
||||
))),
|
||||
// TODO: enable_post_sp1_prover
|
||||
// sp1_prover: None,
|
||||
});
|
||||
}
|
||||
ProverType::ChunkSp1 => {
|
||||
handler.chunk_prover = Some(ChunkProver {
|
||||
halo2_prover: None,
|
||||
// TODO: enable_post_sp1_prover
|
||||
// sp1_prover: Some(RefCell::new(Sp1Prover::new())),
|
||||
});
|
||||
}
|
||||
ProverType::ChunkAll => {
|
||||
handler.chunk_prover = Some(ChunkProver {
|
||||
halo2_prover: Some(RefCell::new(Halo2Prover::from_params_and_assets(
|
||||
params_map, assets_dir,
|
||||
))),
|
||||
// TODO: enable_post_sp1_prover
|
||||
// sp1_prover: Some(RefCell::new(Sp1Prover::new())),
|
||||
});
|
||||
}
|
||||
ProverType::Batch => {
|
||||
handler.batch_prover = Some(RefCell::new(BatchProver::from_params_and_assets(
|
||||
params_map, assets_dir,
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(handler)
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
prover_type: ProverType,
|
||||
params_dir: &str,
|
||||
assets_dir: &str,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
) -> Result<Self> {
|
||||
Self::new_multi(vec![prover_type], params_dir, assets_dir, geth_client)
|
||||
}
|
||||
|
||||
fn gen_halo2_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
|
||||
if let Some(prover) = self.chunk_prover.as_ref() {
|
||||
if let Some(halo2_prover) = prover.halo2_prover.as_ref() {
|
||||
let chunk = ChunkProvingTask::new(chunk_trace);
|
||||
|
||||
let chunk_proof = halo2_prover.borrow_mut().gen_halo2_chunk_proof(
|
||||
chunk,
|
||||
None,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
)?;
|
||||
|
||||
return Ok(chunk_proof);
|
||||
}
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_halo2_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
let chunk_trace = self.gen_chunk_traces(task)?;
|
||||
let chunk_proof = self.gen_halo2_chunk_proof_raw(chunk_trace)?;
|
||||
Ok(serde_json::to_string(&chunk_proof)?)
|
||||
}
|
||||
|
||||
fn gen_sp1_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
let _ = self.gen_chunk_traces(task)?;
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn gen_batch_proof_raw(&self, batch_task_detail: BatchTaskDetail) -> Result<BatchProof> {
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> = batch_task_detail
|
||||
.chunk_infos
|
||||
.clone()
|
||||
.into_iter()
|
||||
.zip(batch_task_detail.batch_proving_task.chunk_proofs.clone())
|
||||
.collect();
|
||||
|
||||
let chunk_proofs: Vec<ChunkProof> =
|
||||
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
|
||||
|
||||
let is_valid = prover.borrow_mut().check_protocol_of_chunks(&chunk_proofs);
|
||||
|
||||
if !is_valid {
|
||||
bail!("non-match chunk protocol")
|
||||
}
|
||||
check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?;
|
||||
let batch_proof = prover.borrow_mut().gen_batch_proof(
|
||||
batch_task_detail.batch_proving_task,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
)?;
|
||||
|
||||
return Ok(batch_proof);
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
log::info!("[circuit] gen_batch_proof for task {}", task.id);
|
||||
|
||||
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
let batch_proof = self.gen_batch_proof_raw(batch_task_detail)?;
|
||||
Ok(serde_json::to_string(&batch_proof)?)
|
||||
}
|
||||
|
||||
fn gen_bundle_proof_raw(&self, bundle_task_detail: BundleTaskDetail) -> Result<BundleProof> {
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let bundle_proof = prover.borrow_mut().gen_bundle_proof(
|
||||
bundle_task_detail,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
)?;
|
||||
|
||||
return Ok(bundle_proof);
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_bundle_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
log::info!("[circuit] gen_bundle_proof for task {}", task.id);
|
||||
let bundle_task_detail: BundleTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
let bundle_proof = self.gen_bundle_proof_raw(bundle_task_detail)?;
|
||||
Ok(serde_json::to_string(&bundle_proof)?)
|
||||
}
|
||||
|
||||
fn get_output_dir(&self) -> Option<&str> {
|
||||
OUTPUT_DIR.as_deref()
|
||||
}
|
||||
|
||||
fn gen_chunk_traces(&self, task: &Task) -> Result<Vec<BlockTrace>> {
|
||||
let chunk_task_detail: ChunkTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
self.get_sorted_traces_by_hashes(&chunk_task_detail.block_hashes)
|
||||
}
|
||||
|
||||
fn get_sorted_traces_by_hashes(&self, block_hashes: &[CommonHash]) -> Result<Vec<BlockTrace>> {
|
||||
if block_hashes.is_empty() {
|
||||
log::error!("[prover] failed to get sorted traces: block_hashes are empty");
|
||||
bail!("block_hashes are empty")
|
||||
}
|
||||
|
||||
let mut block_traces = Vec::new();
|
||||
for hash in block_hashes.iter() {
|
||||
let trace = self
|
||||
.geth_client
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.borrow_mut()
|
||||
.get_block_trace_by_hash(hash)?;
|
||||
block_traces.push(trace);
|
||||
}
|
||||
|
||||
block_traces.sort_by(|a, b| {
|
||||
if get_block_number(a).is_none() {
|
||||
Ordering::Less
|
||||
} else if get_block_number(b).is_none() {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
get_block_number(a)
|
||||
.unwrap()
|
||||
.cmp(&get_block_number(b).unwrap())
|
||||
}
|
||||
});
|
||||
|
||||
let block_numbers: Vec<u64> = block_traces
|
||||
.iter()
|
||||
.map(|trace| get_block_number(trace).unwrap_or(0))
|
||||
.collect();
|
||||
let mut i = 0;
|
||||
while i < block_numbers.len() - 1 {
|
||||
if block_numbers[i] + 1 != block_numbers[i + 1] {
|
||||
log::error!(
|
||||
"[prover] block numbers are not continuous, got {} and {}",
|
||||
block_numbers[i],
|
||||
block_numbers[i + 1]
|
||||
);
|
||||
bail!(
|
||||
"block numbers are not continuous, got {} and {}",
|
||||
block_numbers[i],
|
||||
block_numbers[i + 1]
|
||||
)
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
Ok(block_traces)
|
||||
}
|
||||
}
|
||||
|
||||
impl CircuitsHandler for EuclidHandler {
|
||||
fn get_vk(&self, task_type: TaskType) -> Option<Vec<u8>> {
|
||||
match task_type {
|
||||
TaskType::ChunkHalo2 => self.chunk_prover.as_ref().and_then(|prover| {
|
||||
prover
|
||||
.halo2_prover
|
||||
.as_ref()
|
||||
.and_then(|p| p.borrow().get_vk())
|
||||
}),
|
||||
TaskType::ChunkSp1 => unimplemented!(),
|
||||
TaskType::Batch => self
|
||||
.batch_prover
|
||||
.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_batch_vk()),
|
||||
TaskType::Bundle => self
|
||||
.batch_prover
|
||||
.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_bundle_vk()),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_proof_data(&self, task_type: TaskType, task: &crate::types::Task) -> Result<String> {
|
||||
match task_type {
|
||||
TaskType::ChunkHalo2 => self.gen_halo2_chunk_proof(task),
|
||||
TaskType::ChunkSp1 => self.gen_sp1_chunk_proof(task),
|
||||
TaskType::Batch => self.gen_batch_proof(task),
|
||||
TaskType::Bundle => self.gen_bundle_proof(task),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user