Compare commits

...

13 Commits

Author SHA1 Message Date
Rohit Narurkar
4a8eb78a2b (libzkp): refactor batch header variants (#1755) 2025-11-19 08:34:56 +09:00
Rohit Narurkar
f06483217d chore: add inline-docs for avoiding confusion (#1754) 2025-11-18 15:58:47 +00:00
Ho
aac3929022 purge config template 2025-11-18 21:36:24 +09:00
Ho
853626676d refactor for pi_hash_by_version 2025-11-18 20:06:08 +09:00
Ho
c6f8dc2d7e update test stuff 2025-11-18 19:39:45 +09:00
Ho
36b44e40bc fix feature settings 2025-11-18 19:39:31 +09:00
Ho
f81a7bce1e bump version 2025-11-18 16:48:05 +09:00
Ho
7e404d0676 linting 2025-11-18 16:42:20 +09:00
Ho
78f6955d90 galileo features 2025-11-18 16:31:14 +09:00
Ho
1877aaba25 better log and batch header parsing 2025-11-18 12:08:24 +09:00
Ho
64ccae6155 Merge remote-tracking branch 'origin/develop' into feat/galileo 2025-11-18 11:08:03 +09:00
Ho
83ae36b31a Galileo fork in batch task 2025-11-18 11:08:01 +09:00
Ho
75bd07e7c4 update dependencies 2025-11-17 22:04:53 +09:00
32 changed files with 1560 additions and 526 deletions

932
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -14,16 +14,16 @@ edition = "2021"
homepage = "https://scroll.io"
readme = "README.md"
repository = "https://github.com/scroll-tech/scroll"
version = "4.6.3"
version = "4.7.1"
[workspace.dependencies]
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "360f364" }
scroll-zkvm-verifier = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "360f364" }
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "360f364" }
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.7.0-rc.4" }
scroll-zkvm-verifier = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.7.0-rc.4" }
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.7.0-rc.4" }
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "master", features = ["scroll", "rkyv"] }
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "master" }
sbv-core = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "master", features = ["scroll"] }
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", tag = "scroll-v91", features = ["scroll", "rkyv"] }
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", tag = "scroll-v91" }
sbv-core = { git = "https://github.com/scroll-tech/stateless-block-verifier", tag = "scroll-v91", features = ["scroll"] }
metrics = "0.23.0"
metrics-util = "0.17"
@@ -31,14 +31,14 @@ metrics-tracing-context = "0.16.0"
anyhow = "1.0"
alloy = { version = "1", default-features = false }
alloy-primitives = { version = "1.3", default-features = false, features = ["tiny-keccak"] }
alloy-primitives = { version = "1.4.1", default-features = false, features = ["tiny-keccak"] }
# also use this to trigger "serde" feature for primitives
alloy-serde = { version = "1", default-features = false }
serde = { version = "1", default-features = false, features = ["derive"] }
serde_json = { version = "1.0" }
serde_derive = "1.0"
serde_with = "3.11.0"
serde_with = "3"
itertools = "0.14"
tiny-keccak = "2.0"
tracing = "0.1"
@@ -46,21 +46,20 @@ eyre = "0.6"
once_cell = "1.20"
base64 = "0.22"
[patch.crates-io]
revm = { git = "https://github.com/scroll-tech/revm" }
revm-bytecode = { git = "https://github.com/scroll-tech/revm" }
revm-context = { git = "https://github.com/scroll-tech/revm" }
revm-context-interface = { git = "https://github.com/scroll-tech/revm" }
revm-database = { git = "https://github.com/scroll-tech/revm" }
revm-database-interface = { git = "https://github.com/scroll-tech/revm" }
revm-handler = { git = "https://github.com/scroll-tech/revm" }
revm-inspector = { git = "https://github.com/scroll-tech/revm" }
revm-interpreter = { git = "https://github.com/scroll-tech/revm" }
revm-precompile = { git = "https://github.com/scroll-tech/revm" }
revm-primitives = { git = "https://github.com/scroll-tech/revm" }
revm-state = { git = "https://github.com/scroll-tech/revm" }
alloy-primitives = { git = "https://github.com/scroll-tech/alloy-core", branch = "feat/rkyv" }
[patch.crates-io]
revm = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-bytecode = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-context = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-context-interface = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-database = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-database-interface = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-handler = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-inspector = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-interpreter = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-precompile = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-primitives = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-state = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
[profile.maxperf]
inherits = "release"

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug"
)
var tag = "v4.6.3"
var tag = "v4.7.1"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {

View File

@@ -10,6 +10,7 @@
"min_prover_version": "v4.4.45",
"verifiers": [
{
"features": "legacy_witness:openvm_13",
"assets_path": "assets",
"fork_name": "feynman"
},

View File

@@ -69,12 +69,12 @@ type AssetConfig struct {
ForkName string `json:"fork_name"`
Vkfile string `json:"vk_file,omitempty"`
MinProverVersion string `json:"min_prover_version,omitempty"`
Features string `json:"features,omitempty"`
}
// VerifierConfig load zk verifier config.
type VerifierConfig struct {
MinProverVersion string `json:"min_prover_version"`
Features string `json:"features,omitempty"`
Verifiers []AssetConfig `json:"verifiers"`
}

View File

@@ -141,13 +141,6 @@ func DumpVk(forkName, filePath string) error {
return nil
}
// Set dynamic feature flags that control libzkp runtime behavior
func SetDynamicFeature(feats string) {
cFeats := goToCString(feats)
defer freeCString(cFeats)
C.set_dynamic_feature(cFeats)
}
// UnivTaskCompatibilityFix calls the universal task compatibility fix function
func UniversalTaskCompatibilityFix(taskJSON string) (string, error) {
cTaskJSON := goToCString(taskJSON)

View File

@@ -313,7 +313,7 @@ func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkProofs []
switch dbBatchCodecVersion {
case encoding.CodecV3, encoding.CodecV4, encoding.CodecV6, encoding.CodecV7, encoding.CodecV8, encoding.CodecV9:
default:
return taskDetail, nil
return nil, fmt.Errorf("Unsupported codec version <%d>", dbBatchCodecVersion)
}
codec, err := encoding.CodecFromVersion(encoding.CodecVersion(dbBatch.CodecVersion))
@@ -325,6 +325,7 @@ func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkProofs []
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header version %d: %w", dbBatch.CodecVersion, decodeErr)
}
log.Info("Decode batchheader bytes to canonical header", "version", batchHeader.Version())
taskDetail.BatchHeader = batchHeader
taskDetail.ChallengeDigest = common.HexToHash(dbBatch.ChallengeDigest)
@@ -335,7 +336,7 @@ func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkProofs []
taskDetail.KzgProof = &message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[112:160]))}
taskDetail.KzgCommitment = &message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[64:112]))}
} else {
log.Debug("Apply validium mode for batch proving task")
log.Info("Apply validium mode for batch proving task")
codec := cutils.FromVersion(version)
batchHeader, decodeErr := codec.DABatchForTaskFromBytes(dbBatch.BatchHeader)
if decodeErr != nil {

View File

@@ -29,6 +29,7 @@ type rustCircuitConfig struct {
Version uint `json:"version"`
ForkName string `json:"fork_name"`
AssetsPath string `json:"assets_path"`
Features string `json:"features,omitempty"`
}
var validiumMode bool
@@ -47,6 +48,7 @@ func newRustCircuitConfig(cfg config.AssetConfig) *rustCircuitConfig {
Version: uint(ver),
AssetsPath: cfg.AssetsPath,
ForkName: cfg.ForkName,
Features: cfg.Features,
}
}
@@ -82,9 +84,6 @@ func NewVerifier(cfg *config.VerifierConfig, useValidiumMode bool) (*Verifier, e
return nil, err
}
if cfg.Features != "" {
libzkp.SetDynamicFeature(cfg.Features)
}
libzkp.InitVerifier(string(configBytes))
v := &Verifier{

View File

@@ -9,7 +9,7 @@ scroll-zkvm-types = { workspace = true, features = ["scroll"] }
scroll-zkvm-verifier.workspace = true
alloy-primitives.workspace = true #depress the effect of "native-keccak"
sbv-primitives = {workspace = true, features = ["scroll-compress-ratio", "scroll"]}
sbv-primitives = {workspace = true, features = ["scroll-compress-info", "scroll"]}
sbv-core = { workspace = true, features = ["scroll"] }
base64.workspace = true
serde.workspace = true

View File

@@ -1,38 +1,64 @@
pub mod proofs;
pub mod tasks;
pub use tasks::ProvintTaskExt;
pub mod verifier;
use verifier::HardForkName;
pub use verifier::{TaskType, VerifierConfig};
mod utils;
use sbv_primitives::B256;
use scroll_zkvm_types::utils::vec_as_base64;
use scroll_zkvm_types::{utils::vec_as_base64, version::Version};
use serde::{Deserialize, Serialize};
use serde_json::value::RawValue;
use std::path::Path;
use std::{collections::HashMap, path::Path, sync::OnceLock};
use tasks::chunk_interpreter::{ChunkInterpreter, TryFromWithInterpreter};
/// global features: use legacy encoding for witness
static mut LEGACY_WITNESS_ENCODING: bool = false;
pub(crate) fn witness_use_legacy_mode() -> bool {
unsafe { LEGACY_WITNESS_ENCODING }
pub(crate) fn witness_use_legacy_mode(fork_name: &str) -> eyre::Result<bool> {
ADDITIONAL_FEATURES
.get()
.and_then(|features| features.get(fork_name))
.map(|cfg| cfg.legacy_witness_encoding)
.ok_or_else(|| {
eyre::eyre!(
"can not found features setting for unrecognized fork {}",
fork_name
)
})
}
pub fn set_dynamic_feature(feats: &str) {
for feat_s in feats.split(':') {
match feat_s.trim().to_lowercase().as_str() {
"legacy_witness" => {
tracing::info!("set witness encoding for legacy mode");
unsafe {
// the function is only called while initialize step
LEGACY_WITNESS_ENCODING = true;
#[derive(Debug, Default, Clone)]
struct FeatureOptions {
legacy_witness_encoding: bool,
for_openvm_13_prover: bool,
}
static ADDITIONAL_FEATURES: OnceLock<HashMap<HardForkName, FeatureOptions>> = OnceLock::new();
impl FeatureOptions {
pub fn new(feats: &str) -> Self {
let mut ret: Self = Default::default();
for feat_s in feats.split(':') {
match feat_s.trim().to_lowercase().as_str() {
"legacy_witness" => {
tracing::info!("set witness encoding for legacy mode");
ret.legacy_witness_encoding = true;
}
"openvm_13" => {
tracing::info!("set prover should use openvm 13");
ret.for_openvm_13_prover = true;
}
s => tracing::warn!("unrecognized dynamic feature: {s}"),
}
s => tracing::warn!("unrecognized dynamic feature: {s}"),
}
ret
}
}
pub fn set_dynamic_feature(_: &str) {
tracing::error!("call deprecated method");
}
/// Turn the coordinator's chunk task into a json string for formal chunk proving
/// task (with full witnesses)
pub fn checkout_chunk_task(
@@ -112,35 +138,56 @@ pub fn gen_universal_task(
let mut task = serde_json::from_str::<ChunkProvingTask>(task_json)?;
// normailze fork name field in task
task.fork_name = task.fork_name.to_lowercase();
let version = Version::from(task.version);
// always respect the fork_name_str (which has been normalized) being passed
// if the fork_name wrapped in task is not match, consider it a malformed task
if fork_name_str != task.fork_name.as_str() {
eyre::bail!("fork name in chunk task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name);
}
if fork_name_str != version.fork.as_str() {
eyre::bail!(
"given task version, expected fork={fork_name_str}, got={version_fork}",
version_fork = version.fork.as_str()
);
}
let (pi_hash, metadata, u_task) =
utils::panic_catch(move || gen_universal_chunk_task(task, fork_name_str.into()))
utils::panic_catch(move || gen_universal_chunk_task(task))
.map_err(|e| eyre::eyre!("caught panic in chunk task{e}"))??;
(pi_hash, AnyMetaData::Chunk(metadata), u_task)
}
x if x == TaskType::Batch as i32 => {
let mut task = serde_json::from_str::<BatchProvingTask>(task_json)?;
task.fork_name = task.fork_name.to_lowercase();
let version = Version::from(task.version);
if fork_name_str != task.fork_name.as_str() {
eyre::bail!("fork name in batch task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name);
}
if fork_name_str != version.fork.as_str() {
eyre::bail!(
"given task version, expected fork={fork_name_str}, got={version_fork}",
version_fork = version.fork.as_str()
);
}
let (pi_hash, metadata, u_task) =
utils::panic_catch(move || gen_universal_batch_task(task, fork_name_str.into()))
utils::panic_catch(move || gen_universal_batch_task(task))
.map_err(|e| eyre::eyre!("caught panic in chunk task{e}"))??;
(pi_hash, AnyMetaData::Batch(metadata), u_task)
}
x if x == TaskType::Bundle as i32 => {
let mut task = serde_json::from_str::<BundleProvingTask>(task_json)?;
task.fork_name = task.fork_name.to_lowercase();
let version = Version::from(task.version);
if fork_name_str != task.fork_name.as_str() {
eyre::bail!("fork name in bundle task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name);
}
if fork_name_str != version.fork.as_str() {
eyre::bail!(
"given task version, expected fork={fork_name_str}, got={version_fork}",
version_fork = version.fork.as_str()
);
}
let (pi_hash, metadata, u_task) =
utils::panic_catch(move || gen_universal_bundle_task(task, fork_name_str.into()))
utils::panic_catch(move || gen_universal_bundle_task(task))
.map_err(|e| eyre::eyre!("caught panic in chunk task{e}"))??;
(pi_hash, AnyMetaData::Bundle(metadata), u_task)
}
@@ -148,11 +195,26 @@ pub fn gen_universal_task(
};
u_task.vk = Vec::from(expected_vk);
let fork_name = u_task.fork_name.clone();
let mut u_task_ext = ProvintTaskExt::new(u_task);
// set additional settings from global features
if let Some(cfg) = ADDITIONAL_FEATURES
.get()
.and_then(|features| features.get(&fork_name))
{
u_task_ext.use_openvm_13 = cfg.for_openvm_13_prover;
} else {
tracing::warn!(
"can not found features setting for unrecognized fork {}",
fork_name
);
}
Ok((
pi_hash,
serde_json::to_string(&metadata)?,
serde_json::to_string(&u_task)?,
serde_json::to_string(&u_task_ext)?,
))
}
@@ -183,7 +245,26 @@ pub fn gen_wrapped_proof(proof_json: &str, metadata: &str, vk: &[u8]) -> eyre::R
/// init verifier
pub fn verifier_init(config: &str) -> eyre::Result<()> {
let cfg: VerifierConfig = serde_json::from_str(config)?;
ADDITIONAL_FEATURES
.set(HashMap::from_iter(cfg.circuits.iter().map(|config| {
tracing::info!(
"start setting features [{:?}] for fork {}",
config.features,
config.fork_name
);
(
config.fork_name.to_lowercase(),
config
.features
.as_ref()
.map(|features| FeatureOptions::new(features.as_str()))
.unwrap_or_default(),
)
})))
.map_err(|c| eyre::eyre!("Fail to init additional features: {c:?}"))?;
verifier::init(cfg);
Ok(())
}

View File

@@ -140,8 +140,6 @@ impl ProofMetadata for ChunkProofMetadata {
pub struct BatchProofMetadata {
/// The batch information describing the list of chunks.
pub batch_info: BatchInfo,
/// The [`scroll_zkvm_types::batch::BatchHeader`]'s digest.
pub batch_hash: B256,
}
impl ProofMetadata for BatchProofMetadata {

View File

@@ -14,7 +14,7 @@ use crate::{
utils::panic_catch,
};
use sbv_primitives::B256;
use scroll_zkvm_types::public_inputs::{ForkName, MultiVersionPublicInputs, Version};
use scroll_zkvm_types::public_inputs::{MultiVersionPublicInputs, Version};
fn encode_task_to_witness<T: serde::Serialize>(task: &T) -> eyre::Result<Vec<u8>> {
let config = bincode::config::standard();
@@ -35,17 +35,38 @@ fn check_aggregation_proofs<Metadata: MultiVersionPublicInputs>(
Ok(())
}
#[derive(serde::Deserialize, serde::Serialize)]
pub struct ProvintTaskExt {
#[serde(flatten)]
task: ProvingTask,
#[serde(default)]
pub use_openvm_13: bool,
}
impl From<ProvintTaskExt> for ProvingTask {
fn from(wrap_t: ProvintTaskExt) -> Self {
wrap_t.task
}
}
impl ProvintTaskExt {
pub fn new(task: ProvingTask) -> Self {
Self {
task,
use_openvm_13: false,
}
}
}
/// Generate required staff for chunk proving
pub fn gen_universal_chunk_task(
task: ChunkProvingTask,
fork_name: ForkName,
) -> eyre::Result<(B256, ChunkProofMetadata, ProvingTask)> {
let chunk_total_gas = task.stats().total_gas_used;
let chunk_info = task.precheck_and_build_metadata()?;
let (chunk_info, pi_hash) = task.precheck_and_build_metadata()?;
let proving_task = task.try_into()?;
let expected_pi_hash = chunk_info.pi_hash_by_fork(fork_name);
Ok((
expected_pi_hash,
pi_hash,
ChunkProofMetadata {
chunk_info,
chunk_total_gas,
@@ -57,18 +78,12 @@ pub fn gen_universal_chunk_task(
/// Generate required staff for batch proving
pub fn gen_universal_batch_task(
task: BatchProvingTask,
fork_name: ForkName,
) -> eyre::Result<(B256, BatchProofMetadata, ProvingTask)> {
let batch_info = task.precheck_and_build_metadata()?;
let (batch_info, batch_pi_hash) = task.precheck_and_build_metadata()?;
let proving_task = task.try_into()?;
let expected_pi_hash = batch_info.pi_hash_by_fork(fork_name);
Ok((
expected_pi_hash,
BatchProofMetadata {
batch_info,
batch_hash: expected_pi_hash,
},
batch_pi_hash,
BatchProofMetadata { batch_info },
proving_task,
))
}
@@ -76,17 +91,14 @@ pub fn gen_universal_batch_task(
/// Generate required staff for bundle proving
pub fn gen_universal_bundle_task(
task: BundleProvingTask,
fork_name: ForkName,
) -> eyre::Result<(B256, BundleProofMetadata, ProvingTask)> {
let bundle_info = task.precheck_and_build_metadata()?;
let (bundle_info, bundle_pi_hash) = task.precheck_and_build_metadata()?;
let proving_task = task.try_into()?;
let expected_pi_hash = bundle_info.pi_hash_by_fork(fork_name);
Ok((
expected_pi_hash,
bundle_pi_hash,
BundleProofMetadata {
bundle_info,
bundle_pi_hash: expected_pi_hash,
bundle_pi_hash,
},
proving_task,
))

View File

@@ -3,15 +3,15 @@ use eyre::Result;
use sbv_primitives::{B256, U256};
use scroll_zkvm_types::{
batch::{
build_point_eval_witness, BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchHeaderV8,
BatchHeaderValidium, BatchInfo, BatchWitness, Envelope, EnvelopeV6, EnvelopeV7, EnvelopeV8,
LegacyBatchWitness, ReferenceHeader, N_BLOB_BYTES,
build_point_eval_witness, BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchHeaderValidium,
BatchInfo, BatchWitness, Envelope, EnvelopeV6, EnvelopeV7, LegacyBatchWitness,
ReferenceHeader, N_BLOB_BYTES,
},
chunk::ChunkInfo,
public_inputs::{ForkName, Version},
public_inputs::{ForkName, MultiVersionPublicInputs, Version},
task::ProvingTask,
utils::{to_rkyv_bytes, RancorError},
version::{Domain, STFVersion},
version::{Codec, Domain, STFVersion},
};
use crate::proofs::ChunkProof;
@@ -26,23 +26,32 @@ pub struct BatchHeaderValidiumWithHash {
batch_hash: B256,
}
/// Define variable batch header type, since BatchHeaderV6 can not
/// be decoded as V7 we can always has correct deserialization
/// Notice: V6 header MUST be put above V7 since untagged enum
/// try to decode each defination in order
/// Parse header types passed from golang side and adapt to the
/// defination in zkvm-prover's types
/// We distinguish the header type in golang side according to the codec
/// version, i.e. v6 - v9 (current), and validium
/// And adapt it to different header version used in zkvm-prover's witness
/// defination, i.e. v6- v8 (current), and validium
#[derive(Clone, serde::Deserialize, serde::Serialize)]
#[serde(untagged)]
#[allow(non_camel_case_types)]
pub enum BatchHeaderV {
/// Header for validium mode.
Validium(BatchHeaderValidiumWithHash),
/// Header for scroll's STF version v6.
V6(BatchHeaderV6),
V7_8(BatchHeaderV7),
/// Header for scroll's STF versions v7, v8, v9.
///
/// Since the codec essentially is unchanged for the above STF versions, we do not define new
/// variants, instead re-using the [`BatchHeaderV7`] variant.
V7_V8_V9(BatchHeaderV7),
}
impl core::fmt::Display for BatchHeaderV {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
BatchHeaderV::V6(_) => write!(f, "V6"),
BatchHeaderV::V7_8(_) => write!(f, "V7_8"),
BatchHeaderV::V7_V8_V9(_) => write!(f, "V7_V8_V9"),
BatchHeaderV::Validium(_) => write!(f, "Validium"),
}
}
@@ -52,7 +61,7 @@ impl BatchHeaderV {
pub fn batch_hash(&self) -> B256 {
match self {
BatchHeaderV::V6(h) => h.batch_hash(),
BatchHeaderV::V7_8(h) => h.batch_hash(),
BatchHeaderV::V7_V8_V9(h) => h.batch_hash(),
BatchHeaderV::Validium(h) => h.header.batch_hash(),
}
}
@@ -64,17 +73,10 @@ impl BatchHeaderV {
}
}
pub fn must_v7_header(&self) -> &BatchHeaderV7 {
pub fn must_v7_v8_v9_header(&self) -> &BatchHeaderV7 {
match self {
BatchHeaderV::V7_8(h) => h,
_ => unreachable!("A header of {} is considered to be v7", self),
}
}
pub fn must_v8_header(&self) -> &BatchHeaderV8 {
match self {
BatchHeaderV::V7_8(h) => h,
_ => unreachable!("A header of {} is considered to be v8", self),
BatchHeaderV::V7_V8_V9(h) => h,
_ => unreachable!("A header of {} is considered to be in [v7, v8, v9]", self),
}
}
@@ -114,8 +116,8 @@ impl TryFrom<BatchProvingTask> for ProvingTask {
type Error = eyre::Error;
fn try_from(value: BatchProvingTask) -> Result<Self> {
let witness = value.build_guest_input();
let serialized_witness = if crate::witness_use_legacy_mode() {
let witness = value.build_guest_input(value.version.into());
let serialized_witness = if crate::witness_use_legacy_mode(&value.fork_name)? {
let legacy_witness = LegacyBatchWitness::from(witness);
to_rkyv_bytes::<RancorError>(&legacy_witness)?.into_vec()
} else {
@@ -137,8 +139,30 @@ impl TryFrom<BatchProvingTask> for ProvingTask {
}
impl BatchProvingTask {
fn build_guest_input(&self) -> BatchWitness {
let version = Version::from(self.version);
fn build_guest_input(&self, version: Version) -> BatchWitness {
tracing::info!(
"Handling batch task for input, version byte {}, Version data: {:?}",
self.version,
version
);
// sanity check for if result of header type parsing match to version
match &self.batch_header {
BatchHeaderV::Validium(_) => assert!(
version.is_validium(),
"version {:?} is not match with parsed header, get validium header but version is not validium", version,
),
BatchHeaderV::V6(_) => assert_eq!(version.fork, ForkName::EuclidV1,
"hardfork mismatch for da-codec@v6 header: found={:?}, expected={:?}",
version.fork,
ForkName::EuclidV1,
),
BatchHeaderV::V7_V8_V9(_) => assert!(
matches!(version.fork, ForkName::EuclidV2 | ForkName::Feynman | ForkName::Galileo),
"hardfork mismatch for da-codec@v7/8/9 header: found={}, expected={:?}",
version.fork,
[ForkName::EuclidV2, ForkName::Feynman, ForkName::Galileo],
),
}
let point_eval_witness = if !version.is_validium() {
// sanity check: calculate point eval needed and compare with task input
@@ -146,44 +170,21 @@ impl BatchProvingTask {
let blob = point_eval::to_blob(&self.blob_bytes);
let commitment = point_eval::blob_to_kzg_commitment(&blob);
let versioned_hash = point_eval::get_versioned_hash(&commitment);
let challenge_digest = match &self.batch_header {
BatchHeaderV::V6(_) => {
assert_eq!(
version.fork,
ForkName::EuclidV1,
"hardfork mismatch for da-codec@v6 header: found={:?}, expected={:?}",
version.fork,
ForkName::EuclidV1,
);
EnvelopeV6::from_slice(self.blob_bytes.as_slice())
let padded_blob_bytes = {
let mut padded_blob_bytes = self.blob_bytes.to_vec();
padded_blob_bytes.resize(N_BLOB_BYTES, 0);
padded_blob_bytes
};
let challenge_digest = match version.codec {
Codec::V6 => {
// notice v6 do not use padded blob bytes
<EnvelopeV6 as Envelope>::from_slice(self.blob_bytes.as_slice())
.challenge_digest(versioned_hash)
}
BatchHeaderV::V7_8(_) => {
let padded_blob_bytes = {
let mut padded_blob_bytes = self.blob_bytes.to_vec();
padded_blob_bytes.resize(N_BLOB_BYTES, 0);
padded_blob_bytes
};
match version.fork {
ForkName::EuclidV2 => {
<EnvelopeV7 as Envelope>::from_slice(padded_blob_bytes.as_slice())
.challenge_digest(versioned_hash)
}
ForkName::Feynman => {
<EnvelopeV8 as Envelope>::from_slice(padded_blob_bytes.as_slice())
.challenge_digest(versioned_hash)
}
fork_name => unreachable!(
"hardfork mismatch for da-codec@v7 header: found={}, expected={:?}",
fork_name,
[ForkName::EuclidV2, ForkName::Feynman],
),
}
}
BatchHeaderV::Validium(_) => unreachable!("version!=validium"),
Codec::V7 => <EnvelopeV7 as Envelope>::from_slice(padded_blob_bytes.as_slice())
.challenge_digest(versioned_hash),
};
let (proof, _) = point_eval::get_kzg_proof(&blob, challenge_digest);
(commitment.to_bytes(), proof.to_bytes(), challenge_digest)
@@ -231,11 +232,18 @@ impl BatchProvingTask {
(Domain::Scroll, STFVersion::V6) => {
ReferenceHeader::V6(*self.batch_header.must_v6_header())
}
(Domain::Scroll, STFVersion::V7) => {
ReferenceHeader::V7(*self.batch_header.must_v7_header())
}
(Domain::Scroll, STFVersion::V8) => {
ReferenceHeader::V8(*self.batch_header.must_v8_header())
// The da-codec for STF versions v7, v8, v9 is identical. In zkvm-prover we do not
// create additional variants to indicate the identical behaviour of codec. Instead we
// add a separate variant for the STF version.
//
// We handle the different STF versions here however build the same batch header since
// that type does not change. The batch header's version byte constructed in the
// coordinator actually defines the STF version (v7, v8 or v9) and we can derive the
// hard-fork (feynman or galileo) and the codec from the version byte.
//
// Refer [`scroll_zkvm_types::public_inputs::Version`].
(Domain::Scroll, STFVersion::V7 | STFVersion::V8 | STFVersion::V9) => {
ReferenceHeader::V7_V8_V9(*self.batch_header.must_v7_v8_v9_header())
}
(Domain::Validium, STFVersion::V1) => {
ReferenceHeader::Validium(*self.batch_header.must_validium_header())
@@ -273,18 +281,20 @@ impl BatchProvingTask {
}
}
pub fn precheck_and_build_metadata(&self) -> Result<BatchInfo> {
pub fn precheck_and_build_metadata(&self) -> Result<(BatchInfo, B256)> {
// for every aggregation task, there are two steps needed to build the metadata:
// 1. generate data for metadata from the witness
// 2. validate every adjacent proof pair
let witness = self.build_guest_input();
let version = Version::from(self.version);
let witness = self.build_guest_input(version);
let metadata = BatchInfo::from(&witness);
super::check_aggregation_proofs(
witness.chunk_infos.as_slice(),
Version::from(self.version),
)?;
let pi_hash = metadata.pi_hash_by_version(version);
Ok(metadata)
Ok((metadata, pi_hash))
}
}

View File

@@ -1,7 +1,8 @@
use eyre::Result;
use sbv_primitives::B256;
use scroll_zkvm_types::{
bundle::{BundleInfo, BundleWitness, LegacyBundleWitness},
public_inputs::Version,
public_inputs::{MultiVersionPublicInputs, Version},
task::ProvingTask,
utils::{to_rkyv_bytes, RancorError},
};
@@ -32,19 +33,20 @@ impl BundleProvingTask {
.first()
.expect(BUNDLE_SANITY_MSG)
.metadata
.batch_info
.batch_hash,
self.batch_proofs
.last()
.expect(BUNDLE_SANITY_MSG)
.metadata
.batch_info
.batch_hash,
);
format!("{first}-{last}")
}
fn build_guest_input(&self) -> BundleWitness {
let version = Version::from(self.version);
fn build_guest_input(&self, version: Version) -> BundleWitness {
BundleWitness {
version: version.as_version_byte(),
batch_proofs: self.batch_proofs.iter().map(|proof| proof.into()).collect(),
@@ -57,18 +59,20 @@ impl BundleProvingTask {
}
}
pub fn precheck_and_build_metadata(&self) -> Result<BundleInfo> {
pub fn precheck_and_build_metadata(&self) -> Result<(BundleInfo, B256)> {
// for every aggregation task, there are two steps needed to build the metadata:
// 1. generate data for metadata from the witness
// 2. validate every adjacent proof pair
let witness = self.build_guest_input();
let version = Version::from(self.version);
let witness = self.build_guest_input(version);
let metadata = BundleInfo::from(&witness);
super::check_aggregation_proofs(
witness.batch_infos.as_slice(),
Version::from(self.version),
)?;
let pi_hash = metadata.pi_hash_by_version(version);
Ok(metadata)
Ok((metadata, pi_hash))
}
}
@@ -76,8 +80,8 @@ impl TryFrom<BundleProvingTask> for ProvingTask {
type Error = eyre::Error;
fn try_from(value: BundleProvingTask) -> Result<Self> {
let witness = value.build_guest_input();
let serialized_witness = if crate::witness_use_legacy_mode() {
let witness = value.build_guest_input(value.version.into());
let serialized_witness = if crate::witness_use_legacy_mode(&value.fork_name)? {
let legacy = LegacyBundleWitness::from(witness);
to_rkyv_bytes::<RancorError>(&legacy)?.into_vec()
} else {

View File

@@ -3,9 +3,9 @@ use sbv_core::BlockWitness;
use sbv_primitives::{types::consensus::BlockHeader, B256};
use scroll_zkvm_types::{
chunk::{execute, ChunkInfo, ChunkWitness, LegacyChunkWitness, ValidiumInputs},
public_inputs::{MultiVersionPublicInputs, Version},
task::ProvingTask,
utils::{to_rkyv_bytes, RancorError},
version::Version,
};
use super::chunk_interpreter::*;
@@ -98,8 +98,8 @@ impl TryFrom<ChunkProvingTask> for ProvingTask {
type Error = eyre::Error;
fn try_from(value: ChunkProvingTask) -> Result<Self> {
let witness = value.build_guest_input();
let serialized_witness = if crate::witness_use_legacy_mode() {
let witness = value.build_guest_input(value.version.into());
let serialized_witness = if crate::witness_use_legacy_mode(&value.fork_name)? {
let legacy_witness = LegacyChunkWitness::from(witness);
to_rkyv_bytes::<RancorError>(&legacy_witness)?.into_vec()
} else {
@@ -156,9 +156,7 @@ impl ChunkProvingTask {
format!("{first}-{last}")
}
fn build_guest_input(&self) -> ChunkWitness {
let version = Version::from(self.version);
fn build_guest_input(&self, version: Version) -> ChunkWitness {
if version.is_validium() {
assert!(self.validium_inputs.is_some());
ChunkWitness::new(
@@ -182,11 +180,13 @@ impl ChunkProvingTask {
self.block_witnesses[0].states.push(node);
}
pub fn precheck_and_build_metadata(&self) -> Result<ChunkInfo> {
let witness = self.build_guest_input();
pub fn precheck_and_build_metadata(&self) -> Result<(ChunkInfo, B256)> {
let version = Version::from(self.version);
let witness = self.build_guest_input(version);
let ret = ChunkInfo::try_from(witness).map_err(|e| eyre::eyre!("{e}"))?;
assert_eq!(ret.post_msg_queue_hash, self.post_msg_queue_hash);
Ok(ret)
let pi_hash = ret.pi_hash_by_version(version);
Ok((ret, pi_hash))
}
/// this method check the validate of current task (there may be missing storage node)
@@ -214,7 +214,7 @@ impl ChunkProvingTask {
let err_parse_re = regex::Regex::new(pattern)?;
let mut attempts = 0;
loop {
let witness = self.build_guest_input();
let witness = self.build_guest_input(Version::euclid_v2());
match execute(witness) {
Ok(_) => return Ok(()),

View File

@@ -44,6 +44,8 @@ pub struct CircuitConfig {
pub version: u8,
pub fork_name: String,
pub assets_path: String,
#[serde(default)]
pub features: Option<String>,
}
#[derive(Debug, Serialize, Deserialize)]
@@ -51,7 +53,7 @@ pub struct VerifierConfig {
pub circuits: Vec<CircuitConfig>,
}
type HardForkName = String;
pub(crate) type HardForkName = String;
type VerifierType = Arc<Mutex<dyn ProofVerifier + Send>>;
static VERIFIERS: OnceLock<HashMap<HardForkName, VerifierType>> = OnceLock::new();

View File

@@ -8,6 +8,7 @@ edition.workspace = true
[dependencies]
scroll-zkvm-types.workspace = true
scroll-zkvm-prover.workspace = true
libzkp = { path = "../libzkp"}
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "05648db" }
serde.workspace = true
serde_json.workspace = true

View File

@@ -272,7 +272,7 @@ impl LocalProver {
let duration = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
let created_at = duration.as_secs() as f64 + duration.subsec_nanos() as f64 * 1e-9;
let prover_task = UniversalHandler::get_task_from_input(&req.input)?;
let (prover_task, task_cfg) = UniversalHandler::get_task_from_input(&req.input)?;
let vk = hex::encode(&prover_task.vk);
let handler = if let Some(handler) = self.handlers.get(&vk) {
handler.clone()
@@ -298,10 +298,8 @@ impl LocalProver {
.location_data
.get_asset(&vk, &url_base, &base_config.workspace_path)
.await?;
let circuits_handler = Arc::new(Mutex::new(UniversalHandler::new(
&asset_path,
req.proof_type,
)?));
let circuits_handler =
Arc::new(Mutex::new(UniversalHandler::new(&asset_path, &task_cfg)?));
self.handlers.insert(vk, circuits_handler.clone());
circuits_handler
};

View File

@@ -3,7 +3,7 @@ use std::path::Path;
use super::CircuitsHandler;
use async_trait::async_trait;
use eyre::Result;
use scroll_proving_sdk::prover::ProofType;
use libzkp::ProvintTaskExt;
use scroll_zkvm_prover::{Prover, ProverConfig};
use scroll_zkvm_types::ProvingTask;
use tokio::sync::Mutex;
@@ -11,12 +11,18 @@ pub struct UniversalHandler {
prover: Prover,
}
// additional config dispatched with proving task
#[derive(Debug, Default)]
pub(crate) struct TaskConfig {
pub is_openvm_v13: bool,
}
/// Safe for current usage as `CircuitsHandler` trait (protected inside of Mutex and NEVER extract
/// the instance out by `into_inner`)
unsafe impl Send for UniversalHandler {}
impl UniversalHandler {
pub fn new(workspace_path: impl AsRef<Path>, _proof_type: ProofType) -> Result<Self> {
pub fn new(workspace_path: impl AsRef<Path>, cfg: &TaskConfig) -> Result<Self> {
let path_app_exe = workspace_path.as_ref().join("app.vmexe");
let path_app_config = workspace_path.as_ref().join("openvm.toml");
let segment_len = Some((1 << 22) - 100);
@@ -24,6 +30,7 @@ impl UniversalHandler {
path_app_config,
path_app_exe,
segment_len,
is_openvm_v13: cfg.is_openvm_v13,
};
let prover = Prover::setup(config, None)?;
@@ -36,8 +43,13 @@ impl UniversalHandler {
&mut self.prover
}
pub fn get_task_from_input(input: &str) -> Result<ProvingTask> {
Ok(serde_json::from_str(input)?)
pub fn get_task_from_input(input: &str) -> Result<(ProvingTask, TaskConfig)> {
let task_ext: ProvintTaskExt = serde_json::from_str(input)?;
let cfg = TaskConfig {
is_openvm_v13: task_ext.use_openvm_13,
};
Ok((task_ext.into(), cfg))
}
}

View File

@@ -9,7 +9,7 @@ ifndef END_BLOCK
$(error END_BLOCK is not set. Define it in .make.env or pass END_BLOCK=<end_block>)
endif
all: setup_db test_tool import_data
all: setup_db test_tool prepare_db import_data
clean:
docker compose down
@@ -42,6 +42,9 @@ setup_db: clean
fi; \
done
${GOOSE_CMD} up
prepare_db:
GOOSE_MIGRATION_DIR=conf ${GOOSE_CMD} down
GOOSE_MIGRATION_DIR=conf ${GOOSE_CMD} up-to 100
test_tool:
@@ -52,9 +55,17 @@ build/bin/e2e_tool: test_tool
import_data_euclid: build/bin/e2e_tool check_vars
build/bin/e2e_tool --config conf/config.json --codec 7 ${BEGIN_BLOCK} ${END_BLOCK}
import_data: build/bin/e2e_tool check_vars
import_data_feynman: build/bin/e2e_tool check_vars
build/bin/e2e_tool --config conf/config.json --codec 8 ${BEGIN_BLOCK} ${END_BLOCK}
import_data_galileo: build/bin/e2e_tool check_vars
build/bin/e2e_tool --config conf/config.json --codec 9 ${BEGIN_BLOCK} ${END_BLOCK}
import_data: build/bin/e2e_tool check_vars
build/bin/e2e_tool --config conf/config.json --codec ${CODEC_VERSION} ${BEGIN_BLOCK} ${END_BLOCK}
reimport_data: prepare_db import_data
coordinator_setup:
$(MAKE) -C ../../coordinator localsetup
cp -f conf/genesis.json ../../coordinator/build/bin/conf

View File

@@ -1,2 +1,3 @@
BEGIN_BLOCK?=35
END_BLOCK?=49
END_BLOCK?=49
CODEC_VERSION?=8

File diff suppressed because one or more lines are too long

View File

@@ -25,7 +25,7 @@ SELECT 'INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
quote_literal(transactions) ||
');'
FROM l2_block
WHERE number >= 1 and number <= 49
WHERE number >= 20278000 and number <= 20278050
ORDER BY number ASC;
-- Write footer

View File

@@ -1,2 +1,3 @@
BEGIN_BLOCK?=10973711
END_BLOCK?=10973721
CODEC_VERSION?=8

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,3 @@
BEGIN_BLOCK?=20278022
END_BLOCK?=20278025
CODEC_VERSION?=9

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,9 @@
{
"db_config": {
"driver_name": "postgres",
"dsn": "postgres://dev:dev@localhost:5432/scroll?sslmode=disable",
"maxOpenNum": 5,
"maxIdleNum": 1
},
"validium_mode": false
}

File diff suppressed because one or more lines are too long

View File

@@ -8,11 +8,7 @@
"retry_wait_time_sec": 10,
"connection_timeout_sec": 1800
},
"l2geth": {
"endpoint": "<the url of rpc endpoint>"
},
"prover": {
"circuit_type": 2,
"supported_proof_types": [
1,
2,
@@ -28,6 +24,11 @@
"hard_fork_name": "feynman",
"base_url": "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/feynman/",
"workspace_path": ".work/feynman"
}
},
"galileo": {
"hard_fork_name": "galileo",
"base_url": "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/galileo/",
"workspace_path": ".work/galileo"
}
}
}