[Feat] Galileo forking (#1753)

Co-authored-by: Rohit Narurkar <rohit.narurkar@proton.me>
This commit is contained in:
Ho
2025-11-24 18:37:04 +09:00
committed by GitHub
parent 235ba874c6
commit 9dceae1ca2
39 changed files with 1746 additions and 597 deletions

932
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -14,16 +14,16 @@ edition = "2021"
homepage = "https://scroll.io" homepage = "https://scroll.io"
readme = "README.md" readme = "README.md"
repository = "https://github.com/scroll-tech/scroll" repository = "https://github.com/scroll-tech/scroll"
version = "4.6.3" version = "4.7.1"
[workspace.dependencies] [workspace.dependencies]
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "360f364" } scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.7.0" }
scroll-zkvm-verifier = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "360f364" } scroll-zkvm-verifier = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.7.0" }
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "360f364" } scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.7.0" }
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "master", features = ["scroll", "rkyv"] } sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", tag = "scroll-v91", features = ["scroll", "rkyv"] }
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "master" } sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", tag = "scroll-v91" }
sbv-core = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "master", features = ["scroll"] } sbv-core = { git = "https://github.com/scroll-tech/stateless-block-verifier", tag = "scroll-v91", features = ["scroll"] }
metrics = "0.23.0" metrics = "0.23.0"
metrics-util = "0.17" metrics-util = "0.17"
@@ -31,14 +31,14 @@ metrics-tracing-context = "0.16.0"
anyhow = "1.0" anyhow = "1.0"
alloy = { version = "1", default-features = false } alloy = { version = "1", default-features = false }
alloy-primitives = { version = "1.3", default-features = false, features = ["tiny-keccak"] } alloy-primitives = { version = "1.4.1", default-features = false, features = ["tiny-keccak"] }
# also use this to trigger "serde" feature for primitives # also use this to trigger "serde" feature for primitives
alloy-serde = { version = "1", default-features = false } alloy-serde = { version = "1", default-features = false }
serde = { version = "1", default-features = false, features = ["derive"] } serde = { version = "1", default-features = false, features = ["derive"] }
serde_json = { version = "1.0" } serde_json = { version = "1.0" }
serde_derive = "1.0" serde_derive = "1.0"
serde_with = "3.11.0" serde_with = "3"
itertools = "0.14" itertools = "0.14"
tiny-keccak = "2.0" tiny-keccak = "2.0"
tracing = "0.1" tracing = "0.1"
@@ -46,21 +46,20 @@ eyre = "0.6"
once_cell = "1.20" once_cell = "1.20"
base64 = "0.22" base64 = "0.22"
[patch.crates-io]
revm = { git = "https://github.com/scroll-tech/revm" }
revm-bytecode = { git = "https://github.com/scroll-tech/revm" }
revm-context = { git = "https://github.com/scroll-tech/revm" }
revm-context-interface = { git = "https://github.com/scroll-tech/revm" }
revm-database = { git = "https://github.com/scroll-tech/revm" }
revm-database-interface = { git = "https://github.com/scroll-tech/revm" }
revm-handler = { git = "https://github.com/scroll-tech/revm" }
revm-inspector = { git = "https://github.com/scroll-tech/revm" }
revm-interpreter = { git = "https://github.com/scroll-tech/revm" }
revm-precompile = { git = "https://github.com/scroll-tech/revm" }
revm-primitives = { git = "https://github.com/scroll-tech/revm" }
revm-state = { git = "https://github.com/scroll-tech/revm" }
alloy-primitives = { git = "https://github.com/scroll-tech/alloy-core", branch = "feat/rkyv" } [patch.crates-io]
revm = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-bytecode = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-context = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-context-interface = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-database = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-database-interface = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-handler = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-inspector = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-interpreter = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-precompile = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-primitives = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
revm-state = { git = "https://github.com/scroll-tech/revm", tag = "scroll-v91" }
[profile.maxperf] [profile.maxperf]
inherits = "release" inherits = "release"

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug" "runtime/debug"
) )
var tag = "v4.6.3" var tag = "v4.7.1"
var commit = func() string { var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok { if info, ok := debug.ReadBuildInfo(); ok {

View File

@@ -37,7 +37,12 @@ coordinator_tool:
localsetup: coordinator_api ## Local setup: build coordinator_api, copy config, and setup releases localsetup: coordinator_api ## Local setup: build coordinator_api, copy config, and setup releases
mkdir -p build/bin/conf mkdir -p build/bin/conf
@echo "Copying configuration files..." @echo "Copying configuration files..."
cp -fL $(CURDIR)/conf/config.json $(CURDIR)/build/bin/conf/config.template.json @if [ -f "$(PWD)/conf/config.template.json" ]; then \
SRC="$(PWD)/conf/config.template.json"; \
else \
SRC="$(CURDIR)/conf/config.json"; \
fi; \
cp -fL "$$SRC" "$(CURDIR)/build/bin/conf/config.template.json"
@echo "Setting up releases..." @echo "Setting up releases..."
cd $(CURDIR)/build && bash setup_releases.sh cd $(CURDIR)/build && bash setup_releases.sh

View File

@@ -6,6 +6,9 @@ if [ -z "${SCROLL_ZKVM_VERSION}" ]; then
exit 1 exit 1
fi fi
# default fork name from env or "galileo"
SCROLL_FORK_NAME="${SCROLL_FORK_NAME:-galileo}"
# set ASSET_DIR by reading from config.json # set ASSET_DIR by reading from config.json
CONFIG_FILE="bin/conf/config.template.json" CONFIG_FILE="bin/conf/config.template.json"
if [ ! -f "$CONFIG_FILE" ]; then if [ ! -f "$CONFIG_FILE" ]; then
@@ -28,7 +31,13 @@ for ((i=0; i<$VERIFIER_COUNT; i++)); do
# extract assets_path for current verifier # extract assets_path for current verifier
ASSETS_PATH=$(jq -r ".prover_manager.verifier.verifiers[$i].assets_path" "$CONFIG_FILE") ASSETS_PATH=$(jq -r ".prover_manager.verifier.verifiers[$i].assets_path" "$CONFIG_FILE")
FORK_NAME=$(jq -r ".prover_manager.verifier.verifiers[$i].fork_name" "$CONFIG_FILE") FORK_NAME=$(jq -r ".prover_manager.verifier.verifiers[$i].fork_name" "$CONFIG_FILE")
# skip if this verifier's fork doesn't match the target fork
if [ "$FORK_NAME" != "$SCROLL_FORK_NAME" ]; then
echo "Expect $SCROLL_FORK_NAME, skip current fork ($FORK_NAME)"
continue
fi
if [ "$ASSETS_PATH" = "null" ]; then if [ "$ASSETS_PATH" = "null" ]; then
echo "Warning: Could not find assets_path for verifier $i, skipping..." echo "Warning: Could not find assets_path for verifier $i, skipping..."
continue continue

View File

@@ -10,7 +10,8 @@
"min_prover_version": "v4.4.45", "min_prover_version": "v4.4.45",
"verifiers": [ "verifiers": [
{ {
"assets_path": "assets", "features": "legacy_witness:openvm_13",
"assets_path": "assets_feynman",
"fork_name": "feynman" "fork_name": "feynman"
}, },
{ {

View File

@@ -69,12 +69,12 @@ type AssetConfig struct {
ForkName string `json:"fork_name"` ForkName string `json:"fork_name"`
Vkfile string `json:"vk_file,omitempty"` Vkfile string `json:"vk_file,omitempty"`
MinProverVersion string `json:"min_prover_version,omitempty"` MinProverVersion string `json:"min_prover_version,omitempty"`
Features string `json:"features,omitempty"`
} }
// VerifierConfig load zk verifier config. // VerifierConfig load zk verifier config.
type VerifierConfig struct { type VerifierConfig struct {
MinProverVersion string `json:"min_prover_version"` MinProverVersion string `json:"min_prover_version"`
Features string `json:"features,omitempty"`
Verifiers []AssetConfig `json:"verifiers"` Verifiers []AssetConfig `json:"verifiers"`
} }

View File

@@ -141,13 +141,6 @@ func DumpVk(forkName, filePath string) error {
return nil return nil
} }
// Set dynamic feature flags that control libzkp runtime behavior
func SetDynamicFeature(feats string) {
cFeats := goToCString(feats)
defer freeCString(cFeats)
C.set_dynamic_feature(cFeats)
}
// UnivTaskCompatibilityFix calls the universal task compatibility fix function // UnivTaskCompatibilityFix calls the universal task compatibility fix function
func UniversalTaskCompatibilityFix(taskJSON string) (string, error) { func UniversalTaskCompatibilityFix(taskJSON string) (string, error) {
cTaskJSON := goToCString(taskJSON) cTaskJSON := goToCString(taskJSON)

View File

@@ -56,8 +56,6 @@ char* gen_wrapped_proof(char* proof_json, char* metadata, char* vk, size_t vk_le
// Release memory allocated for a string returned by gen_wrapped_proof // Release memory allocated for a string returned by gen_wrapped_proof
void release_string(char* string_ptr); void release_string(char* string_ptr);
void set_dynamic_feature(const char* feats);
// Universal task compatibility fix function // Universal task compatibility fix function
char* univ_task_compatibility_fix(char* task_json); char* univ_task_compatibility_fix(char* task_json);

View File

@@ -311,9 +311,12 @@ func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkProofs []
if !bp.validiumMode() { if !bp.validiumMode() {
dbBatchCodecVersion := encoding.CodecVersion(dbBatch.CodecVersion) dbBatchCodecVersion := encoding.CodecVersion(dbBatch.CodecVersion)
switch dbBatchCodecVersion { switch dbBatchCodecVersion {
case 0:
log.Warn("the codec version is 0, if it is not under integration test we have encountered an error here")
return taskDetail, nil
case encoding.CodecV3, encoding.CodecV4, encoding.CodecV6, encoding.CodecV7, encoding.CodecV8, encoding.CodecV9: case encoding.CodecV3, encoding.CodecV4, encoding.CodecV6, encoding.CodecV7, encoding.CodecV8, encoding.CodecV9:
default: default:
return taskDetail, nil return nil, fmt.Errorf("Unsupported codec version <%d>", dbBatchCodecVersion)
} }
codec, err := encoding.CodecFromVersion(encoding.CodecVersion(dbBatch.CodecVersion)) codec, err := encoding.CodecFromVersion(encoding.CodecVersion(dbBatch.CodecVersion))
@@ -335,7 +338,7 @@ func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkProofs []
taskDetail.KzgProof = &message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[112:160]))} taskDetail.KzgProof = &message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[112:160]))}
taskDetail.KzgCommitment = &message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[64:112]))} taskDetail.KzgCommitment = &message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[64:112]))}
} else { } else {
log.Debug("Apply validium mode for batch proving task") log.Info("Apply validium mode for batch proving task")
codec := cutils.FromVersion(version) codec := cutils.FromVersion(version)
batchHeader, decodeErr := codec.DABatchForTaskFromBytes(dbBatch.BatchHeader) batchHeader, decodeErr := codec.DABatchForTaskFromBytes(dbBatch.BatchHeader)
if decodeErr != nil { if decodeErr != nil {

View File

@@ -29,6 +29,7 @@ type rustCircuitConfig struct {
Version uint `json:"version"` Version uint `json:"version"`
ForkName string `json:"fork_name"` ForkName string `json:"fork_name"`
AssetsPath string `json:"assets_path"` AssetsPath string `json:"assets_path"`
Features string `json:"features,omitempty"`
} }
var validiumMode bool var validiumMode bool
@@ -47,6 +48,7 @@ func newRustCircuitConfig(cfg config.AssetConfig) *rustCircuitConfig {
Version: uint(ver), Version: uint(ver),
AssetsPath: cfg.AssetsPath, AssetsPath: cfg.AssetsPath,
ForkName: cfg.ForkName, ForkName: cfg.ForkName,
Features: cfg.Features,
} }
} }
@@ -82,9 +84,6 @@ func NewVerifier(cfg *config.VerifierConfig, useValidiumMode bool) (*Verifier, e
return nil, err return nil, err
} }
if cfg.Features != "" {
libzkp.SetDynamicFeature(cfg.Features)
}
libzkp.InitVerifier(string(configBytes)) libzkp.InitVerifier(string(configBytes))
v := &Verifier{ v := &Verifier{

View File

@@ -9,7 +9,7 @@ scroll-zkvm-types = { workspace = true, features = ["scroll"] }
scroll-zkvm-verifier.workspace = true scroll-zkvm-verifier.workspace = true
alloy-primitives.workspace = true #depress the effect of "native-keccak" alloy-primitives.workspace = true #depress the effect of "native-keccak"
sbv-primitives = {workspace = true, features = ["scroll-compress-ratio", "scroll"]} sbv-primitives = {workspace = true, features = ["scroll-compress-info", "scroll"]}
sbv-core = { workspace = true, features = ["scroll"] } sbv-core = { workspace = true, features = ["scroll"] }
base64.workspace = true base64.workspace = true
serde.workspace = true serde.workspace = true

View File

@@ -1,35 +1,57 @@
pub mod proofs; pub mod proofs;
pub mod tasks; pub mod tasks;
pub use tasks::ProvingTaskExt;
pub mod verifier; pub mod verifier;
use verifier::HardForkName;
pub use verifier::{TaskType, VerifierConfig}; pub use verifier::{TaskType, VerifierConfig};
mod utils; mod utils;
use sbv_primitives::B256; use sbv_primitives::B256;
use scroll_zkvm_types::utils::vec_as_base64; use scroll_zkvm_types::{utils::vec_as_base64, version::Version};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::value::RawValue; use serde_json::value::RawValue;
use std::path::Path; use std::{collections::HashMap, path::Path, sync::OnceLock};
use tasks::chunk_interpreter::{ChunkInterpreter, TryFromWithInterpreter}; use tasks::chunk_interpreter::{ChunkInterpreter, TryFromWithInterpreter};
/// global features: use legacy encoding for witness pub(crate) fn witness_use_legacy_mode(fork_name: &str) -> eyre::Result<bool> {
static mut LEGACY_WITNESS_ENCODING: bool = false; ADDITIONAL_FEATURES
.get()
pub(crate) fn witness_use_legacy_mode() -> bool { .and_then(|features| features.get(fork_name))
unsafe { LEGACY_WITNESS_ENCODING } .map(|cfg| cfg.legacy_witness_encoding)
.ok_or_else(|| {
eyre::eyre!(
"can not find features setting for unrecognized fork {}",
fork_name
)
})
} }
pub fn set_dynamic_feature(feats: &str) { #[derive(Debug, Default, Clone)]
for feat_s in feats.split(':') { struct FeatureOptions {
match feat_s.trim().to_lowercase().as_str() { legacy_witness_encoding: bool,
"legacy_witness" => { for_openvm_13_prover: bool,
tracing::info!("set witness encoding for legacy mode"); }
unsafe {
// the function is only called while initialize step static ADDITIONAL_FEATURES: OnceLock<HashMap<HardForkName, FeatureOptions>> = OnceLock::new();
LEGACY_WITNESS_ENCODING = true;
impl FeatureOptions {
pub fn new(feats: &str) -> Self {
let mut ret: Self = Default::default();
for feat_s in feats.split(':') {
match feat_s.trim().to_lowercase().as_str() {
"legacy_witness" => {
tracing::info!("set witness encoding for legacy mode");
ret.legacy_witness_encoding = true;
} }
"openvm_13" => {
tracing::info!("set prover should use openvm 13");
ret.for_openvm_13_prover = true;
}
s => tracing::warn!("unrecognized dynamic feature: {s}"),
} }
s => tracing::warn!("unrecognized dynamic feature: {s}"),
} }
ret
} }
} }
@@ -112,35 +134,56 @@ pub fn gen_universal_task(
let mut task = serde_json::from_str::<ChunkProvingTask>(task_json)?; let mut task = serde_json::from_str::<ChunkProvingTask>(task_json)?;
// normailze fork name field in task // normailze fork name field in task
task.fork_name = task.fork_name.to_lowercase(); task.fork_name = task.fork_name.to_lowercase();
let version = Version::from(task.version);
// always respect the fork_name_str (which has been normalized) being passed // always respect the fork_name_str (which has been normalized) being passed
// if the fork_name wrapped in task is not match, consider it a malformed task // if the fork_name wrapped in task is not match, consider it a malformed task
if fork_name_str != task.fork_name.as_str() { if fork_name_str != task.fork_name.as_str() {
eyre::bail!("fork name in chunk task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name); eyre::bail!("fork name in chunk task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name);
} }
if fork_name_str != version.fork.as_str() {
eyre::bail!(
"given task version, expected fork={fork_name_str}, got={version_fork}",
version_fork = version.fork.as_str()
);
}
let (pi_hash, metadata, u_task) = let (pi_hash, metadata, u_task) =
utils::panic_catch(move || gen_universal_chunk_task(task, fork_name_str.into())) utils::panic_catch(move || gen_universal_chunk_task(task))
.map_err(|e| eyre::eyre!("caught panic in chunk task{e}"))??; .map_err(|e| eyre::eyre!("caught panic in chunk task{e}"))??;
(pi_hash, AnyMetaData::Chunk(metadata), u_task) (pi_hash, AnyMetaData::Chunk(metadata), u_task)
} }
x if x == TaskType::Batch as i32 => { x if x == TaskType::Batch as i32 => {
let mut task = serde_json::from_str::<BatchProvingTask>(task_json)?; let mut task = serde_json::from_str::<BatchProvingTask>(task_json)?;
task.fork_name = task.fork_name.to_lowercase(); task.fork_name = task.fork_name.to_lowercase();
let version = Version::from(task.version);
if fork_name_str != task.fork_name.as_str() { if fork_name_str != task.fork_name.as_str() {
eyre::bail!("fork name in batch task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name); eyre::bail!("fork name in batch task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name);
} }
if fork_name_str != version.fork.as_str() {
eyre::bail!(
"given task version, expected fork={fork_name_str}, got={version_fork}",
version_fork = version.fork.as_str()
);
}
let (pi_hash, metadata, u_task) = let (pi_hash, metadata, u_task) =
utils::panic_catch(move || gen_universal_batch_task(task, fork_name_str.into())) utils::panic_catch(move || gen_universal_batch_task(task))
.map_err(|e| eyre::eyre!("caught panic in chunk task{e}"))??; .map_err(|e| eyre::eyre!("caught panic in chunk task{e}"))??;
(pi_hash, AnyMetaData::Batch(metadata), u_task) (pi_hash, AnyMetaData::Batch(metadata), u_task)
} }
x if x == TaskType::Bundle as i32 => { x if x == TaskType::Bundle as i32 => {
let mut task = serde_json::from_str::<BundleProvingTask>(task_json)?; let mut task = serde_json::from_str::<BundleProvingTask>(task_json)?;
task.fork_name = task.fork_name.to_lowercase(); task.fork_name = task.fork_name.to_lowercase();
let version = Version::from(task.version);
if fork_name_str != task.fork_name.as_str() { if fork_name_str != task.fork_name.as_str() {
eyre::bail!("fork name in bundle task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name); eyre::bail!("fork name in bundle task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name);
} }
if fork_name_str != version.fork.as_str() {
eyre::bail!(
"given task version, expected fork={fork_name_str}, got={version_fork}",
version_fork = version.fork.as_str()
);
}
let (pi_hash, metadata, u_task) = let (pi_hash, metadata, u_task) =
utils::panic_catch(move || gen_universal_bundle_task(task, fork_name_str.into())) utils::panic_catch(move || gen_universal_bundle_task(task))
.map_err(|e| eyre::eyre!("caught panic in chunk task{e}"))??; .map_err(|e| eyre::eyre!("caught panic in chunk task{e}"))??;
(pi_hash, AnyMetaData::Bundle(metadata), u_task) (pi_hash, AnyMetaData::Bundle(metadata), u_task)
} }
@@ -148,11 +191,26 @@ pub fn gen_universal_task(
}; };
u_task.vk = Vec::from(expected_vk); u_task.vk = Vec::from(expected_vk);
let fork_name = u_task.fork_name.clone();
let mut u_task_ext = ProvingTaskExt::new(u_task);
// set additional settings from global features
if let Some(cfg) = ADDITIONAL_FEATURES
.get()
.and_then(|features| features.get(&fork_name))
{
u_task_ext.use_openvm_13 = cfg.for_openvm_13_prover;
} else {
tracing::warn!(
"can not found features setting for unrecognized fork {}",
fork_name
);
}
Ok(( Ok((
pi_hash, pi_hash,
serde_json::to_string(&metadata)?, serde_json::to_string(&metadata)?,
serde_json::to_string(&u_task)?, serde_json::to_string(&u_task_ext)?,
)) ))
} }
@@ -183,7 +241,26 @@ pub fn gen_wrapped_proof(proof_json: &str, metadata: &str, vk: &[u8]) -> eyre::R
/// init verifier /// init verifier
pub fn verifier_init(config: &str) -> eyre::Result<()> { pub fn verifier_init(config: &str) -> eyre::Result<()> {
let cfg: VerifierConfig = serde_json::from_str(config)?; let cfg: VerifierConfig = serde_json::from_str(config)?;
ADDITIONAL_FEATURES
.set(HashMap::from_iter(cfg.circuits.iter().map(|config| {
tracing::info!(
"start setting features [{:?}] for fork {}",
config.features,
config.fork_name
);
(
config.fork_name.to_lowercase(),
config
.features
.as_ref()
.map(|features| FeatureOptions::new(features.as_str()))
.unwrap_or_default(),
)
})))
.map_err(|c| eyre::eyre!("Fail to init additional features: {c:?}"))?;
verifier::init(cfg); verifier::init(cfg);
Ok(()) Ok(())
} }

View File

@@ -140,8 +140,6 @@ impl ProofMetadata for ChunkProofMetadata {
pub struct BatchProofMetadata { pub struct BatchProofMetadata {
/// The batch information describing the list of chunks. /// The batch information describing the list of chunks.
pub batch_info: BatchInfo, pub batch_info: BatchInfo,
/// The [`scroll_zkvm_types::batch::BatchHeader`]'s digest.
pub batch_hash: B256,
} }
impl ProofMetadata for BatchProofMetadata { impl ProofMetadata for BatchProofMetadata {
@@ -217,7 +215,7 @@ impl<Metadata: ProofMetadata> PersistableProof for WrappedProof<Metadata> {
mod tests { mod tests {
use base64::{prelude::BASE64_STANDARD, Engine}; use base64::{prelude::BASE64_STANDARD, Engine};
use sbv_primitives::B256; use sbv_primitives::B256;
use scroll_zkvm_types::{bundle::BundleInfo, proof::EvmProof, public_inputs::ForkName}; use scroll_zkvm_types::{bundle::BundleInfo, proof::EvmProof};
use super::*; use super::*;
@@ -255,7 +253,7 @@ mod tests {
msg_queue_hash: B256::repeat_byte(6), msg_queue_hash: B256::repeat_byte(6),
encryption_key: None, encryption_key: None,
}; };
let bundle_pi_hash = bundle_info.pi_hash(ForkName::EuclidV1); let bundle_pi_hash = bundle_info.pi_hash_euclidv1();
BundleProofMetadata { BundleProofMetadata {
bundle_info, bundle_info,
bundle_pi_hash, bundle_pi_hash,

View File

@@ -14,7 +14,7 @@ use crate::{
utils::panic_catch, utils::panic_catch,
}; };
use sbv_primitives::B256; use sbv_primitives::B256;
use scroll_zkvm_types::public_inputs::{ForkName, MultiVersionPublicInputs, Version}; use scroll_zkvm_types::public_inputs::{MultiVersionPublicInputs, Version};
fn encode_task_to_witness<T: serde::Serialize>(task: &T) -> eyre::Result<Vec<u8>> { fn encode_task_to_witness<T: serde::Serialize>(task: &T) -> eyre::Result<Vec<u8>> {
let config = bincode::config::standard(); let config = bincode::config::standard();
@@ -35,17 +35,37 @@ fn check_aggregation_proofs<Metadata: MultiVersionPublicInputs>(
Ok(()) Ok(())
} }
#[derive(serde::Deserialize, serde::Serialize)]
pub struct ProvingTaskExt {
#[serde(flatten)]
task: ProvingTask,
#[serde(default)]
pub use_openvm_13: bool,
}
impl From<ProvingTaskExt> for ProvingTask {
fn from(wrap_t: ProvingTaskExt) -> Self {
wrap_t.task
}
}
impl ProvingTaskExt {
pub fn new(task: ProvingTask) -> Self {
Self {
task,
use_openvm_13: false,
}
}
}
/// Generate required staff for chunk proving /// Generate required staff for chunk proving
pub fn gen_universal_chunk_task( pub fn gen_universal_chunk_task(
task: ChunkProvingTask, task: ChunkProvingTask,
fork_name: ForkName,
) -> eyre::Result<(B256, ChunkProofMetadata, ProvingTask)> { ) -> eyre::Result<(B256, ChunkProofMetadata, ProvingTask)> {
let chunk_total_gas = task.stats().total_gas_used; let chunk_total_gas = task.stats().total_gas_used;
let chunk_info = task.precheck_and_build_metadata()?; let (proving_task, chunk_info, chunk_pi_hash) = task.into_proving_task_with_precheck()?;
let proving_task = task.try_into()?;
let expected_pi_hash = chunk_info.pi_hash_by_fork(fork_name);
Ok(( Ok((
expected_pi_hash, chunk_pi_hash,
ChunkProofMetadata { ChunkProofMetadata {
chunk_info, chunk_info,
chunk_total_gas, chunk_total_gas,
@@ -57,18 +77,11 @@ pub fn gen_universal_chunk_task(
/// Generate required staff for batch proving /// Generate required staff for batch proving
pub fn gen_universal_batch_task( pub fn gen_universal_batch_task(
task: BatchProvingTask, task: BatchProvingTask,
fork_name: ForkName,
) -> eyre::Result<(B256, BatchProofMetadata, ProvingTask)> { ) -> eyre::Result<(B256, BatchProofMetadata, ProvingTask)> {
let batch_info = task.precheck_and_build_metadata()?; let (proving_task, batch_info, batch_pi_hash) = task.into_proving_task_with_precheck()?;
let proving_task = task.try_into()?;
let expected_pi_hash = batch_info.pi_hash_by_fork(fork_name);
Ok(( Ok((
expected_pi_hash, batch_pi_hash,
BatchProofMetadata { BatchProofMetadata { batch_info },
batch_info,
batch_hash: expected_pi_hash,
},
proving_task, proving_task,
)) ))
} }
@@ -76,17 +89,13 @@ pub fn gen_universal_batch_task(
/// Generate required staff for bundle proving /// Generate required staff for bundle proving
pub fn gen_universal_bundle_task( pub fn gen_universal_bundle_task(
task: BundleProvingTask, task: BundleProvingTask,
fork_name: ForkName,
) -> eyre::Result<(B256, BundleProofMetadata, ProvingTask)> { ) -> eyre::Result<(B256, BundleProofMetadata, ProvingTask)> {
let bundle_info = task.precheck_and_build_metadata()?; let (proving_task, bundle_info, bundle_pi_hash) = task.into_proving_task_with_precheck()?;
let proving_task = task.try_into()?;
let expected_pi_hash = bundle_info.pi_hash_by_fork(fork_name);
Ok(( Ok((
expected_pi_hash, bundle_pi_hash,
BundleProofMetadata { BundleProofMetadata {
bundle_info, bundle_info,
bundle_pi_hash: expected_pi_hash, bundle_pi_hash,
}, },
proving_task, proving_task,
)) ))

View File

@@ -3,15 +3,15 @@ use eyre::Result;
use sbv_primitives::{B256, U256}; use sbv_primitives::{B256, U256};
use scroll_zkvm_types::{ use scroll_zkvm_types::{
batch::{ batch::{
build_point_eval_witness, BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchHeaderV8, build_point_eval_witness, BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchHeaderValidium,
BatchHeaderValidium, BatchInfo, BatchWitness, Envelope, EnvelopeV6, EnvelopeV7, EnvelopeV8, BatchInfo, BatchWitness, Envelope, EnvelopeV6, EnvelopeV7, LegacyBatchWitness,
LegacyBatchWitness, ReferenceHeader, N_BLOB_BYTES, ReferenceHeader, N_BLOB_BYTES,
}, },
chunk::ChunkInfo, chunk::ChunkInfo,
public_inputs::{ForkName, Version}, public_inputs::{ForkName, MultiVersionPublicInputs, Version},
task::ProvingTask, task::ProvingTask,
utils::{to_rkyv_bytes, RancorError}, utils::{to_rkyv_bytes, RancorError},
version::{Domain, STFVersion}, version::{Codec, Domain, STFVersion},
}; };
use crate::proofs::ChunkProof; use crate::proofs::ChunkProof;
@@ -26,23 +26,32 @@ pub struct BatchHeaderValidiumWithHash {
batch_hash: B256, batch_hash: B256,
} }
/// Define variable batch header type, since BatchHeaderV6 can not /// Parse header types passed from golang side and adapt to the
/// be decoded as V7 we can always has correct deserialization /// definition in zkvm-prover's types
/// Notice: V6 header MUST be put above V7 since untagged enum /// We distinguish the header type in golang side according to the codec
/// try to decode each defination in order /// version, i.e. v7 - v9 (current), and validium
/// And adapt it to the corresponding header version used in zkvm-prover's witness
/// definition, i.e. v7- v8 (current), and validium
#[derive(Clone, serde::Deserialize, serde::Serialize)] #[derive(Clone, serde::Deserialize, serde::Serialize)]
#[serde(untagged)] #[serde(untagged)]
#[allow(non_camel_case_types)]
pub enum BatchHeaderV { pub enum BatchHeaderV {
/// Header for validium mode.
Validium(BatchHeaderValidiumWithHash), Validium(BatchHeaderValidiumWithHash),
/// Header for scroll's STF version v6.
V6(BatchHeaderV6), V6(BatchHeaderV6),
V7_8(BatchHeaderV7), /// Header for scroll's STF versions v7, v8, v9.
///
/// Since the codec essentially is unchanged for the above STF versions, we do not define new
/// variants, instead re-using the [`BatchHeaderV7`] variant.
V7_V8_V9(BatchHeaderV7),
} }
impl core::fmt::Display for BatchHeaderV { impl core::fmt::Display for BatchHeaderV {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self { match self {
BatchHeaderV::V6(_) => write!(f, "V6"), BatchHeaderV::V6(_) => write!(f, "V6"),
BatchHeaderV::V7_8(_) => write!(f, "V7_8"), BatchHeaderV::V7_V8_V9(_) => write!(f, "V7_V8_V9"),
BatchHeaderV::Validium(_) => write!(f, "Validium"), BatchHeaderV::Validium(_) => write!(f, "Validium"),
} }
} }
@@ -52,7 +61,7 @@ impl BatchHeaderV {
pub fn batch_hash(&self) -> B256 { pub fn batch_hash(&self) -> B256 {
match self { match self {
BatchHeaderV::V6(h) => h.batch_hash(), BatchHeaderV::V6(h) => h.batch_hash(),
BatchHeaderV::V7_8(h) => h.batch_hash(), BatchHeaderV::V7_V8_V9(h) => h.batch_hash(),
BatchHeaderV::Validium(h) => h.header.batch_hash(), BatchHeaderV::Validium(h) => h.header.batch_hash(),
} }
} }
@@ -64,17 +73,10 @@ impl BatchHeaderV {
} }
} }
pub fn must_v7_header(&self) -> &BatchHeaderV7 { pub fn must_v7_v8_v9_header(&self) -> &BatchHeaderV7 {
match self { match self {
BatchHeaderV::V7_8(h) => h, BatchHeaderV::V7_V8_V9(h) => h,
_ => unreachable!("A header of {} is considered to be v7", self), _ => unreachable!("A header of {} is considered to be in [v7, v8, v9]", self),
}
}
pub fn must_v8_header(&self) -> &BatchHeaderV8 {
match self {
BatchHeaderV::V7_8(h) => h,
_ => unreachable!("A header of {} is considered to be v8", self),
} }
} }
@@ -110,35 +112,55 @@ pub struct BatchProvingTask {
pub fork_name: String, pub fork_name: String,
} }
impl TryFrom<BatchProvingTask> for ProvingTask { impl BatchProvingTask {
type Error = eyre::Error; pub fn into_proving_task_with_precheck(self) -> Result<(ProvingTask, BatchInfo, B256)> {
let (witness, metadata, batch_pi_hash) = self.precheck()?;
fn try_from(value: BatchProvingTask) -> Result<Self> { let serialized_witness = if crate::witness_use_legacy_mode(&self.fork_name)? {
let witness = value.build_guest_input();
let serialized_witness = if crate::witness_use_legacy_mode() {
let legacy_witness = LegacyBatchWitness::from(witness); let legacy_witness = LegacyBatchWitness::from(witness);
to_rkyv_bytes::<RancorError>(&legacy_witness)?.into_vec() to_rkyv_bytes::<RancorError>(&legacy_witness)?.into_vec()
} else { } else {
super::encode_task_to_witness(&witness)? super::encode_task_to_witness(&witness)?
}; };
Ok(ProvingTask { let proving_task = ProvingTask {
identifier: value.batch_header.batch_hash().to_string(), identifier: self.batch_header.batch_hash().to_string(),
fork_name: value.fork_name, fork_name: self.fork_name,
aggregated_proofs: value aggregated_proofs: self
.chunk_proofs .chunk_proofs
.into_iter() .into_iter()
.map(|w_proof| w_proof.proof.into_stark_proof().expect("expect root proof")) .map(|w_proof| w_proof.proof.into_stark_proof().expect("expect root proof"))
.collect(), .collect(),
serialized_witness: vec![serialized_witness], serialized_witness: vec![serialized_witness],
vk: Vec::new(), vk: Vec::new(),
}) };
}
}
impl BatchProvingTask { Ok((proving_task, metadata, batch_pi_hash))
fn build_guest_input(&self) -> BatchWitness { }
let version = Version::from(self.version);
fn build_guest_input(&self, version: Version) -> BatchWitness {
tracing::info!(
"Handling batch task for input, version byte {}, Version data: {:?}",
self.version,
version
);
// sanity check for if result of header type parsing match to version
match &self.batch_header {
BatchHeaderV::Validium(_) => assert!(
version.is_validium(),
"version {:?} is not match with parsed header, get validium header but version is not validium", version,
),
BatchHeaderV::V6(_) => assert_eq!(version.fork, ForkName::EuclidV1,
"hardfork mismatch for da-codec@v6 header: found={:?}, expected={:?}",
version.fork,
ForkName::EuclidV1,
),
BatchHeaderV::V7_V8_V9(_) => assert!(
matches!(version.fork, ForkName::EuclidV2 | ForkName::Feynman | ForkName::Galileo),
"hardfork mismatch for da-codec@v7/8/9 header: found={}, expected={:?}",
version.fork,
[ForkName::EuclidV2, ForkName::Feynman, ForkName::Galileo],
),
}
let point_eval_witness = if !version.is_validium() { let point_eval_witness = if !version.is_validium() {
// sanity check: calculate point eval needed and compare with task input // sanity check: calculate point eval needed and compare with task input
@@ -146,44 +168,21 @@ impl BatchProvingTask {
let blob = point_eval::to_blob(&self.blob_bytes); let blob = point_eval::to_blob(&self.blob_bytes);
let commitment = point_eval::blob_to_kzg_commitment(&blob); let commitment = point_eval::blob_to_kzg_commitment(&blob);
let versioned_hash = point_eval::get_versioned_hash(&commitment); let versioned_hash = point_eval::get_versioned_hash(&commitment);
let challenge_digest = match &self.batch_header {
BatchHeaderV::V6(_) => { let padded_blob_bytes = {
assert_eq!( let mut padded_blob_bytes = self.blob_bytes.to_vec();
version.fork, padded_blob_bytes.resize(N_BLOB_BYTES, 0);
ForkName::EuclidV1, padded_blob_bytes
"hardfork mismatch for da-codec@v6 header: found={:?}, expected={:?}", };
version.fork, let challenge_digest = match version.codec {
ForkName::EuclidV1, Codec::V6 => {
); // notice v6 do not use padded blob bytes
EnvelopeV6::from_slice(self.blob_bytes.as_slice()) <EnvelopeV6 as Envelope>::from_slice(self.blob_bytes.as_slice())
.challenge_digest(versioned_hash) .challenge_digest(versioned_hash)
} }
BatchHeaderV::V7_8(_) => { Codec::V7 => <EnvelopeV7 as Envelope>::from_slice(padded_blob_bytes.as_slice())
let padded_blob_bytes = { .challenge_digest(versioned_hash),
let mut padded_blob_bytes = self.blob_bytes.to_vec();
padded_blob_bytes.resize(N_BLOB_BYTES, 0);
padded_blob_bytes
};
match version.fork {
ForkName::EuclidV2 => {
<EnvelopeV7 as Envelope>::from_slice(padded_blob_bytes.as_slice())
.challenge_digest(versioned_hash)
}
ForkName::Feynman => {
<EnvelopeV8 as Envelope>::from_slice(padded_blob_bytes.as_slice())
.challenge_digest(versioned_hash)
}
fork_name => unreachable!(
"hardfork mismatch for da-codec@v7 header: found={}, expected={:?}",
fork_name,
[ForkName::EuclidV2, ForkName::Feynman],
),
}
}
BatchHeaderV::Validium(_) => unreachable!("version!=validium"),
}; };
let (proof, _) = point_eval::get_kzg_proof(&blob, challenge_digest); let (proof, _) = point_eval::get_kzg_proof(&blob, challenge_digest);
(commitment.to_bytes(), proof.to_bytes(), challenge_digest) (commitment.to_bytes(), proof.to_bytes(), challenge_digest)
@@ -231,11 +230,18 @@ impl BatchProvingTask {
(Domain::Scroll, STFVersion::V6) => { (Domain::Scroll, STFVersion::V6) => {
ReferenceHeader::V6(*self.batch_header.must_v6_header()) ReferenceHeader::V6(*self.batch_header.must_v6_header())
} }
(Domain::Scroll, STFVersion::V7) => { // The da-codec for STF versions v7, v8, v9 is identical. In zkvm-prover we do not
ReferenceHeader::V7(*self.batch_header.must_v7_header()) // create additional variants to indicate the identical behaviour of codec. Instead we
} // add a separate variant for the STF version.
(Domain::Scroll, STFVersion::V8) => { //
ReferenceHeader::V8(*self.batch_header.must_v8_header()) // We handle the different STF versions here however build the same batch header since
// that type does not change. The batch header's version byte constructed in the
// coordinator actually defines the STF version (v7, v8 or v9) and we can derive the
// hard-fork (feynman or galileo) and the codec from the version byte.
//
// Refer [`scroll_zkvm_types::public_inputs::Version`].
(Domain::Scroll, STFVersion::V7 | STFVersion::V8 | STFVersion::V9) => {
ReferenceHeader::V7_V8_V9(*self.batch_header.must_v7_v8_v9_header())
} }
(Domain::Validium, STFVersion::V1) => { (Domain::Validium, STFVersion::V1) => {
ReferenceHeader::Validium(*self.batch_header.must_validium_header()) ReferenceHeader::Validium(*self.batch_header.must_validium_header())
@@ -273,18 +279,20 @@ impl BatchProvingTask {
} }
} }
pub fn precheck_and_build_metadata(&self) -> Result<BatchInfo> { pub fn precheck(&self) -> Result<(BatchWitness, BatchInfo, B256)> {
// for every aggregation task, there are two steps needed to build the metadata: // for every aggregation task, there are two steps needed to build the metadata:
// 1. generate data for metadata from the witness // 1. generate data for metadata from the witness
// 2. validate every adjacent proof pair // 2. validate every adjacent proof pair
let witness = self.build_guest_input(); let version = Version::from(self.version);
let witness = self.build_guest_input(version);
let metadata = BatchInfo::from(&witness); let metadata = BatchInfo::from(&witness);
super::check_aggregation_proofs( super::check_aggregation_proofs(
witness.chunk_infos.as_slice(), witness.chunk_infos.as_slice(),
Version::from(self.version), Version::from(self.version),
)?; )?;
let pi_hash = metadata.pi_hash_by_version(version);
Ok(metadata) Ok((witness, metadata, pi_hash))
} }
} }

View File

@@ -1,7 +1,8 @@
use eyre::Result; use eyre::Result;
use sbv_primitives::B256;
use scroll_zkvm_types::{ use scroll_zkvm_types::{
bundle::{BundleInfo, BundleWitness, LegacyBundleWitness}, bundle::{BundleInfo, BundleWitness, LegacyBundleWitness},
public_inputs::Version, public_inputs::{MultiVersionPublicInputs, Version},
task::ProvingTask, task::ProvingTask,
utils::{to_rkyv_bytes, RancorError}, utils::{to_rkyv_bytes, RancorError},
}; };
@@ -24,6 +25,30 @@ pub struct BundleProvingTask {
} }
impl BundleProvingTask { impl BundleProvingTask {
pub fn into_proving_task_with_precheck(self) -> Result<(ProvingTask, BundleInfo, B256)> {
let (witness, bundle_info, bundle_pi_hash) = self.precheck()?;
let serialized_witness = if crate::witness_use_legacy_mode(&self.fork_name)? {
let legacy = LegacyBundleWitness::from(witness);
to_rkyv_bytes::<RancorError>(&legacy)?.into_vec()
} else {
super::encode_task_to_witness(&witness)?
};
let proving_task = ProvingTask {
identifier: self.identifier(),
fork_name: self.fork_name,
aggregated_proofs: self
.batch_proofs
.into_iter()
.map(|w_proof| w_proof.proof.into_stark_proof().expect("expect root proof"))
.collect(),
serialized_witness: vec![serialized_witness],
vk: Vec::new(),
};
Ok((proving_task, bundle_info, bundle_pi_hash))
}
fn identifier(&self) -> String { fn identifier(&self) -> String {
assert!(!self.batch_proofs.is_empty(), "{BUNDLE_SANITY_MSG}",); assert!(!self.batch_proofs.is_empty(), "{BUNDLE_SANITY_MSG}",);
@@ -32,19 +57,20 @@ impl BundleProvingTask {
.first() .first()
.expect(BUNDLE_SANITY_MSG) .expect(BUNDLE_SANITY_MSG)
.metadata .metadata
.batch_info
.batch_hash, .batch_hash,
self.batch_proofs self.batch_proofs
.last() .last()
.expect(BUNDLE_SANITY_MSG) .expect(BUNDLE_SANITY_MSG)
.metadata .metadata
.batch_info
.batch_hash, .batch_hash,
); );
format!("{first}-{last}") format!("{first}-{last}")
} }
fn build_guest_input(&self) -> BundleWitness { fn build_guest_input(&self, version: Version) -> BundleWitness {
let version = Version::from(self.version);
BundleWitness { BundleWitness {
version: version.as_version_byte(), version: version.as_version_byte(),
batch_proofs: self.batch_proofs.iter().map(|proof| proof.into()).collect(), batch_proofs: self.batch_proofs.iter().map(|proof| proof.into()).collect(),
@@ -57,43 +83,19 @@ impl BundleProvingTask {
} }
} }
pub fn precheck_and_build_metadata(&self) -> Result<BundleInfo> { fn precheck(&self) -> Result<(BundleWitness, BundleInfo, B256)> {
// for every aggregation task, there are two steps needed to build the metadata: // for every aggregation task, there are two steps needed to build the metadata:
// 1. generate data for metadata from the witness // 1. generate data for metadata from the witness
// 2. validate every adjacent proof pair // 2. validate every adjacent proof pair
let witness = self.build_guest_input(); let version = Version::from(self.version);
let witness = self.build_guest_input(version);
let metadata = BundleInfo::from(&witness); let metadata = BundleInfo::from(&witness);
super::check_aggregation_proofs( super::check_aggregation_proofs(
witness.batch_infos.as_slice(), witness.batch_infos.as_slice(),
Version::from(self.version), Version::from(self.version),
)?; )?;
let pi_hash = metadata.pi_hash_by_version(version);
Ok(metadata) Ok((witness, metadata, pi_hash))
}
}
impl TryFrom<BundleProvingTask> for ProvingTask {
type Error = eyre::Error;
fn try_from(value: BundleProvingTask) -> Result<Self> {
let witness = value.build_guest_input();
let serialized_witness = if crate::witness_use_legacy_mode() {
let legacy = LegacyBundleWitness::from(witness);
to_rkyv_bytes::<RancorError>(&legacy)?.into_vec()
} else {
super::encode_task_to_witness(&witness)?
};
Ok(ProvingTask {
identifier: value.identifier(),
fork_name: value.fork_name,
aggregated_proofs: value
.batch_proofs
.into_iter()
.map(|w_proof| w_proof.proof.into_stark_proof().expect("expect root proof"))
.collect(),
serialized_witness: vec![serialized_witness],
vk: Vec::new(),
})
} }
} }

View File

@@ -3,9 +3,9 @@ use sbv_core::BlockWitness;
use sbv_primitives::{types::consensus::BlockHeader, B256}; use sbv_primitives::{types::consensus::BlockHeader, B256};
use scroll_zkvm_types::{ use scroll_zkvm_types::{
chunk::{execute, ChunkInfo, ChunkWitness, LegacyChunkWitness, ValidiumInputs}, chunk::{execute, ChunkInfo, ChunkWitness, LegacyChunkWitness, ValidiumInputs},
public_inputs::{MultiVersionPublicInputs, Version},
task::ProvingTask, task::ProvingTask,
utils::{to_rkyv_bytes, RancorError}, utils::{to_rkyv_bytes, RancorError},
version::Version,
}; };
use super::chunk_interpreter::*; use super::chunk_interpreter::*;
@@ -94,28 +94,6 @@ pub struct ChunkDetails {
pub total_gas_used: u64, pub total_gas_used: u64,
} }
impl TryFrom<ChunkProvingTask> for ProvingTask {
type Error = eyre::Error;
fn try_from(value: ChunkProvingTask) -> Result<Self> {
let witness = value.build_guest_input();
let serialized_witness = if crate::witness_use_legacy_mode() {
let legacy_witness = LegacyChunkWitness::from(witness);
to_rkyv_bytes::<RancorError>(&legacy_witness)?.into_vec()
} else {
super::encode_task_to_witness(&witness)?
};
Ok(ProvingTask {
identifier: value.identifier(),
fork_name: value.fork_name,
aggregated_proofs: Vec::new(),
serialized_witness: vec![serialized_witness],
vk: Vec::new(),
})
}
}
impl ChunkProvingTask { impl ChunkProvingTask {
pub fn stats(&self) -> ChunkDetails { pub fn stats(&self) -> ChunkDetails {
let num_blocks = self.block_witnesses.len(); let num_blocks = self.block_witnesses.len();
@@ -137,6 +115,26 @@ impl ChunkProvingTask {
} }
} }
pub fn into_proving_task_with_precheck(self) -> Result<(ProvingTask, ChunkInfo, B256)> {
let (witness, chunk_info, chunk_pi_hash) = self.precheck()?;
let serialized_witness = if crate::witness_use_legacy_mode(&self.fork_name)? {
let legacy_witness = LegacyChunkWitness::from(witness);
to_rkyv_bytes::<RancorError>(&legacy_witness)?.into_vec()
} else {
super::encode_task_to_witness(&witness)?
};
let proving_task = ProvingTask {
identifier: self.identifier(),
fork_name: self.fork_name,
aggregated_proofs: Vec::new(),
serialized_witness: vec![serialized_witness],
vk: Vec::new(),
};
Ok((proving_task, chunk_info, chunk_pi_hash))
}
fn identifier(&self) -> String { fn identifier(&self) -> String {
assert!(!self.block_witnesses.is_empty(), "{CHUNK_SANITY_MSG}",); assert!(!self.block_witnesses.is_empty(), "{CHUNK_SANITY_MSG}",);
@@ -156,9 +154,7 @@ impl ChunkProvingTask {
format!("{first}-{last}") format!("{first}-{last}")
} }
fn build_guest_input(&self) -> ChunkWitness { fn build_guest_input(&self, version: Version) -> ChunkWitness {
let version = Version::from(self.version);
if version.is_validium() { if version.is_validium() {
assert!(self.validium_inputs.is_some()); assert!(self.validium_inputs.is_some());
ChunkWitness::new( ChunkWitness::new(
@@ -182,11 +178,13 @@ impl ChunkProvingTask {
self.block_witnesses[0].states.push(node); self.block_witnesses[0].states.push(node);
} }
pub fn precheck_and_build_metadata(&self) -> Result<ChunkInfo> { fn precheck(&self) -> Result<(ChunkWitness, ChunkInfo, B256)> {
let witness = self.build_guest_input(); let version = Version::from(self.version);
let ret = ChunkInfo::try_from(witness).map_err(|e| eyre::eyre!("{e}"))?; let witness = self.build_guest_input(version);
assert_eq!(ret.post_msg_queue_hash, self.post_msg_queue_hash); let chunk_info = ChunkInfo::try_from(witness.clone()).map_err(|e| eyre::eyre!("{e}"))?;
Ok(ret) assert_eq!(chunk_info.post_msg_queue_hash, self.post_msg_queue_hash);
let chunk_pi_hash = chunk_info.pi_hash_by_version(version);
Ok((witness, chunk_info, chunk_pi_hash))
} }
/// this method check the validate of current task (there may be missing storage node) /// this method check the validate of current task (there may be missing storage node)
@@ -214,7 +212,7 @@ impl ChunkProvingTask {
let err_parse_re = regex::Regex::new(pattern)?; let err_parse_re = regex::Regex::new(pattern)?;
let mut attempts = 0; let mut attempts = 0;
loop { loop {
let witness = self.build_guest_input(); let witness = self.build_guest_input(Version::euclid_v2());
match execute(witness) { match execute(witness) {
Ok(_) => return Ok(()), Ok(_) => return Ok(()),

View File

@@ -44,6 +44,8 @@ pub struct CircuitConfig {
pub version: u8, pub version: u8,
pub fork_name: String, pub fork_name: String,
pub assets_path: String, pub assets_path: String,
#[serde(default)]
pub features: Option<String>,
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
@@ -51,7 +53,7 @@ pub struct VerifierConfig {
pub circuits: Vec<CircuitConfig>, pub circuits: Vec<CircuitConfig>,
} }
type HardForkName = String; pub(crate) type HardForkName = String;
type VerifierType = Arc<Mutex<dyn ProofVerifier + Send>>; type VerifierType = Arc<Mutex<dyn ProofVerifier + Send>>;
static VERIFIERS: OnceLock<HashMap<HardForkName, VerifierType>> = OnceLock::new(); static VERIFIERS: OnceLock<HashMap<HardForkName, VerifierType>> = OnceLock::new();

View File

@@ -275,10 +275,3 @@ pub unsafe extern "C" fn release_string(ptr: *mut c_char) {
let _ = CString::from_raw(ptr); let _ = CString::from_raw(ptr);
} }
} }
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn set_dynamic_feature(feats: *const c_char) {
let feats_str = c_char_to_str(feats);
libzkp::set_dynamic_feature(feats_str);
}

View File

@@ -8,6 +8,7 @@ edition.workspace = true
[dependencies] [dependencies]
scroll-zkvm-types.workspace = true scroll-zkvm-types.workspace = true
scroll-zkvm-prover.workspace = true scroll-zkvm-prover.workspace = true
libzkp = { path = "../libzkp"}
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "05648db" } scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "05648db" }
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true

View File

@@ -12,6 +12,7 @@ use scroll_proving_sdk::{
ProvingService, ProvingService,
}, },
}; };
use scroll_zkvm_types::ProvingTask;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{ use std::{
collections::HashMap, collections::HashMap,
@@ -273,6 +274,8 @@ impl LocalProver {
let created_at = duration.as_secs() as f64 + duration.subsec_nanos() as f64 * 1e-9; let created_at = duration.as_secs() as f64 + duration.subsec_nanos() as f64 * 1e-9;
let prover_task = UniversalHandler::get_task_from_input(&req.input)?; let prover_task = UniversalHandler::get_task_from_input(&req.input)?;
let is_openvm_13 = prover_task.use_openvm_13;
let prover_task: ProvingTask = prover_task.into();
let vk = hex::encode(&prover_task.vk); let vk = hex::encode(&prover_task.vk);
let handler = if let Some(handler) = self.handlers.get(&vk) { let handler = if let Some(handler) = self.handlers.get(&vk) {
handler.clone() handler.clone()
@@ -300,7 +303,7 @@ impl LocalProver {
.await?; .await?;
let circuits_handler = Arc::new(Mutex::new(UniversalHandler::new( let circuits_handler = Arc::new(Mutex::new(UniversalHandler::new(
&asset_path, &asset_path,
req.proof_type, is_openvm_13,
)?)); )?));
self.handlers.insert(vk, circuits_handler.clone()); self.handlers.insert(vk, circuits_handler.clone());
circuits_handler circuits_handler

View File

@@ -3,7 +3,7 @@ use std::path::Path;
use super::CircuitsHandler; use super::CircuitsHandler;
use async_trait::async_trait; use async_trait::async_trait;
use eyre::Result; use eyre::Result;
use scroll_proving_sdk::prover::ProofType; use libzkp::ProvingTaskExt;
use scroll_zkvm_prover::{Prover, ProverConfig}; use scroll_zkvm_prover::{Prover, ProverConfig};
use scroll_zkvm_types::ProvingTask; use scroll_zkvm_types::ProvingTask;
use tokio::sync::Mutex; use tokio::sync::Mutex;
@@ -16,14 +16,15 @@ pub struct UniversalHandler {
unsafe impl Send for UniversalHandler {} unsafe impl Send for UniversalHandler {}
impl UniversalHandler { impl UniversalHandler {
pub fn new(workspace_path: impl AsRef<Path>, _proof_type: ProofType) -> Result<Self> { pub fn new(workspace_path: impl AsRef<Path>, is_openvm_v13: bool) -> Result<Self> {
let path_app_exe = workspace_path.as_ref().join("app.vmexe"); let path_app_exe = workspace_path.as_ref().join("app.vmexe");
let path_app_config = workspace_path.as_ref().join("openvm.toml"); let path_app_config = workspace_path.as_ref().join("openvm.toml");
let segment_len = Some((1 << 22) - 100); let segment_len = Some((1 << 21) - 100);
let config = ProverConfig { let config = ProverConfig {
path_app_config, path_app_config,
path_app_exe, path_app_exe,
segment_len, segment_len,
is_openvm_v13,
}; };
let prover = Prover::setup(config, None)?; let prover = Prover::setup(config, None)?;
@@ -36,7 +37,7 @@ impl UniversalHandler {
&mut self.prover &mut self.prover
} }
pub fn get_task_from_input(input: &str) -> Result<ProvingTask> { pub fn get_task_from_input(input: &str) -> Result<ProvingTaskExt> {
Ok(serde_json::from_str(input)?) Ok(serde_json::from_str(input)?)
} }
} }

View File

@@ -44,6 +44,12 @@ setup_db: clean
${GOOSE_CMD} up ${GOOSE_CMD} up
GOOSE_MIGRATION_DIR=conf ${GOOSE_CMD} up-to 100 GOOSE_MIGRATION_DIR=conf ${GOOSE_CMD} up-to 100
reset_db:
GOOSE_MIGRATION_DIR=conf ${GOOSE_CMD} down
${GOOSE_CMD} down-to 0
${GOOSE_CMD} up
GOOSE_MIGRATION_DIR=conf ${GOOSE_CMD} up-to 100
test_tool: test_tool:
go build -o $(PWD)/build/bin/e2e_tool ../../rollup/tests/integration_tool go build -o $(PWD)/build/bin/e2e_tool ../../rollup/tests/integration_tool
@@ -52,9 +58,17 @@ build/bin/e2e_tool: test_tool
import_data_euclid: build/bin/e2e_tool check_vars import_data_euclid: build/bin/e2e_tool check_vars
build/bin/e2e_tool --config conf/config.json --codec 7 ${BEGIN_BLOCK} ${END_BLOCK} build/bin/e2e_tool --config conf/config.json --codec 7 ${BEGIN_BLOCK} ${END_BLOCK}
import_data: build/bin/e2e_tool check_vars import_data_feynman: build/bin/e2e_tool check_vars
build/bin/e2e_tool --config conf/config.json --codec 8 ${BEGIN_BLOCK} ${END_BLOCK} build/bin/e2e_tool --config conf/config.json --codec 8 ${BEGIN_BLOCK} ${END_BLOCK}
import_data_galileo: build/bin/e2e_tool check_vars
build/bin/e2e_tool --config conf/config.json --codec 9 ${BEGIN_BLOCK} ${END_BLOCK}
import_data: build/bin/e2e_tool check_vars
build/bin/e2e_tool --config conf/config.json --codec ${CODEC_VERSION} ${BEGIN_BLOCK} ${END_BLOCK}
reimport_data: reset_db import_data
coordinator_setup: coordinator_setup:
$(MAKE) -C ../../coordinator localsetup SCROLL_FORK_NAME=${SCROLL_FORK_NAME} $(MAKE) -C ../../coordinator localsetup
cp -f conf/genesis.json ../../coordinator/build/bin/conf cp -f conf/genesis.json ../../coordinator/build/bin/conf

View File

@@ -1,2 +1,4 @@
BEGIN_BLOCK?=35 BEGIN_BLOCK?=35
END_BLOCK?=49 END_BLOCK?=49
CODEC_VERSION?=8
SCROLL_FORK_NAME=feynman

View File

@@ -0,0 +1,40 @@
{
"prover_manager": {
"provers_per_session": 1,
"session_attempts": 5,
"external_prover_threshold": 32,
"bundle_collection_time_sec": 180,
"batch_collection_time_sec": 180,
"chunk_collection_time_sec": 180,
"verifier": {
"min_prover_version": "v4.4.45",
"verifiers": [
{
"assets_path": "assets",
"fork_name": "feynman"
}
]
}
},
"db": {
"driver_name": "postgres",
"dsn": "postgres://dev:dev@localhost/scroll?sslmode=disable",
"maxOpenNum": 200,
"maxIdleNum": 20
},
"l2": {
"validium_mode": true,
"chain_id": 5343513301,
"l2geth": {
"endpoint": "http://cloak-xen-sequencer.sepolia.scroll.tech:8545/"
}
},
"auth": {
"secret": "prover secret key",
"challenge_expire_duration_sec": 3600,
"login_expire_duration_sec": 3600
},
"sequencer": {
"decryption_key": "<decryption key>"
}
}

File diff suppressed because one or more lines are too long

View File

@@ -25,7 +25,7 @@ SELECT 'INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
quote_literal(transactions) || quote_literal(transactions) ||
');' ');'
FROM l2_block FROM l2_block
WHERE number >= 1 and number <= 49 WHERE number >= 20278000 and number <= 20278050
ORDER BY number ASC; ORDER BY number ASC;
-- Write footer -- Write footer

View File

@@ -1,2 +1,4 @@
BEGIN_BLOCK?=10973711 BEGIN_BLOCK?=10973711
END_BLOCK?=10973721 END_BLOCK?=10973721
CODEC_VERSION?=8
SCROLL_FORK_NAME=feynman

View File

@@ -0,0 +1,41 @@
{
"prover_manager": {
"provers_per_session": 1,
"session_attempts": 5,
"external_prover_threshold": 32,
"bundle_collection_time_sec": 180,
"batch_collection_time_sec": 180,
"chunk_collection_time_sec": 180,
"verifier": {
"min_prover_version": "v4.4.33",
"verifiers": [
{
"features": "legacy_witness:openvm_13",
"assets_path": "assets_feynman",
"fork_name": "feynman"
}
]
}
},
"db": {
"driver_name": "postgres",
"dsn": "postgres://dev:dev@localhost/scroll?sslmode=disable",
"maxOpenNum": 200,
"maxIdleNum": 20
},
"l2": {
"validium_mode": false,
"chain_id": 534351,
"l2geth": {
"endpoint": "<serach a public rpc endpoint like alchemy>"
}
},
"auth": {
"secret": "prover secret key",
"challenge_expire_duration_sec": 3600,
"login_expire_duration_sec": 3600
},
"sequencer": {
"decryption_key": "not need"
}
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,4 @@
BEGIN_BLOCK?=20278022
END_BLOCK?=20278025
CODEC_VERSION?=9
SCROLL_FORK_NAME=galileo

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,9 @@
{
"db_config": {
"driver_name": "postgres",
"dsn": "postgres://dev:dev@localhost:5432/scroll?sslmode=disable",
"maxOpenNum": 5,
"maxIdleNum": 1
},
"validium_mode": false
}

View File

@@ -0,0 +1,40 @@
{
"prover_manager": {
"provers_per_session": 1,
"session_attempts": 5,
"external_prover_threshold": 32,
"bundle_collection_time_sec": 180,
"batch_collection_time_sec": 180,
"chunk_collection_time_sec": 180,
"verifier": {
"min_prover_version": "v4.4.33",
"verifiers": [
{
"assets_path": "assets",
"fork_name": "galileo"
}
]
}
},
"db": {
"driver_name": "postgres",
"dsn": "postgres://dev:dev@localhost/scroll?sslmode=disable",
"maxOpenNum": 200,
"maxIdleNum": 20
},
"l2": {
"validium_mode": false,
"chain_id": 534351,
"l2geth": {
"endpoint": "<serach a public rpc endpoint like alchemy>"
}
},
"auth": {
"secret": "prover secret key",
"challenge_expire_duration_sec": 3600,
"login_expire_duration_sec": 3600
},
"sequencer": {
"decryption_key": "not need"
}
}

File diff suppressed because one or more lines are too long

View File

@@ -8,11 +8,7 @@
"retry_wait_time_sec": 10, "retry_wait_time_sec": 10,
"connection_timeout_sec": 1800 "connection_timeout_sec": 1800
}, },
"l2geth": {
"endpoint": "<the url of rpc endpoint>"
},
"prover": { "prover": {
"circuit_type": 2,
"supported_proof_types": [ "supported_proof_types": [
1, 1,
2, 2,
@@ -28,6 +24,11 @@
"hard_fork_name": "feynman", "hard_fork_name": "feynman",
"base_url": "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/feynman/", "base_url": "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/feynman/",
"workspace_path": ".work/feynman" "workspace_path": ".work/feynman"
} },
"galileo": {
"hard_fork_name": "galileo",
"base_url": "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/galileo/",
"workspace_path": ".work/galileo"
}
} }
} }