mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-11 23:18:07 -05:00
Compare commits
26 Commits
develop
...
refactor/z
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1464d1091d | ||
|
|
14f355b528 | ||
|
|
4b91057a05 | ||
|
|
904b97b70d | ||
|
|
a8a0a816d1 | ||
|
|
9c5c29e465 | ||
|
|
1135291329 | ||
|
|
9b10ad8f14 | ||
|
|
d0869d44d6 | ||
|
|
bcd48b846f | ||
|
|
591a833146 | ||
|
|
d4f1e7d981 | ||
|
|
9801992724 | ||
|
|
bff0e6f398 | ||
|
|
85f3cc35e0 | ||
|
|
15c872d5a5 | ||
|
|
c0f773c014 | ||
|
|
a300fa284b | ||
|
|
932be72b88 | ||
|
|
0c4c410d2c | ||
|
|
0360cd2c6f | ||
|
|
a6d49b24d8 | ||
|
|
8ed77bd780 | ||
|
|
66b1095e25 | ||
|
|
69ff09a7e6 | ||
|
|
318c46ebc9 |
16
.dockerignore
Normal file
16
.dockerignore
Normal file
@@ -0,0 +1,16 @@
|
||||
.github
|
||||
|
||||
.gitignore
|
||||
|
||||
.dockerignore
|
||||
|
||||
Dockerfile
|
||||
Dockerfile.backup
|
||||
|
||||
.output
|
||||
|
||||
docs
|
||||
|
||||
openvm-clippy
|
||||
|
||||
target
|
||||
2655
zkvm-prover/Cargo.lock → Cargo.lock
generated
2655
zkvm-prover/Cargo.lock → Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
93
Cargo.toml
Normal file
93
Cargo.toml
Normal file
@@ -0,0 +1,93 @@
|
||||
[workspace]
|
||||
members = [
|
||||
"common/types-rs",
|
||||
"common/types-rs/base",
|
||||
"common/types-rs/aggregation",
|
||||
"common/types-rs/chunk",
|
||||
"common/types-rs/batch",
|
||||
"common/types-rs/bundle",
|
||||
"common/libzkp/impl",
|
||||
"zkvm-prover/prover",
|
||||
"zkvm-prover/verifier",
|
||||
"zkvm-prover/integration",
|
||||
"zkvm-prover/bin",
|
||||
]
|
||||
exclude = [
|
||||
"prover"
|
||||
]
|
||||
|
||||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
authors = ["Scroll developers"]
|
||||
edition = "2021"
|
||||
homepage = "https://scroll.io"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/scroll-tech/scroll"
|
||||
version = "4.5.8"
|
||||
|
||||
[workspace.dependencies]
|
||||
scroll-zkvm-prover-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", package = "scroll-zkvm-prover"}
|
||||
|
||||
openvm = { git = "https://github.com/openvm-org/openvm.git", rev = "a0ae88f", default-features = false }
|
||||
openvm-build = { git = "https://github.com/openvm-org/openvm.git", rev = "a0ae88f", default-features = false }
|
||||
openvm-transpiler = { git = "https://github.com/openvm-org/openvm.git", rev = "a0ae88f", default-features = false }
|
||||
openvm-custom-insn = { git = "https://github.com/openvm-org/openvm.git", rev = "a0ae88f", default-features = false }
|
||||
openvm-rv32im-guest = { git = "https://github.com/openvm-org/openvm.git", rev = "a0ae88f", default-features = false }
|
||||
openvm-circuit = { git = "https://github.com/openvm-org/openvm.git", rev = "a0ae88f", default-features = false }
|
||||
openvm-native-circuit = { git = "https://github.com/openvm-org/openvm.git", rev = "a0ae88f", default-features = false }
|
||||
openvm-native-compiler = { git = "https://github.com/openvm-org/openvm.git", rev = "a0ae88f", default-features = false }
|
||||
openvm-native-recursion = { git = "https://github.com/openvm-org/openvm.git", rev = "a0ae88f", default-features = false }
|
||||
openvm-native-transpiler = { git = "https://github.com/openvm-org/openvm.git", rev = "a0ae88f", default-features = false }
|
||||
openvm-continuations = { git = "https://github.com/openvm-org/openvm.git", rev = "a0ae88f", default-features = false }
|
||||
openvm-sdk = { git = "https://github.com/openvm-org/openvm.git", rev = "a0ae88f", default-features = false, features = ["parallel", "bench-metrics", "evm-prove"] }
|
||||
openvm-stark-sdk = { git = "https://github.com/openvm-org/stark-backend.git", tag = "v1.0.1" }
|
||||
|
||||
sbv-core = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-upgrade", features = ["scroll"] }
|
||||
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-upgrade", features = ["scroll"] }
|
||||
sbv-kv = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-upgrade" }
|
||||
sbv-trie = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-upgrade" }
|
||||
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-upgrade" }
|
||||
|
||||
metrics = "0.23.0"
|
||||
metrics-util = "0.17"
|
||||
metrics-tracing-context = "0.16.0"
|
||||
|
||||
alloy = { version = "0.11", default-features = false }
|
||||
alloy-primitives = { version = "0.8", default-features = false }
|
||||
# also use this to trigger "serde" feature for primitives
|
||||
alloy-serde = { version = "0.8", default-features = false }
|
||||
|
||||
rkyv = "0.8"
|
||||
serde = { version = "1", default-features = false, features = ["derive"] }
|
||||
serde_json = { version = "1.0" }
|
||||
serde_with = "3.11.0"
|
||||
itertools = "0.14"
|
||||
tiny-keccak = "2.0"
|
||||
tracing = "0.1"
|
||||
eyre = "0.6"
|
||||
bincode_v1 = { version = "1.3", package = "bincode"}
|
||||
snark-verifier-sdk = { version = "0.2.0", default-features = false, features = [
|
||||
"loader_halo2",
|
||||
"halo2-axiom",
|
||||
"display",
|
||||
] }
|
||||
once_cell = "1.20"
|
||||
base64 = "0.22"
|
||||
|
||||
#TODO: upgrade
|
||||
vm-zstd = { git = "https://github.com/scroll-tech/rust-zstd-decompressor.git", tag = "v0.1.1" }
|
||||
|
||||
scroll-zkvm-circuit-input-types = { path = "common/types-rs"}
|
||||
scroll-zkvm-verifier = { path = "zkvm-prover/verifier"}
|
||||
scroll-zkvm-prover = { path = "zkvm-prover/prover"}
|
||||
|
||||
[patch.crates-io]
|
||||
alloy-primitives = { git = "https://github.com/scroll-tech/alloy-core", branch = "v0.8.18-euclid-upgrade" }
|
||||
ruint = { git = "https://github.com/scroll-tech/uint.git", branch = "v1.12.3" }
|
||||
tiny-keccak = { git = "https://github.com/scroll-tech/tiny-keccak", branch = "scroll-patch-v2.0.2-euclid-upgrade" }
|
||||
|
||||
[profile.maxperf]
|
||||
inherits = "release"
|
||||
lto = "fat"
|
||||
codegen-units = 1
|
||||
6
build/.cargo/config.toml
Normal file
6
build/.cargo/config.toml
Normal file
@@ -0,0 +1,6 @@
|
||||
[patch."https://github.com/scroll-tech/scroll.git"]
|
||||
scroll-zkvm-circuit-input-types-base = { path = "../common/types-rs/base"}
|
||||
scroll-zkvm-circuit-input-types-aggregation = { path = "../common/types-rs/aggregation"}
|
||||
scroll-zkvm-circuit-input-types-chunk = { path = "../common/types-rs/chunk"}
|
||||
scroll-zkvm-circuit-input-types-batch = { path = "../common/types-rs/batch"}
|
||||
scroll-zkvm-circuit-input-types-bundle = { path = "../common/types-rs/bundle"}
|
||||
@@ -7,25 +7,19 @@ edition = "2021"
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
[patch.crates-io]
|
||||
# patched add rkyv support & MSRV 1.77
|
||||
alloy-primitives = { git = "https://github.com/scroll-tech/alloy-core", branch = "v0.8.21" }
|
||||
ruint = { git = "https://github.com/scroll-tech/uint.git", branch = "v1.12.3" }
|
||||
tiny-keccak = { git = "https://github.com/scroll-tech/tiny-keccak", branch = "scroll-patch-v2.0.2-openvm-v1.0.0-rc.1" }
|
||||
|
||||
[dependencies]
|
||||
euclid_prover = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.3.0", package = "scroll-zkvm-prover" }
|
||||
euclid_verifier = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.3.0", package = "scroll-zkvm-verifier" }
|
||||
scroll-zkvm-prover.workspace = true
|
||||
scroll-zkvm-verifier.workspace = true
|
||||
|
||||
base64 = "0.13.0"
|
||||
env_logger = "0.9.0"
|
||||
env_logger = "0.11.0"
|
||||
libc = "0.2"
|
||||
log = "0.4"
|
||||
once_cell = "1.19"
|
||||
serde = "1.0"
|
||||
base64.workspace = true
|
||||
once_cell.workspace = true
|
||||
serde.workspace = true
|
||||
serde_derive = "1.0"
|
||||
serde_json = "1.0.66"
|
||||
anyhow = "1.0.86"
|
||||
serde_json.workspace = true
|
||||
anyhow = "1"
|
||||
|
||||
[profile.test]
|
||||
opt-level = 3
|
||||
|
||||
@@ -3,8 +3,8 @@ use super::{ProofVerifier, TaskType, VKDump};
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::utils::panic_catch;
|
||||
use euclid_prover::{BatchProof, BundleProof, ChunkProof};
|
||||
use euclid_verifier::verifier::{BatchVerifier, BundleVerifierEuclidV1, ChunkVerifier};
|
||||
use scroll_zkvm_prover::{BatchProof, BundleProof, ChunkProof};
|
||||
use scroll_zkvm_verifier::verifier::{BatchVerifier, BundleVerifierEuclidV1, ChunkVerifier};
|
||||
use std::{fs::File, path::Path};
|
||||
|
||||
pub struct EuclidVerifier {
|
||||
@@ -53,12 +53,13 @@ impl ProofVerifier for EuclidVerifier {
|
||||
}
|
||||
|
||||
fn dump_vk(&self, file: &Path) {
|
||||
use base64::{prelude::BASE64_STANDARD, Engine};
|
||||
let f = File::create(file).expect("Failed to open file to dump VK");
|
||||
|
||||
let dump = VKDump {
|
||||
chunk_vk: base64::encode(self.chunk_verifier.get_app_vk()),
|
||||
batch_vk: base64::encode(self.batch_verifier.get_app_vk()),
|
||||
bundle_vk: base64::encode(self.bundle_verifier.get_app_vk()),
|
||||
chunk_vk: BASE64_STANDARD.encode(self.chunk_verifier.get_app_vk()),
|
||||
batch_vk: BASE64_STANDARD.encode(self.batch_verifier.get_app_vk()),
|
||||
bundle_vk: BASE64_STANDARD.encode(self.bundle_verifier.get_app_vk()),
|
||||
};
|
||||
serde_json::to_writer(f, &dump).expect("Failed to dump VK");
|
||||
}
|
||||
|
||||
@@ -3,8 +3,8 @@ use super::{ProofVerifier, TaskType, VKDump};
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::utils::panic_catch;
|
||||
use euclid_prover::{BatchProof, BundleProof, ChunkProof};
|
||||
use euclid_verifier::verifier::{BatchVerifier, BundleVerifierEuclidV2, ChunkVerifier};
|
||||
use scroll_zkvm_prover::{BatchProof, BundleProof, ChunkProof};
|
||||
use scroll_zkvm_verifier::verifier::{BatchVerifier, BundleVerifierEuclidV2, ChunkVerifier};
|
||||
use std::{fs::File, path::Path};
|
||||
|
||||
pub struct EuclidV2Verifier {
|
||||
@@ -53,12 +53,13 @@ impl ProofVerifier for EuclidV2Verifier {
|
||||
}
|
||||
|
||||
fn dump_vk(&self, file: &Path) {
|
||||
use base64::{prelude::BASE64_STANDARD, Engine};
|
||||
let f = File::create(file).expect("Failed to open file to dump VK");
|
||||
|
||||
let dump = VKDump {
|
||||
chunk_vk: base64::encode(self.chunk_verifier.get_app_vk()),
|
||||
batch_vk: base64::encode(self.batch_verifier.get_app_vk()),
|
||||
bundle_vk: base64::encode(self.bundle_verifier.get_app_vk()),
|
||||
chunk_vk: BASE64_STANDARD.encode(self.chunk_verifier.get_app_vk()),
|
||||
batch_vk: BASE64_STANDARD.encode(self.batch_verifier.get_app_vk()),
|
||||
bundle_vk: BASE64_STANDARD.encode(self.bundle_verifier.get_app_vk()),
|
||||
};
|
||||
serde_json::to_writer(f, &dump).expect("Failed to dump VK");
|
||||
}
|
||||
|
||||
17
common/types-rs/Cargo.toml
Normal file
17
common/types-rs/Cargo.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "scroll-zkvm-circuit-input-types"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
repository.workspace = true
|
||||
version = "0.2.0"
|
||||
|
||||
[dependencies]
|
||||
types-base = { path = "base", package = "scroll-zkvm-circuit-input-types-base"}
|
||||
types-agg = { path = "aggregation", package = "scroll-zkvm-circuit-input-types-aggregation"}
|
||||
types-chunk = { path = "chunk", package = "scroll-zkvm-circuit-input-types-chunk"}
|
||||
types-batch = { path = "batch", package = "scroll-zkvm-circuit-input-types-batch"}
|
||||
types-bundle = { path = "bundle", package = "scroll-zkvm-circuit-input-types-bundle"}
|
||||
|
||||
|
||||
24
common/types-rs/README.md
Normal file
24
common/types-rs/README.md
Normal file
@@ -0,0 +1,24 @@
|
||||
# Input Types for circuits
|
||||
|
||||
A series of separated crates for the input types accepted by circuits as input.
|
||||
|
||||
This crate help decoupling circuits with other crates and keep their dependencies neat and controllable. Avoiding to involve crates which is not compatible with the tootlchain of openvm from indirect dependency.
|
||||
|
||||
### Code structure
|
||||
```
|
||||
types-rs
|
||||
│
|
||||
├── base
|
||||
│
|
||||
├── circuit
|
||||
│
|
||||
├── aggregation
|
||||
│
|
||||
<following are layer-oriented crates>
|
||||
│
|
||||
├── chunk
|
||||
│
|
||||
├── batch
|
||||
│
|
||||
└── bundle
|
||||
```
|
||||
14
common/types-rs/aggregation/Cargo.toml
Normal file
14
common/types-rs/aggregation/Cargo.toml
Normal file
@@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "scroll-zkvm-circuit-input-types-aggregation"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
repository.workspace = true
|
||||
version = "0.2.0"
|
||||
|
||||
[dependencies]
|
||||
alloy-primitives = { workspace = true, default-features = false, features = ["std", "map-hashbrown", "map-fxhash", "rkyv"] }
|
||||
rkyv.workspace = true
|
||||
serde.workspace = true
|
||||
|
||||
81
common/types-rs/aggregation/src/lib.rs
Normal file
81
common/types-rs/aggregation/src/lib.rs
Normal file
@@ -0,0 +1,81 @@
|
||||
/// Represents an openvm program commitments and public values.
|
||||
#[derive(
|
||||
Clone,
|
||||
Debug,
|
||||
rkyv::Archive,
|
||||
rkyv::Deserialize,
|
||||
rkyv::Serialize,
|
||||
serde::Deserialize,
|
||||
serde::Serialize,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct AggregationInput {
|
||||
/// Public values.
|
||||
pub public_values: Vec<u32>,
|
||||
/// Represent the commitment needed to verify a root proof
|
||||
pub commitment: ProgramCommitment,
|
||||
}
|
||||
|
||||
/// Represent the commitment needed to verify a [`RootProof`].
|
||||
#[derive(
|
||||
Clone,
|
||||
Debug,
|
||||
Default,
|
||||
rkyv::Archive,
|
||||
rkyv::Deserialize,
|
||||
rkyv::Serialize,
|
||||
serde::Deserialize,
|
||||
serde::Serialize,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct ProgramCommitment {
|
||||
/// The commitment to the child program exe.
|
||||
pub exe: [u32; 8],
|
||||
/// The commitment to the child program leaf.
|
||||
pub leaf: [u32; 8],
|
||||
}
|
||||
|
||||
impl ProgramCommitment {
|
||||
pub fn deserialize(commitment_bytes: &[u8]) -> Self {
|
||||
// TODO: temporary skip deserialize if no vk is provided
|
||||
if commitment_bytes.is_empty() {
|
||||
return Default::default();
|
||||
}
|
||||
|
||||
let archived_data =
|
||||
rkyv::access::<ArchivedProgramCommitment, rkyv::rancor::BoxedError>(commitment_bytes)
|
||||
.unwrap();
|
||||
|
||||
Self {
|
||||
exe: archived_data.exe.map(|u32_le| u32_le.to_native()),
|
||||
leaf: archived_data.leaf.map(|u32_le| u32_le.to_native()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serialize(&self) -> Vec<u8> {
|
||||
rkyv::to_bytes::<rkyv::rancor::BoxedError>(self)
|
||||
.map(|v| v.to_vec())
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ArchivedProgramCommitment> for ProgramCommitment {
|
||||
fn from(archived: &ArchivedProgramCommitment) -> Self {
|
||||
Self {
|
||||
exe: archived.exe.map(|u32_le| u32_le.to_native()),
|
||||
leaf: archived.leaf.map(|u32_le| u32_le.to_native()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Number of public-input values, i.e. [u32; N].
|
||||
///
|
||||
/// Note that the actual value for each u32 is a byte.
|
||||
pub const NUM_PUBLIC_VALUES: usize = 32;
|
||||
|
||||
/// Witness for an [`AggregationCircuit`][AggCircuit] that also carries proofs that are being
|
||||
/// aggregated.
|
||||
pub trait ProofCarryingWitness {
|
||||
/// Get the root proofs from the witness.
|
||||
fn get_proofs(&self) -> Vec<AggregationInput>;
|
||||
}
|
||||
21
common/types-rs/base/Cargo.toml
Normal file
21
common/types-rs/base/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "scroll-zkvm-circuit-input-types-base"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
repository.workspace = true
|
||||
version = "0.2.0"
|
||||
|
||||
[dependencies]
|
||||
alloy-primitives = { workspace = true, default-features = false, features = ["std", "map-hashbrown", "map-fxhash", "rkyv"] }
|
||||
alloy-serde.workspace = true
|
||||
rkyv.workspace = true
|
||||
serde.workspace = true
|
||||
itertools.workspace = true
|
||||
tiny-keccak = { workspace = true }
|
||||
sha3 = "0.10.8"
|
||||
sha2 = "0.10.8"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
2
common/types-rs/base/src/lib.rs
Normal file
2
common/types-rs/base/src/lib.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub mod public_inputs;
|
||||
pub mod utils;
|
||||
81
common/types-rs/base/src/public_inputs.rs
Normal file
81
common/types-rs/base/src/public_inputs.rs
Normal file
@@ -0,0 +1,81 @@
|
||||
use alloy_primitives::B256;
|
||||
pub mod batch;
|
||||
pub mod bundle;
|
||||
pub mod chunk;
|
||||
|
||||
/// Defines behaviour to be implemented by types representing the public-input values of a circuit.
|
||||
pub trait PublicInputs {
|
||||
/// Keccak-256 digest of the public inputs. The public-input hash are revealed as public values
|
||||
/// via [`openvm::io::reveal`].
|
||||
fn pi_hash(&self) -> B256;
|
||||
|
||||
/// Validation logic between public inputs of two contiguous instances.
|
||||
fn validate(&self, prev_pi: &Self);
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Default,
|
||||
Debug,
|
||||
Copy,
|
||||
Clone,
|
||||
PartialEq,
|
||||
Eq,
|
||||
rkyv::Archive,
|
||||
rkyv::Deserialize,
|
||||
rkyv::Serialize,
|
||||
serde::Deserialize,
|
||||
serde::Serialize,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub enum ForkName {
|
||||
#[default]
|
||||
EuclidV1,
|
||||
EuclidV2,
|
||||
}
|
||||
|
||||
impl From<&ArchivedForkName> for ForkName {
|
||||
fn from(archived: &ArchivedForkName) -> Self {
|
||||
match archived {
|
||||
ArchivedForkName::EuclidV1 => ForkName::EuclidV1,
|
||||
ArchivedForkName::EuclidV2 => ForkName::EuclidV2,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Option<&str>> for ForkName {
|
||||
fn from(value: Option<&str>) -> Self {
|
||||
match value {
|
||||
None => Default::default(),
|
||||
Some("euclidv1") => ForkName::EuclidV1,
|
||||
Some("euclidv2") => ForkName::EuclidV2,
|
||||
Some(s) => unreachable!("hardfork not accepted: {s}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for ForkName {
|
||||
fn from(value: &str) -> Self {
|
||||
match value {
|
||||
"euclidv1" => ForkName::EuclidV1,
|
||||
"euclidv2" => ForkName::EuclidV2,
|
||||
s => unreachable!("hardfork not accepted: {s}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// helper trait to extend PublicInputs
|
||||
pub trait MultiVersionPublicInputs {
|
||||
fn pi_hash_by_fork(&self, fork_name: ForkName) -> B256;
|
||||
fn validate(&self, prev_pi: &Self, fork_name: ForkName);
|
||||
}
|
||||
|
||||
impl<T: MultiVersionPublicInputs> PublicInputs for (T, ForkName) {
|
||||
fn pi_hash(&self) -> B256 {
|
||||
self.0.pi_hash_by_fork(self.1)
|
||||
}
|
||||
|
||||
fn validate(&self, prev_pi: &Self) {
|
||||
assert_eq!(self.1, prev_pi.1);
|
||||
self.0.validate(&prev_pi.0, self.1)
|
||||
}
|
||||
}
|
||||
144
common/types-rs/base/src/public_inputs/batch.rs
Normal file
144
common/types-rs/base/src/public_inputs/batch.rs
Normal file
@@ -0,0 +1,144 @@
|
||||
use alloy_primitives::B256;
|
||||
|
||||
use crate::{
|
||||
public_inputs::{ForkName, MultiVersionPublicInputs},
|
||||
utils::keccak256,
|
||||
};
|
||||
|
||||
/// Represents public-input values for a batch.
|
||||
#[derive(
|
||||
Clone,
|
||||
Debug,
|
||||
rkyv::Archive,
|
||||
rkyv::Deserialize,
|
||||
rkyv::Serialize,
|
||||
serde::Deserialize,
|
||||
serde::Serialize,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct BatchInfo {
|
||||
/// The state root before applying the batch.
|
||||
#[rkyv()]
|
||||
pub parent_state_root: B256,
|
||||
/// The batch hash of the parent batch.
|
||||
#[rkyv()]
|
||||
pub parent_batch_hash: B256,
|
||||
/// The state root after applying txs in the batch.
|
||||
#[rkyv()]
|
||||
pub state_root: B256,
|
||||
/// The batch header hash of the batch.
|
||||
#[rkyv()]
|
||||
pub batch_hash: B256,
|
||||
/// The EIP-155 chain ID of all txs in the batch.
|
||||
#[rkyv()]
|
||||
pub chain_id: u64,
|
||||
/// The withdraw root of the last block in the last chunk in the batch.
|
||||
#[rkyv()]
|
||||
pub withdraw_root: B256,
|
||||
/// The L1 msg queue hash at the end of the previous batch.
|
||||
#[rkyv()]
|
||||
pub prev_msg_queue_hash: B256,
|
||||
/// The L1 msg queue hash at the end of the current batch.
|
||||
#[rkyv()]
|
||||
pub post_msg_queue_hash: B256,
|
||||
}
|
||||
|
||||
impl From<&ArchivedBatchInfo> for BatchInfo {
|
||||
fn from(archived: &ArchivedBatchInfo) -> Self {
|
||||
Self {
|
||||
parent_state_root: archived.parent_state_root.into(),
|
||||
parent_batch_hash: archived.parent_batch_hash.into(),
|
||||
state_root: archived.state_root.into(),
|
||||
batch_hash: archived.batch_hash.into(),
|
||||
chain_id: archived.chain_id.into(),
|
||||
withdraw_root: archived.withdraw_root.into(),
|
||||
prev_msg_queue_hash: archived.prev_msg_queue_hash.into(),
|
||||
post_msg_queue_hash: archived.post_msg_queue_hash.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BatchInfo {
|
||||
/// Public input hash for a batch (euclidv1 or da-codec@v6) is defined as
|
||||
///
|
||||
/// keccak(
|
||||
/// parent state root ||
|
||||
/// parent batch hash ||
|
||||
/// state root ||
|
||||
/// batch hash ||
|
||||
/// chain id ||
|
||||
/// withdraw root ||
|
||||
/// )
|
||||
fn pi_hash_euclidv1(&self) -> B256 {
|
||||
keccak256(
|
||||
std::iter::empty()
|
||||
.chain(self.parent_state_root.as_slice())
|
||||
.chain(self.parent_batch_hash.as_slice())
|
||||
.chain(self.state_root.as_slice())
|
||||
.chain(self.batch_hash.as_slice())
|
||||
.chain(self.chain_id.to_be_bytes().as_slice())
|
||||
.chain(self.withdraw_root.as_slice())
|
||||
.cloned()
|
||||
.collect::<Vec<u8>>(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Public input hash for a batch (euclidv2 or da-codec@v7) is defined as
|
||||
///
|
||||
/// keccak(
|
||||
/// parent state root ||
|
||||
/// parent batch hash ||
|
||||
/// state root ||
|
||||
/// batch hash ||
|
||||
/// chain id ||
|
||||
/// withdraw root ||
|
||||
/// prev msg queue hash ||
|
||||
/// post msg queue hash
|
||||
/// )
|
||||
fn pi_hash_euclidv2(&self) -> B256 {
|
||||
keccak256(
|
||||
std::iter::empty()
|
||||
.chain(self.parent_state_root.as_slice())
|
||||
.chain(self.parent_batch_hash.as_slice())
|
||||
.chain(self.state_root.as_slice())
|
||||
.chain(self.batch_hash.as_slice())
|
||||
.chain(self.chain_id.to_be_bytes().as_slice())
|
||||
.chain(self.withdraw_root.as_slice())
|
||||
.chain(self.prev_msg_queue_hash.as_slice())
|
||||
.chain(self.post_msg_queue_hash.as_slice())
|
||||
.cloned()
|
||||
.collect::<Vec<u8>>(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub type VersionedBatchInfo = (BatchInfo, ForkName);
|
||||
|
||||
impl MultiVersionPublicInputs for BatchInfo {
|
||||
fn pi_hash_by_fork(&self, fork_name: ForkName) -> B256 {
|
||||
match fork_name {
|
||||
ForkName::EuclidV1 => self.pi_hash_euclidv1(),
|
||||
ForkName::EuclidV2 => self.pi_hash_euclidv2(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate public inputs between 2 contiguous batches.
|
||||
///
|
||||
/// - chain id MUST match
|
||||
/// - state roots MUST be chained
|
||||
/// - batch hashes MUST be chained
|
||||
/// - L1 msg queue hashes MUST be chained
|
||||
fn validate(&self, prev_pi: &Self, fork_name: ForkName) {
|
||||
assert_eq!(self.chain_id, prev_pi.chain_id);
|
||||
assert_eq!(self.parent_state_root, prev_pi.state_root);
|
||||
assert_eq!(self.parent_batch_hash, prev_pi.batch_hash);
|
||||
assert_eq!(self.prev_msg_queue_hash, prev_pi.post_msg_queue_hash);
|
||||
|
||||
if fork_name == ForkName::EuclidV1 {
|
||||
assert_eq!(self.prev_msg_queue_hash, B256::ZERO);
|
||||
assert_eq!(prev_pi.prev_msg_queue_hash, B256::ZERO);
|
||||
assert_eq!(self.post_msg_queue_hash, B256::ZERO);
|
||||
assert_eq!(prev_pi.post_msg_queue_hash, B256::ZERO);
|
||||
}
|
||||
}
|
||||
}
|
||||
149
common/types-rs/base/src/public_inputs/bundle.rs
Normal file
149
common/types-rs/base/src/public_inputs/bundle.rs
Normal file
@@ -0,0 +1,149 @@
|
||||
use alloy_primitives::B256;
|
||||
|
||||
use crate::{
|
||||
public_inputs::{ForkName, MultiVersionPublicInputs, PublicInputs},
|
||||
utils::keccak256,
|
||||
};
|
||||
|
||||
/// Represents fields required to compute the public-inputs digest of a bundle.
|
||||
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
|
||||
pub struct BundleInfo {
|
||||
/// The EIP-155 chain ID of all txs in the bundle.
|
||||
pub chain_id: u64,
|
||||
/// The L1 msg queue hash at the end of the last batch in the bundle.
|
||||
/// Not a phase 1 field so we make it omitable
|
||||
#[serde(default)]
|
||||
pub msg_queue_hash: B256,
|
||||
/// The number of batches bundled together in the bundle.
|
||||
pub num_batches: u32,
|
||||
/// The last finalized on-chain state root.
|
||||
pub prev_state_root: B256,
|
||||
/// The last finalized on-chain batch hash.
|
||||
pub prev_batch_hash: B256,
|
||||
/// The state root after applying every batch in the bundle.
|
||||
///
|
||||
/// Upon verification of the EVM-verifiable bundle proof, this state root will be finalized
|
||||
/// on-chain.
|
||||
pub post_state_root: B256,
|
||||
/// The batch hash of the last batch in the bundle.
|
||||
///
|
||||
/// Upon verification of the EVM-verifiable bundle proof, this batch hash will be finalized
|
||||
/// on-chain.
|
||||
pub batch_hash: B256,
|
||||
/// The withdrawals root at the last block in the last chunk in the last batch in the bundle.
|
||||
pub withdraw_root: B256,
|
||||
}
|
||||
|
||||
impl BundleInfo {
|
||||
/// Public input hash for a bundle (euclidv1 or da-codec@v6) is defined as
|
||||
///
|
||||
/// keccak(
|
||||
/// chain id ||
|
||||
/// num batches ||
|
||||
/// prev state root ||
|
||||
/// prev batch hash ||
|
||||
/// post state root ||
|
||||
/// batch hash ||
|
||||
/// withdraw root
|
||||
/// )
|
||||
pub fn pi_hash_euclidv1(&self) -> B256 {
|
||||
keccak256(
|
||||
std::iter::empty()
|
||||
.chain(self.chain_id.to_be_bytes().as_slice())
|
||||
.chain(self.num_batches.to_be_bytes().as_slice())
|
||||
.chain(self.prev_state_root.as_slice())
|
||||
.chain(self.prev_batch_hash.as_slice())
|
||||
.chain(self.post_state_root.as_slice())
|
||||
.chain(self.batch_hash.as_slice())
|
||||
.chain(self.withdraw_root.as_slice())
|
||||
.cloned()
|
||||
.collect::<Vec<u8>>(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Public input hash for a bundle (euclidv2 or da-codec@v7) is defined as
|
||||
///
|
||||
/// keccak(
|
||||
/// chain id ||
|
||||
/// msg_queue_hash ||
|
||||
/// num batches ||
|
||||
/// prev state root ||
|
||||
/// prev batch hash ||
|
||||
/// post state root ||
|
||||
/// batch hash ||
|
||||
/// withdraw root
|
||||
/// )
|
||||
pub fn pi_hash_euclidv2(&self) -> B256 {
|
||||
keccak256(
|
||||
std::iter::empty()
|
||||
.chain(self.chain_id.to_be_bytes().as_slice())
|
||||
.chain(self.msg_queue_hash.as_slice())
|
||||
.chain(self.num_batches.to_be_bytes().as_slice())
|
||||
.chain(self.prev_state_root.as_slice())
|
||||
.chain(self.prev_batch_hash.as_slice())
|
||||
.chain(self.post_state_root.as_slice())
|
||||
.chain(self.batch_hash.as_slice())
|
||||
.chain(self.withdraw_root.as_slice())
|
||||
.cloned()
|
||||
.collect::<Vec<u8>>(),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn pi_hash(&self, fork_name: ForkName) -> B256 {
|
||||
match fork_name {
|
||||
ForkName::EuclidV1 => self.pi_hash_euclidv1(),
|
||||
ForkName::EuclidV2 => self.pi_hash_euclidv2(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MultiVersionPublicInputs for BundleInfo {
|
||||
fn pi_hash_by_fork(&self, fork_name: ForkName) -> B256 {
|
||||
match fork_name {
|
||||
ForkName::EuclidV1 => self.pi_hash_euclidv1(),
|
||||
ForkName::EuclidV2 => self.pi_hash_euclidv2(),
|
||||
}
|
||||
}
|
||||
|
||||
fn validate(&self, _prev_pi: &Self, _fork_name: ForkName) {
|
||||
unreachable!("bundle is the last layer and is not aggregated by any other circuit");
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct BundleInfoV1(pub BundleInfo);
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct BundleInfoV2(pub BundleInfo);
|
||||
|
||||
impl From<BundleInfo> for BundleInfoV1 {
|
||||
fn from(value: BundleInfo) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BundleInfo> for BundleInfoV2 {
|
||||
fn from(value: BundleInfo) -> Self {
|
||||
Self(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl PublicInputs for BundleInfoV1 {
|
||||
fn pi_hash(&self) -> B256 {
|
||||
self.0.pi_hash_euclidv1()
|
||||
}
|
||||
|
||||
fn validate(&self, _prev_pi: &Self) {
|
||||
unreachable!("bundle is the last layer and is not aggregated by any other circuit");
|
||||
}
|
||||
}
|
||||
|
||||
impl PublicInputs for BundleInfoV2 {
|
||||
fn pi_hash(&self) -> B256 {
|
||||
self.0.pi_hash_euclidv2()
|
||||
}
|
||||
|
||||
fn validate(&self, _prev_pi: &Self) {
|
||||
unreachable!("bundle is the last layer and is not aggregated by any other circuit");
|
||||
}
|
||||
}
|
||||
248
common/types-rs/base/src/public_inputs/chunk.rs
Normal file
248
common/types-rs/base/src/public_inputs/chunk.rs
Normal file
@@ -0,0 +1,248 @@
|
||||
use alloy_primitives::{B256, U256};
|
||||
|
||||
use crate::{
|
||||
public_inputs::{ForkName, MultiVersionPublicInputs},
|
||||
utils::keccak256,
|
||||
};
|
||||
|
||||
/// Number of bytes used to serialise [`BlockContextV2`].
|
||||
pub const SIZE_BLOCK_CTX: usize = 52;
|
||||
|
||||
/// Represents the version 2 of block context.
|
||||
///
|
||||
/// The difference between v2 and v1 is that the block number field has been removed since v2.
|
||||
#[derive(
|
||||
Debug,
|
||||
Clone,
|
||||
PartialEq,
|
||||
rkyv::Archive,
|
||||
rkyv::Deserialize,
|
||||
rkyv::Serialize,
|
||||
serde::Deserialize,
|
||||
serde::Serialize,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct BlockContextV2 {
|
||||
/// The timestamp of the block.
|
||||
pub timestamp: u64,
|
||||
/// The base fee of the block.
|
||||
pub base_fee: U256,
|
||||
/// The gas limit of the block.
|
||||
pub gas_limit: u64,
|
||||
/// The number of transactions in the block, including both L1 msg txs as well as L2 txs.
|
||||
pub num_txs: u16,
|
||||
/// The number of L1 msg txs in the block.
|
||||
pub num_l1_msgs: u16,
|
||||
}
|
||||
|
||||
impl From<&ArchivedBlockContextV2> for BlockContextV2 {
|
||||
fn from(archived: &ArchivedBlockContextV2) -> Self {
|
||||
Self {
|
||||
timestamp: archived.timestamp.into(),
|
||||
base_fee: archived.base_fee.into(),
|
||||
gas_limit: archived.gas_limit.into(),
|
||||
num_txs: archived.num_txs.into(),
|
||||
num_l1_msgs: archived.num_l1_msgs.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&[u8]> for BlockContextV2 {
|
||||
fn from(bytes: &[u8]) -> Self {
|
||||
assert_eq!(bytes.len(), SIZE_BLOCK_CTX);
|
||||
|
||||
let timestamp = u64::from_be_bytes(bytes[0..8].try_into().expect("should not fail"));
|
||||
let base_fee = U256::from_be_slice(&bytes[8..40]);
|
||||
let gas_limit = u64::from_be_bytes(bytes[40..48].try_into().expect("should not fail"));
|
||||
let num_txs = u16::from_be_bytes(bytes[48..50].try_into().expect("should not fail"));
|
||||
let num_l1_msgs = u16::from_be_bytes(bytes[50..52].try_into().expect("should not fail"));
|
||||
|
||||
Self {
|
||||
timestamp,
|
||||
base_fee,
|
||||
gas_limit,
|
||||
num_txs,
|
||||
num_l1_msgs,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BlockContextV2 {
|
||||
/// Serialize the block context in packed form.
|
||||
pub fn to_bytes(&self) -> Vec<u8> {
|
||||
std::iter::empty()
|
||||
.chain(self.timestamp.to_be_bytes())
|
||||
.chain(self.base_fee.to_be_bytes::<32>())
|
||||
.chain(self.gas_limit.to_be_bytes())
|
||||
.chain(self.num_txs.to_be_bytes())
|
||||
.chain(self.num_l1_msgs.to_be_bytes())
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents header-like information for the chunk.
|
||||
#[derive(
|
||||
Debug,
|
||||
Clone,
|
||||
rkyv::Archive,
|
||||
rkyv::Deserialize,
|
||||
rkyv::Serialize,
|
||||
serde::Deserialize,
|
||||
serde::Serialize,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct ChunkInfo {
|
||||
/// The EIP-155 chain ID for all txs in the chunk.
|
||||
#[rkyv()]
|
||||
pub chain_id: u64,
|
||||
/// The state root before applying the chunk.
|
||||
#[rkyv()]
|
||||
pub prev_state_root: B256,
|
||||
/// The state root after applying the chunk.
|
||||
#[rkyv()]
|
||||
pub post_state_root: B256,
|
||||
/// The withdrawals root after applying the chunk.
|
||||
#[rkyv()]
|
||||
pub withdraw_root: B256,
|
||||
/// Digest of L1 message txs force included in the chunk.
|
||||
/// It is a legacy field and can be omitted in new defination
|
||||
#[rkyv()]
|
||||
#[serde(default)]
|
||||
pub data_hash: B256,
|
||||
/// Digest of L2 tx data flattened over all L2 txs in the chunk.
|
||||
#[rkyv()]
|
||||
pub tx_data_digest: B256,
|
||||
/// The L1 msg queue hash at the end of the previous chunk.
|
||||
#[rkyv()]
|
||||
pub prev_msg_queue_hash: B256,
|
||||
/// The L1 msg queue hash at the end of the current chunk.
|
||||
#[rkyv()]
|
||||
pub post_msg_queue_hash: B256,
|
||||
/// The length of rlp encoded L2 tx bytes flattened over all L2 txs in the chunk.
|
||||
#[rkyv()]
|
||||
pub tx_data_length: u64,
|
||||
/// The block number of the first block in the chunk.
|
||||
#[rkyv()]
|
||||
pub initial_block_number: u64,
|
||||
/// The block contexts of the blocks in the chunk.
|
||||
#[rkyv()]
|
||||
pub block_ctxs: Vec<BlockContextV2>,
|
||||
}
|
||||
|
||||
impl ChunkInfo {
|
||||
/// Public input hash for a given chunk (euclidv1 or da-codec@v6) is defined as
|
||||
///
|
||||
/// keccak(
|
||||
/// chain id ||
|
||||
/// prev state root ||
|
||||
/// post state root ||
|
||||
/// withdraw root ||
|
||||
/// chunk data hash ||
|
||||
/// tx data hash
|
||||
/// )
|
||||
pub fn pi_hash_euclidv1(&self) -> B256 {
|
||||
keccak256(
|
||||
std::iter::empty()
|
||||
.chain(&self.chain_id.to_be_bytes())
|
||||
.chain(self.prev_state_root.as_slice())
|
||||
.chain(self.post_state_root.as_slice())
|
||||
.chain(self.withdraw_root.as_slice())
|
||||
.chain(self.data_hash.as_slice())
|
||||
.chain(self.tx_data_digest.as_slice())
|
||||
.cloned()
|
||||
.collect::<Vec<u8>>(),
|
||||
)
|
||||
}
|
||||
|
||||
/// Public input hash for a given chunk (euclidv2 or da-codec@v7) is defined as
|
||||
///
|
||||
/// keccak(
|
||||
/// chain id ||
|
||||
/// prev state root ||
|
||||
/// post state root ||
|
||||
/// withdraw root ||
|
||||
/// tx data digest ||
|
||||
/// prev msg queue hash ||
|
||||
/// post msg queue hash ||
|
||||
/// initial block number ||
|
||||
/// block_ctx for block_ctx in block_ctxs
|
||||
/// )
|
||||
pub fn pi_hash_euclidv2(&self) -> B256 {
|
||||
keccak256(
|
||||
std::iter::empty()
|
||||
.chain(&self.chain_id.to_be_bytes())
|
||||
.chain(self.prev_state_root.as_slice())
|
||||
.chain(self.post_state_root.as_slice())
|
||||
.chain(self.withdraw_root.as_slice())
|
||||
.chain(self.tx_data_digest.as_slice())
|
||||
.chain(self.prev_msg_queue_hash.as_slice())
|
||||
.chain(self.post_msg_queue_hash.as_slice())
|
||||
.chain(&self.initial_block_number.to_be_bytes())
|
||||
.chain(
|
||||
self.block_ctxs
|
||||
.iter()
|
||||
.flat_map(|block_ctx| block_ctx.to_bytes())
|
||||
.collect::<Vec<u8>>()
|
||||
.as_slice(),
|
||||
)
|
||||
.cloned()
|
||||
.collect::<Vec<u8>>(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ArchivedChunkInfo> for ChunkInfo {
|
||||
fn from(archived: &ArchivedChunkInfo) -> Self {
|
||||
Self {
|
||||
chain_id: archived.chain_id.into(),
|
||||
prev_state_root: archived.prev_state_root.into(),
|
||||
post_state_root: archived.post_state_root.into(),
|
||||
withdraw_root: archived.withdraw_root.into(),
|
||||
data_hash: archived.data_hash.into(),
|
||||
tx_data_digest: archived.tx_data_digest.into(),
|
||||
prev_msg_queue_hash: archived.prev_msg_queue_hash.into(),
|
||||
post_msg_queue_hash: archived.post_msg_queue_hash.into(),
|
||||
tx_data_length: archived.tx_data_length.into(),
|
||||
initial_block_number: archived.initial_block_number.into(),
|
||||
block_ctxs: archived
|
||||
.block_ctxs
|
||||
.iter()
|
||||
.map(BlockContextV2::from)
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type VersionedChunkInfo = (ChunkInfo, ForkName);
|
||||
|
||||
impl MultiVersionPublicInputs for ChunkInfo {
|
||||
/// Compute the public input hash for the chunk.
|
||||
fn pi_hash_by_fork(&self, fork_name: ForkName) -> B256 {
|
||||
match fork_name {
|
||||
ForkName::EuclidV1 => {
|
||||
assert_ne!(self.data_hash, B256::ZERO, "v6 must has valid data hash");
|
||||
self.pi_hash_euclidv1()
|
||||
}
|
||||
ForkName::EuclidV2 => self.pi_hash_euclidv2(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate public inputs between 2 contiguous chunks.
|
||||
///
|
||||
/// - chain id MUST match
|
||||
/// - state roots MUST be chained
|
||||
/// - L1 msg queue hash MUST be chained
|
||||
fn validate(&self, prev_pi: &Self, fork_name: ForkName) {
|
||||
assert_eq!(self.chain_id, prev_pi.chain_id);
|
||||
assert_eq!(self.prev_state_root, prev_pi.post_state_root);
|
||||
assert_eq!(self.prev_msg_queue_hash, prev_pi.post_msg_queue_hash);
|
||||
|
||||
// message queue hash is used only after euclidv2 (da-codec@v7)
|
||||
if fork_name == ForkName::EuclidV1 {
|
||||
assert_eq!(self.prev_msg_queue_hash, B256::ZERO);
|
||||
assert_eq!(prev_pi.prev_msg_queue_hash, B256::ZERO);
|
||||
assert_eq!(self.post_msg_queue_hash, B256::ZERO);
|
||||
assert_eq!(prev_pi.post_msg_queue_hash, B256::ZERO);
|
||||
}
|
||||
}
|
||||
}
|
||||
35
common/types-rs/base/src/utils/hash.rs
Normal file
35
common/types-rs/base/src/utils/hash.rs
Normal file
@@ -0,0 +1,35 @@
|
||||
use alloy_primitives::B256;
|
||||
use tiny_keccak::{Hasher, Keccak};
|
||||
|
||||
/// From the utility of ether-rs
|
||||
///
|
||||
/// Computes the Keccak-256 hash of input bytes.
|
||||
///
|
||||
/// Note that strings are interpreted as UTF-8 bytes,
|
||||
pub fn keccak256<T: AsRef<[u8]>>(bytes: T) -> B256 {
|
||||
let mut output = [0u8; 32];
|
||||
|
||||
let mut hasher = Keccak::v256();
|
||||
hasher.update(bytes.as_ref());
|
||||
hasher.finalize(&mut output);
|
||||
|
||||
B256::from(output)
|
||||
}
|
||||
|
||||
pub fn keccak256_rv32<T: AsRef<[u8]>>(bytes: T) -> B256 {
|
||||
use sha3::{Digest, Keccak256};
|
||||
let mut output = [0u8; 32];
|
||||
let mut hasher = Keccak256::new();
|
||||
hasher.update(bytes.as_ref());
|
||||
output.copy_from_slice(hasher.finalize().as_ref());
|
||||
B256::from(output)
|
||||
}
|
||||
|
||||
pub fn sha256_rv32<T: AsRef<[u8]>>(bytes: T) -> B256 {
|
||||
use sha2::{Digest, Sha256};
|
||||
let mut output = [0u8; 32];
|
||||
let mut hasher = Sha256::new();
|
||||
hasher.update(bytes.as_ref());
|
||||
output.copy_from_slice(hasher.finalize().as_ref());
|
||||
B256::from(output)
|
||||
}
|
||||
2
common/types-rs/base/src/utils/mod.rs
Normal file
2
common/types-rs/base/src/utils/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
mod hash;
|
||||
pub use hash::{keccak256, keccak256_rv32, sha256_rv32};
|
||||
21
common/types-rs/batch/Cargo.toml
Normal file
21
common/types-rs/batch/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "scroll-zkvm-circuit-input-types-batch"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
repository.workspace = true
|
||||
version = "0.2.0"
|
||||
|
||||
[dependencies]
|
||||
alloy-primitives = { workspace = true, default-features = false, features = ["std", "map-hashbrown", "map-fxhash", "rkyv"] }
|
||||
rkyv.workspace = true
|
||||
serde.workspace = true
|
||||
itertools.workspace = true
|
||||
vm-zstd = { workspace = true }
|
||||
|
||||
types-base = { path = "../base", package = "scroll-zkvm-circuit-input-types-base"}
|
||||
types-agg = { path = "../aggregation", package = "scroll-zkvm-circuit-input-types-aggregation"}
|
||||
|
||||
[features]
|
||||
default = []
|
||||
30
common/types-rs/batch/src/header/mod.rs
Normal file
30
common/types-rs/batch/src/header/mod.rs
Normal file
@@ -0,0 +1,30 @@
|
||||
use alloy_primitives::B256;
|
||||
|
||||
pub mod v6;
|
||||
|
||||
pub mod v7;
|
||||
|
||||
pub trait BatchHeader {
|
||||
/// The DA-codec version for the batch header.
|
||||
fn version(&self) -> u8;
|
||||
|
||||
/// The incremental index of the batch.
|
||||
fn index(&self) -> u64;
|
||||
|
||||
/// The batch header digest of the parent batch.
|
||||
fn parent_batch_hash(&self) -> B256;
|
||||
|
||||
/// The batch header digest.
|
||||
fn batch_hash(&self) -> B256;
|
||||
}
|
||||
|
||||
/// Reference header indicate the version of batch header base on which batch hash
|
||||
/// should be calculated.
|
||||
#[derive(Clone, Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub enum ReferenceHeader {
|
||||
/// Represents DA-codec v6.
|
||||
V6(v6::BatchHeaderV6),
|
||||
/// Represents DA-codec v7.
|
||||
V7(v7::BatchHeaderV7),
|
||||
}
|
||||
151
common/types-rs/batch/src/header/v6.rs
Normal file
151
common/types-rs/batch/src/header/v6.rs
Normal file
@@ -0,0 +1,151 @@
|
||||
use super::BatchHeader;
|
||||
use alloy_primitives::B256;
|
||||
use types_base::utils::keccak256;
|
||||
|
||||
/// Represents the header summarising the batch of chunks as per DA-codec v6.
|
||||
#[derive(
|
||||
Clone,
|
||||
Copy,
|
||||
Debug,
|
||||
Default,
|
||||
rkyv::Archive,
|
||||
rkyv::Deserialize,
|
||||
rkyv::Serialize,
|
||||
serde::Deserialize,
|
||||
serde::Serialize,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct BatchHeaderV6 {
|
||||
/// The DA-codec version for the batch.
|
||||
#[rkyv()]
|
||||
pub version: u8,
|
||||
/// The index of the batch
|
||||
#[rkyv()]
|
||||
pub batch_index: u64,
|
||||
/// Number of L1 messages popped in the batch
|
||||
#[rkyv()]
|
||||
pub l1_message_popped: u64,
|
||||
/// Number of total L1 messages popped after the batch
|
||||
#[rkyv()]
|
||||
pub total_l1_message_popped: u64,
|
||||
/// The parent batch hash
|
||||
#[rkyv()]
|
||||
pub parent_batch_hash: B256,
|
||||
/// The timestamp of the last block in this batch
|
||||
#[rkyv()]
|
||||
pub last_block_timestamp: u64,
|
||||
/// The data hash of the batch
|
||||
#[rkyv()]
|
||||
pub data_hash: B256,
|
||||
/// The versioned hash of the blob with this batch's data
|
||||
#[rkyv()]
|
||||
pub blob_versioned_hash: B256,
|
||||
/// The blob data proof: z (32), y (32)
|
||||
#[rkyv()]
|
||||
pub blob_data_proof: [B256; 2],
|
||||
}
|
||||
|
||||
impl BatchHeader for BatchHeaderV6 {
|
||||
fn version(&self) -> u8 {
|
||||
self.version
|
||||
}
|
||||
|
||||
fn index(&self) -> u64 {
|
||||
self.batch_index
|
||||
}
|
||||
|
||||
fn parent_batch_hash(&self) -> B256 {
|
||||
self.parent_batch_hash
|
||||
}
|
||||
|
||||
/// Batch hash as per DA-codec v6:
|
||||
///
|
||||
/// keccak(
|
||||
/// version ||
|
||||
/// batch index ||
|
||||
/// l1 message popped ||
|
||||
/// total l1 message popped ||
|
||||
/// batch data hash ||
|
||||
/// versioned hash ||
|
||||
/// parent batch hash ||
|
||||
/// last block timestamp ||
|
||||
/// z ||
|
||||
/// y
|
||||
/// )
|
||||
fn batch_hash(&self) -> B256 {
|
||||
keccak256(
|
||||
std::iter::empty()
|
||||
.chain(vec![self.version].as_slice())
|
||||
.chain(self.batch_index.to_be_bytes().as_slice())
|
||||
.chain(self.l1_message_popped.to_be_bytes().as_slice())
|
||||
.chain(self.total_l1_message_popped.to_be_bytes().as_slice())
|
||||
.chain(self.data_hash.as_slice())
|
||||
.chain(self.blob_versioned_hash.as_slice())
|
||||
.chain(self.parent_batch_hash.as_slice())
|
||||
.chain(self.last_block_timestamp.to_be_bytes().as_slice())
|
||||
.chain(self.blob_data_proof[0].as_slice())
|
||||
.chain(self.blob_data_proof[1].as_slice())
|
||||
.cloned()
|
||||
.collect::<Vec<u8>>(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl BatchHeader for ArchivedBatchHeaderV6 {
|
||||
fn version(&self) -> u8 {
|
||||
self.version
|
||||
}
|
||||
|
||||
fn index(&self) -> u64 {
|
||||
self.batch_index.into()
|
||||
}
|
||||
|
||||
fn parent_batch_hash(&self) -> B256 {
|
||||
self.parent_batch_hash.into()
|
||||
}
|
||||
|
||||
fn batch_hash(&self) -> B256 {
|
||||
let batch_index: u64 = self.batch_index.into();
|
||||
let l1_message_popped: u64 = self.l1_message_popped.into();
|
||||
let total_l1_message_popped: u64 = self.total_l1_message_popped.into();
|
||||
let data_hash: B256 = self.data_hash.into();
|
||||
let blob_versioned_hash: B256 = self.blob_versioned_hash.into();
|
||||
let parent_batch_hash: B256 = self.parent_batch_hash.into();
|
||||
let last_block_timestamp: u64 = self.last_block_timestamp.into();
|
||||
let blob_data_proof: [B256; 2] = self.blob_data_proof.map(|h| h.into());
|
||||
keccak256(
|
||||
std::iter::empty()
|
||||
.chain(vec![self.version].as_slice())
|
||||
.chain(batch_index.to_be_bytes().as_slice())
|
||||
.chain(l1_message_popped.to_be_bytes().as_slice())
|
||||
.chain(total_l1_message_popped.to_be_bytes().as_slice())
|
||||
.chain(data_hash.as_slice())
|
||||
.chain(blob_versioned_hash.as_slice())
|
||||
.chain(parent_batch_hash.as_slice())
|
||||
.chain(last_block_timestamp.to_be_bytes().as_slice())
|
||||
.chain(blob_data_proof[0].as_slice())
|
||||
.chain(blob_data_proof[1].as_slice())
|
||||
.cloned()
|
||||
.collect::<Vec<u8>>(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ArchivedBatchHeaderV6> for BatchHeaderV6 {
|
||||
fn from(archived: &ArchivedBatchHeaderV6) -> Self {
|
||||
Self {
|
||||
version: archived.version,
|
||||
batch_index: archived.batch_index.into(),
|
||||
l1_message_popped: archived.l1_message_popped.into(),
|
||||
total_l1_message_popped: archived.total_l1_message_popped.into(),
|
||||
parent_batch_hash: archived.parent_batch_hash.into(),
|
||||
last_block_timestamp: archived.last_block_timestamp.into(),
|
||||
data_hash: archived.data_hash.into(),
|
||||
blob_versioned_hash: archived.blob_versioned_hash.into(),
|
||||
blob_data_proof: [
|
||||
archived.blob_data_proof[0].into(),
|
||||
archived.blob_data_proof[1].into(),
|
||||
],
|
||||
}
|
||||
}
|
||||
}
|
||||
106
common/types-rs/batch/src/header/v7.rs
Normal file
106
common/types-rs/batch/src/header/v7.rs
Normal file
@@ -0,0 +1,106 @@
|
||||
use alloy_primitives::B256;
|
||||
|
||||
use super::BatchHeader;
|
||||
use types_base::utils::keccak256;
|
||||
|
||||
/// Represents the header summarising the batch of chunks as per DA-codec v7.
|
||||
#[derive(
|
||||
Clone,
|
||||
Copy,
|
||||
Debug,
|
||||
Default,
|
||||
rkyv::Archive,
|
||||
rkyv::Deserialize,
|
||||
rkyv::Serialize,
|
||||
serde::Deserialize,
|
||||
serde::Serialize,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct BatchHeaderV7 {
|
||||
/// The DA-codec version for the batch.
|
||||
#[rkyv()]
|
||||
pub version: u8,
|
||||
/// The index of the batch
|
||||
#[rkyv()]
|
||||
pub batch_index: u64,
|
||||
/// The parent batch hash
|
||||
#[rkyv()]
|
||||
pub parent_batch_hash: B256,
|
||||
/// The versioned hash of the blob with this batch's data
|
||||
#[rkyv()]
|
||||
pub blob_versioned_hash: B256,
|
||||
}
|
||||
|
||||
impl BatchHeader for BatchHeaderV7 {
|
||||
fn version(&self) -> u8 {
|
||||
self.version
|
||||
}
|
||||
|
||||
fn index(&self) -> u64 {
|
||||
self.batch_index
|
||||
}
|
||||
|
||||
fn parent_batch_hash(&self) -> B256 {
|
||||
self.parent_batch_hash
|
||||
}
|
||||
|
||||
/// Batch hash as per DA-codec v7:
|
||||
///
|
||||
/// keccak(
|
||||
/// version ||
|
||||
/// batch index ||
|
||||
/// versioned hash ||
|
||||
/// parent batch hash
|
||||
/// )
|
||||
fn batch_hash(&self) -> B256 {
|
||||
keccak256(
|
||||
std::iter::empty()
|
||||
.chain(vec![self.version].as_slice())
|
||||
.chain(self.batch_index.to_be_bytes().as_slice())
|
||||
.chain(self.blob_versioned_hash.as_slice())
|
||||
.chain(self.parent_batch_hash.as_slice())
|
||||
.cloned()
|
||||
.collect::<Vec<u8>>(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl BatchHeader for ArchivedBatchHeaderV7 {
|
||||
fn version(&self) -> u8 {
|
||||
self.version
|
||||
}
|
||||
|
||||
fn index(&self) -> u64 {
|
||||
self.batch_index.into()
|
||||
}
|
||||
|
||||
fn parent_batch_hash(&self) -> B256 {
|
||||
self.parent_batch_hash.into()
|
||||
}
|
||||
|
||||
fn batch_hash(&self) -> B256 {
|
||||
let batch_index: u64 = self.batch_index.into();
|
||||
let blob_versioned_hash: B256 = self.blob_versioned_hash.into();
|
||||
let parent_batch_hash: B256 = self.parent_batch_hash.into();
|
||||
keccak256(
|
||||
std::iter::empty()
|
||||
.chain(vec![self.version].as_slice())
|
||||
.chain(batch_index.to_be_bytes().as_slice())
|
||||
.chain(blob_versioned_hash.as_slice())
|
||||
.chain(parent_batch_hash.as_slice())
|
||||
.cloned()
|
||||
.collect::<Vec<u8>>(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ArchivedBatchHeaderV7> for BatchHeaderV7 {
|
||||
fn from(archived: &ArchivedBatchHeaderV7) -> Self {
|
||||
Self {
|
||||
version: archived.version,
|
||||
batch_index: archived.batch_index.into(),
|
||||
parent_batch_hash: archived.parent_batch_hash.into(),
|
||||
blob_versioned_hash: archived.blob_versioned_hash.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
17
common/types-rs/batch/src/lib.rs
Normal file
17
common/types-rs/batch/src/lib.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
mod header;
|
||||
pub use header::{
|
||||
ArchivedReferenceHeader, BatchHeader, ReferenceHeader,
|
||||
v6::{ArchivedBatchHeaderV6, BatchHeaderV6},
|
||||
v7::{ArchivedBatchHeaderV7, BatchHeaderV7},
|
||||
};
|
||||
|
||||
mod payload;
|
||||
pub use payload::{
|
||||
v6::{EnvelopeV6, PayloadV6},
|
||||
v7::{EnvelopeV7, PayloadV7},
|
||||
};
|
||||
|
||||
pub use payload::{BLOB_WIDTH, N_BLOB_BYTES, N_DATA_BYTES_PER_COEFFICIENT};
|
||||
|
||||
mod witness;
|
||||
pub use witness::{ArchivedBatchWitness, BatchWitness, Bytes48, PointEvalWitness};
|
||||
15
common/types-rs/batch/src/payload/mod.rs
Normal file
15
common/types-rs/batch/src/payload/mod.rs
Normal file
@@ -0,0 +1,15 @@
|
||||
pub mod v6;
|
||||
pub mod v7;
|
||||
|
||||
/// The number data bytes we pack each BLS12-381 scalar into. The most-significant byte is 0.
|
||||
pub const N_DATA_BYTES_PER_COEFFICIENT: usize = 31;
|
||||
|
||||
/// The number of BLS12-381 scalar fields that effectively represent an EIP-4844 blob.
|
||||
pub const BLOB_WIDTH: usize = 4096;
|
||||
|
||||
/// The effective (reduced) number of bytes we can use within a blob.
|
||||
///
|
||||
/// EIP-4844 requires that each 32-bytes chunk of bytes represent a BLS12-381 scalar field element
|
||||
/// in its canonical form. As a result, we set the most-significant byte in each such chunk to 0.
|
||||
/// This allows us to use only up to 31 bytes in each such chunk, hence the reduced capacity.
|
||||
pub const N_BLOB_BYTES: usize = BLOB_WIDTH * N_DATA_BYTES_PER_COEFFICIENT;
|
||||
212
common/types-rs/batch/src/payload/v6.rs
Normal file
212
common/types-rs/batch/src/payload/v6.rs
Normal file
@@ -0,0 +1,212 @@
|
||||
use alloy_primitives::B256;
|
||||
use itertools::Itertools;
|
||||
|
||||
use crate::BatchHeaderV6;
|
||||
use types_base::{public_inputs::chunk::ChunkInfo, utils::keccak256};
|
||||
|
||||
/// The default max chunks for v6 payload
|
||||
pub const N_MAX_CHUNKS: usize = 45;
|
||||
|
||||
/// The number of bytes to encode number of chunks in a batch.
|
||||
const N_BYTES_NUM_CHUNKS: usize = 2;
|
||||
|
||||
/// The number of rows to encode chunk size (u32).
|
||||
const N_BYTES_CHUNK_SIZE: usize = 4;
|
||||
|
||||
impl From<&[u8]> for EnvelopeV6 {
|
||||
fn from(blob_bytes: &[u8]) -> Self {
|
||||
let is_encoded = blob_bytes[0] & 1 == 1;
|
||||
Self {
|
||||
is_encoded,
|
||||
envelope_bytes: if blob_bytes[0] & 1 == 1 {
|
||||
vm_zstd::process(&blob_bytes[1..]).unwrap().decoded_data
|
||||
} else {
|
||||
Vec::from(&blob_bytes[1..])
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct EnvelopeV6 {
|
||||
/// The original envelope bytes supplied.
|
||||
///
|
||||
/// Caching just for re-use later in challenge digest computation.
|
||||
pub envelope_bytes: Vec<u8>,
|
||||
/// If the enveloped bytes is encoded (compressed) in envelop
|
||||
pub is_encoded: bool,
|
||||
}
|
||||
|
||||
impl EnvelopeV6 {
|
||||
/// Parse payload bytes and obtain challenge digest
|
||||
pub fn challenge_digest(&self, versioned_hash: B256) -> B256 {
|
||||
let payload = Payload::from(self);
|
||||
payload.get_challenge_digest(versioned_hash)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&EnvelopeV6> for Payload {
|
||||
fn from(envelope: &EnvelopeV6) -> Self {
|
||||
Self::from_payload(&envelope.envelope_bytes)
|
||||
}
|
||||
}
|
||||
|
||||
/// Payload that describes a batch.
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct Payload {
|
||||
/// Metadata that encodes the sizes of every chunk in the batch.
|
||||
pub metadata_digest: B256,
|
||||
/// The Keccak digests of transaction bytes for every chunk in the batch.
|
||||
///
|
||||
/// The `chunk_data_digest` is a part of the chunk-circuit's public input and hence used to
|
||||
/// verify that the transaction bytes included in the chunk-circuit indeed match the
|
||||
/// transaction bytes made available in the batch.
|
||||
pub chunk_data_digests: Vec<B256>,
|
||||
}
|
||||
|
||||
pub type PayloadV6 = Payload;
|
||||
|
||||
impl Payload {
|
||||
/// For raw payload data (read from decompressed enveloped data), which is raw batch bytes
|
||||
/// with metadata, this function segments the byte stream into chunk segments.
|
||||
///
|
||||
/// This method is used INSIDE OF zkvm since we can not generate (compress) batch data within
|
||||
/// the vm program
|
||||
///
|
||||
/// The structure of batch bytes is as follows:
|
||||
///
|
||||
/// | Byte Index | Size | Hint |
|
||||
/// |--------------------------------------------------------------|-------------------------------|-------------------------------------|
|
||||
/// | 0 | N_BYTES_NUM_CHUNKS | Number of chunks |
|
||||
/// | N_BYTES_NUM_CHUNKS | N_BYTES_CHUNK_SIZE | Size of chunks[0] |
|
||||
/// | N_BYTES_NUM_CHUNKS + N_BYTES_CHUNK_SIZE | N_BYTES_CHUNK_SIZE | Size of chunks[1] |
|
||||
/// | N_BYTES_NUM_CHUNKS + (i * N_BYTES_CHUNK_SIZE) | N_BYTES_CHUNK_SIZE | Size of chunks[i] |
|
||||
/// | N_BYTES_NUM_CHUNKS + ((N_MAX_CHUNKS-1) * N_BYTES_CHUNK_SIZE) | N_BYTES_CHUNK_SIZE | Size of chunks[N_MAX_CHUNKS-1] |
|
||||
/// | N_BYTES_NUM_CHUNKS + (N_MAX_CHUNKS * N_BYTES_CHUNK_SIZE) | Size of chunks[0] | L2 tx bytes of chunks[0] |
|
||||
/// | "" + Size_of_chunks[0] | Size of chunks[1] | L2 tx bytes of chunks[1] |
|
||||
/// | "" + Size_of_chunks[i-1] | Size of chunks[i] | L2 tx bytes of chunks[i] |
|
||||
/// | "" + Size_of_chunks[Num_chunks-1] | Size of chunks[Num_chunks-1] | L2 tx bytes of chunks[Num_chunks-1] |
|
||||
pub fn from_payload(batch_bytes_with_metadata: &[u8]) -> Self {
|
||||
// Get the metadata bytes and metadata digest.
|
||||
let n_bytes_metadata = Self::n_bytes_metadata();
|
||||
let metadata_bytes = &batch_bytes_with_metadata[..n_bytes_metadata];
|
||||
let metadata_digest = keccak256(metadata_bytes);
|
||||
|
||||
// The remaining bytes represent the chunk data (L2 tx bytes) segmented as chunks.
|
||||
let batch_bytes = &batch_bytes_with_metadata[n_bytes_metadata..];
|
||||
|
||||
// The number of chunks in the batch.
|
||||
let valid_chunks = metadata_bytes[..N_BYTES_NUM_CHUNKS]
|
||||
.iter()
|
||||
.fold(0usize, |acc, &d| acc * 256usize + d as usize);
|
||||
|
||||
// The size of each chunk in the batch.
|
||||
let chunk_sizes = metadata_bytes[N_BYTES_NUM_CHUNKS..]
|
||||
.iter()
|
||||
.chunks(N_BYTES_CHUNK_SIZE)
|
||||
.into_iter()
|
||||
.map(|bytes| bytes.fold(0usize, |acc, &d| acc * 256usize + d as usize))
|
||||
.collect::<Vec<usize>>();
|
||||
|
||||
// For every unused chunk, the chunk size should be set to 0.
|
||||
for &unused_chunk_size in chunk_sizes.iter().skip(valid_chunks) {
|
||||
assert_eq!(unused_chunk_size, 0, "unused chunk has size 0");
|
||||
}
|
||||
|
||||
// Segment the batch bytes based on the chunk sizes.
|
||||
let (segmented_batch_data, remaining_bytes) =
|
||||
chunk_sizes.into_iter().take(valid_chunks).fold(
|
||||
(Vec::new(), batch_bytes),
|
||||
|(mut datas, rest_bytes), size| {
|
||||
datas.push(Vec::from(&rest_bytes[..size]));
|
||||
(datas, &rest_bytes[size..])
|
||||
},
|
||||
);
|
||||
|
||||
// After segmenting the batch data into chunks, no bytes should be left.
|
||||
assert!(
|
||||
remaining_bytes.is_empty(),
|
||||
"chunk segmentation len must add up to the correct value"
|
||||
);
|
||||
|
||||
// Compute the chunk data digests based on the segmented data.
|
||||
let chunk_data_digests = segmented_batch_data
|
||||
.iter()
|
||||
.map(|bytes| B256::from(keccak256(bytes)))
|
||||
.collect();
|
||||
|
||||
Self {
|
||||
metadata_digest,
|
||||
chunk_data_digests,
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute the challenge digest from blob bytes. which is the combination of
|
||||
/// digest for bytes in each chunk
|
||||
pub fn get_challenge_digest(&self, versioned_hash: B256) -> B256 {
|
||||
keccak256(self.get_challenge_digest_preimage(versioned_hash))
|
||||
}
|
||||
|
||||
/// The number of bytes in payload Data to represent the "payload metadata" section: a u16 to
|
||||
/// represent the size of chunks and max_chunks * u32 to represent chunk sizes
|
||||
const fn n_bytes_metadata() -> usize {
|
||||
N_BYTES_NUM_CHUNKS + (N_MAX_CHUNKS * N_BYTES_CHUNK_SIZE)
|
||||
}
|
||||
|
||||
/// Validate the payload contents.
|
||||
pub fn validate<'a>(
|
||||
&self,
|
||||
header: &BatchHeaderV6,
|
||||
chunk_infos: &'a [ChunkInfo],
|
||||
) -> (&'a ChunkInfo, &'a ChunkInfo) {
|
||||
// There should be at least 1 chunk info.
|
||||
assert!(!chunk_infos.is_empty(), "at least 1 chunk info");
|
||||
|
||||
// Get the first and last chunks' info, to construct the batch info.
|
||||
let (first_chunk, last_chunk) = (
|
||||
chunk_infos.first().expect("at least one chunk in batch"),
|
||||
chunk_infos.last().expect("at least one chunk in batch"),
|
||||
);
|
||||
|
||||
for (&chunk_data_digest, chunk_info) in self.chunk_data_digests.iter().zip_eq(chunk_infos) {
|
||||
assert_eq!(chunk_data_digest, chunk_info.tx_data_digest)
|
||||
}
|
||||
|
||||
// Validate the l1-msg identifier data_hash for the batch.
|
||||
let batch_data_hash_preimage = chunk_infos
|
||||
.iter()
|
||||
.flat_map(|chunk_info| chunk_info.data_hash.0)
|
||||
.collect::<Vec<_>>();
|
||||
let batch_data_hash = keccak256(batch_data_hash_preimage);
|
||||
assert_eq!(batch_data_hash, header.data_hash);
|
||||
|
||||
(first_chunk, last_chunk)
|
||||
}
|
||||
|
||||
/// Get the preimage for the challenge digest.
|
||||
pub(crate) fn get_challenge_digest_preimage(&self, versioned_hash: B256) -> Vec<u8> {
|
||||
// preimage =
|
||||
// metadata_digest ||
|
||||
// chunk[0].chunk_data_digest || ...
|
||||
// chunk[N_SNARKS-1].chunk_data_digest ||
|
||||
// blob_versioned_hash
|
||||
//
|
||||
// where chunk_data_digest for a padded chunk is set equal to the "last valid chunk"'s
|
||||
// chunk_data_digest.
|
||||
let mut preimage = self.metadata_digest.to_vec();
|
||||
let last_digest = self
|
||||
.chunk_data_digests
|
||||
.last()
|
||||
.expect("at least we have one");
|
||||
for chunk_digest in self
|
||||
.chunk_data_digests
|
||||
.iter()
|
||||
.chain(std::iter::repeat(last_digest))
|
||||
.take(N_MAX_CHUNKS)
|
||||
{
|
||||
preimage.extend_from_slice(chunk_digest.as_slice());
|
||||
}
|
||||
preimage.extend_from_slice(versioned_hash.as_slice());
|
||||
preimage
|
||||
}
|
||||
}
|
||||
256
common/types-rs/batch/src/payload/v7.rs
Normal file
256
common/types-rs/batch/src/payload/v7.rs
Normal file
@@ -0,0 +1,256 @@
|
||||
use alloy_primitives::B256;
|
||||
|
||||
use crate::BatchHeaderV7;
|
||||
use types_base::{
|
||||
public_inputs::chunk::{BlockContextV2, ChunkInfo, SIZE_BLOCK_CTX},
|
||||
utils::keccak256,
|
||||
};
|
||||
|
||||
use super::N_BLOB_BYTES;
|
||||
|
||||
/// da-codec@v7
|
||||
const DA_CODEC_VERSION: u8 = 7;
|
||||
|
||||
/// Represents the data contained within an EIP-4844 blob that is published on-chain.
|
||||
///
|
||||
/// The bytes following some metadata represent zstd-encoded [`PayloadV7`] if the envelope is
|
||||
/// indicated as `is_encoded == true`.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct EnvelopeV7 {
|
||||
/// The original envelope bytes supplied.
|
||||
///
|
||||
/// Caching just for re-use later in challenge digest computation.
|
||||
pub envelope_bytes: Vec<u8>,
|
||||
/// The version from da-codec, i.e. v7 in this case.
|
||||
pub version: u8,
|
||||
/// A single byte boolean flag (value is 0 or 1) to denote whether or not the following blob
|
||||
/// bytes represent a batch in its zstd-encoded or raw form.
|
||||
pub is_encoded: u8,
|
||||
/// The unpadded bytes that possibly encode the [`PayloadV7`].
|
||||
pub unpadded_bytes: Vec<u8>,
|
||||
}
|
||||
|
||||
impl From<&[u8]> for EnvelopeV7 {
|
||||
fn from(blob_bytes: &[u8]) -> Self {
|
||||
// The number of bytes is as expected.
|
||||
assert_eq!(blob_bytes.len(), N_BLOB_BYTES);
|
||||
|
||||
// The version of the blob encoding was as expected, i.e. da-codec@v7.
|
||||
let version = blob_bytes[0];
|
||||
assert_eq!(version, DA_CODEC_VERSION);
|
||||
|
||||
// Calculate the unpadded size of the encoded payload.
|
||||
//
|
||||
// It should be at most the maximum number of bytes allowed.
|
||||
let unpadded_size = (blob_bytes[1] as usize) * 256 * 256
|
||||
+ (blob_bytes[2] as usize) * 256
|
||||
+ blob_bytes[3] as usize;
|
||||
assert!(unpadded_size <= N_BLOB_BYTES - 5);
|
||||
|
||||
// Whether the envelope represents encoded payload or raw payload.
|
||||
//
|
||||
// Is a boolean.
|
||||
let is_encoded = blob_bytes[4];
|
||||
assert!(is_encoded <= 1);
|
||||
|
||||
// The padded bytes are all 0s.
|
||||
for &padded_byte in blob_bytes.iter().skip(5 + unpadded_size) {
|
||||
assert_eq!(padded_byte, 0);
|
||||
}
|
||||
|
||||
Self {
|
||||
version,
|
||||
is_encoded,
|
||||
unpadded_bytes: blob_bytes[5..(5 + unpadded_size)].to_vec(),
|
||||
envelope_bytes: blob_bytes.to_vec(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EnvelopeV7 {
|
||||
/// The verification of the EIP-4844 blob is done via point-evaluation precompile
|
||||
/// implemented in-circuit.
|
||||
///
|
||||
/// We require a random challenge point for this, and using Fiat-Shamir we compute it with
|
||||
/// every byte in the blob along with the blob's versioned hash, i.e. an identifier for its KZG
|
||||
/// commitment.
|
||||
///
|
||||
/// keccak256(
|
||||
/// keccak256(envelope) ||
|
||||
/// versioned hash
|
||||
/// )
|
||||
pub fn challenge_digest(&self, versioned_hash: B256) -> B256 {
|
||||
keccak256(
|
||||
std::iter::empty()
|
||||
.chain(keccak256(&self.envelope_bytes))
|
||||
.chain(versioned_hash.0)
|
||||
.collect::<Vec<u8>>(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the batch data, eventually encoded into an [`EnvelopeV7`].
|
||||
///
|
||||
/// | Field | # Bytes | Type | Index |
|
||||
/// |------------------------|---------|----------------|---------------|
|
||||
/// | prevL1MessageQueueHash | 32 | bytes32 | 0 |
|
||||
/// | postL1MessageQueueHash | 32 | bytes32 | 32 |
|
||||
/// | initialL2BlockNumber | 8 | u64 | 64 |
|
||||
/// | numBlocks | 2 | u16 | 72 |
|
||||
/// | blockCtxs[0] | 52 | BlockContextV2 | 74 |
|
||||
/// | ... blockCtxs[i] ... | 52 | BlockContextV2 | 74 + 52*i |
|
||||
/// | blockCtxs[n-1] | 52 | BlockContextV2 | 74 + 52*(n-1) |
|
||||
/// | l2TxsData | dynamic | bytes | 74 + 52*n |
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PayloadV7 {
|
||||
/// The version from da-codec, i.e. v7 in this case.
|
||||
///
|
||||
/// Note: This is not really a part of payload, simply coopied from the envelope for
|
||||
/// convenience.
|
||||
pub version: u8,
|
||||
/// Message queue hash at the end of the previous batch.
|
||||
pub prev_msg_queue_hash: B256,
|
||||
/// Message queue hash at the end of the current batch.
|
||||
pub post_msg_queue_hash: B256,
|
||||
/// The block number of the first block in the batch.
|
||||
pub initial_block_number: u64,
|
||||
/// The number of blocks in the batch.
|
||||
pub num_blocks: u16,
|
||||
/// The block contexts of each block in the batch.
|
||||
pub block_contexts: Vec<BlockContextV2>,
|
||||
/// The L2 tx data flattened over every tx in every block in the batch.
|
||||
pub tx_data: Vec<u8>,
|
||||
}
|
||||
|
||||
const INDEX_PREV_MSG_QUEUE_HASH: usize = 0;
|
||||
const INDEX_POST_MSG_QUEUE_HASH: usize = INDEX_PREV_MSG_QUEUE_HASH + 32;
|
||||
const INDEX_L2_BLOCK_NUM: usize = INDEX_POST_MSG_QUEUE_HASH + 32;
|
||||
const INDEX_NUM_BLOCKS: usize = INDEX_L2_BLOCK_NUM + 8;
|
||||
const INDEX_BLOCK_CTX: usize = INDEX_NUM_BLOCKS + 2;
|
||||
|
||||
impl From<&EnvelopeV7> for PayloadV7 {
|
||||
fn from(envelope: &EnvelopeV7) -> Self {
|
||||
// Conditionally decode depending on the flag set in the envelope.
|
||||
let payload_bytes = if envelope.is_encoded & 1 == 1 {
|
||||
vm_zstd::process(&envelope.unpadded_bytes)
|
||||
.expect("zstd decode should succeed")
|
||||
.decoded_data
|
||||
} else {
|
||||
envelope.unpadded_bytes.to_vec()
|
||||
};
|
||||
|
||||
// Sanity check on the payload size.
|
||||
assert!(payload_bytes.len() >= INDEX_BLOCK_CTX);
|
||||
let num_blocks = u16::from_be_bytes(
|
||||
payload_bytes[INDEX_NUM_BLOCKS..INDEX_BLOCK_CTX]
|
||||
.try_into()
|
||||
.expect("should not fail"),
|
||||
);
|
||||
assert!(payload_bytes.len() >= INDEX_BLOCK_CTX + ((num_blocks as usize) * SIZE_BLOCK_CTX));
|
||||
|
||||
// Deserialize the other fields.
|
||||
let prev_msg_queue_hash =
|
||||
B256::from_slice(&payload_bytes[INDEX_PREV_MSG_QUEUE_HASH..INDEX_POST_MSG_QUEUE_HASH]);
|
||||
let post_msg_queue_hash =
|
||||
B256::from_slice(&payload_bytes[INDEX_POST_MSG_QUEUE_HASH..INDEX_L2_BLOCK_NUM]);
|
||||
let initial_block_number = u64::from_be_bytes(
|
||||
payload_bytes[INDEX_L2_BLOCK_NUM..INDEX_NUM_BLOCKS]
|
||||
.try_into()
|
||||
.expect("should not fail"),
|
||||
);
|
||||
|
||||
// Deserialize block contexts depending on the number of blocks in the batch.
|
||||
let mut block_contexts = Vec::with_capacity(num_blocks as usize);
|
||||
for i in 0..num_blocks {
|
||||
let start = (i as usize) * SIZE_BLOCK_CTX + INDEX_BLOCK_CTX;
|
||||
block_contexts.push(BlockContextV2::from(
|
||||
&payload_bytes[start..(start + SIZE_BLOCK_CTX)],
|
||||
));
|
||||
}
|
||||
|
||||
// All remaining bytes are flattened L2 txs.
|
||||
let tx_data =
|
||||
payload_bytes[INDEX_BLOCK_CTX + ((num_blocks as usize) * SIZE_BLOCK_CTX)..].to_vec();
|
||||
|
||||
Self {
|
||||
version: envelope.version,
|
||||
prev_msg_queue_hash,
|
||||
post_msg_queue_hash,
|
||||
initial_block_number,
|
||||
num_blocks,
|
||||
block_contexts,
|
||||
tx_data,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PayloadV7 {
|
||||
/// Validate the payload contents.
|
||||
pub fn validate<'a>(
|
||||
&self,
|
||||
header: &BatchHeaderV7,
|
||||
chunk_infos: &'a [ChunkInfo],
|
||||
) -> (&'a ChunkInfo, &'a ChunkInfo) {
|
||||
// Get the first and last chunks' info, to construct the batch info.
|
||||
let (first_chunk, last_chunk) = (
|
||||
chunk_infos.first().expect("at least one chunk in batch"),
|
||||
chunk_infos.last().expect("at least one chunk in batch"),
|
||||
);
|
||||
|
||||
// version from payload is what's present in the on-chain batch header
|
||||
assert_eq!(self.version, header.version);
|
||||
|
||||
// number of blocks in the batch
|
||||
assert_eq!(
|
||||
usize::from(self.num_blocks),
|
||||
chunk_infos
|
||||
.iter()
|
||||
.flat_map(|chunk_info| &chunk_info.block_ctxs)
|
||||
.count()
|
||||
);
|
||||
assert_eq!(usize::from(self.num_blocks), self.block_contexts.len());
|
||||
|
||||
// the block number of the first block in the batch
|
||||
assert_eq!(self.initial_block_number, first_chunk.initial_block_number);
|
||||
|
||||
// prev message queue hash
|
||||
assert_eq!(self.prev_msg_queue_hash, first_chunk.prev_msg_queue_hash);
|
||||
|
||||
// post message queue hash
|
||||
assert_eq!(self.post_msg_queue_hash, last_chunk.post_msg_queue_hash);
|
||||
|
||||
// for each chunk, the tx_data_digest, i.e. keccak digest of the rlp-encoded L2 tx bytes
|
||||
// flattened over every tx in the chunk, should be re-computed and matched against the
|
||||
// public input of the chunk-circuit.
|
||||
//
|
||||
// first check that the total size of rlp-encoded tx data flattened over all txs in the
|
||||
// chunk is in fact the size available from the payload.
|
||||
assert_eq!(
|
||||
u64::try_from(self.tx_data.len()).expect("len(tx-data) is u64"),
|
||||
chunk_infos
|
||||
.iter()
|
||||
.map(|chunk_info| chunk_info.tx_data_length)
|
||||
.sum::<u64>(),
|
||||
);
|
||||
let mut index: usize = 0;
|
||||
for chunk_info in chunk_infos.iter() {
|
||||
let chunk_size = chunk_info.tx_data_length as usize;
|
||||
let chunk_tx_data_digest =
|
||||
keccak256(&self.tx_data.as_slice()[index..(index + chunk_size)]);
|
||||
assert_eq!(chunk_tx_data_digest, chunk_info.tx_data_digest);
|
||||
index += chunk_size;
|
||||
}
|
||||
|
||||
// for each block in the batch, check that the block context matches what's provided as
|
||||
// witness.
|
||||
for (block_ctx, witness_block_ctx) in self.block_contexts.iter().zip(
|
||||
chunk_infos
|
||||
.iter()
|
||||
.flat_map(|chunk_info| &chunk_info.block_ctxs),
|
||||
) {
|
||||
assert_eq!(block_ctx, witness_block_ctx);
|
||||
}
|
||||
|
||||
(first_chunk, last_chunk)
|
||||
}
|
||||
}
|
||||
57
common/types-rs/batch/src/witness.rs
Normal file
57
common/types-rs/batch/src/witness.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
use crate::header::ReferenceHeader;
|
||||
use types_agg::{AggregationInput, ProgramCommitment, ProofCarryingWitness};
|
||||
use types_base::public_inputs::{ForkName, chunk::ChunkInfo};
|
||||
|
||||
/// Simply rewrap byte48 to avoid unnecessary dep
|
||||
pub type Bytes48 = [u8; 48];
|
||||
|
||||
/// Witness required by applying point evaluation
|
||||
#[derive(Clone, Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct PointEvalWitness {
|
||||
/// kzg commitment
|
||||
#[rkyv()]
|
||||
pub kzg_commitment: Bytes48,
|
||||
/// kzg proof
|
||||
#[rkyv()]
|
||||
pub kzg_proof: Bytes48,
|
||||
}
|
||||
|
||||
/// Witness to the batch circuit.
|
||||
#[derive(Clone, Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct BatchWitness {
|
||||
/// Flattened root proofs from all chunks in the batch.
|
||||
#[rkyv()]
|
||||
pub chunk_proofs: Vec<AggregationInput>,
|
||||
/// Chunk infos.
|
||||
#[rkyv()]
|
||||
pub chunk_infos: Vec<ChunkInfo>,
|
||||
/// Blob bytes.
|
||||
#[rkyv()]
|
||||
pub blob_bytes: Vec<u8>,
|
||||
/// Witness for point evaluation
|
||||
pub point_eval_witness: PointEvalWitness,
|
||||
/// Header for reference.
|
||||
#[rkyv()]
|
||||
pub reference_header: ReferenceHeader,
|
||||
/// The code version specify the chain spec
|
||||
#[rkyv()]
|
||||
pub fork_name: ForkName,
|
||||
}
|
||||
|
||||
impl ProofCarryingWitness for ArchivedBatchWitness {
|
||||
fn get_proofs(&self) -> Vec<AggregationInput> {
|
||||
self.chunk_proofs
|
||||
.iter()
|
||||
.map(|archived| AggregationInput {
|
||||
public_values: archived
|
||||
.public_values
|
||||
.iter()
|
||||
.map(|u32_le| u32_le.to_native())
|
||||
.collect(),
|
||||
commitment: ProgramCommitment::from(&archived.commitment),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
21
common/types-rs/bundle/Cargo.toml
Normal file
21
common/types-rs/bundle/Cargo.toml
Normal file
@@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "scroll-zkvm-circuit-input-types-bundle"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
repository.workspace = true
|
||||
version = "0.2.0"
|
||||
|
||||
[dependencies]
|
||||
alloy-primitives = { workspace = true, default-features = false, features = ["std", "map-hashbrown", "map-fxhash", "rkyv"] }
|
||||
rkyv.workspace = true
|
||||
serde.workspace = true
|
||||
itertools.workspace = true
|
||||
vm-zstd = { workspace = true }
|
||||
|
||||
types-base = { path = "../base", package = "scroll-zkvm-circuit-input-types-base"}
|
||||
types-agg = { path = "../aggregation", package = "scroll-zkvm-circuit-input-types-aggregation"}
|
||||
|
||||
[features]
|
||||
default = []
|
||||
2
common/types-rs/bundle/src/lib.rs
Normal file
2
common/types-rs/bundle/src/lib.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
mod witness;
|
||||
pub use witness::{ArchivedBundleWitness, BundleWitness};
|
||||
30
common/types-rs/bundle/src/witness.rs
Normal file
30
common/types-rs/bundle/src/witness.rs
Normal file
@@ -0,0 +1,30 @@
|
||||
use types_agg::{AggregationInput, ProgramCommitment, ProofCarryingWitness};
|
||||
use types_base::public_inputs::batch::BatchInfo;
|
||||
|
||||
/// The witness for the bundle circuit.
|
||||
#[derive(Clone, Debug, rkyv::Archive, rkyv::Deserialize, rkyv::Serialize)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct BundleWitness {
|
||||
/// Batch proofs being aggregated in the bundle.
|
||||
#[rkyv()]
|
||||
pub batch_proofs: Vec<AggregationInput>,
|
||||
/// Public-input values for the corresponding batch proofs.
|
||||
#[rkyv()]
|
||||
pub batch_infos: Vec<BatchInfo>,
|
||||
}
|
||||
|
||||
impl ProofCarryingWitness for ArchivedBundleWitness {
|
||||
fn get_proofs(&self) -> Vec<AggregationInput> {
|
||||
self.batch_proofs
|
||||
.iter()
|
||||
.map(|archived| AggregationInput {
|
||||
public_values: archived
|
||||
.public_values
|
||||
.iter()
|
||||
.map(|u32_le| u32_le.to_native())
|
||||
.collect(),
|
||||
commitment: ProgramCommitment::from(&archived.commitment),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
28
common/types-rs/chunk/Cargo.toml
Normal file
28
common/types-rs/chunk/Cargo.toml
Normal file
@@ -0,0 +1,28 @@
|
||||
[package]
|
||||
name = "scroll-zkvm-circuit-input-types-chunk"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
homepage.workspace = true
|
||||
readme.workspace = true
|
||||
repository.workspace = true
|
||||
version = "0.2.0"
|
||||
|
||||
[dependencies]
|
||||
alloy-primitives = { workspace = true, default-features = false, features = ["std", "map-hashbrown", "map-fxhash", "rkyv"] }
|
||||
rkyv.workspace = true
|
||||
sbv-trie = { workspace = true }
|
||||
sbv-core = { workspace = true }
|
||||
sbv-primitives = { workspace = true }
|
||||
sbv-kv = { workspace = true }
|
||||
serde.workspace = true
|
||||
itertools.workspace = true
|
||||
|
||||
openvm = { workspace = true, features = ["std"] }
|
||||
openvm-rv32im-guest = { workspace = true }
|
||||
openvm-custom-insn = { workspace = true }
|
||||
|
||||
types-base = { path = "../base", package = "scroll-zkvm-circuit-input-types-base"}
|
||||
|
||||
[features]
|
||||
default = []
|
||||
openvm = ["sbv-trie/openvm", "sbv-core/openvm", "sbv-primitives/openvm"]
|
||||
167
common/types-rs/chunk/src/execute.rs
Normal file
167
common/types-rs/chunk/src/execute.rs
Normal file
@@ -0,0 +1,167 @@
|
||||
use sbv_core::{EvmDatabase, EvmExecutor};
|
||||
use sbv_primitives::{
|
||||
BlockWitness,
|
||||
chainspec::{
|
||||
BaseFeeParams, BaseFeeParamsKind, Chain, MAINNET,
|
||||
reth_chainspec::ChainSpec,
|
||||
scroll::{ScrollChainConfig, ScrollChainSpec},
|
||||
},
|
||||
ext::{BlockWitnessChunkExt, TxBytesHashExt},
|
||||
hardforks::SCROLL_DEV_HARDFORKS,
|
||||
types::{
|
||||
consensus::BlockHeader,
|
||||
reth::{Block, BlockWitnessRethExt, RecoveredBlock},
|
||||
scroll::ChunkInfoBuilder,
|
||||
},
|
||||
};
|
||||
|
||||
use crate::{ArchivedChunkWitness, make_providers, manually_drop_on_zkvm};
|
||||
use types_base::public_inputs::{
|
||||
ForkName,
|
||||
chunk::{BlockContextV2, ChunkInfo},
|
||||
};
|
||||
|
||||
fn block_ctxv2_from_block(value: &RecoveredBlock<Block>) -> BlockContextV2 {
|
||||
use alloy_primitives::U256;
|
||||
BlockContextV2 {
|
||||
timestamp: value.timestamp,
|
||||
gas_limit: value.gas_limit,
|
||||
base_fee: U256::from(value.base_fee_per_gas().expect("base_fee_expected")),
|
||||
num_txs: u16::try_from(value.body().transactions.len()).expect("num txs u16"),
|
||||
num_l1_msgs: u16::try_from(
|
||||
value
|
||||
.body()
|
||||
.transactions
|
||||
.iter()
|
||||
.filter(|tx| tx.is_l1_message())
|
||||
.count(),
|
||||
)
|
||||
.expect("num l1 msgs u16"),
|
||||
}
|
||||
}
|
||||
|
||||
type Witness = ArchivedChunkWitness;
|
||||
|
||||
pub fn execute(witness: &Witness) -> Result<ChunkInfo, String> {
|
||||
if witness.blocks.is_empty() {
|
||||
return Err("At least one witness must be provided in chunk mode".into());
|
||||
}
|
||||
if !witness.blocks.has_same_chain_id() {
|
||||
return Err("All witnesses must have the same chain id in chunk mode".into());
|
||||
}
|
||||
if !witness.blocks.has_seq_block_number() {
|
||||
return Err("All witnesses must have sequential block numbers in chunk mode".into());
|
||||
}
|
||||
// Get the blocks to build the basic chunk-info.
|
||||
let blocks = manually_drop_on_zkvm!(
|
||||
witness
|
||||
.blocks
|
||||
.iter()
|
||||
.map(|w| w.build_reth_block())
|
||||
.collect::<Result<Vec<RecoveredBlock<Block>>, _>>()
|
||||
.map_err(|e| e.to_string())?
|
||||
);
|
||||
let pre_state_root = witness.blocks[0].pre_state_root;
|
||||
|
||||
let fork_name = ForkName::from(&witness.fork_name);
|
||||
let chain = Chain::from_id(witness.blocks[0].chain_id());
|
||||
|
||||
// SCROLL_DEV_HARDFORKS will enable all forks
|
||||
let mut hardforks = (*SCROLL_DEV_HARDFORKS).clone();
|
||||
if fork_name == ForkName::EuclidV1 {
|
||||
// disable EuclidV2 fork for legacy chunk
|
||||
use sbv_primitives::{chainspec::ForkCondition, hardforks::ScrollHardfork};
|
||||
hardforks.insert(ScrollHardfork::EuclidV2, ForkCondition::Never);
|
||||
}
|
||||
|
||||
let inner = ChainSpec {
|
||||
chain,
|
||||
genesis_hash: Default::default(),
|
||||
genesis: Default::default(),
|
||||
genesis_header: Default::default(),
|
||||
paris_block_and_final_difficulty: Default::default(),
|
||||
hardforks,
|
||||
deposit_contract: Default::default(),
|
||||
base_fee_params: BaseFeeParamsKind::Constant(BaseFeeParams::ethereum()),
|
||||
prune_delete_limit: 20000,
|
||||
blob_params: Default::default(),
|
||||
};
|
||||
let config = ScrollChainConfig::mainnet();
|
||||
let chain_spec: ScrollChainSpec = ScrollChainSpec { inner, config };
|
||||
|
||||
let (code_db, nodes_provider, block_hashes) = make_providers(&witness.blocks);
|
||||
let nodes_provider = manually_drop_on_zkvm!(nodes_provider);
|
||||
|
||||
let prev_state_root = witness.blocks[0].pre_state_root();
|
||||
let mut db = manually_drop_on_zkvm!(
|
||||
EvmDatabase::new_from_root(code_db, prev_state_root, &nodes_provider, block_hashes)
|
||||
.map_err(|e| format!("failed to create EvmDatabase: {}", e))?
|
||||
);
|
||||
for block in blocks.iter() {
|
||||
let output = manually_drop_on_zkvm!(
|
||||
EvmExecutor::new(std::sync::Arc::new(chain_spec.clone()), &db, block)
|
||||
.execute()
|
||||
.map_err(|e| format!("failed to execute block: {}", e))?
|
||||
);
|
||||
db.update(&nodes_provider, output.state.state.iter())
|
||||
.map_err(|e| format!("failed to update db: {}", e))?;
|
||||
}
|
||||
|
||||
let post_state_root = db.commit_changes();
|
||||
|
||||
let withdraw_root = db
|
||||
.withdraw_root()
|
||||
.map_err(|e| format!("failed to get withdraw root: {}", e))?;
|
||||
|
||||
let mut rlp_buffer = manually_drop_on_zkvm!(Vec::with_capacity(2048));
|
||||
let (tx_data_length, tx_data_digest) = blocks
|
||||
.iter()
|
||||
.flat_map(|b| b.body().transactions.iter())
|
||||
.tx_bytes_hash_in(rlp_buffer.as_mut());
|
||||
let _ = tx_data_length;
|
||||
|
||||
let sbv_chunk_info = {
|
||||
#[allow(unused_mut)]
|
||||
let mut builder = ChunkInfoBuilder::new(&chain_spec, pre_state_root.into(), &blocks);
|
||||
if fork_name == ForkName::EuclidV2 {
|
||||
builder.set_prev_msg_queue_hash(witness.prev_msg_queue_hash.into());
|
||||
}
|
||||
builder.build(withdraw_root)
|
||||
};
|
||||
if post_state_root != sbv_chunk_info.post_state_root() {
|
||||
return Err(format!(
|
||||
"state root mismatch: expected={}, found={}",
|
||||
sbv_chunk_info.post_state_root(),
|
||||
post_state_root
|
||||
));
|
||||
}
|
||||
|
||||
let chunk_info = ChunkInfo {
|
||||
chain_id: sbv_chunk_info.chain_id(),
|
||||
prev_state_root: sbv_chunk_info.prev_state_root(),
|
||||
post_state_root: sbv_chunk_info.post_state_root(),
|
||||
data_hash: sbv_chunk_info
|
||||
.clone()
|
||||
.into_legacy()
|
||||
.map(|x| x.data_hash)
|
||||
.unwrap_or_default(),
|
||||
withdraw_root,
|
||||
tx_data_digest,
|
||||
tx_data_length: u64::try_from(tx_data_length).expect("tx_data_length: u64"),
|
||||
initial_block_number: blocks[0].header().number,
|
||||
prev_msg_queue_hash: witness.prev_msg_queue_hash.into(),
|
||||
post_msg_queue_hash: sbv_chunk_info
|
||||
.into_euclid_v2()
|
||||
.map(|x| x.post_msg_queue_hash)
|
||||
.unwrap_or_default(),
|
||||
block_ctxs: blocks.iter().map(block_ctxv2_from_block).collect(),
|
||||
};
|
||||
|
||||
openvm::io::println(format!("withdraw_root = {:?}", withdraw_root));
|
||||
openvm::io::println(format!("tx_bytes_hash = {:?}", tx_data_digest));
|
||||
|
||||
// We should never touch that lazy lock... Or else we introduce 40M useless cycles.
|
||||
assert!(std::sync::LazyLock::get(&MAINNET).is_none());
|
||||
|
||||
Ok(chunk_info)
|
||||
}
|
||||
11
common/types-rs/chunk/src/lib.rs
Normal file
11
common/types-rs/chunk/src/lib.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
#![feature(lazy_get)]
|
||||
|
||||
mod utils;
|
||||
|
||||
mod witness;
|
||||
|
||||
pub use utils::make_providers;
|
||||
pub use witness::{ArchivedChunkWitness, ChunkWitness};
|
||||
|
||||
mod execute;
|
||||
pub use execute::execute;
|
||||
27
common/types-rs/chunk/src/public_inputs.rs
Normal file
27
common/types-rs/chunk/src/public_inputs.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use alloy_primitives::{B256, U256};
|
||||
use sbv_primitives::types::{
|
||||
consensus::BlockHeader,
|
||||
reth::{Block, RecoveredBlock},
|
||||
};
|
||||
|
||||
use types_base::public_inputs::chunk::BlockContextV2;
|
||||
|
||||
impl From<&RecoveredBlock<Block>> for BlockContextV2 {
|
||||
fn from(value: &RecoveredBlock<Block>) -> Self {
|
||||
Self {
|
||||
timestamp: value.timestamp,
|
||||
gas_limit: value.gas_limit,
|
||||
base_fee: U256::from(value.base_fee_per_gas().expect("base_fee_expected")),
|
||||
num_txs: u16::try_from(value.body().transactions.len()).expect("num txs u16"),
|
||||
num_l1_msgs: u16::try_from(
|
||||
value
|
||||
.body()
|
||||
.transactions
|
||||
.iter()
|
||||
.filter(|tx| tx.is_l1_message())
|
||||
.count(),
|
||||
)
|
||||
.expect("num l1 msgs u16"),
|
||||
}
|
||||
}
|
||||
}
|
||||
48
common/types-rs/chunk/src/utils.rs
Normal file
48
common/types-rs/chunk/src/utils.rs
Normal file
@@ -0,0 +1,48 @@
|
||||
use sbv_kv::nohash::NoHashMap;
|
||||
use sbv_primitives::{B256, BlockWitness, Bytes, ext::BlockWitnessExt};
|
||||
use sbv_trie::{BlockWitnessTrieExt, TrieNode};
|
||||
|
||||
type CodeDb = NoHashMap<B256, Bytes>;
|
||||
|
||||
type NodesProvider = NoHashMap<B256, TrieNode>;
|
||||
|
||||
type BlockHashProvider = sbv_kv::null::NullProvider;
|
||||
|
||||
pub fn make_providers<W: BlockWitness>(
|
||||
witnesses: &[W],
|
||||
) -> (CodeDb, NodesProvider, BlockHashProvider) {
|
||||
let code_db = {
|
||||
// build code db
|
||||
let num_codes = witnesses.iter().map(|w| w.codes_iter().len()).sum();
|
||||
let mut code_db =
|
||||
NoHashMap::<B256, Bytes>::with_capacity_and_hasher(num_codes, Default::default());
|
||||
witnesses.import_codes(&mut code_db);
|
||||
code_db
|
||||
};
|
||||
let nodes_provider = {
|
||||
let num_states = witnesses.iter().map(|w| w.states_iter().len()).sum();
|
||||
let mut nodes_provider =
|
||||
NoHashMap::<B256, TrieNode>::with_capacity_and_hasher(num_states, Default::default());
|
||||
witnesses.import_nodes(&mut nodes_provider).unwrap();
|
||||
nodes_provider
|
||||
};
|
||||
let block_hashes = sbv_kv::null::NullProvider;
|
||||
|
||||
(code_db, nodes_provider, block_hashes)
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
#[cfg(not(any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")))]
|
||||
macro_rules! manually_drop_on_zkvm {
|
||||
($e:expr) => {
|
||||
std::mem::ManuallyDrop::new($e)
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
#[cfg(any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64"))]
|
||||
macro_rules! manually_drop_on_zkvm {
|
||||
($e:expr) => {
|
||||
$e
|
||||
};
|
||||
}
|
||||
71
common/types-rs/chunk/src/witness.rs
Normal file
71
common/types-rs/chunk/src/witness.rs
Normal file
@@ -0,0 +1,71 @@
|
||||
use alloy_primitives::B256;
|
||||
use sbv_primitives::types::BlockWitness;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use types_base::public_inputs::ForkName;
|
||||
|
||||
/// The witness type accepted by the chunk-circuit.
|
||||
#[derive(
|
||||
Clone,
|
||||
Debug,
|
||||
serde::Deserialize,
|
||||
serde::Serialize,
|
||||
rkyv::Archive,
|
||||
rkyv::Deserialize,
|
||||
rkyv::Serialize,
|
||||
)]
|
||||
#[rkyv(derive(Debug))]
|
||||
pub struct ChunkWitness {
|
||||
/// The block witness for each block in the chunk.
|
||||
pub blocks: Vec<BlockWitness>,
|
||||
/// The on-chain rolling L1 message queue hash before enqueueing any L1 msg tx from the chunk.
|
||||
pub prev_msg_queue_hash: B256,
|
||||
/// The code version specify the chain spec
|
||||
pub fork_name: ForkName,
|
||||
}
|
||||
|
||||
impl ChunkWitness {
|
||||
pub fn new(blocks: &[BlockWitness], prev_msg_queue_hash: B256, fork_name: ForkName) -> Self {
|
||||
let num_codes = blocks.iter().map(|w| w.codes.len()).sum();
|
||||
let num_states = blocks.iter().map(|w| w.states.len()).sum();
|
||||
let mut codes = HashSet::with_capacity(num_codes);
|
||||
let mut states = HashSet::with_capacity(num_states);
|
||||
|
||||
let blocks = blocks
|
||||
.iter()
|
||||
.map(|block| BlockWitness {
|
||||
chain_id: block.chain_id,
|
||||
header: block.header.clone(),
|
||||
pre_state_root: block.pre_state_root,
|
||||
transaction: block.transaction.clone(),
|
||||
withdrawals: block.withdrawals.clone(),
|
||||
states: block
|
||||
.states
|
||||
.iter()
|
||||
.filter(|s| states.insert(*s))
|
||||
.cloned()
|
||||
.collect(),
|
||||
codes: block
|
||||
.codes
|
||||
.iter()
|
||||
.filter(|c| codes.insert(*c))
|
||||
.cloned()
|
||||
.collect(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Self {
|
||||
blocks,
|
||||
prev_msg_queue_hash,
|
||||
fork_name,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_v1(blocks: &[BlockWitness]) -> Self {
|
||||
Self::new(blocks, Default::default(), ForkName::EuclidV1)
|
||||
}
|
||||
|
||||
pub fn new_v2(blocks: &[BlockWitness], prev_msg_queue_hash: B256) -> Self {
|
||||
Self::new(blocks, prev_msg_queue_hash, ForkName::EuclidV2)
|
||||
}
|
||||
}
|
||||
21
common/types-rs/src/lib.rs
Normal file
21
common/types-rs/src/lib.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
// re-export for a compatible interface with old circuit/types for prover
|
||||
|
||||
pub mod bundle {
|
||||
pub use types_base::public_inputs::bundle::{BundleInfo, BundleInfoV1, BundleInfoV2};
|
||||
pub use types_bundle::*;
|
||||
}
|
||||
|
||||
pub mod batch {
|
||||
pub use types_base::public_inputs::batch::{ArchivedBatchInfo, BatchInfo, VersionedBatchInfo};
|
||||
pub use types_batch::*;
|
||||
}
|
||||
|
||||
pub mod chunk {
|
||||
pub use types_base::public_inputs::chunk::{
|
||||
ArchivedChunkInfo, BlockContextV2, ChunkInfo, SIZE_BLOCK_CTX, VersionedChunkInfo,
|
||||
};
|
||||
pub use types_chunk::*;
|
||||
}
|
||||
|
||||
pub use types_agg;
|
||||
pub use types_base::{public_inputs, utils};
|
||||
1
common/types/message/batch-proof-sample.json
Normal file
1
common/types/message/batch-proof-sample.json
Normal file
File diff suppressed because one or more lines are too long
1
common/types/message/batch-task-sample.json
Normal file
1
common/types/message/batch-task-sample.json
Normal file
File diff suppressed because one or more lines are too long
1
common/types/message/batch-task-test-out.json
Normal file
1
common/types/message/batch-task-test-out.json
Normal file
File diff suppressed because one or more lines are too long
1
common/types/message/bundle-proof-sample.json
Normal file
1
common/types/message/bundle-proof-sample.json
Normal file
@@ -0,0 +1 @@
|
||||
{"metadata":{"bundle_info":{"chain_id":333333,"msg_queue_hash":"0x0101010101010101010101010101010101010101010101010101010101010101","num_batches":2,"prev_state_root":"0x5302a56cbbec7d14d48d592b805d4ec3c7011439dfaa90d44deee02a9326d203","prev_batch_hash":"0xabacadaeaf000000000000000000000000000000000000000000000000000000","post_state_root":"0xaf6696afb2e11052490051f0f9f6444be6e9f5bb82beb3c3dae846cfa59ed6e0","batch_hash":"0xf0ee5d6b9cd739eb1ff816a58486af8b08d42a8c50d6e5998e7a3947c7aae2a9","withdraw_root":"0x0000000000000000000000000000000000000000000000000000000000000000"},"bundle_pi_hash":"0x2028510c403837c6ed77660fd92814ba61d7b746e7268cc8dfc14d163d45e6bd"},"proof":{"proof":"CfpNiL6UpegsK3VcoAj9ey5daMbZDFiF1XpCKvrOeN0MPPLNCDrllJL/gN0E3qmq20kGLYpBQ8aZ3sgUrxpSyA+9GKK8NhZoIM75adOnV8AYCLXpmxfS81MxIai/+ghxDIUvJQJVgWKJPsMQp4lO/Qltc4eCNWeoR2jHua/VzSASQXDDQ5ozD6i448TVkjKiyEcHwFFMMuOebFUzDc85hA4AJGM1T9bPl5VVQkEpijdNF+1lzUfi27U0XRQbYz8aE8hiCLxR8Z2bHg65dvfa+TsaDI8oAlz33Q1yIadZBtceKsH53P5u6vwWp0dQvw8DGNv8G5zvsayHPNCvy4xz8hRT3E4G0Ome8voqqOxrc/A8u2fE6LoXKswvU6Uquv+LHwGMbTugRvQ0BBXlLQ29Hvj18rDzS6ll0OnEcRiaaEkGOZy7Kq1PGiF7ZxMZsJYCbhyPgg4TKpesYDUJygEN0iGNX90dmyzGLTTgJATMYBGD2U+XP/T+UOMbxFTl3TFNHWlCPhEAu5LBwZ0pD3XV1xNW1iUqwTSfg7Qz1SOUYkot10Q8EAKeXk3hluHK+nSQhOMfWC4tnvfQdMqepfymwwArzA/9GMA/Two7yuzgCz7vHb+56YKPZiDrh4cqSvpVI92hCF8GWHaTqWDR0fikx2Y7GLX8YBM3Rx8reQE+LYYGEJHJzD4cIc0MKiuet605ZPSAaKpb8JM2EgrCAfw+QAhBiwXQ3HOQkrt17tzqNJH7IeHF761v43D9w+IeqvetKEgYXEH3fHmN00dLV2Uws8C4956qze+SG81ScnZzbrIeiO9lnmUXSFzrL40K+3NqCZcFnfLhVidyEJepzJi50yOK5BUJdMFdNtvHtprICqLKyb7aRg39qoZ7RqyJTg5nAjQQBGelvRu/AN6zdyxja73Jo5gEovdIiMybi/IhfMwKGWgiRaOGxyHx9KZ/ZA/w7r3rce6vuDsUhk5hsgVj4wUW3BqoZ8iRIH8X6AjK1xli+S/HfgAkfmUVwNNBOcgYEcrqEbswsfYKOcoFn71DISLK0jmB44LTNyGxoWBMpIAOf/gGhQSNk0ojd4n4UXxShsqmJ57Kudw/mGimMm+Crhr5asxeiFH0eJNBgUEXDuveqE1d20UTRJ1UJ/hZGomsDLebTojSTtsMLWTtx/4Mqg+g3Odte1WKN6CgxF4kGRcW2tE3D1jiBys5FTHMAhmka3mUBwlciT7syDWBDlYVuSmwppCghdBMQfQL4s3Uh0vRG28LkU+UXcwYXwh3UK6cA1bBnKfAa9k7P5BuMxVh8p6he6EZr0kGNjKGPSxuVxgczO/C32GP+HVVsWlIMNmgB4GeMHIN3yJampOrLZIMlQuP9d9kOicvRia1ge5sFtT+Vmthnp1F7sR3P+ADB/WxKSxVbiLaVBo+zm/rZbyM9vU0CVLD69lzPC6xKcFkxewlWJU6o7rOz1qzh47fT+8qUcVYfpCSEtT/U8eX2JFnXCb0PPXWivofI28tnsuS8GjwUiOyzCoxxuIEOyz1HNRXBcO2dSKR2qM41zUs0btA2JkA3hTVW8YWn8czHxrZyooooaumzbUPQBOqO3fewnLLyQ9etBcjZJ8Xm/B1EBk9cRPWDjgx5Hq8C0soA+EsoNoaSQJu67HuFTRd/OWvKSliCoj1XVcqBobnJWmTU7kAgi73pMaq/G4ot2rRFSL9MbkJgHCyxBkrl9nkCVUJC5GphsrDS5P5/bmRS3iTNdxiXAzdwOIQqJpEO54oN+3CHZuZuUOgCcWTI3uxWq/gBDJrBTsv8EUqtNQJve0qwIh2PUuJl5DIqF0CvswN649gywc=","instances":"AAAAAAAAAAAAAAAAAAAAAAAAAAAApvhdIlw19IwSvukAAAAAAAAAAAAAAAAAAAAAAAAAAAAl72fyrHk3TaguHQAAAAAAAAAAAAAAAAAAAAAAAAAAAAALh9HvEG69AvDlAAAAAAAAAAAAAAAAAAAAAAAAAAAAkGY9R6S+t36FIrAAAAAAAAAAAAAAAAAAAAAAAAAAAACoNqt7QwZoXUpj/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAdaREhypq22OmnAAAAAAAAAAAAAAAAAAAAAAAAAAAAOXf2Vj0jGD1q4xQAAAAAAAAAAAAAAAAAAAAAAAAAAADZYAdKTg7m4hBHGgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAll4nXKE0us1IMAAAAAAAAAAAAAAAAAAAAAAAAAAAAFfnJ8YXlwczTsyEAAAAAAAAAAAAAAAAAAAAAAAAAAAArXqULkWYvNST9PQAAAAAAAAAAAAAAAAAAAAAAAAAAAAArqteSdJMySnbMAC5TUWus+SXtvRWUNmCSMiMb4aZvb4hpJ5yXqjtih6gAIn9WQUOx/Z/rbbdComU0hCSwKwrewQgB3KolXKensAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAUQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA3AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA7QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB3AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC6AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA1wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC3AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAyAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADfAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAATQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAARQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADmAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAL0="},"vk":"AhYAAAAABAAAAD2PumVP6pqldS0PKWW8Q4IvnE/rvtm5/2fXvG196sYhKtVFtg+WFGYJrU+eMUKZVjPurMpM8kbYiXvE18bnsU4Nu8s47Xabxy0EViND1dzsu5HicdAWl0xG5C+VpO2faJdK4nGwtD4WHtbdqWY72nSY5aKSDxAYO85vLy+9cJZlQsMNQlhTi/2q9PYQpC4D3Uf8E+yZ7gvLhd6cFdErlg4Oq/nthQkfxPAarVYLUFNGW80SgIloMDhutrky34D+Csw8T9j5UXpHz3K/2yuVSXK6OvMG4/058TXG09qKgXYP","git_version":"9f48bc4"}
|
||||
1
common/types/message/chunk-proof-sample.json
Normal file
1
common/types/message/chunk-proof-sample.json
Normal file
File diff suppressed because one or more lines are too long
4
rust-toolchain
Normal file
4
rust-toolchain
Normal file
@@ -0,0 +1,4 @@
|
||||
[toolchain]
|
||||
channel = "nightly-2025-02-14"
|
||||
targets = ["riscv32im-unknown-none-elf", "x86_64-unknown-linux-gnu"]
|
||||
components = ["llvm-tools", "rustc-dev"]
|
||||
8
rustfmt.toml
Normal file
8
rustfmt.toml
Normal file
@@ -0,0 +1,8 @@
|
||||
comment_width = 300
|
||||
edition = "2024"
|
||||
imports_granularity = "Crate"
|
||||
max_width = 100
|
||||
newline_style = "Unix"
|
||||
normalize_comments = true
|
||||
style_edition = "2024"
|
||||
wrap_comments = false
|
||||
@@ -1,78 +0,0 @@
|
||||
[package]
|
||||
name = "prover"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[patch.crates-io]
|
||||
alloy-primitives = { git = "https://github.com/scroll-tech/alloy-core", branch = "v0.8.21" }
|
||||
ruint = { git = "https://github.com/scroll-tech/uint.git", branch = "v1.12.3" }
|
||||
tiny-keccak = { git = "https://github.com/scroll-tech/tiny-keccak", branch = "scroll-patch-v2.0.2-openvm-v1.0.0-rc.1" }
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
log = "0.4"
|
||||
env_logger = "0.11.3"
|
||||
serde = { version = "1.0.198", features = ["derive"] }
|
||||
serde_json = "1.0.116"
|
||||
futures = "0.3.30"
|
||||
|
||||
scroll-zkvm-prover-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", tag = "v0.3.0", package = "scroll-zkvm-prover" }
|
||||
ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", branch = "main", features = [
|
||||
"openvm",
|
||||
] }
|
||||
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-upgrade", features = [
|
||||
"scroll",
|
||||
] }
|
||||
base64 = "0.13.1"
|
||||
reqwest = { version = "0.12.4", features = ["gzip"] }
|
||||
reqwest-middleware = "0.3"
|
||||
reqwest-retry = "0.5"
|
||||
once_cell = "1.19.0"
|
||||
hex = "0.4.3"
|
||||
tiny-keccak = { version = "2.0.0", features = ["sha3", "keccak"] }
|
||||
rand = "0.8.5"
|
||||
eth-keystore = "0.5.0"
|
||||
rlp = "0.5.2"
|
||||
tokio = "1.37.0"
|
||||
async-trait = "0.1"
|
||||
sled = "0.34.7"
|
||||
http = "1.1.0"
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
ctor = "0.2.8"
|
||||
url = "2.5.4"
|
||||
serde_bytes = "0.11.15"
|
||||
|
||||
[patch."https://github.com/openvm-org/stark-backend.git"]
|
||||
openvm-stark-backend = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", branch = "main", features = ["gpu"] }
|
||||
openvm-stark-sdk = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", branch = "main", features = ["gpu"] }
|
||||
|
||||
[patch."https://github.com/Plonky3/Plonky3.git"]
|
||||
p3-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-field = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-commit = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-matrix = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-baby-bear = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", features = [
|
||||
"nightly-features",
|
||||
], tag = "v0.1.1" }
|
||||
p3-util = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-challenger = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-dft = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-fri = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-goldilocks = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-keccak = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-keccak-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-blake3 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-mds = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-merkle-tree = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-monty-31 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-poseidon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-poseidon2 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-poseidon2-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-symmetric = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-uni-stark = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
p3-maybe-rayon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" } # the "parallel" feature is NOT on by default to allow single-threaded benchmarking
|
||||
p3-bn254-fr = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.1.1" }
|
||||
18
zkvm-prover/Dockerfile
Normal file
18
zkvm-prover/Dockerfile
Normal file
@@ -0,0 +1,18 @@
|
||||
FROM rust:1.85
|
||||
|
||||
RUN rustup toolchain install nightly-2025-02-14-x86_64-unknown-linux-gnu
|
||||
RUN rustup component add rust-src --toolchain nightly-2025-02-14-x86_64-unknown-linux-gnu
|
||||
|
||||
WORKDIR /app
|
||||
RUN git clone --branch refactor/scroll_mono --single-branch --depth 1 https://github.com/scroll-tech/zkvm-prover.git zkvm-circuits
|
||||
|
||||
ADD common/types-rs /app/common/types-rs
|
||||
ADD zkvm-prover /app/zkvm-prover
|
||||
ADD Cargo.* /app/
|
||||
ADD rust-toolchain /app/
|
||||
ADD build/.cargo /app/zkvm-circuits/.cargo
|
||||
WORKDIR /app/zkvm-circuits
|
||||
|
||||
RUN cargo fetch
|
||||
|
||||
ENTRYPOINT ["/app/zkvm-circuits/build-guest-actions-entrypoint.sh"]
|
||||
@@ -1,48 +1,92 @@
|
||||
.PHONY: prover lint tests_binary
|
||||
RUST_MIN_STACK ?= 16777216
|
||||
export RUST_MIN_STACK
|
||||
|
||||
ifeq (4.3,$(firstword $(sort $(MAKE_VERSION) 4.3)))
|
||||
PLONKY3_VERSION=$(shell grep -m 1 "Plonky3.git" ./Cargo.lock | cut -d "#" -f2 | cut -c-7)
|
||||
RUST_BACKTRACE ?= 1
|
||||
export RUST_BACKTRACE
|
||||
|
||||
RUST_LOG ?= off,scroll_zkvm_integration=debug,scroll_zkvm_verifier=debug,scroll_zkvm_prover=debug,openvm_circuit=debug
|
||||
export RUST_LOG
|
||||
|
||||
ifdef LEGACY
|
||||
FEATURE := --no-default-features
|
||||
TESTDATA_PATH := integration/testdata/phase1
|
||||
CHUNK_PROOF := 12508460-12508463
|
||||
else
|
||||
PLONKY3_VERSION=$(shell grep -m 1 "Plonky3.git" ./Cargo.lock | cut -d "\#" -f2 | cut -c-7)
|
||||
FEATURE :=
|
||||
TESTDATA_PATH := integration/testdata/phase2
|
||||
CHUNK_PROOF := 1-4
|
||||
endif
|
||||
|
||||
ZKVM_VERSION=$(shell ./print_high_zkvm_version.sh)
|
||||
ifeq (${ZKVM_VERSION},)
|
||||
$(error ZKVM_VERSION not set)
|
||||
else
|
||||
$(info ZKVM_VERSION is ${ZKVM_VERSION})
|
||||
endif
|
||||
$(info FEATURE set to: $(FEATURE))
|
||||
$(info TESTDATA_PATH set to: $(TESTDATA_PATH))
|
||||
|
||||
ZKVM_COMMIT=$(shell echo ${ZKVM_VERSION} | cut -d " " -f2)
|
||||
$(info ZKVM_COMMIT is ${ZKVM_COMMIT})
|
||||
download-release:
|
||||
sh download-release.sh
|
||||
|
||||
PLONKY3_GPU_VERSION=$(shell ./print_plonky3gpu_version.sh | sed -n '2p')
|
||||
fmt:
|
||||
@cargo fmt --all
|
||||
|
||||
GIT_REV=$(shell git rev-parse --short HEAD)
|
||||
GO_TAG=$(shell grep "var tag = " ../common/version/version.go | cut -d "\"" -f2)
|
||||
clippy:
|
||||
# @cargo clippy --tests --manifest-path crates/circuits/types/Cargo.toml -- -D warnings
|
||||
# sh openvm-clippy.sh
|
||||
@cargo clippy --tests --all-features --manifest-path verifier/Cargo.toml -- -D warnings
|
||||
@cargo clippy --tests --all-features --manifest-path prover/Cargo.toml -- -D warnings
|
||||
@cargo clippy --tests --all-features --manifest-path integration/Cargo.toml -- -D warnings
|
||||
# @cargo clippy --tests --all-features --manifest-path build-guest/Cargo.toml -- -D warnings
|
||||
|
||||
ifeq (${GO_TAG},)
|
||||
$(error GO_TAG not set)
|
||||
else
|
||||
$(info GO_TAG is ${GO_TAG})
|
||||
endif
|
||||
clean-guest:
|
||||
docker rmi build-guest:local
|
||||
|
||||
ifeq (${PLONKY3_GPU_VERSION},)
|
||||
# use plonky3 with CPU
|
||||
ZK_VERSION=${ZKVM_COMMIT}-${PLONKY3_VERSION}
|
||||
else
|
||||
# use halo2_gpu
|
||||
ZK_VERSION=${ZKVM_COMMIT}-${PLONKY3_GPU_VERSION}
|
||||
endif
|
||||
build-guest:
|
||||
sh build-guest.sh
|
||||
|
||||
prover:
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --release
|
||||
clean-build-guest: clean-guest build-guest
|
||||
|
||||
tests_binary:
|
||||
cargo clean && cargo test --release --no-run
|
||||
ls target/release/deps/prover* | grep -v "\.d" | xargs -I{} ln -sf {} ./prover.test
|
||||
clean-test-cache:
|
||||
@rm -f $(TESTDATA_PATH)/proofs/*.json
|
||||
|
||||
lint:
|
||||
cargo check --all-features
|
||||
cargo clippy --all-features --all-targets -- -D warnings
|
||||
cargo fmt --all
|
||||
$(TESTDATA_PATH)/proofs/chunk-%.json:
|
||||
@OUTPUT_DIR=$(realpath $(TESTDATA_PATH)/proofs) $(MAKE) test-single-chunk
|
||||
cp -f $(TESTDATA_PATH)/proofs/chunk/proofs/*.json $(TESTDATA_PATH)/proofs
|
||||
|
||||
profile-chunk:
|
||||
@GUEST_PROFILING=true cargo test --release -p scroll-zkvm-integration --test chunk_circuit guest_profiling -- --exact --nocapture
|
||||
|
||||
test-execute-chunk:
|
||||
@cargo test --release -p scroll-zkvm-integration $(FEATURE) --test chunk_circuit test_execute -- --exact --nocapture
|
||||
|
||||
test-execute-chunk-multi:
|
||||
@cargo test --release -p scroll-zkvm-integration $(FEATURE) --test chunk_circuit test_execute_multi -- --exact --nocapture
|
||||
|
||||
test-cycle:
|
||||
@cargo test --release -p scroll-zkvm-integration $(FEATURE) --test chunk_circuit test_cycle -- --exact --nocapture
|
||||
|
||||
test-execute-batch: $(TESTDATA_PATH)/proofs/chunk-$(CHUNK_PROOF).json
|
||||
@cargo test --release -p scroll-zkvm-integration $(FEATURE) --test batch_circuit test_e2e_execute -- --exact --nocapture
|
||||
|
||||
test-execute-batch-fast: $(TESTDATA_PATH)/tasks/batch-task.json
|
||||
@cargo test --release -p scroll-zkvm-integration $(FEATURE) --test batch_circuit test_execute -- --exact --nocapture
|
||||
|
||||
test-execute-bundle:
|
||||
@cargo test --release -p scroll-zkvm-integration $(FEATURE) --test bundle_circuit test_execute -- --exact --nocapture
|
||||
|
||||
test-single-chunk:
|
||||
@cargo test --release -p scroll-zkvm-integration $(FEATURE) --test chunk_circuit setup_prove_verify_single -- --exact --nocapture
|
||||
|
||||
test-multi-chunk:
|
||||
@cargo test --release -p scroll-zkvm-integration $(FEATURE) --test chunk_circuit setup_prove_verify_multi -- --exact --nocapture
|
||||
|
||||
test-single-batch: $(TESTDATA_PATH)/tasks/batch-task.json
|
||||
@cargo test --release -p scroll-zkvm-integration $(FEATURE) --test batch_circuit setup_prove_verify_single -- --exact --nocapture
|
||||
|
||||
test-e2e-batch:
|
||||
@cargo test --release -p scroll-zkvm-integration $(FEATURE) --test batch_circuit e2e -- --exact --nocapture
|
||||
|
||||
test-bundle:
|
||||
@cargo test --release -p scroll-zkvm-integration $(FEATURE) --test bundle_circuit setup_prove_verify -- --exact --nocapture
|
||||
|
||||
test-bundle-local:
|
||||
@cargo test --release -p scroll-zkvm-integration $(FEATURE) --test bundle_circuit setup_prove_verify_local_task -- --exact --nocapture
|
||||
|
||||
test-e2e-bundle:
|
||||
@cargo test --release -p scroll-zkvm-integration $(FEATURE) --test bundle_circuit e2e -- --exact --nocapture
|
||||
|
||||
168
zkvm-prover/README.md
Normal file
168
zkvm-prover/README.md
Normal file
@@ -0,0 +1,168 @@
|
||||
# Scroll zkVM
|
||||
|
||||
**zkVM-based Circuits (Guest Programs) with a complete Scroll Prover implementation**
|
||||
|
||||
## Repository
|
||||
|
||||
This repository contains the following member crates:
|
||||
|
||||
- [scroll-zkvm-circuit-types](./crates/circuits/types): Primitive and Common types used by the circuits
|
||||
- [scroll-zkvm-chunk-circuit](./crates/circuits/chunk-circuit): Circuit for verification of a Scroll [chunk](TODO:doc)
|
||||
- [scroll-zkvm-batch-circuit](./crates/circuits/batch-circuit): Circuit for verification of a Scroll [batch](TODO:doc)
|
||||
- [scroll-zkvm-bundle-circuit](./crates/circuits/bundle-circuit): Circuit for verification of a Scroll [bundle](TODO:doc)
|
||||
- [scroll-zkvm-prover](./crates/prover): Implementation for a Scroll Prover
|
||||
- [scroll-zkvm-verifier](./crates/verifier): Implementation for a Verifier-only mode
|
||||
- [scroll-zkvm-integration](./crates/integration): Integration tests for the Scroll Prover
|
||||
|
||||
## Overview
|
||||
|
||||
The Scroll zkVM Circuits are [openvm](https://book.openvm.dev/) based Guest Programs.
|
||||
|
||||
The [prover](./crates/prover) crate offers a minimalistic API for setting up, generating and verifying proofs for Scroll's zk-rollup.
|
||||
|
||||
For a deeper dive into our implementation, please refer the [interfaces](./docs/interfaces.md) doc.
|
||||
|
||||
## Testing
|
||||
|
||||
For more commands please refer the [Makefile](./Makefile).
|
||||
|
||||
### Build Guest Programs
|
||||
|
||||
In case you have made any changes to the guest programs, it is important to build them before running the tests.
|
||||
|
||||
```shell
|
||||
$ make build-guest
|
||||
```
|
||||
|
||||
Upon building the guest programs, the child commitments in [batch-circuit](./crates/circuits/batch-circuit/src/child_commitments.rs) and [bundle-circuit](./crates/circuits/bundle-circuit/src/child_commitments.rs) will be overwritten by `build-guest`.
|
||||
|
||||
### End-to-end tests for chunk-prover
|
||||
|
||||
```shell
|
||||
$ RUST_MIN_STACK=16777216 make test-single-chunk
|
||||
```
|
||||
|
||||
### End-to-end tests for batch-prover
|
||||
|
||||
```shell
|
||||
$ RUST_MIN_STACK=16777216 make test-e2e-batch
|
||||
```
|
||||
|
||||
### End-to-end tests for bundle-prover
|
||||
|
||||
```shell
|
||||
$ RUST_MIN_STACK=16777216 make test-e2e-bundle
|
||||
```
|
||||
|
||||
*Note*: Configure `RUST_LOG=debug` for debug logs or `RUST_LOG=none,scroll_zkvm_prover=debug` for logs specifically from the `scroll-zkvm-prover` crate.
|
||||
|
||||
## Usage of Prover API
|
||||
|
||||
### Dependency
|
||||
|
||||
Add the following dependency in your `Cargo.toml`:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", branch = "master" }
|
||||
```
|
||||
|
||||
### Chunk Prover
|
||||
|
||||
Prover capable of generating STARK proofs for a Scroll [chunk](TODO:doc):
|
||||
|
||||
```rust
|
||||
use std::path::Path;
|
||||
|
||||
use scroll_zkvm_prover::{ChunkProver, task::ChunkProvingTask};
|
||||
|
||||
// Paths to the application exe and application config.
|
||||
let path_exe = Path::new("./path/to/app.vmexe");
|
||||
let path_app_config = Path::new("./path/to/openvm.toml");
|
||||
|
||||
// Optional directory to cache generated proofs on disk.
|
||||
let cache_dir = Path::new("./path/to/cache/proofs");
|
||||
|
||||
// Setup prover.
|
||||
let prover = ChunkProver::setup(&path_exe, &path_app_config, Some(&cache_dir))?;
|
||||
|
||||
// Proving task of a chunk with 3 blocks.
|
||||
let block_witnesses = vec![
|
||||
sbv::primitives::types::BlockWitness { /* */ },
|
||||
sbv::primitives::types::BlockWitness { /* */ },
|
||||
sbv::primitives::types::BlockWitness { /* */ },
|
||||
];
|
||||
let task = ChunkProvingTask { block_witnesses };
|
||||
|
||||
// Generate a proof.
|
||||
let proof = prover.gen_proof(&task)?;
|
||||
|
||||
// Verify proof.
|
||||
prover.verify_proof(&proof)?;
|
||||
```
|
||||
|
||||
### Batch Prover
|
||||
|
||||
Prover capable of generating STARK proofs for a Scroll [batch](TODO:doc):
|
||||
|
||||
```rust
|
||||
use std::path::Path;
|
||||
|
||||
use scroll_zkvm_prover::{BatchProver, task::BatchProvingTask};
|
||||
|
||||
// Paths to the application exe and application config.
|
||||
let path_exe = Path::new("./path/to/app.vmexe");
|
||||
let path_app_config = Path::new("./path/to/openvm.toml");
|
||||
|
||||
// Optional directory to cache generated proofs on disk.
|
||||
let cache_dir = Path::new("./path/to/cache/proofs");
|
||||
|
||||
// Setup prover.
|
||||
let prover = BatchProver::setup(&path_exe, &path_app_config, Some(&cache_dir))?;
|
||||
|
||||
// Task that proves batching of a number of chunks.
|
||||
let task = BatchProvingTask {
|
||||
chunk_proofs, // chunk proofs being aggregated in this batch
|
||||
batch_header, // the header for the batch
|
||||
blob_bytes, // the EIP-4844 blob that makes this batch data available on L1
|
||||
};
|
||||
|
||||
// Generate a proof.
|
||||
let proof = prover.gen_proof(&task)?;
|
||||
|
||||
// Verify proof.
|
||||
prover.verify_proof(&proof)?;
|
||||
```
|
||||
|
||||
### Bundle Prover
|
||||
|
||||
Prover capable of generating EVM-verifiable halo2-based SNARK proofs for a Scroll [bundle](TODO:doc):
|
||||
|
||||
```rust
|
||||
use std::path::Path;
|
||||
|
||||
use scroll_zkvm_prover::{BundleProver, task::BundleProvingTask};
|
||||
|
||||
// Paths to the application exe and application config.
|
||||
let path_exe = Path::new("./path/to/app.vmexe");
|
||||
let path_app_config = Path::new("./path/to/openvm.toml");
|
||||
|
||||
// Optional directory to cache generated proofs on disk.
|
||||
let cache_dir = Path::new("./path/to/cache/proofs");
|
||||
|
||||
// Setup prover.
|
||||
//
|
||||
// The Bundle Prover's setup also looks into $HOME/.openvm for halo2-based setup parameters.
|
||||
let prover = BundleProver::setup(&path_exe, &path_app_config, Some(&cache_dir))?;
|
||||
|
||||
// Task that proves batching of a number of chunks.
|
||||
let task = BundleProvingTask {
|
||||
batch_proofs, // batch proofs being aggregated in this bundle
|
||||
};
|
||||
|
||||
// Generate a proof.
|
||||
let evm_proof = prover.gen_proof_evm(&task)?;
|
||||
|
||||
// Verify proof.
|
||||
prover.verify_proof_evm(&evm_proof)?;
|
||||
```
|
||||
37
zkvm-prover/bin/Cargo.toml
Normal file
37
zkvm-prover/bin/Cargo.toml
Normal file
@@ -0,0 +1,37 @@
|
||||
[package]
|
||||
name = "zkvm-prover-bin"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
env_logger = "0.11.3"
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
base64.workspace = true
|
||||
|
||||
# Re-export from sbv
|
||||
anyhow = "1.0"
|
||||
tracing = "0.1"
|
||||
futures = "0.3"
|
||||
alloy = { workspace = true, features = ["provider-http", "transport-http", "reqwest", "reqwest-rustls-tls", "json-rpc"] }
|
||||
itertools.workspace = true
|
||||
url = ">=2.5.3"
|
||||
|
||||
# Re-export from proving-sdk
|
||||
reqwest = { version = "0.12.4", features = ["gzip"] }
|
||||
reqwest-middleware = "0.3"
|
||||
reqwest-retry = "0.5"
|
||||
hex = "0.4.3"
|
||||
tokio = "1.37"
|
||||
rlp = "0.5"
|
||||
rand = "0.8"
|
||||
async-trait = "0.1"
|
||||
tiny-keccak = { workspace = true, features = ["sha3", "keccak"] }
|
||||
|
||||
scroll-zkvm-prover.workspace = true
|
||||
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", branch = "refactor/scroll"}
|
||||
sbv-primitives.workspace = true
|
||||
sbv-utils = { workspace = true, features = ["scroll"] }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
48
zkvm-prover/bin/Makefile
Normal file
48
zkvm-prover/bin/Makefile
Normal file
@@ -0,0 +1,48 @@
|
||||
.PHONY: prover lint tests_binary
|
||||
|
||||
ifeq (4.3,$(firstword $(sort $(MAKE_VERSION) 4.3)))
|
||||
PLONKY3_VERSION=$(shell grep -m 1 "Plonky3.git" ./Cargo.lock | cut -d "#" -f2 | cut -c-7)
|
||||
else
|
||||
PLONKY3_VERSION=$(shell grep -m 1 "Plonky3.git" ./Cargo.lock | cut -d "\#" -f2 | cut -c-7)
|
||||
endif
|
||||
|
||||
ZKVM_VERSION=$(shell ./print_high_zkvm_version.sh)
|
||||
ifeq (${ZKVM_VERSION},)
|
||||
$(error ZKVM_VERSION not set)
|
||||
else
|
||||
$(info ZKVM_VERSION is ${ZKVM_VERSION})
|
||||
endif
|
||||
|
||||
ZKVM_COMMIT=$(shell echo ${ZKVM_VERSION} | cut -d " " -f2)
|
||||
$(info ZKVM_COMMIT is ${ZKVM_COMMIT})
|
||||
|
||||
PLONKY3_GPU_VERSION=$(shell ./print_plonky3gpu_version.sh | sed -n '2p')
|
||||
|
||||
GIT_REV=$(shell git rev-parse --short HEAD)
|
||||
GO_TAG=$(shell grep "var tag = " ../common/version/version.go | cut -d "\"" -f2)
|
||||
|
||||
ifeq (${GO_TAG},)
|
||||
$(error GO_TAG not set)
|
||||
else
|
||||
$(info GO_TAG is ${GO_TAG})
|
||||
endif
|
||||
|
||||
ifeq (${PLONKY3_GPU_VERSION},)
|
||||
# use plonky3 with CPU
|
||||
ZK_VERSION=${ZKVM_COMMIT}-${PLONKY3_VERSION}
|
||||
else
|
||||
# use halo2_gpu
|
||||
ZK_VERSION=${ZKVM_COMMIT}-${PLONKY3_GPU_VERSION}
|
||||
endif
|
||||
|
||||
prover:
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --release
|
||||
|
||||
tests_binary:
|
||||
cargo clean && cargo test --release --no-run
|
||||
ls target/release/deps/prover* | grep -v "\.d" | xargs -I{} ln -sf {} ./prover.test
|
||||
|
||||
lint:
|
||||
cargo check --all-features
|
||||
cargo clippy --all-features --all-targets -- -D warnings
|
||||
cargo fmt --all
|
||||
@@ -1,13 +1,12 @@
|
||||
mod prover;
|
||||
mod types;
|
||||
mod zk_circuits_handler;
|
||||
|
||||
use clap::{ArgAction, Parser};
|
||||
use prover::{LocalProver, LocalProverConfig};
|
||||
use scroll_proving_sdk::{
|
||||
prover::ProverBuilder,
|
||||
utils::{get_version, init_tracing},
|
||||
};
|
||||
mod prover;
|
||||
mod types;
|
||||
mod zk_circuits_handler;
|
||||
use prover::{LocalProver, LocalProverConfig};
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(disable_version_flag = true)]
|
||||
@@ -1,16 +1,17 @@
|
||||
use crate::zk_circuits_handler::{
|
||||
euclid::EuclidHandler, euclidV2::EuclidV2Handler, CircuitsHandler,
|
||||
CircuitsHandler, RequestPreHandler, RpcConfig, euclid::EuclidHandler, euclidV2::EuclidV2Handler,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use anyhow::{Result, anyhow};
|
||||
use async_trait::async_trait;
|
||||
use base64::{Engine, prelude::BASE64_STANDARD};
|
||||
use scroll_proving_sdk::{
|
||||
config::Config as SdkConfig,
|
||||
prover::{
|
||||
ProvingService,
|
||||
proving_service::{
|
||||
GetVkRequest, GetVkResponse, ProveRequest, ProveResponse, QueryTaskRequest,
|
||||
QueryTaskResponse, TaskStatus,
|
||||
},
|
||||
ProvingService,
|
||||
},
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -25,6 +26,8 @@ use tokio::{runtime::Handle, sync::Mutex, task::JoinHandle};
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct LocalProverConfig {
|
||||
pub sdk_config: SdkConfig,
|
||||
#[serde(alias = "l2geth")]
|
||||
pub rpc_config: RpcConfig,
|
||||
pub circuits: HashMap<String, CircuitConfig>,
|
||||
}
|
||||
|
||||
@@ -54,6 +57,7 @@ pub struct LocalProver {
|
||||
current_task: Option<JoinHandle<Result<String>>>,
|
||||
|
||||
active_handler: Option<(String, Arc<dyn CircuitsHandler>)>,
|
||||
pre_handler: Option<RequestPreHandler>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
@@ -69,7 +73,7 @@ impl ProvingService for LocalProver {
|
||||
let vk = handler.get_vk(*proof_type).await;
|
||||
|
||||
if let Some(vk) = vk {
|
||||
vks.push(base64::encode(vk));
|
||||
vks.push(BASE64_STANDARD.encode(vk));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -139,6 +143,7 @@ impl LocalProver {
|
||||
next_task_id: 0,
|
||||
current_task: None,
|
||||
active_handler: None,
|
||||
pre_handler: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -147,10 +152,21 @@ impl LocalProver {
|
||||
req: ProveRequest,
|
||||
handler: Arc<dyn CircuitsHandler>,
|
||||
) -> Result<ProveResponse> {
|
||||
if self.pre_handler.is_none() {
|
||||
self.pre_handler
|
||||
.replace(RequestPreHandler::create(&self.config.rpc_config)?);
|
||||
}
|
||||
|
||||
self.next_task_id += 1;
|
||||
let duration = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
|
||||
let created_at = duration.as_secs() as f64 + duration.subsec_nanos() as f64 * 1e-9;
|
||||
|
||||
let req = self
|
||||
.pre_handler
|
||||
.as_ref()
|
||||
.expect("has been created")
|
||||
.on_request(req)
|
||||
.await?;
|
||||
let req_clone = req.clone();
|
||||
let handle = Handle::current();
|
||||
let task_handle =
|
||||
@@ -1,21 +1,27 @@
|
||||
use alloy::primitives::B256;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
use scroll_proving_sdk::prover::types::CircuitType;
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
pub struct Task {
|
||||
#[serde(rename = "type", default)]
|
||||
pub task_type: CircuitType,
|
||||
pub task_data: String,
|
||||
#[serde(default)]
|
||||
pub hard_fork_name: String,
|
||||
}
|
||||
|
||||
type CommonHash = B256;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ChunkTaskDetail {
|
||||
pub block_hashes: Vec<CommonHash>,
|
||||
pub prev_msg_queue_hash: CommonHash,
|
||||
pub fork_name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
pub struct ProofDetail {
|
||||
pub id: String,
|
||||
#[serde(rename = "type", default)]
|
||||
pub proof_type: CircuitType,
|
||||
pub proof_data: String,
|
||||
pub error: String,
|
||||
}
|
||||
140
zkvm-prover/bin/src/zk_circuits_handler.rs
Normal file
140
zkvm-prover/bin/src/zk_circuits_handler.rs
Normal file
@@ -0,0 +1,140 @@
|
||||
pub mod euclid;
|
||||
#[allow(non_snake_case)]
|
||||
pub mod euclidV2;
|
||||
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use scroll_proving_sdk::prover::{ProofType, proving_service::ProveRequest};
|
||||
|
||||
#[async_trait]
|
||||
pub trait CircuitsHandler: Sync + Send {
|
||||
async fn get_vk(&self, task_type: ProofType) -> Option<Vec<u8>>;
|
||||
|
||||
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String>;
|
||||
}
|
||||
|
||||
use alloy::{
|
||||
providers::{Provider, ProviderBuilder, RootProvider},
|
||||
rpc::client::ClientBuilder,
|
||||
transports::layers::RetryBackoffLayer,
|
||||
};
|
||||
use sbv_primitives::types::Network;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
fn default_max_retry() -> u32 {
|
||||
10
|
||||
}
|
||||
fn default_backoff() -> u64 {
|
||||
100
|
||||
}
|
||||
fn default_cups() -> u64 {
|
||||
100
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug)]
|
||||
pub struct RpcConfig {
|
||||
#[serde(alias = "endpoint")]
|
||||
pub rpc_url: String,
|
||||
// Concurrency Limit, default 10
|
||||
// pub max_concurrency: usize,
|
||||
// Retry parameters
|
||||
#[serde(default = "default_max_retry")]
|
||||
pub max_retry: u32,
|
||||
// backoff duration in milliseconds, default 100ms
|
||||
#[serde(default = "default_backoff")]
|
||||
pub backoff: u64,
|
||||
// compute units per second: default 100
|
||||
#[serde(default = "default_cups")]
|
||||
pub cups: u64,
|
||||
}
|
||||
|
||||
pub struct RequestPreHandler {
|
||||
provider: RootProvider<Network>,
|
||||
}
|
||||
|
||||
impl RequestPreHandler {
|
||||
pub fn create(config: &RpcConfig) -> Result<Self> {
|
||||
let rpc = url::Url::parse(&config.rpc_url)?;
|
||||
tracing::info!("Using RPC: {}", rpc);
|
||||
let retry_layer = RetryBackoffLayer::new(config.max_retry, config.backoff, config.cups);
|
||||
let client = ClientBuilder::default().layer(retry_layer).http(rpc);
|
||||
|
||||
Ok(Self {
|
||||
provider: ProviderBuilder::<_, _, Network>::default().on_client(client),
|
||||
})
|
||||
}
|
||||
|
||||
async fn on_chunk_request(&self, input: String) -> Result<String> {
|
||||
use crate::types::ChunkTaskDetail;
|
||||
use alloy::network::primitives::BlockTransactionsKind;
|
||||
use sbv_utils::{rpc::ProviderExt, witness::WitnessBuilder};
|
||||
use scroll_zkvm_prover::task::chunk::ChunkProvingTask;
|
||||
|
||||
let chunk_task: ChunkTaskDetail = serde_json::from_str(&input)?;
|
||||
|
||||
let chain_id = self.provider.get_chain_id().await?;
|
||||
|
||||
// we need block number but only get hashes, which cause much extra cost for query the block
|
||||
// number from hash according to https://github.com/scroll-tech/scroll/blob/932be72b88ba2ebb6f9457e8480ee08d612d35a7/coordinator/internal/orm/l2_block.go#L53
|
||||
// the hashes is ordered by ascending in block number so a heuristic way is applied
|
||||
|
||||
let mut block_witnesses = Vec::new();
|
||||
|
||||
for block_hash in chunk_task.block_hashes {
|
||||
// grep `dump_block_witness` in sbv here,
|
||||
// TODO: we do not need to do that
|
||||
// if we have block number or `dump_block_witness` support block hashes
|
||||
let block = self
|
||||
.provider
|
||||
.get_block_by_hash(block_hash, BlockTransactionsKind::Full)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow::anyhow!("Block not found"))?;
|
||||
|
||||
let number = block.header.number;
|
||||
if number == 0 {
|
||||
anyhow::bail!("no number in header or use block 0");
|
||||
}
|
||||
|
||||
let builder = WitnessBuilder::new()
|
||||
.block(block)
|
||||
.chain_id(chain_id)
|
||||
.execution_witness(self.provider.debug_execution_witness(number.into()).await?);
|
||||
|
||||
let builder = builder
|
||||
.state_root(
|
||||
self.provider
|
||||
.scroll_disk_root(number.into())
|
||||
.await?
|
||||
.disk_root,
|
||||
)
|
||||
.unwrap()
|
||||
.prev_state_root(
|
||||
self.provider
|
||||
.scroll_disk_root((number - 1).into())
|
||||
.await?
|
||||
.disk_root,
|
||||
);
|
||||
|
||||
block_witnesses.push(builder.build()?);
|
||||
}
|
||||
|
||||
let input_repack = ChunkProvingTask {
|
||||
fork_name: chunk_task.fork_name,
|
||||
prev_msg_queue_hash: chunk_task.prev_msg_queue_hash,
|
||||
block_witnesses,
|
||||
};
|
||||
// self.provider.dump_block_witness(number)
|
||||
|
||||
Ok(serde_json::to_string(&input_repack)?)
|
||||
}
|
||||
|
||||
pub async fn on_request(&self, mut prove_request: ProveRequest) -> Result<ProveRequest> {
|
||||
match prove_request.proof_type {
|
||||
ProofType::Chunk => {
|
||||
prove_request.input = self.on_chunk_request(prove_request.input).await?;
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
Ok(prove_request)
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,12 @@
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
use super::CircuitsHandler;
|
||||
use anyhow::{anyhow, Result};
|
||||
use anyhow::{Result, anyhow};
|
||||
use async_trait::async_trait;
|
||||
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
|
||||
use scroll_zkvm_prover_euclid::{
|
||||
task::{batch::BatchProvingTask, bundle::BundleProvingTask, chunk::ChunkProvingTask},
|
||||
use scroll_proving_sdk::prover::{ProofType, proving_service::ProveRequest};
|
||||
use scroll_zkvm_prover::{
|
||||
BatchProver, BundleProverEuclidV1, ChunkProver, ProverConfig,
|
||||
task::{batch::BatchProvingTask, bundle::BundleProvingTask, chunk::ChunkProvingTask},
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
pub struct EuclidHandler {
|
||||
@@ -1,12 +1,12 @@
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
use super::{euclid::Phase, CircuitsHandler};
|
||||
use anyhow::{anyhow, Result};
|
||||
use super::{CircuitsHandler, euclid::Phase};
|
||||
use anyhow::{Result, anyhow};
|
||||
use async_trait::async_trait;
|
||||
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
|
||||
use scroll_zkvm_prover_euclid::{
|
||||
task::{batch::BatchProvingTask, bundle::BundleProvingTask, chunk::ChunkProvingTask},
|
||||
use scroll_proving_sdk::prover::{ProofType, proving_service::ProveRequest};
|
||||
use scroll_zkvm_prover::{
|
||||
BatchProver, BundleProverEuclidV2, ChunkProver,
|
||||
task::{batch::BatchProvingTask, bundle::BundleProvingTask, chunk::ChunkProvingTask},
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
pub struct EuclidV2Handler {
|
||||
4
zkvm-prover/build-guest-actions-entrypoint.sh
Executable file
4
zkvm-prover/build-guest-actions-entrypoint.sh
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/bin/bash
|
||||
|
||||
# run crates/build-guest
|
||||
cargo run --release -p scroll-zkvm-build-guest
|
||||
69
zkvm-prover/build-guest.sh
Executable file
69
zkvm-prover/build-guest.sh
Executable file
@@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env bash
|
||||
set -ex
|
||||
|
||||
[ -f "crates/build-guest/.env" ] && . crates/build-guest/.env
|
||||
|
||||
# if BUILD_STAGES if empty, set it to stage1,stage2,stage3
|
||||
if [ -z "${BUILD_STAGES}" ]; then
|
||||
BUILD_STAGES="stage1,stage2,stage3"
|
||||
fi
|
||||
|
||||
# build docker image
|
||||
docker build --platform linux/amd64 -t build-guest:local -f ./Dockerfile ../
|
||||
|
||||
# run docker image
|
||||
docker run --cidfile ./build-guest.cid --platform linux/amd64 -e FEATURE=${FEATURE} build-guest:local
|
||||
|
||||
container_id=$(cat ./build-guest.cid)
|
||||
|
||||
# copy staffs from sources in container to local
|
||||
for f in chunk-circuit \
|
||||
batch-circuit \
|
||||
bundle-circuit; do
|
||||
docker cp ${container_id}:/app/zkvm-circuits/crates/circuits/${f}/openvm.toml build/${f}/
|
||||
done
|
||||
|
||||
if [ -n "$(echo ${BUILD_STAGES} | grep stage1)" ]; then
|
||||
# copy leaf commitments from container to local
|
||||
for f in chunk-circuit/chunk_leaf_commit.rs \
|
||||
batch-circuit/batch_leaf_commit.rs \
|
||||
bundle-circuit/bundle_leaf_commit.rs; do
|
||||
docker cp ${container_id}:/app/zkvm-circuits/crates/circuits/${f} build/${f}
|
||||
done
|
||||
docker cp ${container_id}:/app/zkvm-circuits/crates/circuits/bundle-circuit/digest_2 build/bundle-circuit/digest_2
|
||||
fi
|
||||
|
||||
if [ -n "$(echo ${BUILD_STAGES} | grep stage2)" ]; then
|
||||
# copy root verifier
|
||||
docker cp ${container_id}:/app/zkvm-circuits/crates/build-guest/root_verifier.asm build/root_verifier.asm
|
||||
fi
|
||||
|
||||
if [ -n "$(echo ${BUILD_STAGES} | grep stage3)" ]; then
|
||||
# copy exe commitments from container to local
|
||||
for f in chunk-circuit/chunk_exe_commit.rs \
|
||||
chunk-circuit/chunk_exe_rv32_commit.rs \
|
||||
batch-circuit/batch_exe_commit.rs \
|
||||
bundle-circuit/bundle_exe_commit.rs \
|
||||
bundle-circuit/bundle_exe_euclidv1_commit.rs; do
|
||||
docker cp ${container_id}:/app/zkvm-circuits/crates/circuits/${f} build/${f}
|
||||
done
|
||||
|
||||
# copy digests from container to local
|
||||
docker cp ${container_id}:/app/zkvm-circuits/crates/circuits/bundle-circuit/digest_1 build/bundle-circuit/digest_1
|
||||
docker cp ${container_id}:/app/zkvm-circuits/crates/circuits/bundle-circuit/digest_1_euclidv1 build/bundle-circuit/digest_1_euclidv1
|
||||
|
||||
# copy app.vmexe from container to local
|
||||
mkdir -p build/chunk-circuit/openvm
|
||||
mkdir -p build/batch-circuit/openvm
|
||||
mkdir -p build/bundle-circuit/openvm
|
||||
docker cp ${container_id}:/app/zkvm-circuits/crates/circuits/chunk-circuit/openvm/app.vmexe build/chunk-circuit/openvm/app.vmexe
|
||||
docker cp ${container_id}:/app/zkvm-circuits/crates/circuits/chunk-circuit/openvm/app_rv32.vmexe build/chunk-circuit/openvm/app_rv32.vmexe
|
||||
docker cp ${container_id}:/app/zkvm-circuits/crates/circuits/batch-circuit/openvm/app.vmexe build/batch-circuit/openvm/app.vmexe
|
||||
docker cp ${container_id}:/app/zkvm-circuits/crates/circuits/bundle-circuit/openvm/app.vmexe build/bundle-circuit/openvm/app.vmexe
|
||||
docker cp ${container_id}:/app/zkvm-circuits/crates/circuits/bundle-circuit/openvm/app_euclidv1.vmexe build/bundle-circuit/openvm/app_euclidv1.vmexe
|
||||
|
||||
fi
|
||||
|
||||
# remove docker container
|
||||
docker rm ${container_id}
|
||||
rm ./build-guest.cid
|
||||
3
zkvm-prover/build/.gitignore
vendored
Normal file
3
zkvm-prover/build/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
*.vmexe
|
||||
root_verifier.asm
|
||||
bundle-circuit/digest_*
|
||||
4
zkvm-prover/build/batch-circuit/batch_exe_commit.rs
Normal file
4
zkvm-prover/build/batch-circuit/batch_exe_commit.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
#![cfg_attr(rustfmt, rustfmt_skip)]
|
||||
//! Generated by crates/build-guest. DO NOT EDIT!
|
||||
|
||||
pub const COMMIT: [u32; 8] = [1660301305, 168653291, 489155857, 877082518, 574909420, 273656918, 548174885, 1859120536];
|
||||
4
zkvm-prover/build/batch-circuit/batch_leaf_commit.rs
Normal file
4
zkvm-prover/build/batch-circuit/batch_leaf_commit.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
#![cfg_attr(rustfmt, rustfmt_skip)]
|
||||
//! Generated by crates/build-guest. DO NOT EDIT!
|
||||
|
||||
pub const COMMIT: [u32; 8] = [1583597683, 1891396505, 1658359221, 937358707, 1082503370, 1627277738, 322886937, 1126664665];
|
||||
34
zkvm-prover/build/batch-circuit/openvm.toml
Normal file
34
zkvm-prover/build/batch-circuit/openvm.toml
Normal file
@@ -0,0 +1,34 @@
|
||||
[app_fri_params.fri_params]
|
||||
log_blowup = 1
|
||||
log_final_poly_len = 0
|
||||
num_queries = 100
|
||||
proof_of_work_bits = 16
|
||||
|
||||
[app_vm_config.rv32i]
|
||||
|
||||
[app_vm_config.rv32m]
|
||||
|
||||
[app_vm_config.io]
|
||||
|
||||
[app_vm_config.keccak]
|
||||
|
||||
[app_vm_config.castf]
|
||||
|
||||
[app_vm_config.modular]
|
||||
supported_modulus = [
|
||||
"4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787",
|
||||
"52435875175126190479447740508185965837690552500527637822603658699938581184513",
|
||||
]
|
||||
[app_vm_config.native]
|
||||
[app_vm_config.pairing]
|
||||
supported_curves = ["Bls12_381"]
|
||||
[app_vm_config.sha256]
|
||||
[app_vm_config.fp2]
|
||||
supported_modulus = [
|
||||
"4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787",
|
||||
]
|
||||
[[app_vm_config.ecc.supported_curves]]
|
||||
modulus = "4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787"
|
||||
scalar = "52435875175126190479447740508185965837690552500527637822603658699938581184513"
|
||||
a = "0"
|
||||
b = "4"
|
||||
4
zkvm-prover/build/bundle-circuit/bundle_exe_commit.rs
Normal file
4
zkvm-prover/build/bundle-circuit/bundle_exe_commit.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
#![cfg_attr(rustfmt, rustfmt_skip)]
|
||||
//! Generated by crates/build-guest. DO NOT EDIT!
|
||||
|
||||
pub const COMMIT: [u32; 8] = [649838675, 786832254, 1395878687, 1458584106, 1291473044, 419188889, 113604861, 1633400343];
|
||||
@@ -0,0 +1,4 @@
|
||||
#![cfg_attr(rustfmt, rustfmt_skip)]
|
||||
//! Generated by crates/build-guest. DO NOT EDIT!
|
||||
|
||||
pub const COMMIT: [u32; 8] = [486184220, 544209032, 1886689311, 570463891, 461318186, 106392984, 525080597, 1270661183];
|
||||
4
zkvm-prover/build/bundle-circuit/bundle_leaf_commit.rs
Normal file
4
zkvm-prover/build/bundle-circuit/bundle_leaf_commit.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
#![cfg_attr(rustfmt, rustfmt_skip)]
|
||||
//! Generated by crates/build-guest. DO NOT EDIT!
|
||||
|
||||
pub const COMMIT: [u32; 8] = [1227657215, 852763880, 97050317, 868662534, 861316317, 1072133630, 767460211, 493765067];
|
||||
17
zkvm-prover/build/bundle-circuit/openvm.toml
Normal file
17
zkvm-prover/build/bundle-circuit/openvm.toml
Normal file
@@ -0,0 +1,17 @@
|
||||
[app_fri_params.fri_params]
|
||||
log_blowup = 1
|
||||
log_final_poly_len = 0
|
||||
num_queries = 100
|
||||
proof_of_work_bits = 16
|
||||
|
||||
[app_vm_config.rv32i]
|
||||
|
||||
[app_vm_config.rv32m]
|
||||
|
||||
[app_vm_config.io]
|
||||
|
||||
[app_vm_config.keccak]
|
||||
|
||||
[app_vm_config.castf]
|
||||
|
||||
[app_vm_config.native]
|
||||
4
zkvm-prover/build/chunk-circuit/chunk_exe_commit.rs
Normal file
4
zkvm-prover/build/chunk-circuit/chunk_exe_commit.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
#![cfg_attr(rustfmt, rustfmt_skip)]
|
||||
//! Generated by crates/build-guest. DO NOT EDIT!
|
||||
|
||||
pub const COMMIT: [u32; 8] = [1098565535, 1693373045, 331673897, 142368506, 1824595882, 1020436445, 4334643, 929005758];
|
||||
4
zkvm-prover/build/chunk-circuit/chunk_exe_rv32_commit.rs
Normal file
4
zkvm-prover/build/chunk-circuit/chunk_exe_rv32_commit.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
#![cfg_attr(rustfmt, rustfmt_skip)]
|
||||
//! Generated by crates/build-guest. DO NOT EDIT!
|
||||
|
||||
pub const COMMIT: [u32; 8] = [1876028461, 1806970388, 1493987331, 600051485, 803969577, 1396836730, 826662536, 1405122028];
|
||||
4
zkvm-prover/build/chunk-circuit/chunk_leaf_commit.rs
Normal file
4
zkvm-prover/build/chunk-circuit/chunk_leaf_commit.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
#![cfg_attr(rustfmt, rustfmt_skip)]
|
||||
//! Generated by crates/build-guest. DO NOT EDIT!
|
||||
|
||||
pub const COMMIT: [u32; 8] = [1443243841, 1004366406, 2003092941, 1325139171, 1093586017, 1842955945, 1478923252, 128760176];
|
||||
53
zkvm-prover/build/chunk-circuit/openvm.toml
Normal file
53
zkvm-prover/build/chunk-circuit/openvm.toml
Normal file
@@ -0,0 +1,53 @@
|
||||
[app_fri_params.fri_params]
|
||||
log_blowup = 1
|
||||
log_final_poly_len = 0
|
||||
num_queries = 100
|
||||
proof_of_work_bits = 16
|
||||
|
||||
[app_vm_config.rv32i]
|
||||
|
||||
[app_vm_config.io]
|
||||
|
||||
[app_vm_config.keccak]
|
||||
|
||||
[app_vm_config.rv32m]
|
||||
range_tuple_checker_sizes = [256, 8192]
|
||||
|
||||
[app_vm_config.bigint]
|
||||
range_tuple_checker_sizes = [256, 8192]
|
||||
|
||||
[app_vm_config.modular]
|
||||
supported_modulus = [
|
||||
"21888242871839275222246405745257275088696311157297823662689037894645226208583",
|
||||
"21888242871839275222246405745257275088548364400416034343698204186575808495617",
|
||||
"115792089237316195423570985008687907853269984665640564039457584007908834671663",
|
||||
"115792089237316195423570985008687907852837564279074904382605163141518161494337",
|
||||
"115792089210356248762697446949407573530086143415290314195533631308867097853951",
|
||||
"115792089210356248762697446949407573529996955224135760342422259061068512044369"
|
||||
]
|
||||
|
||||
[app_vm_config.fp2]
|
||||
supported_modulus = ["21888242871839275222246405745257275088696311157297823662689037894645226208583"]
|
||||
|
||||
[app_vm_config.pairing]
|
||||
supported_curves = ["Bn254"]
|
||||
|
||||
[app_vm_config.sha256]
|
||||
|
||||
[[app_vm_config.ecc.supported_curves]]
|
||||
modulus = "115792089237316195423570985008687907853269984665640564039457584007908834671663"
|
||||
scalar = "115792089237316195423570985008687907852837564279074904382605163141518161494337"
|
||||
a = "0"
|
||||
b = "7"
|
||||
|
||||
[[app_vm_config.ecc.supported_curves]]
|
||||
modulus = "115792089210356248762697446949407573530086143415290314195533631308867097853951"
|
||||
scalar = "115792089210356248762697446949407573529996955224135760342422259061068512044369"
|
||||
a = "115792089210356248762697446949407573530086143415290314195533631308867097853948"
|
||||
b = "41058363725152142129326129780047268409114441015993725554835256314039467401291"
|
||||
|
||||
[[app_vm_config.ecc.supported_curves]]
|
||||
modulus = "21888242871839275222246405745257275088696311157297823662689037894645226208583"
|
||||
scalar = "21888242871839275222246405745257275088548364400416034343698204186575808495617"
|
||||
a = "0"
|
||||
b = "3"
|
||||
@@ -8,9 +8,6 @@
|
||||
"retry_wait_time_sec": 10,
|
||||
"connection_timeout_sec": 30
|
||||
},
|
||||
"l2geth": {
|
||||
"endpoint": "http://localhost:9999"
|
||||
},
|
||||
"prover": {
|
||||
"circuit_type": 2,
|
||||
"supported_proof_types": [
|
||||
@@ -22,6 +19,9 @@
|
||||
},
|
||||
"db_path": "unique-db-path-for-prover-1"
|
||||
},
|
||||
"l2geth": {
|
||||
"endpoint": "http://localhost:9999"
|
||||
},
|
||||
"circuits": {
|
||||
"euclidV2": {
|
||||
"hard_fork_name": "euclidV2",
|
||||
24
zkvm-prover/download-release.sh
Normal file
24
zkvm-prover/download-release.sh
Normal file
@@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
|
||||
# release version
|
||||
if [ -z "${SCROLL_ZKVM_VERSION}" ]; then
|
||||
echo "SCROLL_ZKVM_VERSION not set"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# chunk-circuit exe
|
||||
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/chunk/app.vmexe -O crates/circuits/chunk-circuit/openvm/app.vmexe
|
||||
|
||||
# batch-circuit exe
|
||||
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/batch/app.vmexe -O crates/circuits/batch-circuit/openvm/app.vmexe
|
||||
|
||||
# bundle-circuit exe
|
||||
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/bundle/app.vmexe -O crates/circuits/bundle-circuit/openvm/app.vmexe
|
||||
|
||||
# bundle-circuit exe, legacy version, may not exist
|
||||
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/bundle/app_euclidv1.vmexe -O crates/circuits/bundle-circuit/openvm/app_euclidv1.vmexe || echo "legacy app not exist for $SCROLL_ZKVM_VERSION"
|
||||
|
||||
# assets for verifier-only mode
|
||||
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/root-verifier-vm-config -O crates/verifier/testdata/root-verifier-vm-config
|
||||
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/root-verifier-committed-exe -O crates/verifier/testdata/root-verifier-committed-exe
|
||||
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/verifier.bin -O crates/verifier/testdata/verifier.bin
|
||||
1
zkvm-prover/integration/.gitignore
vendored
Normal file
1
zkvm-prover/integration/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.txt
|
||||
43
zkvm-prover/integration/Cargo.toml
Normal file
43
zkvm-prover/integration/Cargo.toml
Normal file
@@ -0,0 +1,43 @@
|
||||
[package]
|
||||
name = "scroll-zkvm-integration"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
|
||||
[dependencies]
|
||||
scroll-zkvm-circuit-input-types.workspace = true
|
||||
scroll-zkvm-prover.workspace = true
|
||||
scroll-zkvm-verifier.workspace = true
|
||||
sbv-primitives = { workspace = true }
|
||||
tracing.workspace = true
|
||||
metrics-tracing-context.workspace = true
|
||||
|
||||
openvm-build = { workspace = true, default-features = false }
|
||||
openvm-circuit.workspace = true
|
||||
openvm-sdk = { workspace = true, default-features = false }
|
||||
openvm-native-circuit = { workspace = true, default-features = false }
|
||||
openvm-native-compiler = { workspace = true, default-features = false }
|
||||
openvm-native-recursion = { workspace = true, default-features = false }
|
||||
openvm-native-transpiler = { workspace = true, default-features = false }
|
||||
openvm-transpiler = { workspace = true, default-features = false }
|
||||
|
||||
alloy-primitives.workspace = true
|
||||
eyre.workspace = true
|
||||
serde_json.workspace = true
|
||||
tiny-keccak.workspace = true
|
||||
vm-zstd = { workspace = true, features = ["zstd"] }
|
||||
once_cell.workspace = true
|
||||
|
||||
rayon = "1"
|
||||
chrono = "0.4"
|
||||
#ff = "0.13"
|
||||
glob = "0.3"
|
||||
#sha2 = "0.10"
|
||||
tracing-subscriber = "0.3"
|
||||
|
||||
[dev-dependencies]
|
||||
halo2curves-axiom = "0.7.0"
|
||||
|
||||
[features]
|
||||
default = ["euclidv2"]
|
||||
euclidv2 = []
|
||||
limit-logs = []
|
||||
351
zkvm-prover/integration/src/lib.rs
Normal file
351
zkvm-prover/integration/src/lib.rs
Normal file
@@ -0,0 +1,351 @@
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
process,
|
||||
};
|
||||
|
||||
use once_cell::sync::OnceCell;
|
||||
use openvm_sdk::{
|
||||
F, Sdk,
|
||||
config::{AppConfig, SdkVmConfig},
|
||||
};
|
||||
use scroll_zkvm_prover::{
|
||||
ProverType, WrappedProof,
|
||||
setup::{read_app_config, read_app_exe},
|
||||
task::ProvingTask,
|
||||
};
|
||||
use tracing::instrument;
|
||||
use tracing_subscriber::{fmt::format::FmtSpan, layer::SubscriberExt, util::SubscriberInitExt};
|
||||
|
||||
pub mod testers;
|
||||
|
||||
pub mod utils;
|
||||
|
||||
/// Path to store release assets, root directory of zkvm-prover repository.
|
||||
const DIR_OUTPUT: &str = "./../../.output";
|
||||
|
||||
/// Directory to store proofs on disc.
|
||||
const DIR_PROOFS: &str = "proofs";
|
||||
|
||||
/// File descriptor for app openvm config.
|
||||
const FD_APP_CONFIG: &str = "openvm.toml";
|
||||
|
||||
/// File descriptor for app exe.
|
||||
const FD_APP_EXE: &str = "app.vmexe";
|
||||
|
||||
/// Environment variable used to set the test-run's output directory for assets.
|
||||
const ENV_OUTPUT_DIR: &str = "OUTPUT_DIR";
|
||||
|
||||
/// Every test run will write assets to a new directory.
|
||||
///
|
||||
/// Possibly one of the following:
|
||||
/// - <DIR_OUTPUT>/chunk-tests-{timestamp}
|
||||
/// - <DIR_OUTPUT>/batch-tests-{timestamp}
|
||||
/// - <DIR_OUTPUT>/bundle-tests-{timestamp}
|
||||
static DIR_TESTRUN: OnceCell<PathBuf> = OnceCell::new();
|
||||
|
||||
/// Circuit that implements functionality required to run e2e tests.
|
||||
pub trait ProverTester {
|
||||
/// Prover type that is being tested.
|
||||
type Prover: ProverType;
|
||||
|
||||
/// Path to the corresponding circuit's project directory.
|
||||
const PATH_PROJECT_ROOT: &str;
|
||||
|
||||
/// Prefix to use while naming app-specific data like app exe, app pk, etc.
|
||||
const DIR_ASSETS: &str;
|
||||
|
||||
/// Setup directory structure for the test suite.
|
||||
fn setup() -> eyre::Result<()> {
|
||||
// Setup tracing subscriber.
|
||||
setup_logger()?;
|
||||
|
||||
// If user has set an output directory, use it.
|
||||
let dir_testrun = if let Ok(env_dir) = std::env::var(ENV_OUTPUT_DIR) {
|
||||
let dir = Path::new(&env_dir);
|
||||
if std::fs::exists(dir).is_err() {
|
||||
tracing::error!("OUTPUT_DIR={dir:?} not found");
|
||||
process::exit(1);
|
||||
}
|
||||
dir.into()
|
||||
} else {
|
||||
// Create the <OUTPUT>/{DIR_ASSETS}-test-{timestamp} for test-run.
|
||||
let testrun = format!(
|
||||
"{}-tests-{}",
|
||||
Self::DIR_ASSETS,
|
||||
chrono::Utc::now().format("%Y%m%d_%H%M%S"),
|
||||
);
|
||||
Path::new(DIR_OUTPUT).join(testrun)
|
||||
};
|
||||
|
||||
// Set the path for the current test-run.
|
||||
DIR_TESTRUN
|
||||
.set(dir_testrun)
|
||||
.map_err(|dir| eyre::eyre!("could not set test-run dir: {dir:?}"))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Load the app config.
|
||||
fn load_with_exe_fd(
|
||||
app_exe_fd: &str,
|
||||
) -> eyre::Result<(PathBuf, AppConfig<SdkVmConfig>, PathBuf)> {
|
||||
let path_app_config = Path::new(Self::PATH_PROJECT_ROOT).join(FD_APP_CONFIG);
|
||||
let app_config = read_app_config(&path_app_config)?;
|
||||
let path_assets = Path::new(Self::PATH_PROJECT_ROOT).join("openvm");
|
||||
let path_app_exe = path_assets.join(app_exe_fd);
|
||||
Ok((path_app_config, app_config, path_app_exe))
|
||||
}
|
||||
|
||||
/// Load the app config.
|
||||
fn load() -> eyre::Result<(PathBuf, AppConfig<SdkVmConfig>, PathBuf)> {
|
||||
Self::load_with_exe_fd(&Self::fd_app_exe())
|
||||
}
|
||||
|
||||
/// Get the path to the app exe.
|
||||
fn fd_app_exe() -> String {
|
||||
FD_APP_EXE.to_string()
|
||||
}
|
||||
|
||||
/// Generate proving task for test purposes.
|
||||
fn gen_proving_task() -> eyre::Result<<Self::Prover as ProverType>::ProvingTask>;
|
||||
|
||||
/// Generate multiple proving tasks for test purposes.
|
||||
fn gen_multi_proving_tasks() -> eyre::Result<Vec<<Self::Prover as ProverType>::ProvingTask>> {
|
||||
unimplemented!("must be implemented by MultiTester");
|
||||
}
|
||||
|
||||
/// Light weight testing to simply execute the vm program for test
|
||||
#[instrument("ProverTester::execute", skip_all, fields(task_id))]
|
||||
fn execute(
|
||||
app_config: AppConfig<SdkVmConfig>,
|
||||
task: &<Self::Prover as ProverType>::ProvingTask,
|
||||
exe_path: impl AsRef<Path>,
|
||||
) -> eyre::Result<Vec<F>> {
|
||||
let stdin = task.build_guest_input()?;
|
||||
|
||||
Ok(Sdk::new().execute(read_app_exe(exe_path)?, app_config.app_vm_config, stdin)?)
|
||||
}
|
||||
|
||||
fn execute_with_proving_task(
|
||||
app_config: AppConfig<SdkVmConfig>,
|
||||
exe_path: impl AsRef<Path>,
|
||||
) -> eyre::Result<Vec<F>> {
|
||||
Self::execute(app_config, &Self::gen_proving_task()?, exe_path)
|
||||
}
|
||||
}
|
||||
|
||||
/// The outcome of a successful prove-verify run.
|
||||
pub struct ProveVerifyOutcome<T, P> {
|
||||
/// Single or multiple proving tasks.
|
||||
pub tasks: Vec<T>,
|
||||
/// Verified proofs for the proving tasks.
|
||||
pub proofs: Vec<P>,
|
||||
}
|
||||
|
||||
impl<T: Clone, P: Clone> ProveVerifyOutcome<T, P> {
|
||||
pub fn single(task: T, proof: P) -> Self {
|
||||
Self {
|
||||
tasks: vec![task],
|
||||
proofs: vec![proof],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn multi(tasks: &[T], proofs: &[P]) -> Self {
|
||||
Self {
|
||||
tasks: tasks.to_vec(),
|
||||
proofs: proofs.to_vec(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Setup test environment
|
||||
fn setup_logger() -> eyre::Result<()> {
|
||||
let fmt_layer = tracing_subscriber::fmt::layer()
|
||||
.pretty()
|
||||
.with_span_events(FmtSpan::CLOSE);
|
||||
|
||||
#[cfg(feature = "limit-logs")]
|
||||
{
|
||||
let filters = tracing_subscriber::filter::Targets::new()
|
||||
.with_target("scroll_zkvm_prover", tracing::Level::INFO)
|
||||
.with_target("scroll_zkvm_integration", tracing::Level::DEBUG);
|
||||
|
||||
tracing_subscriber::registry()
|
||||
.with(fmt_layer)
|
||||
.with(filters)
|
||||
.try_init()?;
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "limit-logs"))]
|
||||
{
|
||||
tracing_subscriber::registry()
|
||||
.with(tracing_subscriber::EnvFilter::from_default_env())
|
||||
.with(fmt_layer)
|
||||
.with(metrics_tracing_context::MetricsLayer::new())
|
||||
.try_init()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Alias for convenience.
|
||||
type ProveVerifyRes<T> = eyre::Result<
|
||||
ProveVerifyOutcome<
|
||||
<<T as ProverTester>::Prover as ProverType>::ProvingTask,
|
||||
WrappedProof<<<T as ProverTester>::Prover as ProverType>::ProofMetadata>,
|
||||
>,
|
||||
>;
|
||||
|
||||
/// Alias for convenience.
|
||||
type ProveVerifyEvmRes<T> = eyre::Result<(
|
||||
ProveVerifyOutcome<
|
||||
<<T as ProverTester>::Prover as ProverType>::ProvingTask,
|
||||
WrappedProof<<<T as ProverTester>::Prover as ProverType>::ProofMetadata>,
|
||||
>,
|
||||
scroll_zkvm_verifier::verifier::Verifier<scroll_zkvm_verifier::verifier::AnyVerifier>,
|
||||
PathBuf,
|
||||
)>;
|
||||
|
||||
/// End-to-end test for a single proving task.
|
||||
#[instrument(name = "prove_verify_single", skip_all)]
|
||||
pub fn prove_verify_single<T>(
|
||||
task: Option<<T::Prover as ProverType>::ProvingTask>,
|
||||
) -> ProveVerifyRes<T>
|
||||
where
|
||||
T: ProverTester,
|
||||
<T::Prover as ProverType>::ProvingTask: Clone,
|
||||
<T::Prover as ProverType>::ProofMetadata: Clone,
|
||||
<T::Prover as ProverType>::ProofType: Clone,
|
||||
{
|
||||
let (path_app_config, _, path_app_exe) = T::load()?;
|
||||
|
||||
let cache_dir = DIR_TESTRUN
|
||||
.get()
|
||||
.ok_or(eyre::eyre!("missing assets dir"))?
|
||||
.join(T::DIR_ASSETS)
|
||||
.join(DIR_PROOFS);
|
||||
std::fs::create_dir_all(&cache_dir)?;
|
||||
|
||||
// Generate proving task for the circuit.
|
||||
let task = if let Some(t) = task {
|
||||
t
|
||||
} else {
|
||||
T::gen_proving_task()?
|
||||
};
|
||||
|
||||
// Setup prover.
|
||||
let config = scroll_zkvm_prover::ProverConfig {
|
||||
path_app_exe,
|
||||
path_app_config,
|
||||
dir_cache: Some(cache_dir),
|
||||
..Default::default()
|
||||
};
|
||||
let prover = scroll_zkvm_prover::Prover::<T::Prover>::setup(config)?;
|
||||
|
||||
// Construct root proof for the circuit.
|
||||
let proof = prover.gen_proof(&task)?;
|
||||
|
||||
// Verify proof.
|
||||
prover.verify_proof(&proof)?;
|
||||
|
||||
Ok(ProveVerifyOutcome::single(task, proof))
|
||||
}
|
||||
|
||||
/// End-to-end test for multiple proving tasks of the same prover.
|
||||
#[instrument(name = "prove_verify_multi", skip_all)]
|
||||
pub fn prove_verify_multi<T>(
|
||||
tasks: Option<&[<T::Prover as ProverType>::ProvingTask]>,
|
||||
) -> ProveVerifyRes<T>
|
||||
where
|
||||
T: ProverTester,
|
||||
<T::Prover as ProverType>::ProvingTask: Clone,
|
||||
<T::Prover as ProverType>::ProofMetadata: Clone,
|
||||
<T::Prover as ProverType>::ProofType: Clone,
|
||||
{
|
||||
let (path_app_config, _, path_app_exe) = T::load()?;
|
||||
|
||||
// Setup prover.
|
||||
let cache_dir = DIR_TESTRUN
|
||||
.get()
|
||||
.ok_or(eyre::eyre!("missing assets dir"))?
|
||||
.join(T::DIR_ASSETS)
|
||||
.join(DIR_PROOFS);
|
||||
std::fs::create_dir_all(&cache_dir)?;
|
||||
let config = scroll_zkvm_prover::ProverConfig {
|
||||
path_app_exe,
|
||||
path_app_config,
|
||||
dir_cache: Some(cache_dir),
|
||||
..Default::default()
|
||||
};
|
||||
let prover = scroll_zkvm_prover::Prover::<T::Prover>::setup(config)?;
|
||||
|
||||
// Generate proving task for the circuit.
|
||||
let tasks = tasks.map_or_else(|| T::gen_multi_proving_tasks(), |tasks| Ok(tasks.to_vec()))?;
|
||||
|
||||
// For each of the tasks, generate and verify proof.
|
||||
let proofs = tasks
|
||||
.iter()
|
||||
.map(|task| {
|
||||
let proof = prover.gen_proof(task)?;
|
||||
prover.verify_proof(&proof)?;
|
||||
Ok(proof)
|
||||
})
|
||||
.collect::<eyre::Result<Vec<WrappedProof<<T::Prover as ProverType>::ProofMetadata>>>>()?;
|
||||
|
||||
Ok(ProveVerifyOutcome::multi(&tasks, &proofs))
|
||||
}
|
||||
|
||||
/// End-to-end test for a single proving task to generate an EVM-verifiable SNARK proof.
|
||||
#[instrument(name = "prove_verify_single_evm", skip_all)]
|
||||
pub fn prove_verify_single_evm<T>(
|
||||
task: Option<<T::Prover as ProverType>::ProvingTask>,
|
||||
) -> ProveVerifyEvmRes<T>
|
||||
where
|
||||
T: ProverTester,
|
||||
<T::Prover as ProverType>::ProvingTask: Clone,
|
||||
<T::Prover as ProverType>::ProofMetadata: Clone,
|
||||
<T::Prover as ProverType>::ProofType: Clone,
|
||||
{
|
||||
let (path_app_config, _, path_app_exe) = T::load()?;
|
||||
|
||||
// Setup prover.
|
||||
let path_assets = DIR_TESTRUN
|
||||
.get()
|
||||
.ok_or(eyre::eyre!("missing testrun dir"))?
|
||||
.join(T::DIR_ASSETS);
|
||||
let cache_dir = path_assets.join(DIR_PROOFS);
|
||||
std::fs::create_dir_all(&cache_dir)?;
|
||||
let config = scroll_zkvm_prover::ProverConfig {
|
||||
path_app_exe,
|
||||
path_app_config,
|
||||
dir_cache: Some(cache_dir),
|
||||
..Default::default()
|
||||
};
|
||||
let prover = scroll_zkvm_prover::Prover::<T::Prover>::setup(config)?;
|
||||
|
||||
// Dump verifier-only assets to disk.
|
||||
let (path_vm_config, path_root_committed_exe) = prover.dump_verifier(&path_assets)?;
|
||||
let path_verifier_code = Path::new(T::PATH_PROJECT_ROOT)
|
||||
.join("openvm")
|
||||
.join("verifier.bin");
|
||||
let verifier = scroll_zkvm_verifier::verifier::Verifier::setup(
|
||||
&path_vm_config,
|
||||
&path_root_committed_exe,
|
||||
&path_verifier_code,
|
||||
)?;
|
||||
|
||||
// Generate proving task for the circuit.
|
||||
let task = task.map_or_else(|| T::gen_proving_task(), Ok)?;
|
||||
|
||||
// Construct root proof for the circuit.
|
||||
let proof = prover.gen_proof_evm(&task)?;
|
||||
|
||||
// Verify proof.
|
||||
prover.verify_proof_evm(&proof)?;
|
||||
|
||||
Ok((
|
||||
ProveVerifyOutcome::single(task, proof),
|
||||
verifier,
|
||||
path_assets,
|
||||
))
|
||||
}
|
||||
64
zkvm-prover/integration/src/testers/batch.rs
Normal file
64
zkvm-prover/integration/src/testers/batch.rs
Normal file
@@ -0,0 +1,64 @@
|
||||
use std::path::Path;
|
||||
|
||||
use scroll_zkvm_prover::{
|
||||
BatchProverType, ChunkProof, ProverType, task::ProvingTask, utils::read_json_deep,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
ProverTester,
|
||||
testers::{PATH_TESTDATA, chunk::ChunkProverTester},
|
||||
utils::{build_batch_task, phase_base_directory},
|
||||
};
|
||||
|
||||
pub struct BatchProverTester;
|
||||
|
||||
impl ProverTester for BatchProverTester {
|
||||
type Prover = BatchProverType;
|
||||
|
||||
const PATH_PROJECT_ROOT: &str = "./../build/batch-circuit";
|
||||
|
||||
const DIR_ASSETS: &str = "batch";
|
||||
|
||||
fn gen_proving_task() -> eyre::Result<<Self::Prover as ProverType>::ProvingTask> {
|
||||
Ok(read_json_deep(
|
||||
Path::new(PATH_TESTDATA)
|
||||
.join(phase_base_directory())
|
||||
.join("tasks")
|
||||
.join("batch-task.json"),
|
||||
)?)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BatchTaskBuildingTester;
|
||||
|
||||
impl ProverTester for BatchTaskBuildingTester {
|
||||
type Prover = BatchProverType;
|
||||
|
||||
const PATH_PROJECT_ROOT: &str = "./../build/batch-circuit";
|
||||
|
||||
const DIR_ASSETS: &str = "batch";
|
||||
|
||||
fn gen_proving_task() -> eyre::Result<<Self::Prover as ProverType>::ProvingTask> {
|
||||
let chunk_task = ChunkProverTester::gen_proving_task()?;
|
||||
|
||||
let proof_path = Path::new(PATH_TESTDATA)
|
||||
.join(phase_base_directory())
|
||||
.join("proofs")
|
||||
.join(format!("chunk-{}.json", chunk_task.identifier()));
|
||||
println!("proof_path: {:?}", proof_path);
|
||||
|
||||
let chunk_proof = read_json_deep::<_, ChunkProof>(&proof_path)?;
|
||||
|
||||
let task = build_batch_task(&[chunk_task], &[chunk_proof], Default::default());
|
||||
Ok(task)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn batch_task_parsing() {
|
||||
use scroll_zkvm_prover::task::ProvingTask;
|
||||
|
||||
let task = BatchProverTester::gen_proving_task().unwrap();
|
||||
|
||||
let _ = task.build_guest_input().unwrap();
|
||||
}
|
||||
66
zkvm-prover/integration/src/testers/bundle.rs
Normal file
66
zkvm-prover/integration/src/testers/bundle.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
use std::path::Path;
|
||||
|
||||
use scroll_zkvm_prover::{ProverType, task::bundle::BundleProvingTask, utils::read_json_deep};
|
||||
|
||||
#[cfg(not(feature = "euclidv2"))]
|
||||
use scroll_zkvm_prover::BundleProverTypeEuclidV1 as BundleProverType;
|
||||
#[cfg(feature = "euclidv2")]
|
||||
use scroll_zkvm_prover::BundleProverTypeEuclidV2 as BundleProverType;
|
||||
|
||||
use crate::{ProverTester, testers::PATH_TESTDATA};
|
||||
|
||||
#[cfg(not(feature = "euclidv2"))]
|
||||
use openvm_sdk::config::{AppConfig, SdkVmConfig};
|
||||
#[cfg(not(feature = "euclidv2"))]
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub struct BundleProverTester;
|
||||
|
||||
impl ProverTester for BundleProverTester {
|
||||
type Prover = BundleProverType;
|
||||
|
||||
const PATH_PROJECT_ROOT: &str = "./../build/bundle-circuit";
|
||||
|
||||
const DIR_ASSETS: &str = "bundle";
|
||||
|
||||
fn gen_proving_task() -> eyre::Result<<Self::Prover as ProverType>::ProvingTask> {
|
||||
Ok(BundleProvingTask {
|
||||
batch_proofs: vec![
|
||||
read_json_deep(Path::new(PATH_TESTDATA).join("proofs").join(
|
||||
"batch-0x6a2d14504ccc86a2d1a3fb00f95e50cf2de80230fc51306d16b5f4ccc17b8e73.json",
|
||||
))?,
|
||||
read_json_deep(Path::new(PATH_TESTDATA).join("proofs").join(
|
||||
"batch-0x5f769da6d14efecf756c2a82c164416f31b3986d6c701479107acb1bcd421b21.json",
|
||||
))?,
|
||||
],
|
||||
bundle_info: None,
|
||||
fork_name: "euclidv1".to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "euclidv2"))]
|
||||
fn load() -> eyre::Result<(PathBuf, AppConfig<SdkVmConfig>, PathBuf)> {
|
||||
Self::load_with_exe_fd("app_euclidv1.vmexe")
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BundleLocalTaskTester;
|
||||
|
||||
impl ProverTester for BundleLocalTaskTester {
|
||||
type Prover = BundleProverType;
|
||||
|
||||
const PATH_PROJECT_ROOT: &str = "./../build/bundle-circuit";
|
||||
|
||||
const DIR_ASSETS: &str = "bundle";
|
||||
|
||||
fn gen_proving_task() -> eyre::Result<<Self::Prover as ProverType>::ProvingTask> {
|
||||
Ok(read_json_deep(
|
||||
Path::new(PATH_TESTDATA).join("bundle-task.json"),
|
||||
)?)
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "euclidv2"))]
|
||||
fn load() -> eyre::Result<(PathBuf, AppConfig<SdkVmConfig>, PathBuf)> {
|
||||
Self::load_with_exe_fd("app_euclidv1.vmexe")
|
||||
}
|
||||
}
|
||||
184
zkvm-prover/integration/src/testers/chunk.rs
Normal file
184
zkvm-prover/integration/src/testers/chunk.rs
Normal file
@@ -0,0 +1,184 @@
|
||||
use std::{
|
||||
fs::File,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use sbv_primitives::{B256, types::BlockWitness};
|
||||
use scroll_zkvm_prover::{
|
||||
ChunkProverType, ChunkProverTypeRv32, ProverType, task::chunk::ChunkProvingTask,
|
||||
};
|
||||
|
||||
use crate::{ProverTester, testers::PATH_TESTDATA, utils::phase_base_directory};
|
||||
|
||||
/// Load a file <block_n>.json in the <PATH_BLOCK_WITNESS> directory.
|
||||
pub fn read_block_witness_from_testdata(block_n: usize) -> eyre::Result<BlockWitness> {
|
||||
read_block_witness(
|
||||
Path::new(PATH_TESTDATA)
|
||||
.join(phase_base_directory())
|
||||
.join("witnesses")
|
||||
.join(format!("{}.json", block_n)),
|
||||
)
|
||||
}
|
||||
|
||||
/// Utility function to read and deserialize block witness given the block number.
|
||||
pub fn read_block_witness<P>(path_witness: P) -> eyre::Result<BlockWitness>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
if !path_witness.as_ref().exists() {
|
||||
println!("File not found: {:?}", path_witness.as_ref());
|
||||
return Err(eyre::eyre!("File not found: {:?}", path_witness.as_ref()));
|
||||
}
|
||||
let witness = File::open(path_witness)?;
|
||||
Ok(serde_json::from_reader::<_, BlockWitness>(witness)?)
|
||||
}
|
||||
|
||||
pub struct ChunkProverTester;
|
||||
|
||||
impl ProverTester for ChunkProverTester {
|
||||
type Prover = ChunkProverType;
|
||||
|
||||
const PATH_PROJECT_ROOT: &str = "./../build/chunk-circuit";
|
||||
|
||||
const DIR_ASSETS: &str = "chunk";
|
||||
|
||||
/// [block-1, block-2, block-3, block-4]
|
||||
fn gen_proving_task() -> eyre::Result<<Self::Prover as ProverType>::ProvingTask> {
|
||||
let paths: Vec<PathBuf> = match std::env::var("TRACE_PATH") {
|
||||
Ok(paths) => {
|
||||
let paths: Vec<_> = glob::glob(&paths)?.filter_map(|entry| entry.ok()).collect();
|
||||
if paths.is_empty() {
|
||||
return Err(eyre::eyre!("No files found in the given path"));
|
||||
}
|
||||
paths
|
||||
}
|
||||
Err(_) => {
|
||||
#[cfg(not(feature = "euclidv2"))]
|
||||
let blocks = 12508460usize..=12508463usize;
|
||||
#[cfg(feature = "euclidv2")]
|
||||
let blocks = 1usize..=4usize;
|
||||
blocks
|
||||
.into_iter()
|
||||
.map(|block_n| {
|
||||
Path::new(PATH_TESTDATA)
|
||||
.join(phase_base_directory())
|
||||
.join("witnesses")
|
||||
.join(format!("{}.json", block_n))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
};
|
||||
|
||||
Ok(ChunkProvingTask {
|
||||
block_witnesses: paths
|
||||
.iter()
|
||||
.map(read_block_witness)
|
||||
.collect::<eyre::Result<Vec<BlockWitness>>>()?,
|
||||
prev_msg_queue_hash: if cfg!(feature = "euclidv2") {
|
||||
B256::repeat_byte(1u8)
|
||||
} else {
|
||||
B256::ZERO
|
||||
},
|
||||
fork_name: if cfg!(feature = "euclidv2") {
|
||||
String::from("euclidv2")
|
||||
} else {
|
||||
String::from("euclidv1")
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ChunkProverRv32Tester;
|
||||
|
||||
impl ProverTester for ChunkProverRv32Tester {
|
||||
type Prover = ChunkProverTypeRv32;
|
||||
|
||||
const PATH_PROJECT_ROOT: &str = "./../build/chunk-circuit";
|
||||
|
||||
const DIR_ASSETS: &str = "chunk";
|
||||
|
||||
fn fd_app_exe() -> String {
|
||||
"app_rv32.vmexe".to_string()
|
||||
}
|
||||
|
||||
fn gen_proving_task() -> eyre::Result<<Self::Prover as ProverType>::ProvingTask> {
|
||||
// Reuse the same implementation as ChunkProverTester
|
||||
ChunkProverTester::gen_proving_task()
|
||||
}
|
||||
}
|
||||
|
||||
/// helper func to gen a series of proving tasks, specified by the block number
|
||||
pub fn gen_multi_tasks(
|
||||
blocks: impl IntoIterator<Item = Vec<i32>>,
|
||||
) -> eyre::Result<Vec<<ChunkProverType as ProverType>::ProvingTask>> {
|
||||
let paths: Vec<Vec<PathBuf>> = match std::env::var("TRACE_PATH") {
|
||||
Ok(paths) => glob::glob(&paths)?
|
||||
.filter_map(|entry| entry.ok())
|
||||
.map(|p| vec![p])
|
||||
.collect(),
|
||||
Err(_) => blocks
|
||||
.into_iter()
|
||||
.map(|block_group| {
|
||||
block_group
|
||||
.into_iter()
|
||||
.map(|block_n| {
|
||||
Path::new(PATH_TESTDATA)
|
||||
.join(phase_base_directory())
|
||||
.join("witnesses")
|
||||
.join(format!("{}.json", block_n))
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
||||
let tasks = paths
|
||||
.into_iter()
|
||||
.map(|block_group| -> eyre::Result<_> {
|
||||
let block_witnesses = block_group
|
||||
.iter()
|
||||
.map(read_block_witness)
|
||||
.collect::<eyre::Result<Vec<BlockWitness>>>()?;
|
||||
Ok(ChunkProvingTask {
|
||||
block_witnesses,
|
||||
prev_msg_queue_hash: if cfg!(feature = "euclidv2") {
|
||||
B256::repeat_byte(1u8)
|
||||
} else {
|
||||
B256::ZERO
|
||||
},
|
||||
fork_name: if cfg!(feature = "euclidv2") {
|
||||
String::from("euclidv2")
|
||||
} else {
|
||||
String::from("euclidv1")
|
||||
},
|
||||
})
|
||||
})
|
||||
.collect::<eyre::Result<Vec<ChunkProvingTask>>>()?;
|
||||
|
||||
Ok(tasks)
|
||||
}
|
||||
|
||||
pub struct MultiChunkProverTester;
|
||||
|
||||
impl ProverTester for MultiChunkProverTester {
|
||||
type Prover = ChunkProverType;
|
||||
|
||||
const PATH_PROJECT_ROOT: &str = "./../build/chunk-circuit";
|
||||
|
||||
const DIR_ASSETS: &str = "chunk";
|
||||
|
||||
fn gen_proving_task() -> eyre::Result<<Self::Prover as ProverType>::ProvingTask> {
|
||||
unreachable!("Use gen_multi_proving_tasks");
|
||||
}
|
||||
|
||||
/// [block-1]
|
||||
/// [block-2]
|
||||
/// [block-3, block-4]
|
||||
fn gen_multi_proving_tasks() -> eyre::Result<Vec<<Self::Prover as ProverType>::ProvingTask>> {
|
||||
#[cfg(not(feature = "euclidv2"))]
|
||||
let blocks = [vec![12508460], vec![12508461], vec![12508462, 12508463]];
|
||||
#[cfg(feature = "euclidv2")]
|
||||
let blocks = [vec![1], vec![2], vec![3, 4]];
|
||||
gen_multi_tasks(blocks)
|
||||
}
|
||||
}
|
||||
8
zkvm-prover/integration/src/testers/mod.rs
Normal file
8
zkvm-prover/integration/src/testers/mod.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
pub mod batch;
|
||||
|
||||
pub mod bundle;
|
||||
|
||||
pub mod chunk;
|
||||
|
||||
/// Path to the testdata directory.
|
||||
pub const PATH_TESTDATA: &str = "./testdata";
|
||||
420
zkvm-prover/integration/src/utils/mod.rs
Normal file
420
zkvm-prover/integration/src/utils/mod.rs
Normal file
@@ -0,0 +1,420 @@
|
||||
use sbv_primitives::{
|
||||
B256, U256,
|
||||
types::{BlockWitness, Transaction, eips::Encodable2718, reth::TransactionSigned},
|
||||
};
|
||||
use scroll_zkvm_circuit_input_types::{
|
||||
batch::{BatchHeader, BatchHeaderV6, BatchHeaderV7},
|
||||
utils::keccak256,
|
||||
};
|
||||
use scroll_zkvm_prover::{
|
||||
ChunkProof,
|
||||
task::{
|
||||
batch::{BatchHeaderV, BatchProvingTask},
|
||||
chunk::ChunkProvingTask,
|
||||
},
|
||||
utils::point_eval,
|
||||
};
|
||||
use vm_zstd::zstd_encode;
|
||||
|
||||
fn is_l1_tx(tx: &Transaction) -> bool {
|
||||
// 0x7e is l1 tx
|
||||
tx.transaction_type == 0x7e
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn final_l1_index(blk: &BlockWitness) -> u64 {
|
||||
// Get number of l1 txs. L1 txs can be skipped, so just counting is not enough
|
||||
// (The comment copied from scroll-prover, but why the max l1 queue index is always
|
||||
// the last one for a chunk, or, is the last l1 never being skipped?)
|
||||
blk.transaction
|
||||
.iter()
|
||||
.filter(|tx| is_l1_tx(tx))
|
||||
.map(|tx| tx.queue_index.expect("l1 msg should has queue index"))
|
||||
.max()
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn blks_tx_bytes<'a>(blks: impl Iterator<Item = &'a BlockWitness>) -> Vec<u8> {
|
||||
blks.flat_map(|blk| &blk.transaction)
|
||||
.filter(|tx| !is_l1_tx(tx))
|
||||
.fold(Vec::new(), |mut tx_bytes, tx| {
|
||||
TransactionSigned::try_from(tx)
|
||||
.unwrap()
|
||||
.encode_2718(&mut tx_bytes);
|
||||
tx_bytes
|
||||
})
|
||||
}
|
||||
|
||||
pub fn phase_base_directory() -> &'static str {
|
||||
if cfg!(feature = "euclidv2") {
|
||||
"phase2"
|
||||
} else {
|
||||
"phase1"
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LastHeader {
|
||||
pub batch_index: u64,
|
||||
pub batch_hash: B256,
|
||||
pub version: u8,
|
||||
/// legacy field
|
||||
pub l1_message_index: u64,
|
||||
}
|
||||
|
||||
impl Default for LastHeader {
|
||||
fn default() -> Self {
|
||||
// create a default LastHeader according to the dummy value
|
||||
// being set in the e2e test in scroll-prover:
|
||||
// https://github.com/scroll-tech/scroll-prover/blob/82f8ed3fabee5c3001b0b900cda1608413e621f8/integration/tests/e2e_tests.rs#L203C1-L207C8
|
||||
|
||||
Self {
|
||||
batch_index: 123,
|
||||
version: if cfg!(feature = "euclidv2") { 7 } else { 6 },
|
||||
batch_hash: B256::new([
|
||||
0xab, 0xac, 0xad, 0xae, 0xaf, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
]),
|
||||
l1_message_index: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&BatchHeaderV> for LastHeader {
|
||||
fn from(value: &BatchHeaderV) -> Self {
|
||||
match value {
|
||||
BatchHeaderV::V6(h) => h.into(),
|
||||
BatchHeaderV::V7(h) => h.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&BatchHeaderV6> for LastHeader {
|
||||
fn from(h: &BatchHeaderV6) -> Self {
|
||||
Self {
|
||||
batch_index: h.batch_index,
|
||||
version: h.version,
|
||||
batch_hash: h.batch_hash(),
|
||||
l1_message_index: h.total_l1_message_popped,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&BatchHeaderV7> for LastHeader {
|
||||
fn from(h: &BatchHeaderV7) -> Self {
|
||||
Self {
|
||||
batch_index: h.batch_index,
|
||||
version: h.version,
|
||||
batch_hash: h.batch_hash(),
|
||||
l1_message_index: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build_batch_task(
|
||||
chunk_tasks: &[ChunkProvingTask],
|
||||
chunk_proofs: &[ChunkProof],
|
||||
last_header: LastHeader,
|
||||
) -> BatchProvingTask {
|
||||
// Sanity check.
|
||||
assert_eq!(chunk_tasks.len(), chunk_proofs.len());
|
||||
|
||||
// collect tx bytes from chunk tasks
|
||||
let (meta_chunk_sizes, chunk_digests, chunk_tx_bytes) = chunk_tasks.iter().fold(
|
||||
(Vec::new(), Vec::new(), Vec::new()),
|
||||
|(mut meta_chunk_sizes, mut chunk_digests, mut payload_bytes), task| {
|
||||
let tx_bytes = blks_tx_bytes(task.block_witnesses.iter());
|
||||
meta_chunk_sizes.push(tx_bytes.len());
|
||||
chunk_digests.push(keccak256(&tx_bytes));
|
||||
payload_bytes.extend(tx_bytes);
|
||||
(meta_chunk_sizes, chunk_digests, payload_bytes)
|
||||
},
|
||||
);
|
||||
|
||||
// sanity check
|
||||
for (digest, proof) in chunk_digests.iter().zip(chunk_proofs.iter()) {
|
||||
assert_eq!(digest, &proof.metadata.chunk_info.tx_data_digest);
|
||||
}
|
||||
|
||||
const LEGACY_MAX_CHUNKS: usize = 45;
|
||||
|
||||
let meta_chunk_bytes = {
|
||||
let valid_chunk_size = chunk_proofs.len() as u16;
|
||||
meta_chunk_sizes
|
||||
.into_iter()
|
||||
.chain(std::iter::repeat(0))
|
||||
.take(LEGACY_MAX_CHUNKS)
|
||||
.fold(
|
||||
Vec::from(valid_chunk_size.to_be_bytes()),
|
||||
|mut bytes, len| {
|
||||
bytes.extend_from_slice(&(len as u32).to_be_bytes());
|
||||
bytes
|
||||
},
|
||||
)
|
||||
};
|
||||
|
||||
// collect all data together for payload
|
||||
let mut payload = if cfg!(feature = "euclidv2") {
|
||||
Vec::new()
|
||||
} else {
|
||||
meta_chunk_bytes.clone()
|
||||
};
|
||||
#[cfg(feature = "euclidv2")]
|
||||
{
|
||||
let num_blocks = chunk_tasks
|
||||
.iter()
|
||||
.map(|t| t.block_witnesses.len())
|
||||
.sum::<usize>() as u16;
|
||||
let (prev_msg_queue_hash, initial_block_number) = {
|
||||
let first_chunk = &chunk_proofs
|
||||
.first()
|
||||
.expect("at least one chunk")
|
||||
.metadata
|
||||
.chunk_info;
|
||||
(
|
||||
first_chunk.prev_msg_queue_hash,
|
||||
first_chunk.initial_block_number,
|
||||
)
|
||||
};
|
||||
|
||||
let post_msg_queue_hash = chunk_proofs
|
||||
.last()
|
||||
.expect("at least one chunk")
|
||||
.metadata
|
||||
.chunk_info
|
||||
.post_msg_queue_hash;
|
||||
payload.extend_from_slice(prev_msg_queue_hash.as_slice());
|
||||
payload.extend_from_slice(post_msg_queue_hash.as_slice());
|
||||
payload.extend(initial_block_number.to_be_bytes());
|
||||
payload.extend(num_blocks.to_be_bytes());
|
||||
assert_eq!(payload.len(), 74);
|
||||
for proof in chunk_proofs {
|
||||
for ctx in &proof.metadata.chunk_info.block_ctxs {
|
||||
payload.extend(ctx.to_bytes());
|
||||
}
|
||||
}
|
||||
assert_eq!(payload.len(), 74 + 52 * num_blocks as usize);
|
||||
}
|
||||
payload.extend(chunk_tx_bytes);
|
||||
// compress ...
|
||||
let compressed_payload = zstd_encode(&payload);
|
||||
|
||||
let version = 7u32;
|
||||
let heading = compressed_payload.len() as u32 + (version << 24);
|
||||
|
||||
let blob_bytes = if cfg!(feature = "euclidv2") {
|
||||
let mut blob_bytes = Vec::from(heading.to_be_bytes());
|
||||
blob_bytes.push(1u8); // compressed flag
|
||||
blob_bytes.extend(compressed_payload);
|
||||
blob_bytes.resize(4096 * 31, 0);
|
||||
blob_bytes
|
||||
} else {
|
||||
let mut blob_bytes = vec![1];
|
||||
blob_bytes.extend(compressed_payload);
|
||||
blob_bytes
|
||||
};
|
||||
|
||||
let kzg_blob = point_eval::to_blob(&blob_bytes);
|
||||
let kzg_commitment = point_eval::blob_to_kzg_commitment(&kzg_blob);
|
||||
let blob_versioned_hash = point_eval::get_versioned_hash(&kzg_commitment);
|
||||
|
||||
// primage = keccak(payload) + blob_versioned_hash
|
||||
let chg_preimage = if cfg!(feature = "euclidv2") {
|
||||
let mut chg_preimage = keccak256(&blob_bytes).to_vec();
|
||||
chg_preimage.extend(blob_versioned_hash.0);
|
||||
chg_preimage
|
||||
} else {
|
||||
let mut chg_preimage = Vec::from(keccak256(&meta_chunk_bytes).0);
|
||||
let last_digest = chunk_digests.last().expect("at least we have one");
|
||||
chg_preimage.extend(
|
||||
chunk_digests
|
||||
.iter()
|
||||
.chain(std::iter::repeat(last_digest))
|
||||
.take(LEGACY_MAX_CHUNKS)
|
||||
.fold(Vec::new(), |mut ret, digest| {
|
||||
ret.extend_from_slice(&digest.0);
|
||||
ret
|
||||
}),
|
||||
);
|
||||
chg_preimage.extend_from_slice(blob_versioned_hash.as_slice());
|
||||
chg_preimage
|
||||
};
|
||||
let challenge_digest = keccak256(&chg_preimage);
|
||||
|
||||
let x = point_eval::get_x_from_challenge(challenge_digest);
|
||||
let (kzg_proof, z) = point_eval::get_kzg_proof(&kzg_blob, challenge_digest);
|
||||
|
||||
#[cfg(feature = "euclidv2")]
|
||||
let batch_header = {
|
||||
// avoid unused variant warning
|
||||
let _ = x + z;
|
||||
BatchHeaderV::V7(BatchHeaderV7 {
|
||||
version: last_header.version,
|
||||
batch_index: last_header.batch_index + 1,
|
||||
parent_batch_hash: last_header.batch_hash,
|
||||
blob_versioned_hash,
|
||||
})
|
||||
};
|
||||
|
||||
#[cfg(not(feature = "euclidv2"))]
|
||||
let batch_header = {
|
||||
// collect required fields for batch header
|
||||
let last_l1_message_index: u64 = chunk_tasks
|
||||
.iter()
|
||||
.flat_map(|t| &t.block_witnesses)
|
||||
.map(final_l1_index)
|
||||
.reduce(|last, cur| if cur == 0 { last } else { cur })
|
||||
.expect("at least one chunk");
|
||||
let last_l1_message_index = if last_l1_message_index == 0 {
|
||||
last_header.l1_message_index
|
||||
} else {
|
||||
last_l1_message_index
|
||||
};
|
||||
|
||||
let last_block_timestamp = chunk_tasks.last().map_or(0u64, |t| {
|
||||
t.block_witnesses
|
||||
.last()
|
||||
.map_or(0, |trace| trace.header.timestamp)
|
||||
});
|
||||
|
||||
let point_evaluations = [x, z];
|
||||
|
||||
let data_hash = keccak256(
|
||||
chunk_proofs
|
||||
.iter()
|
||||
.map(|proof| &proof.metadata.chunk_info.data_hash)
|
||||
.fold(Vec::new(), |mut bytes, h| {
|
||||
bytes.extend_from_slice(&h.0);
|
||||
bytes
|
||||
}),
|
||||
);
|
||||
|
||||
BatchHeaderV::V6(BatchHeaderV6 {
|
||||
version: last_header.version,
|
||||
batch_index: last_header.batch_index + 1,
|
||||
l1_message_popped: last_l1_message_index - last_header.l1_message_index,
|
||||
last_block_timestamp,
|
||||
total_l1_message_popped: last_l1_message_index,
|
||||
parent_batch_hash: last_header.batch_hash,
|
||||
data_hash,
|
||||
blob_versioned_hash,
|
||||
blob_data_proof: point_evaluations.map(|u| B256::new(u.to_be_bytes())),
|
||||
})
|
||||
};
|
||||
|
||||
BatchProvingTask {
|
||||
chunk_proofs: Vec::from(chunk_proofs),
|
||||
batch_header,
|
||||
blob_bytes,
|
||||
challenge_digest: Some(U256::from_be_bytes(challenge_digest.0)),
|
||||
kzg_commitment: Some(kzg_commitment.to_bytes()),
|
||||
kzg_proof: Some(kzg_proof.to_bytes()),
|
||||
fork_name: if cfg!(feature = "euclidv2") {
|
||||
String::from("euclidv2")
|
||||
} else {
|
||||
String::from("euclidv1")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_and_parse_batch_task() -> eyre::Result<()> {
|
||||
#[cfg(not(feature = "euclidv2"))]
|
||||
use scroll_zkvm_circuit_input_types::batch::{EnvelopeV6 as Envelope, PayloadV6 as Payload};
|
||||
#[cfg(feature = "euclidv2")]
|
||||
use scroll_zkvm_circuit_input_types::batch::{EnvelopeV7 as Envelope, PayloadV7 as Payload};
|
||||
use scroll_zkvm_prover::utils::{read_json, read_json_deep, write_json};
|
||||
|
||||
// ./testdata/
|
||||
let path_testdata = std::path::Path::new("testdata");
|
||||
|
||||
// read block witnesses.
|
||||
let paths_block_witnesses = if cfg!(feature = "euclidv2") {
|
||||
[
|
||||
path_testdata.join("1.json"),
|
||||
path_testdata.join("2.json"),
|
||||
path_testdata.join("3.json"),
|
||||
path_testdata.join("4.json"),
|
||||
]
|
||||
} else {
|
||||
[
|
||||
path_testdata.join("12508460.json"),
|
||||
path_testdata.join("12508461.json"),
|
||||
path_testdata.join("12508462.json"),
|
||||
path_testdata.join("12508463.json"),
|
||||
]
|
||||
};
|
||||
let read_block_witness = |path| Ok(read_json::<_, BlockWitness>(path)?);
|
||||
let chunk_task = ChunkProvingTask {
|
||||
block_witnesses: paths_block_witnesses
|
||||
.iter()
|
||||
.map(read_block_witness)
|
||||
.collect::<eyre::Result<Vec<BlockWitness>>>()?,
|
||||
prev_msg_queue_hash: Default::default(),
|
||||
fork_name: if cfg!(feature = "euclidv2") {
|
||||
String::from("euclidv2")
|
||||
} else {
|
||||
String::from("euclidv1")
|
||||
},
|
||||
};
|
||||
|
||||
// read chunk proof.
|
||||
let path_chunk_proof = path_testdata
|
||||
.join("proofs")
|
||||
.join(if cfg!(feature = "euclidv2") {
|
||||
"chunk-1-4.json"
|
||||
} else {
|
||||
"chunk-12508460-12508463.json"
|
||||
});
|
||||
let chunk_proof = read_json_deep::<_, ChunkProof>(&path_chunk_proof)?;
|
||||
|
||||
let task = build_batch_task(&[chunk_task], &[chunk_proof], Default::default());
|
||||
|
||||
let chunk_infos = task
|
||||
.chunk_proofs
|
||||
.iter()
|
||||
.map(|proof| proof.metadata.chunk_info.clone())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let enveloped = Envelope::from(task.blob_bytes.as_slice());
|
||||
|
||||
#[cfg(feature = "euclidv2")]
|
||||
let header = task.batch_header.must_v7_header();
|
||||
#[cfg(not(feature = "euclidv2"))]
|
||||
let header = task.batch_header.must_v6_header();
|
||||
Payload::from(&enveloped).validate(header, &chunk_infos);
|
||||
|
||||
// depressed task output for pre-v2
|
||||
#[cfg(feature = "euclidv2")]
|
||||
write_json(path_testdata.join("batch-task-test-out.json"), &task).unwrap();
|
||||
#[cfg(not(feature = "euclidv2"))]
|
||||
write_json(path_testdata.join("batch-task-legacy-test-out.json"), &task).unwrap();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(feature = "euclidv2")]
|
||||
#[test]
|
||||
fn test_batch_task_payload() -> eyre::Result<()> {
|
||||
use scroll_zkvm_circuit_input_types::batch::{EnvelopeV7, PayloadV7};
|
||||
use scroll_zkvm_prover::utils::read_json_deep;
|
||||
|
||||
// ./testdata/
|
||||
let path_testdata = std::path::Path::new("testdata");
|
||||
|
||||
let task =
|
||||
read_json_deep::<_, BatchProvingTask>(path_testdata.join("batch-task-test-out.json"))
|
||||
.unwrap();
|
||||
|
||||
println!("blob {:?}", &task.blob_bytes[..32]);
|
||||
let enveloped = EnvelopeV7::from(task.blob_bytes.as_slice());
|
||||
|
||||
let chunk_infos = task
|
||||
.chunk_proofs
|
||||
.iter()
|
||||
.map(|proof| proof.metadata.chunk_info.clone())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
PayloadV7::from(&enveloped).validate(task.batch_header.must_v7_header(), &chunk_infos);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
321
zkvm-prover/integration/testdata/batch-task-phase-1.json
vendored
Normal file
321
zkvm-prover/integration/testdata/batch-task-phase-1.json
vendored
Normal file
File diff suppressed because one or more lines are too long
1
zkvm-prover/integration/testdata/bundle-task.json
vendored
Normal file
1
zkvm-prover/integration/testdata/bundle-task.json
vendored
Normal file
File diff suppressed because one or more lines are too long
26
zkvm-prover/integration/testdata/flatten-trace.py
vendored
Normal file
26
zkvm-prover/integration/testdata/flatten-trace.py
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
import json
|
||||
import sys
|
||||
|
||||
for f in sys.argv[1:]:
|
||||
print("convert", f)
|
||||
with open(f, 'r') as file:
|
||||
data = json.load(file)
|
||||
|
||||
for transaction in data['transaction']:
|
||||
transaction['signature'] = {
|
||||
'r': transaction.pop('r'),
|
||||
's': transaction.pop('s'),
|
||||
'y_parity': transaction.pop('y_parity')
|
||||
}
|
||||
if "authorization_list" in transaction:
|
||||
for auth in transaction["authorization_list"]:
|
||||
auth['inner'] = {
|
||||
'chain_id': auth.pop('chain_id'),
|
||||
'address': auth.pop('address'),
|
||||
'nonce': auth.pop('nonce')
|
||||
}
|
||||
|
||||
|
||||
with open(f, 'w') as file:
|
||||
json.dump(data, file, indent=2)
|
||||
|
||||
1
zkvm-prover/integration/testdata/phase1/proofs/.gitignore
vendored
Normal file
1
zkvm-prover/integration/testdata/phase1/proofs/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.json
|
||||
5
zkvm-prover/integration/testdata/phase1/proofs/README.md
vendored
Normal file
5
zkvm-prover/integration/testdata/phase1/proofs/README.md
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Proof files would be automatically generated here
|
||||
|
||||
### Some tests would try to generate the proofs it needed and cache them under this directory
|
||||
|
||||
### Use `make clean-test-cache` to clean the cached proof files
|
||||
7
zkvm-prover/integration/testdata/phase1/tasks/README.md
vendored
Normal file
7
zkvm-prover/integration/testdata/phase1/tasks/README.md
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
# Put task file here to execute some tests
|
||||
|
||||
### Following tests require batch task (naming as `batch-task.json`)
|
||||
+ `test-execute-batch-fast`
|
||||
+ `test-single-batch`
|
||||
|
||||
A task file can be generated into the output dir while a e2e test has completed
|
||||
1
zkvm-prover/integration/testdata/phase1/tasks/batch-task.json
vendored
Normal file
1
zkvm-prover/integration/testdata/phase1/tasks/batch-task.json
vendored
Normal file
File diff suppressed because one or more lines are too long
89
zkvm-prover/integration/testdata/phase1/witnesses/12508460.json
vendored
Normal file
89
zkvm-prover/integration/testdata/phase1/witnesses/12508460.json
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
{
|
||||
"chain_id": 534352,
|
||||
"header": {
|
||||
"parent_hash": "0x536b4e12eb5f9fe0c4de0057c8524f55fb889d9acf74e6b980f30888e15c786f",
|
||||
"sha3Uncles": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
|
||||
"miner": "0x0000000000000000000000000000000000000000",
|
||||
"state_root": "0xb94cab21a2717b8c2d76559cb83bdc292df7b85fcf0e45f9eb20e5e8ebfd219c",
|
||||
"transactions_root": "0x20ab3ee30fc8c89c55a2f603c2e7f6ae96a3c765decd1dd262771d2b3e56cd57",
|
||||
"receipts_root": "0x056b23fbba480696b65fe5a59b8f2148a1299103c4f57df839233af2cf4ca2d2",
|
||||
"logs_bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"difficulty": "0x2",
|
||||
"number": "0xbedd2c",
|
||||
"gas_limit": "0x989680",
|
||||
"gas_used": "0x5208",
|
||||
"timestamp": "0x677c8b8d",
|
||||
"extra_data": "0xd883050800846765746888676f312e32312e31856c696e757800000000000000024eae3ec2bf7cf694db74e95f133043ff0581e2f12c7ed94d061d588684b94368f22eb321de74982f4fcf572a57d9fa6393d810a695512784eac0b0f2ed6ce401",
|
||||
"mix_hash": "0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"nonce": "0x0000000000000000",
|
||||
"base_fee_per_gas": "0x2663f15"
|
||||
},
|
||||
"pre_state_root": "0xe3440bcf882852bb1a9d6ba941e53a645220fee2c531ed79fa60481be8078c12",
|
||||
"transaction": [
|
||||
{
|
||||
"hash": "0x6714b0fd339b3626857674f793f5bb830e70c5b1f15aa0e2f49471a27c13afd1",
|
||||
"nonce": "0x0",
|
||||
"from": "0x7e1b980c283dc62cb007c693827057a6f62f2a56",
|
||||
"to": "0x5bbe5ecce966a098a8947dc4fda3f50d553b0309",
|
||||
"value": "0x195886b4cd9405",
|
||||
"gas_price": "0x2663f79",
|
||||
"gas": "0x5208",
|
||||
"max_fee_per_gas": "0x2663f79",
|
||||
"input": "0x",
|
||||
"chain_id": "0x82750",
|
||||
"transaction_type": 0,
|
||||
"signature": {
|
||||
"r": "0x363d7997e0f78aebe571280845938df6f181da5fb3280d32794e0d75080cbb52",
|
||||
"s": "0x7247681bb07d278eed847965e7d054a16094c060051aadb67c1b417f2e48686c",
|
||||
"y_parity": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"withdrawals": null,
|
||||
"block_hashes": [],
|
||||
"states": [
|
||||
"0xe2a0305787fa12a823e0f2b7631cc41b3ba8828b3321ca811111fa75cd3aa3bb5ace38",
|
||||
"0xe7a032575a0e9e593c00f959f8c92f12db2869c3395a3b0502d05e2516446f71f85b85843e95ba80",
|
||||
"0xe7a0336b6384b5eca791c62761152d0c79bb0604c104a5fb6f4eb0703f3154bb3db085840a40af3e",
|
||||
"0xe7a0366cc928b5edb82af9bd49922954155ab7b0942694bea4ce44661d9a8736c6888584168b9aa3",
|
||||
"0xe8a02052222313e28459528d920b65115c16c04f3efc82aaedc97be59f3f377c0d3f868569cf265bfe",
|
||||
"0xe8a0310e2d527612073b26eecdfd717e6a320cf44b4afac2b0732d9fcbe2b7fa0cf686850171fcfbdf",
|
||||
"0xf843a0390decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563a1a07ed4c7d56e2ed40f65d25eecbb0110f3b3f4db68e87700287c7e0cedcb68272c",
|
||||
"0xf851808080a0a87d9bb950836582673aa0eecc0ff64aac607870637a2dd2012b8b1b31981f698080a0c5e057ca6d669933e13616d0610d37d68c093c0849351f7d3937c0bfe334287080808080808080808080",
|
||||
"0xf8518080a0a7f6c4a4c6e10af771e26b49055a46b5bd8dc4b739869159004c0f2192a6c4fea0d5f5d4ac2528d201049a8f56210b12a534232de552bb1ac30b7ae69a3bc72adc80808080808080808080808080",
|
||||
"0xf8518080a0bd195422d46aa8ca2875b4e9ffc7aeec1f75ac1658474d1ea837abb1dbfb5acb8080a0aaead344621dbd339dc4ef9c9e764c616996fcdd02fe7df27bae6fd9a16026508080808080808080808080",
|
||||
"0xf8669d396300e542c7cd2a1c5fca601da42a2739f790ad422b4f59fba0844e61b846f8448080a019bd9b7b70a47c4ed5f4bbd7e1713403429580d43093fba01f26a212836c88d2a07f6f0daf66a63b4d504fabde8e9fa491ff678bf22082d8fee03ac3064fcf7de9",
|
||||
"0xf8679e20b8246d45a5a396db3b461bf4a56bd646d9274adeadb5471dd31e30574fb846f8448080a0ebcb3a5f0e5445ac517d6ddfaef7bc3e530d2f0c80c658202c8e3eaf47e4736ca019e0db18cf25c98a25c9e8eac4c99e0653cd8c395303140b41425e9fa3f9bb65",
|
||||
"0xf86e9e20795d27c289621f293bb4bafc6075b72a88572e0c6cf8496dbba98fcdb9b84df84b8087195fcd59f61406a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
|
||||
"0xf86f9e20c17e5c6b28e3f8953c4145163d55830f59344e623aefcb5f02bfff1102b84ef84c018814ea497ffec65068a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
|
||||
"0xf86f9e20cdcd164a6b73972dc6625ecb078be7e607b54459f3edfaada99be20644b84ef84c80882914fe9beae4017ca0d7b9b364ea565bdabe900d91907e7e539c5ac850bd391537902b4ed7d95150f9a03733510decd4cdde078e264666d17cac208f9f9e93fc0a43b02921be5cf5726f",
|
||||
"0xf8718080808080a0d4116100655ae6f2d3413af404c6ea9686f8050cce9f15ec036f7e947194b3ec80a0e9980203b39555009ce153d8e0d1ace642b67e4a3467b9bf4f0b4172f2d7909d80808080a0274fabf259998a5942e98652543f0dd33ffb9e2677df2860b0eaca1733072af480808080",
|
||||
"0xf8b18080a043c9dfc8ff06ac8a2739ed9ba777738700570d69692a19b05ef391aba502fd5980a04f76780e0592d4b28c5696f18cb4844d40253166a0ff4eeb86a3d86113bdcc568080a0b6e2b14327ac51db4389bbff9c5e96500d4eb7c01537b33946eb39063c334352808080a0eab6132b439ef10614bbcfb0f1f34f11c386d421c350a76da5b369ce9756e3808080a0953fda035d1bdf52096a9b64b8a67bae9b38868f7efeb52f247fa4610f856e008080",
|
||||
"0xf8d18080a06a828e05a511a4611fa8e71ae7b3741f9a1d872501e09b63ded4dacbffec0068a0c1c8acc35f550864092d4e0e928b2b2388e5a9c7a98ee7f3ec24520d98a961f880808080a0a9e18235fb6182f743e5ab434591e8de6fb1f97998c6cf4dbe1042b8e47b0d578080a068a6dbb31844885f57647c476e4095cc34f860131a7cbcc58c25e893438e345380a01aa4b3047ac72c3ba403d3c0a02d7acb0c6aca31bf3e75c350c32f99d9599e90a071074b719a71d4f3f7a5dd15b8a7516f47875cba3d06e8c254f85e8289d487468080",
|
||||
"0xf8d1a0b5aa868d635218e59e8e0243a933619c2d2e0e2e90f1e9980c78091c72132dd78080a042346ee27cbf6b302ad4128a2cc82fa994f2d99cbd4267727a6b6b0825686a14808080a097afbc5015220ca78655520ae3220f78f7950797ca39d00dac89d59fd82ae45e8080a01068144e9a010a07af3520da28cb5406fb9b0cfc2fa2b513b6169b1a829f778f80a0fab9252160b79a4f67a6052ccfc215623153a9ac81e745b55a5c84379a5dd84d8080a09ee18d125aaf02efba2b36fde04cd8f7dc364dafa6f19d362663236131495a2480",
|
||||
"0xf90111a02a7233cf36b9a82827cbdf451c2ad1ed9f15f33f814cf1109adc8aa298185ea880a02924d941ceb50108681a2b8d66667c81833c53834abfada171a801d82c5ce9ae80a0694c1f68cf9165ac298e3ec30bd6f9a3a404d1267c2e077362cab0e3cd4289a8808080a0b4913e81d4d65b898fd33855cd20cc3d44e495af5d130ecfe54b7f344646159c80a01d74bd99789ccf28ee072a94877b3dda93b9c8ec22dfc898bf20fa8d1e2a9e28a0be9658774dcb7389939a50b30548f21ec11a9c7bb51a4abbb1094ca24dc54438a0615d9e343cd207610b6c5ab36b2d075357715aacbb3818ebd264b1a5629d77838080a05f68c2efcfc38326a2b7a422edea985e8404e08837b48e82ffbdcada98770e5e80",
|
||||
"0xf90211a01626f23548f6a2ace334f25b80d1d4c0262c4169bfb9d1e9a1cba9cd3ec3c892a0b0c6afc9bbc1a6565217c591bc4bf1a4a0ab5cd16e170e0a2a7aa7a5c7a53f37a051c7b3e74bfe3a452e03f96dab09d7b165ea7f267abb531468283ec5f08983d7a021bd8b2703177ea15230380ecbdd84e37d2c33d6749c8ac689126e49432e9d26a0936b1cbbbff21020e3065f0688945855595cea1404342c3d3f6429d16065c6f2a095a72bd7257d8ff6f5f770494677684574c4c17929df9831502b7dd762b8bf9da0038d5f1103d42f8cb3bbd095d87f1856faa886b3dcb5d29d10f0b7f3adc92200a0c93b261ad32b586ffcf3e2b3dfd58f5427f4fbc3bd180f3b564abecd99d49f45a09823f6910fc24aeac5888a5ecb4aa21ae1732bceec894b730735c56e9a2b9c48a0afd3e17bc3e85c0b8c7ce17990ffc8e0741ccf928739ad757a30822d0e2530ada09e0807112c11b8598ca8a3b4f54f1f9caded8f1af924d870d756f7b355dce85da00de813974e1c3f7f3d618b0c44d745b4a5aa294199d6b020c4787eee6b060c17a07f41788d59a7698850aeec9637942aa91cd832fc5866bd7ac360c524e31f6b64a017425c618e53f77a0b1015b723d706ce44ffcc182adf2e7c60440f6f4690e2b2a016f5b309197a34c6eb937047cbeabc7ea753b144ae8d90cedf7017da4c74443ea087faa0b918b5485a1065668d30c9792c7c0add5e84656990734ac55541bc24bd80",
|
||||
"0xf90211a01f4d23739e6fc214fee3fe0369943a3502351331ef6e2c5b8756960e144acfa2a0fd6392b264cfdc30971a59dd05205bce661d198416a00e85a3f114ddb046e97aa0fb187f4d586b5e2caaa752450c087e683661a17a5a81f0bdf962520acf628799a0505741864b825779c9b0473c61c73a4e7852e1118cf5a6230cea976653b4fe9da02e76237101a74c142eb8dda38af46c2076377f3ed9f5b5f02ad32b6cadc73221a0c1bbf8f910f20250eed36d0e08df47dfb61a90cb87e357c234dbed24481ece54a028bd4323b50e77bc152a39c0bd7f3db7930cd08813519da851479699717504d5a0abae70b12676b71e12f7f894b460b9f54ab87f08358d066d6cd2aeb6d75592c5a01433903533a96521238b6db932357317dd5a44cf4b4af9fd593f3e1ab0862fc5a0384441f5258eb7d352e246e46d1cc212000039dd61dee5706301ad3aef3764ada0b658e96d8a414f7ebacfee1bfea1177dd90eb33af6e0ae6130817daac5c9c7c3a05cb19c244cb8b78ad70317a13b316257c2bf17eb312f373d78db6297e566ca53a03e7b900b5c922027b5db847c38bc46dd61969c92bd0992f66a2abfef7d0dfe3ba0dff653adb7cf638aeebdba34d8c737d9085883f882cd517b1373dea8a620613fa0ec028e333211418e0efb847b026703ff8616a85f621aa5a1915305be848a64a9a06ce0bd094426aff20c1ac2959c29d6c363c44412347b52184d7880b341fe891380",
|
||||
"0xf90211a0271d67d1455e8a6f4586019c8afbed6fdfdc997eba2d09ec7beaf72caa3d4335a038e09e79021a5e98f737da0d9c540e4b4c39306b6dc59d9331a0ef9915521b29a051eb77824cb7a94041c6473b9b30ab9a0f59ae7e8bf5627728c0fe5e8a91c2f6a063ff2fb4093f6919a104c19f7672688d3dc53616c010875b264e519188565beaa057d4c48f865ff400560d2a45dbf78361113d886de4050b826892da2e4dfdfd00a07450c8db528c8a12d68929d433533c74510e6e66978ef4188d09c63d166e7eeda0d66ba24706f374d8f6d6dfe98272ee21d77755cc0668f05b6412256eec71bad6a05cd756dd7c1a8dc777225ba914007bbcac5fad3589fa462f79bb5abf7c402035a059249a2e071d0ea0777a869ab0ff224feb06c71e2a150feded0b5ba613a807d1a06643e0da19c7c14e7aa6273f30f3c361a7e6f3a2ca204789f021d82143300090a087e05dafe07c652f1346995b9f658b90fcffb4d9d1096004cf55d361ac9fdb4aa0844c03f9f37ed5faaf10de6a7b35f8a2037e1fb295749964a7f3fd1a361e9042a041290bbaea599dddab8a658a86b1987e80de7d39716c91369e5470dd19f8c4c7a029ed602a4ddf581c7478dd3de21cfe72096e223156cb9aed973f10f0df56cc5ea0d551a1c8cbce0305083e4528f68776b93ced1e9491deb541aec4ab36af09c728a0239b976f0ebfadce6e07626229b9d31b80099e01868bd555415456a28e54a07b80",
|
||||
"0xf90211a032b374dc2b5335b1161704ead9945df7ed4868b58d4bb4f7225fbbb328ce0d2da04b8ea34bf3a3bca72d15a4bc4bb1f812e948f3cab08b0b70eb3509dcd8977e22a08bb2fa4bc083fb9dcc0ec117da5d43151d4779e79aa72287b65559d436f8e362a0dcfb0c04b041a0309c18625e80a82e68f2cf4a9c5bb62f669c2323c96035679fa03c92665b4a1124bd86c635aa79ceb608743a3267f4f334863a3f6e7f05a0852ca0fbf25fa700001dd9e7d9a63e8e2c8a1a12e02fd85bc86b9b537fda95254c453ba0e865af162daf5380022006074abc2c7c50cc9ef7e8e941984cf4c0ef3b464c9ba05e1bba73a8177b09bd344051031f5dd3b5b7a8044ab5404e9c89f223e90ee444a087e509da93ad7d996219ce56aad9c1cd34c9929548e16aa7b72543dd74ea1f91a0343465cc482e6f3527aec7c70a659871e3959ad45e8706dcebe9375e43949cf3a0500062c9985d0ca265267e995d4dd2a5376025ac492d9ef7010d80927357f8daa01e3aed55c555998e4ea6f71a278568cf2b467566168dc9ce42c781cc46c2eeb3a0185777b8c689da505a547e947b2d47acc5dd459a1d9436611214fc6941f9c154a0eb7be51512e38361b403b7426e983eb1824e4ea3417d2318e1a9b9b2c5f07c3ea089e45c676dcbf3cf9f4993a0af98e0c69b3955c30edd3882231273e27e670064a02818f1f8cdd943ef5cc81ee12d788e3a608566d77d362efbcff75dc36fc552c280",
|
||||
"0xf90211a0364fda3959e2889198a813bdf079dc0f7c729fdb032e4751dd3dc04551a60184a00284c802aacd8302b066c4689ec4b5fe90bc5c57230d4e91e4fbda93644291bca0794c3f11f7c1b3234a79a3de30585721f4395db5d985d2c34166d6472a304ac7a0c53d440ca017cd811dff5d516c4ab90c8696a9f34d04d8b7ea13cb0c5fa280d8a06be46efa42ac8fed8772a2127f85e3ccf1cb84af613a2eddab7126a24e4f4266a00c727a671775e9fc09babe6d0107eaceb7d9fbd626842347dc3b604925e52320a0cc4502fba4df950b8d01629185f0a89e0e7432218e43dc9e408b0d864008245ca00e8c76b4703b747894e851db14585b7385250976bf3a4c7ac2dbc2d9dfae3e56a003b550fe6ecb528a68c05de37b2fb972d5d85e8bca5acbc416a3b914c8bbe5b0a0efdc83cf2b4fcd76d6b3310c60b305f06a0505d3357663d8ed2c0a9fe7e203dca05b2c1fbab0302a400260ad163e6f30939038fd24392569b660c6692bc190a931a03df9a967ef3d72a72e11888886320d35491c29a1c33dd6e37b28470297084959a004dee2c5c40c7bc62d36e84fbe15247c5a26b198987ce95dfd2e525e9e4849e7a0f2d9beaec07525bf7180812f470cdf6224fa821f5f2645292df55ec1badd394ba019d9ecd3933a9892352eb29d377438c6c74c331b6925f96f1475d90d07ad81ffa021101aa3b64c3b33cca339980ba54be9a95840bf199c15c3c478c7db0df5718880",
|
||||
"0xf90211a03b6c36387b53b7e289c3c86af2b557140e4014fa048d0d99135743b7d3fbc60ca006cea7b7573ab37a61fdc9e1097dfdbe282e92d25a42c7d3d91dfdb18019a4c7a0012896d62f59b441aefcd509480e35720fd453df2790e29e813d53b8370d1f4ca0895e6b1257f117ff879ed435960eec1bc24468bd38474f000e518ffb0b315950a0a0edc39e928cc94681b2760f132dcdc83fe17d54c5c5b09de7603e5ab7ebf0ada0f8f82b7e0d0e2a8dc7180e858091131fece2647a6c94588a48b9df739cf5a678a054328eec0b3130aa7d9ef3170d8a4daf3b9190e130770ca41e36fcb430c7a4a5a0d5615a40f7049c7a24c3e57fcb0b22d9680d6b51fff7df0f2c5ce91993be93f4a0184f39052bc5b695e74a9b8594f0ef283d777cfe309d31c6a36d6eaf8450d053a0db99d59bb3e588d5216fac40e67a433f0b462c486b4c04919c8568f1eecbe2c9a0578f1081e57db980c5016b9afffd1e72f39380db1fca1a666a452506bf733bd0a0a9e0c2be9d87031d659f8e0073f4cca2ecae47e2d36f8196095b5f399b53191fa0e570cedea435f2587acf6976abd12f876cbb2ca4479bdb54666944773fb43d81a02082b032a40baebfbc6167892907f68b995404484a834c1033dbd03fddec533fa0a79d6dec64de82fa2980f9c4fc50046fb3028c138d2a6411a03642120f6c40aea01d433ff2fea00de0b949c4545bd18d3bff68a489c9236c3823d1f3ba5f9a2c0c80",
|
||||
"0xf90211a0518c9b75a5010e831f9c6b6788781d9d0c716f56b04f2469692dbba96749de24a042ec722680c699362edd21d5607ee134a1dc08744ef16de815376cffe6439bf3a004255bac25f9229ea4d6b008ddf8eb9c3b62ccb60696cb947cc08fa500db57c3a0649297d59b86ab2ed4c961861c18daaaa6605ee88eceb20cdad700369f61912ea00d4f50e67c8b436716e42ba95862eef3589d3f118502bd9cd7eff6a4aa424469a0c49b14a62328bc685ac5840104ec701afefbc1f373d1e359765d19b4be55a134a05fa1898562d0c0a354b68e75396ca815adbbc42c4db803040842eb04b37a47ada012ff4cb31daaa8197ff04a58d1a638d983e62e7629e93a10d54d95a758da8ca0a0a68c1bf83e21cf3fb82f4a1e1cbf0521a7165fa8f10b492f10d26408c53ae0cda0e5ef039778fb70e7bb8d79fa4b46aa4f917bcb1d40f9f269e474fe0be0c38809a020cbfec4dc6ff15b2cf70da23ca7f56adaf05867cb3cf17e0263fa9848a2fef2a0d5bcd767436231902d498f18bdb56956f75c92cd22cb2a4671dc0401f39b1733a087e3329ee141b72ce704d38e99299dc474484b939d4186d398a1e9e1016ad2dfa029ad9603f70dcba445e840b1c368a0ea5b8f781d6bcc9d833eba9ffbf7296eaca093e7416cb048a35f096e70347f21ff7260d53e35f18e8d932147c3572a31cddda0779d0a1504b35e7b82dd3c4c924a0b882bbd711bb62e2315a953c1c53beb255680",
|
||||
"0xf90211a054b5b608769e954e71842312ce4327f5fd0a9de9fe96577e2ad1e2d1004933efa0d1d1778c26081b6b2eb465d542adf6d7c2d51d39495aa913c3a9d1ad334ecfeba07b73c31dbca293022eb4d6c400d51b2d8e4d7570672ff3b56f66e36c6208b361a0afac336a1f5c0c5943fb8d8fd1ed96dd2fc8ab9a725b7ea42bbcc9d624692ba7a00f0dd2fa596c4fa10f3c5e37ea99343781a374151ce999e8208524981a5aec84a096024a2f8de0776948c288f387a93233daaee328fa49238b027018540a884101a02f82958e321660cca9bd28d87df56bbf69352c9ab6724e124d63433fcb475e62a0d9b5ce4dc30df03dbc6c6279a2c13fab46b69e57764c4bde6812c3fc217827bba0ee20a4dba9619655e313bf69f274b466f3e520d4ba33109c0ec799a20ea358b8a0a9475a02a4bfc4508aa8b4450d9167e6aeca8eda09e0dbf9b39555f34f8d6acfa0772aef02d6492beb261c2610d3b3a274955037bb8eb25a51d3907a3d09484578a0d126468067a425eee9464db3fb8656246a7b4eb3c81f019928539d5b1ace8847a0491dd72e855259dbdc1db03f28ee87977a0d5e3415302543f418d72e636170e6a0a1206cafc4c5432fae8bcfb9f8a24b2d8f3245d42366da254c3c163047b00ff0a0fa104626ea4d3717fde44ce56c5bfc77092085c9385e4f17ead39db40bce86efa05a6d8b33fb5e90fcb35c2df9beccbabd8b1d6194594af6c44809d9d2171cec4f80",
|
||||
"0xf90211a06f381b1179db700439021226ea01b5bfb166a725995cb3d1db486b6271a6d4dea08b1f1b1cece3f9c685ad221eb5c748844ae10094b5159413fa78eb67519f5a77a0cd1b58e039d77794f1b0caa9c284f32edc4b9816697b922506e2c6ec4f64001da0e40d611a936b73ba10265f910a2d176974fb05d7672bc3f8fccb821ca6688e23a0d10c5c668af0fc36c8be996a08aebfcf7753779202c18d4093ba1f0029a6ffaca057559963a6adb8aa7409b27a0baac46b3200c90f0a7caa5f92a89441f17b4effa0d569eedffcc0913515c4fc4b8ec99685cafeb96becbfc10b64adf79e14177521a031792dfd7356838be320f0a0bb986c38a2eb8d98683e1f2126cbb61e8a97aa71a0dd8f5a39fc7576c69c14a36bc520469ed765f5b7de70b2c3fb8f7168d2d5733ca03947fc017a3e8299c7940f677178d4840f6130b46361fa0daca2fac083a7c3daa04e721a84d7e82cedb7fc50dcb8daa8d00fc1026d9367637a5410425e02ba5a7ca0abc4a5577bf03642ed6b35e1ea54e6d5dbb88c1246ef0512f876c5be6c911c38a0408797f0d706f3e98e6c371a5136a8c9fd01bb3fb603d8cf2297b41e720b42a1a0e361ef7b3eaf3d44972649fdd1efcf81341789b83b5cd912b4ff5cd34da1bf34a0a0aa27268c56052b547ee121ea7e74811d079bf4c8474424eedf0850969a5746a04c022bb64eb2accb8805669d7a09896c9dda30339552a48b497f11057104c98d80",
|
||||
"0xf90211a0771ac0700d8181d770ece0742e5c801b54f3e07adfdf419206c23ba3f7261603a0dd2bb9d9214beed4ec198d9e454708f498a159b634bbee0e7c0f0cd7c0242547a0d929f6b52cff176b39a4111b185b9ecd2366d5aafa43618fa24fda40c5eb0385a0f54733b1e24c09f23946a6c806d35e1803e36fc8386195f57fe72f6489b8110aa033005e6f2fe20f7d5fcdf45bf2f7d2824ddc3b75e1143c77b75777ef674648f8a026677a4d486d40f91488755c0498a42330a73cd8663ad7eecd6decc28d1b5824a0de27d51a0dea901b465d028e9683791f673e5418fc3b0ab3833e45b3015dc6b7a0f28cff3c618f2ffd0b1d488777d6ea1955100b282b3221bd99495c1f852adbc7a08cb90b6fccb14782241e71d5153152bffaddd3eba14fd64d141eb2dfe8707984a061001f162446533f3a5c99ae1aa688987140014c0a9bfa50edd022e7332d7b2ea0a9ef84251647b79bc5deda6f44582bb3e4ad91248b96320b846dab026e14a8bda0918b8c610350b37d79b685f3125aed46d921d5698597bf7843bbde7036dbb99aa024c37d30d749c476e7934dc7f14edf064c6450ea4802b16d0b4b0f936fcc8f7ea079f2fd9460d767ae6892dd0fcd93080a875f6e9089859bff55c009ef84b3c2efa09c551555c226ff9fe630334dca2ba68b30894143e21edf6601922626972ac80aa015712facc4c0df63a6c4894e633935b1fd6cafce22ebf0ebedc4d7604437c02280",
|
||||
"0xf90211a07802c76240fcf12a215e4f2ad5758b716879b46cdd43cce045d58357d69dbc43a08a18e9b3eea79e7bfb3cb08c6e5f14fcc7423b34760fa59305e40adc30f6b585a0f76e13f31b6e80c8963c04d5c350fc879ac62282f88fac96d7b39bf8545e145ea0730ccaa7c2ac7c9c70536d2b255e1cad86a5d1c2acaf33771de460d724103ddfa0de8f6a60c349b30cf49b8468fbb49ff89668d64171f7b56938b9bf5770229681a0c7a6d666f664352e484a81aae835885f475e82e036052b7c9a64d0f19cba06a5a0e2e9292e706ec16eaaf34a3395bf6f9f01641409b29b78fabd4406b68c8241a8a0d8b132fca2d11181db5227db3d2c13c88d42f189b8fcbbd491afa9e44713b1c2a0d943f41119e5a40a347f5a92f1ad52e7feb2082b3216b23751f2841f1f3ae9f4a052bd7f4c0265234c9938a9e22a5a97f43e6a84260f5188a046e1bc6a048b2c53a0c9ccca5909f78e85c4bb459bcecad28dd62f7d2662b55c5101e2d679e1896aada0b3ba2721eea1ee5e075503505a2739ae46f3f0f7970a067d44c999734f39ab97a0b65bc4f44e2650dd51e5f69c9a1da908a6ab4b50c6403768ece2cd4d204f36cea035470731725e6c0d3f83e3667e87fdf6ae2ead00f4ae0e2fab359160553060a8a0c260928ae9c4164e31f3e5f35eea202bbeea330b1223a0fc8ffb402176578314a0416c7427b7d67ecc52035c62fa484b678ab46e767aae2f659494b383ad0020dc80",
|
||||
"0xf90211a0aa7d90ee03ffb0d9aa5c05c6af67e4db06fd13a68aa063196cd9ef2de85a4602a0e44e93ad41215c9e048f472cc6297cad6e593a3bb920f45c626cf5778f3513c0a083b31bbf31042862ed797a99dbe47700766d17e11590d17e35e6a25a5d4ac717a0495a966f4d95e212d0be84c2b6d3f2d06b23c6a06efff2f330895426dcfe68f6a018cade23e6aee5d87a1240df51cb14dd00dd2d803f174b3df05090f6be892d1ba0fca73f16b68cda56863e90182cf60eaceffb6d56742ffd958cb2b3e037b501eca0445b0acd8638e3d85be1dc7321164c4776ef7114b27a91d6b0deb49b1383e51aa0a4f99f1ef7264f5009049f258d1167f08758710b41d77a6d76959944f1905dc5a02b508827fbc96c609386a36f7a09953e771ebb6156a470b1d016a7b3df496915a0aa18b704ad8f03598d08ea887477e75bac9679821d31fdb4fdbba4851a6163b1a07479712ae14cc1856eea1ccfee6ae7d01f3438bb65c7699c9024c8183c079091a0ec5e4c622a262cf22d692db947082fab6d268842bdb963640488698073f785d3a0c460f055f4c9302fca54f9c0035ef8ee560b65ff7b57fa349e3ec822f479cc6aa0a4c972c19ff26382418848e98f60ca5cc70ea9caecb6449e577dfc7b61f015b8a070c35efcab4ba9cb82a140b599e7122e6d32a1c1e78a0451ee5a7054c6224968a067fb845f6b25fa59ab83bb5d0aa6f3a76419a156f6e4cee6c91552431484042280",
|
||||
"0xf90211a0b164ee35e937b1ef3bd53fb5e29475416f6e5396c5de0c8d2c16b428eaaab5a8a07e6cbc8821a2e5de97c4f9bbf4e73dd263446d2a2eeb8301597c9a0886874d10a05079785ef100652b28c42b4833f38e07263e7e914a6530705ac4af3162894720a0b4ca81febd1d87e706e0a9922922e86f81c743abab1051acad88ac278fc96578a0bc8b52c79317545eb485216a4332bc86f16463537e581b31b076dac67b6e8df3a008e366c85d9dd81b66a74c6b67e517b1e255cbde0948f5741e1271edc8073e7ea0babc1ad7b2f41a3f5c4a36bd6a0dfb5d7a7cad15d54704b6a4af0b47fe021be8a031571bd4d18732c1aa82c28a446415debc5e38c2082af44d2f23ca2fa9269973a0394c973c371a5b4713b47c7db1f7f581b1cc288a9a26ce8223c0d4a3681f5b7ea0dea4675aedfb020e8f4cc35aa140ec3ce54ee5078263f95beeef47be72840af3a07e28868239c2f500a968ac3a66a5158fe47d2cad15dc10f18b6ce8f5220a5f01a0f45d9d2a467fd972b3c8d6606e49726210de96e9bd16affdd4f85abe7e1db8a5a0e3f0d70e138312a090a3336d4520f3ad7a3c04c16449832054bb40ba8a73389ba00fa99d25c82e8367fea365216634abec282456cd0a8b1fc4981367b9a4ced7b6a0cb9ff3c33e7a7ad56d6ab4723b8d64de775b4c2e89514c1848b8b421d9c6932ba00d97e9cf3830772b06d4571ca97210f3ed44872de04fe3ba13e22529f893040c80",
|
||||
"0xf90211a0b4cdccaafe641d0355e7e1cc0cd56a2fb39872451032fba08f2988a63765d077a0dd57f6153380308531d14e91816a292f3f4e468d8a312a59ca16c944605a419da0038cba9b6de1b6b41ed33c557e57143e63fc56efb2e8a295db12270327ab28cea037cff299fb76ee49d5c5c0a9ea484163f7bac014309636e36ec3299b450ca875a033eeadab32723cb3a3b993d4e1c4d51e38426c7e30e51ec6e9797479d37786d1a0e4d22ef856cf62ea6ad0bcd2084160f3e1ed47430fdc073586ce8152a2ab58f2a0cc7396ec90f0f95ec6b599030085141bfcdfa99166567884c33f049b3d86e8c8a056843bb382a7b92c401eba43f63be7c94a3700e3e18f2514fd610a6d3578cad6a02d27a46e44ddc9d9b537493c17f26ec9362cc082d6a83c3492d30c5ff045b076a07a73df7038f66e207e3f82ed6c59da7359edab6558e9be7d1f36ec199c4c9e4da0abed072b4cc049119fa7bcd72f98f99a982e49b65a7af6b7eed7c7a446dc52f4a00ae6a6ec60bbf6760d74c778d894aaa84601afaadf866903697553c021257ef3a034ece2ce6f440b308dbcf083f2ebcade4ddefcd240dc31e624b3c48b3591f938a0ad33a4f4c214d228d0c7746f5cb3110b6db8e3f6ac6f1f37ddc061b853030072a098f4a9ed0d784a894c0e185fbfbd3865416e23b5a28dbff26044fcaf879fa2aaa0b4f4966127ceff6e000609f405eadc1439908c4a49b3fba099f491b05b38c5ae80",
|
||||
"0xf90211a0ba347efbf5162b4e11d5f7165fe5021abc457de8058b9c73c0b795169df11545a017c68515c8b1b221bff5ebaec8464637ba14e2891e94432ccf3041097d0118a0a0f7c4afdeb16199924bb4834165420d7aa943fbca0f914389e782fa9273496831a01a3a92daf9066909e83a82aee504040929015103d8a72f9b650891f6b00b1d9fa03244aacc72a1bb45d3f6a699561aa2df694d93f609fb4bcee36d124e0bcea80fa057bf9ae20c5c5d8cf80fb0a929023022dff90b413d805287579ee88e4a1d92caa01d418d5ea545f96cd14dd6656e6df42ca7043467bc06d544fb732291f478a93ea0622a9627a2f56d5e496a733ef177f406b0af485ba96fcd0f1579aa2d612fbd16a0acfc4dff1ce2d8813d37797174197f3a6f9f3190d211f4848d03e2b4e1d340a3a04de49cde26d97f35246c702c53ed9d76e6c3d8b72fda80748d9a1120bb0a8fe3a0b5a1f9777b51ea0f54621abdd13a37cadb93b8a45aa9b2a2bc5c38e9a4ad81bba01f5b209e743bdd1e8f5a4ee0c4e5e6d4dcf6f7345ca1c3236e720ef588bd69eea0aedd4a6a0143418b2d74dc13b1db9feb8d9d1416cdc2685bbbd7f499e4a33b8aa002187be5a0524065659f97f3aea834b0b7a3929c75d48d3b8fe121a175b7a235a07817a5aea82253ab6d0cc281e3788188288e14a23575f606c3091fc5f16559d0a0b0a50408c69f755f225ae9683803dab5e4c42afe58cd6b3a925a648154e0fb4180",
|
||||
"0xf90211a0c2f526f029e48df8ec25edd1efd6418d357282d05931b3e1497d4a4ad2d353eba0cf13db068e5d1b9536e51b5a2989232764f2b605ec99c96810e1fe65ff4462b8a0645d4f4d2d66a1606aa17e3f5de396d9fdb9f04df929ddf0d6730b50fbd1778aa0a8f5c07a24c1bd9837cd115b2d11f908f0747897e18a58ee22c9447a480d5275a0063491a701726175df69ac1f73e180b900bf9ba2117ad24912989aef0d618872a0b6ec4c12dfb4d2630d02fa9aeb0c27a3e255ee5595a4242cf0c293aabbd800bba04662487b447a9c30ab93240e6fedc7a43143a7b27a87fe1760eb61654156eb3da03b9f7c1d4a7c3fbdf10fb23ff3f8ce986f7b54847f0d360f9ea80c10af438337a013498287bca164b6f9bd4c9817da94dab8b7df95b18ee78eec8e652aa28b5ec5a06aafebac9533485244a71caeef70333aaee0a3c25b82df914a8b671131d4db15a0f449cfde23e6ff4869fbed6c88666d7a842e062ebbee70b5bfa8ba98aedc19fda008f41eed36b3cdc9d1525149a33251739b2758a4bdc7cc3a6f078ad5f8d3f004a0c0f217cb73996292ba1083570e2be499c1b6d0c98347929848a9ff2cc8252dcea0278dac1280f4eb2a01549050395acc80a1764fdf249df4eccef81752c90b5e62a0263a292774fda29bc458c84c291bef8442625a697fe987910b2c53d7df2a0891a0a4cd8dbe81bbcd04124bd0653a947a6f05a0b8969e06ae0b330d2652b83a27d480",
|
||||
"0xf90211a0c67e28944b2099ab9f9a52ea2381c9d551fca2c91a3cf1353d965eb1687598a3a0a4e9f157d9a0439c85ae7c4174ee8e31c5a67eddabe5260d069852575c523bcfa0cd18d510d997718c86d8c033d6d68979af3c685ac42bdd9df86c8d5819ff5594a0b7aaf055e7bac40e0b3af3c14a6793c6dd4946d9a763a853c7624557b25acc9ba06f8c8d354ea3d30fa6dc446ba87635b8e1de4f0edab58af089fe99da972c87f8a007e5bede6bb3d4e9d709dbb9bd90c7303c08e73cfd223395c44e43ec0e9a11c9a02b4c9aa654ea40d595e4c156fc77a175eff7c1add3eeae12c7f9422c224d1e0ca046fa3b7d71f16e16706660be73a9192e6c64f5423d893b3c3677ff9bb95af6d5a0e67932dbe91813788aacc491f658f9619ab720986a9a088ec76b72af0d8cdf7ca00fb01d17f0836e8c62700519ef10b1fd362e01505b349ce51385d752bb24d756a07444e46d6f16f46b8282626a0a4901bfac07be80e5e23c2e370abc3cd93e6ba6a0d8fa47d152be467f5e4f7b0e9e0a2fa59adb31cb4f5c7005f85f68218f6bf0fca054165f828ed52296df6cf1d7a59e4f1eb17bb26269e100069febe7905a7bc6bea0ef6f39d8d4b499e6ffaf8040221ae6c175bb6f04820674a9fe5155524faadb0ea0baf122b732d8ff0cf1dc63c9470952dae92be0861e9067a1a9d2055664b5a05aa01a090926c27aec40aa5a89861891366882ea2aacaa9606501ee9d36cf8684e3e80",
|
||||
"0xf90211a0cac16a0945dc1ed3b12b206aae2918e02005238000e6c77470e290cd6ce5adc9a0fdb9b53f2d0c06329452244886ad88f180e7567812af15e18416a6b25c80a4eca0e32e604327bc0dfab6d84cdacb85952609984fc0667fc7b8a294804e70e67b1da0f30e98a99d91e2bdac11d5d8932b4b68dbe8dfc3fe0e2f8cc569a3d8e615b997a084cfbd52c75adec9f6b5f5ac8cda3bbe2669147b746610f47f6a8b3f5374c3b8a05e66571751d7e060b2ae327cebbee1597ca83007e67e2b90a9e7afb73faafd83a0d8b970af74d40cae3fde42b5336e97c4aad39de2660e13edf24e242b97d18683a0782f321b326ee4bcb92184a9394d168cff530967e885398efe1e66a408a4140aa04318518b358f53f0255b7ecd29a608f8d9a257a2663c4fa8b059a79c06b65edca0e875655349a871e8abb829581a4b682b6ae0ffbc09d396504abac6971b998caba07d4ab6f178130caa0b7d67367fec6a425ef631bdf7f5437f2bc43655ec67ce77a079a025ecae8bf06e8f98219a5300c82fd6d9d4e387c03610cddf855ac0c431e5a04385669154a5183ff5c7b14a80e1be83473cc72648b193244c7b29397c182c73a03a9d5417d71e23ebfa6b59b68c239aa311550475a383e4dc8fc37589b7406a4da0219fcfe9542b9c8505bae3e91dea6ae0743e89f89eec01a870a9c476668406dda011eb2b169a4299c7c7e197c13e7770179232733a31c4824e823ab1107f36ad1b80",
|
||||
"0xf90211a0d7cc59bde78dd5ad440b70829a2e56ce85cbf6f530d857db6af010e3c5b1b3a6a063bb40b9c449c923fa14dc1a0c56faa8c9a818559b04326d7f99351f123916caa01cbbd11ff629bd096d74537fa9a47238556e045873bf0f998e65632adedc80a8a0a1e80a3a2bf9f82ac93b47dbe6fe9f6b5fd1c30f3bad21d51e9f75a3a43bf018a0c29d54c9f01601ef5a0e825997bf31de39ecc93d899874283e66abf7f9b05a80a063d944bbb9bbdb23d9edd96565b60d07d4c66f20c7d1bbf910f04953928c5489a008ee14fddd575da911f736ae52ad6601bdf9143a211ae53bf6704935894e36b1a0a1e412fd6f4ec5d8e394a6adae2a79882755f37e6306cfd4e2f82c11b6d153cda0fa299f611ec39e8912668b777a3ecf2f07fb5861f508f87f12807c55264e2726a0d848708ff25a31f8983357aeb53fa9522c89c160b63b774e52aed5f3c739d399a047ad093dbcb6f8d68608bce232f915b6add1b91ed7fba5a05555ce51bdc71adda08e7c079976e3b4800d10b432984219970022a1249619502d346c048ddd2e29bfa09d02b0e08643a18654b1890de70333286e8dfc2953329385b58eaac87db49d54a0d7ab64823443d6e4001c199a5ea79e2241404297183f61c0e401dde9465482a8a07bd208ebd532ef6eed9a7278123934417ae8941359362c2812b189bef84f6d22a0d54bf9c103aeb56f669c9162401a1262eda875d817053f9ee631713ed7e40bb480",
|
||||
"0xf90211a0f2abd2af7753e4bd6cd5f411325ea79516808fa57b00c9c67f595a9030960505a0a13ff95e57437a4a2f17c5ff718981f926c5c1fe178f1d54f83dfce3240f69b8a0d991114bfec57a4f0872d0b7587ad3beaf8aa49ba9d29432ba199838ec2ff31fa06f8303f582cd7e6f3e73c983d89225944331f36120d4fb923bbcaa39a2384e38a04953b83755138de63e30fca554bebbf9e040111ccaa749bcbdb474a248f2e307a05e6171e1611560ab2e6df6021b0e89d5220a2c5cad022e3a5b1c6f7f955272d4a0a982486985b01d1825053cf79efe29c2c35699c86f4bbe95c60dc4565ccc6a9fa06f61e86e532cb28fada28398118f3b8c26c0e23ec7974018eccf2d7bfe371ce3a0ef54a35cc090ebb4e365cdf2ea84ce8a66495e89a4f2ac2707d4f598cf11c822a02fb1bea578da67148e3044c025c97945385a9df9fe28c10d336e2bbe601d9009a0bd8ec44f6e6eb5e869959713c316b006377daf8db9428f6e96c53e0aa84e8c50a0e25c05f4aa1db15e2e5b9cefa60cb9ec4d87b1b022804973c43d2ac809468671a01f1455319d36468c771c88dbb6a0b8d5e45cdc485dda6d7d51da8c47e71a3d36a05820eb6059279eeba5d25447c29a521efccbbc85950938e7965b30922bd6da12a0f4a2d5294c65ee4dc170301e745353cf8bdc25105591a1352e7266605020e024a09c43d911641bc3420eb09bc1a1609226d9f39d4a082297587bd795d5b7b793ee80",
|
||||
"0xf90211a0f5509071821d2953b53448397715aab3c8436b35cc877e14ddb459a25ab3a690a055371a19dd0d60219312ed8199f918bdbff9e1b13386c454651dcd0e495b84aca0683add264b2e57df174d9feb3604e723c02383a2b2f7cf6098a913a216898369a011d7b15bb3bcab534178efb9596f94f5791687ee1d8e94c2203975b4662e38c6a08fdc2dac03bc46861810c8c420e2760e8f496a47dedc5e55f20bd148d39ac7e1a03e860edf66403d134ea6f254e5b983f34575d1ccfbcdb6c6d332b45da5ab7207a052fee2c677cdc5c21985e1a139b250672546149fdf5746e2b63367204dbfbebca033aa92f9efe3a1c2c8b306e5c86f2eee374ce188b70ae40d891a641526790c54a0b51a1d87c051be047e7f4bb4b76d051ad76db258426e1902ad38c859db41dd76a095707bb25b4f2c902d2ee2749673b6a15f4e4021fea65b96cb73f6df9f50f003a0baaba1431ca2782928c1a0233ea36879b151f6b357bfee9ab169f7a55b912873a0f428bd164c57c2f55288239b66eba55d54d722606100fdff4011bff479851cb1a034d1624be6579ba1402b640ef12f61a9517b5dc9748dd094e5b95d336e3a0bcea02e0b641713136c69efb6609c6d6ea4686707549e310cb8cb50fda538e7e5aa45a07b85a5161815b739a0e6fe6c6e12bf600af6b6aa406e4a4c1dbf3ee88db2a3e9a0306dbf2a7ae616d4aa59e0b705e1fb7e9bb32d07a74fd851c5b01719235d089080",
|
||||
"0xf90211a0ff2b7bd3db5215fcd3a4addc48b260341e921e6944ea98f7bff59e9554eb7b2ca080c40d4ca01a0373def3b1311a1bb634d2bc6839f3d573e6d5b18ca067686235a02a4897763285f0e008e9c8f55211c2798c7bf64bc8fafe4c61f3e27c3cd4745ba0098cf6533b598b591f216405db78c3405f32e987afbbf87097bca7523fd1f4e6a0eb4bf78601251bcf5e868a93e749ed96a9ccf363dfbd6f325203895d6fbb5724a05a3b0ce2f90da950b41e3246e1a5d9e4aff19f1488d91113476b1c714096fcf4a0331131a4dc0369ae59b015b221bceba18b91cef7e0e1f47c92d71fd0157e6aa8a0eb698bf722feb26900414ed3005e47dd0e91459965a6d5bb0a499b9fa038bd7ea0814daea7607d3579cf6cba1666f8a80e4522cfc5b5da376597de723ccfdbc8e1a08f5495d416ed022366315b94737ca5ad598c4427225df1114dd4e9698ad7e3b5a04327b6652aa132c83bd0bc926f8434d77ff7e316d0eb61cdb9419a2fa226602ba01c6174a86d350c17a74c2680f821d2f347b339c0aea8cb0f5e35d196ccfb4760a0fa8add051d8ab4c3de1e9f4993c934ad169a425823f2291740680f62cc7446bda099ad8e6fb81685c54bd3d97e6e941dec74a16b67b76ab8c6551189331d782fd5a069d9ce64cab8ee4a2a160aeb3c1d2e8183b963b1ccec3f6513368c86a1fbb7daa06c64ae718be6389dfc5d9d41f3755f11772ffdcabab41d9316cc60d9694989fa80"
|
||||
],
|
||||
"codes": []
|
||||
}
|
||||
87
zkvm-prover/integration/testdata/phase1/witnesses/12508461.json
vendored
Normal file
87
zkvm-prover/integration/testdata/phase1/witnesses/12508461.json
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
{
|
||||
"chain_id": 534352,
|
||||
"header": {
|
||||
"parent_hash": "0x19072160cd1086a67605ceee8249774005e952166189d970da6e40614bb71b06",
|
||||
"sha3Uncles": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
|
||||
"miner": "0x0000000000000000000000000000000000000000",
|
||||
"state_root": "0xde2c2b3abf6a595d470e03d8bc1eb15c09cdf609a9c409d7ee36988c1bfd9327",
|
||||
"transactions_root": "0x0346294fd78da3786d6248722a8e9c7141281049b95dfe098e5318263bc83454",
|
||||
"receipts_root": "0x056b23fbba480696b65fe5a59b8f2148a1299103c4f57df839233af2cf4ca2d2",
|
||||
"logs_bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"difficulty": "0x2",
|
||||
"number": "0xbedd2d",
|
||||
"gas_limit": "0x989680",
|
||||
"gas_used": "0x5208",
|
||||
"timestamp": "0x677c8b90",
|
||||
"extra_data": "0xd883050800846765746888676f312e32312e31856c696e7578000000000000006611a5c11506cfae48c8ab05d4e1040afaf08694647cd570dd2cf5c3abfa2825610ae11d0add0f476fb5d6002fd6b76cba92fe46eee5639ebc131a7efb6246f400",
|
||||
"mix_hash": "0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"nonce": "0x0000000000000000",
|
||||
"base_fee_per_gas": "0x2663f15"
|
||||
},
|
||||
"pre_state_root": "0xb94cab21a2717b8c2d76559cb83bdc292df7b85fcf0e45f9eb20e5e8ebfd219c",
|
||||
"transaction": [
|
||||
{
|
||||
"hash": "0x34cbb20335ee817d89d7a810f396d25df30367e582c09b164c8a5982f677fcb4",
|
||||
"nonce": "0x0",
|
||||
"from": "0x03cfb12dcf979beb6caa71b7559ce8fff7f448aa",
|
||||
"to": "0x6833ccb4605e7c468f543fe3f98614423d33fe68",
|
||||
"value": "0x156a93a6acc105",
|
||||
"gas_price": "0x3db5aae",
|
||||
"gas": "0x5208",
|
||||
"max_fee_per_gas": "0x3db5aae",
|
||||
"input": "0x",
|
||||
"chain_id": "0x82750",
|
||||
"transaction_type": 0,
|
||||
"signature": {
|
||||
"r": "0x19d75699a35a0a088af109acc6f88dc89f928b7b92d03e69d3bb116a8dec7035",
|
||||
"s": "0x25b519cd5c137477b129cdbdeb4eab8cb6e81c5058767546aa3a27b6ccebefd",
|
||||
"y_parity": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"withdrawals": null,
|
||||
"block_hashes": [],
|
||||
"states": [
|
||||
"0xe2a0305787fa12a823e0f2b7631cc41b3ba8828b3321ca811111fa75cd3aa3bb5ace38",
|
||||
"0xe7a032575a0e9e593c00f959f8c92f12db2869c3395a3b0502d05e2516446f71f85b85843e95ba80",
|
||||
"0xe7a0336b6384b5eca791c62761152d0c79bb0604c104a5fb6f4eb0703f3154bb3db085840a40af3e",
|
||||
"0xe7a0366cc928b5edb82af9bd49922954155ab7b0942694bea4ce44661d9a8736c6888584168b9aa3",
|
||||
"0xe8a02052222313e28459528d920b65115c16c04f3efc82aaedc97be59f3f377c0d3f868569cf265bfe",
|
||||
"0xe8a0310e2d527612073b26eecdfd717e6a320cf44b4afac2b0732d9fcbe2b7fa0cf686850171fcfbdf",
|
||||
"0xf843a0390decd9548b62a8d60345a988386fc84ba6bc95484008f6362f93160ef3e563a1a07ed4c7d56e2ed40f65d25eecbb0110f3b3f4db68e87700287c7e0cedcb68272c",
|
||||
"0xf851808080a0a87d9bb950836582673aa0eecc0ff64aac607870637a2dd2012b8b1b31981f698080a0c5e057ca6d669933e13616d0610d37d68c093c0849351f7d3937c0bfe334287080808080808080808080",
|
||||
"0xf8518080a0bd195422d46aa8ca2875b4e9ffc7aeec1f75ac1658474d1ea837abb1dbfb5acb8080a0aaead344621dbd339dc4ef9c9e764c616996fcdd02fe7df27bae6fd9a16026508080808080808080808080",
|
||||
"0xf8669d396300e542c7cd2a1c5fca601da42a2739f790ad422b4f59fba0844e61b846f8448080a019bd9b7b70a47c4ed5f4bbd7e1713403429580d43093fba01f26a212836c88d2a07f6f0daf66a63b4d504fabde8e9fa491ff678bf22082d8fee03ac3064fcf7de9",
|
||||
"0xf8679e20b8246d45a5a396db3b461bf4a56bd646d9274adeadb5471dd31e30574fb846f8448080a0ebcb3a5f0e5445ac517d6ddfaef7bc3e530d2f0c80c658202c8e3eaf47e4736ca019e0db18cf25c98a25c9e8eac4c99e0653cd8c395303140b41425e9fa3f9bb65",
|
||||
"0xf86e9e204aefd40aa8af649cfb9ebf7de3d4a668aa82965c2cef0f1815d6107271b84df84b80871571da4bd54106a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
|
||||
"0xf86f9e20343e3e7797d2f3fbe0d79255e0831e623e99f579d87491ac30085fd6eab84ef84c80882a82f989c37b8fe9a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
|
||||
"0xf86f9e20cdcd164a6b73972dc6625ecb078be7e607b54459f3edfaada99be20644b84ef84c8088291501f338e73324a0d7b9b364ea565bdabe900d91907e7e539c5ac850bd391537902b4ed7d95150f9a03733510decd4cdde078e264666d17cac208f9f9e93fc0a43b02921be5cf5726f",
|
||||
"0xf8718080808080a0d4116100655ae6f2d3413af404c6ea9686f8050cce9f15ec036f7e947194b3ec80a0e9980203b39555009ce153d8e0d1ace642b67e4a3467b9bf4f0b4172f2d7909d80808080a0274fabf259998a5942e98652543f0dd33ffb9e2677df2860b0eaca1733072af480808080",
|
||||
"0xf891a09efe8b87cbc50a75f253ff7b3394538b5f7219befd17d4184e24bf45c14371568080808080a04a0a1c32cba82aaa4550828b9726141a989a513859cd28eecbb4594934cfd11aa009bd6ac5cc9f56a277e08c4e7c1590d8ea5d715f2e319d21d0425ec6895b6a4180a01fd7ddd42d5efde9f5ebf8aedda65ee137530348c6d021cb507e7fdb82f17db180808080808080",
|
||||
"0xf8b18080a043c9dfc8ff06ac8a2739ed9ba777738700570d69692a19b05ef391aba502fd5980a0cff9ab07901e8403c5c36b4d1aaa7e5a7aee78f583dc4b0a6f960b8baea7e9cd8080a0b6e2b14327ac51db4389bbff9c5e96500d4eb7c01537b33946eb39063c334352808080a0eab6132b439ef10614bbcfb0f1f34f11c386d421c350a76da5b369ce9756e3808080a0953fda035d1bdf52096a9b64b8a67bae9b38868f7efeb52f247fa4610f856e008080",
|
||||
"0xf8d1808080a04c7cd7da2d9d98645d0cb3ae7edb59b3f1754f1cd54cf24d542de3030865a80b808080a0ea5ff2da5e87ccedac3a2046c5f30e852dd078add7fe9e8bfde928ddc9f9214e8080a04c7e69f654c909579a621de894b5e8cb67b98e5cefdbae89deacab1299d0435a8080a03e893e553c0901f468baf92f55315425fb17b2dad29822770b1fcaeb0dfedcefa0ce91655b30f10511aca098122022f8da5fde7cfae7bb3a2548a5f90b60a96378a000547d8eb855e318aa96ba76980e3f4a40391e5083635c35d49451f8a59627fe80",
|
||||
"0xf8d1a0b5aa868d635218e59e8e0243a933619c2d2e0e2e90f1e9980c78091c72132dd78080a042346ee27cbf6b302ad4128a2cc82fa994f2d99cbd4267727a6b6b0825686a14808080a097afbc5015220ca78655520ae3220f78f7950797ca39d00dac89d59fd82ae45e8080a01068144e9a010a07af3520da28cb5406fb9b0cfc2fa2b513b6169b1a829f778f80a0fab9252160b79a4f67a6052ccfc215623153a9ac81e745b55a5c84379a5dd84d8080a09ee18d125aaf02efba2b36fde04cd8f7dc364dafa6f19d362663236131495a2480",
|
||||
"0xf90111a02a7233cf36b9a82827cbdf451c2ad1ed9f15f33f814cf1109adc8aa298185ea880a02924d941ceb50108681a2b8d66667c81833c53834abfada171a801d82c5ce9ae80a0694c1f68cf9165ac298e3ec30bd6f9a3a404d1267c2e077362cab0e3cd4289a8808080a0b4913e81d4d65b898fd33855cd20cc3d44e495af5d130ecfe54b7f344646159c80a01d74bd99789ccf28ee072a94877b3dda93b9c8ec22dfc898bf20fa8d1e2a9e28a0be9658774dcb7389939a50b30548f21ec11a9c7bb51a4abbb1094ca24dc54438a0615d9e343cd207610b6c5ab36b2d075357715aacbb3818ebd264b1a5629d77838080a05f68c2efcfc38326a2b7a422edea985e8404e08837b48e82ffbdcada98770e5e80",
|
||||
"0xf90211a003dbcf12f7e9fe161a55cb485a7d62a4e917ea935d4071fb5598e35d53cbdaafa066dacd1591f9467282500aebf53081dbbdf7059404b2a3bb7834178e9144e561a03b346b6aba41091fffd56d217a5f0715ead2f00188e7972fd8ffe9b1a5d70bf2a0126571e4b6d6ac74cdbe9dd57e816a744522f256834cc0b2a0ec373988642a08a01788f176f11c366530780860e0537e987712ebc671fe805a032a9b663ca82d10a0e4448b601a6a77ca630ca98c2202a24bb1ce89a5450344d7e71f9e937a391778a0d9eab7214bb00666829bd6edf2047dfb6760849e214429913e8aacb3f0cb48c3a089f0b4fa7c3977de28132e873c9fa55469d32378c471e03cbc7a5d04df77cb3fa0d64c68214edd8c6fde11e22906838fca017cd75461d1f1e40f4b012270f44f4ca0407886c3ff0f7b38b544e662ebb8105611910c573df998ebf701cfe5bfef43c5a08a9b93535f1f8a6f99f4a73fe60ae9b1b479ec773d4fd8f66669ea2bff7124c6a0754287e493666225bac64fa7a25f33e20597b47417d337121b986ff2759cc5cfa08c0b5b7855878462d2ca53ed61cf9334d0c3a43f085994da5b5377c6fc891abfa09671ac55eddac36025e3cf3bec43b1a2ab485e3af3454b48954e1ec55aad902ca05eb52ac3702f94a86959ca017e899fcd6646171821710380970e990100316685a09687be70cbc142f0eae0826be4ebc20da694f0cb244794c095271f9ea36a8bf280",
|
||||
"0xf90211a01626f23548f6a2ace334f25b80d1d4c0262c4169bfb9d1e9a1cba9cd3ec3c892a0b0c6afc9bbc1a6565217c591bc4bf1a4a0ab5cd16e170e0a2a7aa7a5c7a53f37a051c7b3e74bfe3a452e03f96dab09d7b165ea7f267abb531468283ec5f08983d7a021bd8b2703177ea15230380ecbdd84e37d2c33d6749c8ac689126e49432e9d26a0936b1cbbbff21020e3065f0688945855595cea1404342c3d3f6429d16065c6f2a095a72bd7257d8ff6f5f770494677684574c4c17929df9831502b7dd762b8bf9da0038d5f1103d42f8cb3bbd095d87f1856faa886b3dcb5d29d10f0b7f3adc92200a0c93b261ad32b586ffcf3e2b3dfd58f5427f4fbc3bd180f3b564abecd99d49f45a09823f6910fc24aeac5888a5ecb4aa21ae1732bceec894b730735c56e9a2b9c48a0afd3e17bc3e85c0b8c7ce17990ffc8e0741ccf928739ad757a30822d0e2530ada09e0807112c11b8598ca8a3b4f54f1f9caded8f1af924d870d756f7b355dce85da00de813974e1c3f7f3d618b0c44d745b4a5aa294199d6b020c4787eee6b060c17a07f41788d59a7698850aeec9637942aa91cd832fc5866bd7ac360c524e31f6b64a017425c618e53f77a0b1015b723d706ce44ffcc182adf2e7c60440f6f4690e2b2a016f5b309197a34c6eb937047cbeabc7ea753b144ae8d90cedf7017da4c74443ea087faa0b918b5485a1065668d30c9792c7c0add5e84656990734ac55541bc24bd80",
|
||||
"0xf90211a01f4d23739e6fc214fee3fe0369943a3502351331ef6e2c5b8756960e144acfa2a0fd6392b264cfdc30971a59dd05205bce661d198416a00e85a3f114ddb046e97aa0fb187f4d586b5e2caaa752450c087e683661a17a5a81f0bdf962520acf628799a0505741864b825779c9b0473c61c73a4e7852e1118cf5a6230cea976653b4fe9da02e76237101a74c142eb8dda38af46c2076377f3ed9f5b5f02ad32b6cadc73221a0c1bbf8f910f20250eed36d0e08df47dfb61a90cb87e357c234dbed24481ece54a028bd4323b50e77bc152a39c0bd7f3db7930cd08813519da851479699717504d5a0abae70b12676b71e12f7f894b460b9f54ab87f08358d066d6cd2aeb6d75592c5a01433903533a96521238b6db932357317dd5a44cf4b4af9fd593f3e1ab0862fc5a0384441f5258eb7d352e246e46d1cc212000039dd61dee5706301ad3aef3764ada0b658e96d8a414f7ebacfee1bfea1177dd90eb33af6e0ae6130817daac5c9c7c3a05cb19c244cb8b78ad70317a13b316257c2bf17eb312f373d78db6297e566ca53a03e7b900b5c922027b5db847c38bc46dd61969c92bd0992f66a2abfef7d0dfe3ba0dff653adb7cf638aeebdba34d8c737d9085883f882cd517b1373dea8a620613fa0ec028e333211418e0efb847b026703ff8616a85f621aa5a1915305be848a64a9a06ce0bd094426aff20c1ac2959c29d6c363c44412347b52184d7880b341fe891380",
|
||||
"0xf90211a0271d67d1455e8a6f4586019c8afbed6fdfdc997eba2d09ec7beaf72caa3d4335a038e09e79021a5e98f737da0d9c540e4b4c39306b6dc59d9331a0ef9915521b29a051eb77824cb7a94041c6473b9b30ab9a0f59ae7e8bf5627728c0fe5e8a91c2f6a063ff2fb4093f6919a104c19f7672688d3dc53616c010875b264e519188565beaa057d4c48f865ff400560d2a45dbf78361113d886de4050b826892da2e4dfdfd00a07450c8db528c8a12d68929d433533c74510e6e66978ef4188d09c63d166e7eeda0d66ba24706f374d8f6d6dfe98272ee21d77755cc0668f05b6412256eec71bad6a05cd756dd7c1a8dc777225ba914007bbcac5fad3589fa462f79bb5abf7c402035a059249a2e071d0ea0777a869ab0ff224feb06c71e2a150feded0b5ba613a807d1a06643e0da19c7c14e7aa6273f30f3c361a7e6f3a2ca204789f021d82143300090a087e05dafe07c652f1346995b9f658b90fcffb4d9d1096004cf55d361ac9fdb4aa0844c03f9f37ed5faaf10de6a7b35f8a2037e1fb295749964a7f3fd1a361e9042a041290bbaea599dddab8a658a86b1987e80de7d39716c91369e5470dd19f8c4c7a029ed602a4ddf581c7478dd3de21cfe72096e223156cb9aed973f10f0df56cc5ea0d551a1c8cbce0305083e4528f68776b93ced1e9491deb541aec4ab36af09c728a0239b976f0ebfadce6e07626229b9d31b80099e01868bd555415456a28e54a07b80",
|
||||
"0xf90211a032b374dc2b5335b1161704ead9945df7ed4868b58d4bb4f7225fbbb328ce0d2da0416ecbbbca9184ff77c781ab0458d4c10fc18112f215eeb042d15ad8133e529ba08bb2fa4bc083fb9dcc0ec117da5d43151d4779e79aa72287b65559d436f8e362a0dcfb0c04b041a0309c18625e80a82e68f2cf4a9c5bb62f669c2323c96035679fa07a5b3d2d07a3e86f14f85106fa71bee28784167062e5523701369c72a8a0e850a0fbf25fa700001dd9e7d9a63e8e2c8a1a12e02fd85bc86b9b537fda95254c453ba0ab13ce4f0eb18eb58c356e0500a29eba38ed990f61129f713b35518f5f1a5d0ca05e1bba73a8177b09bd344051031f5dd3b5b7a8044ab5404e9c89f223e90ee444a087e509da93ad7d996219ce56aad9c1cd34c9929548e16aa7b72543dd74ea1f91a0343465cc482e6f3527aec7c70a659871e3959ad45e8706dcebe9375e43949cf3a0500062c9985d0ca265267e995d4dd2a5376025ac492d9ef7010d80927357f8daa01e3aed55c555998e4ea6f71a278568cf2b467566168dc9ce42c781cc46c2eeb3a0185777b8c689da505a547e947b2d47acc5dd459a1d9436611214fc6941f9c154a0eb7be51512e38361b403b7426e983eb1824e4ea3417d2318e1a9b9b2c5f07c3ea089e45c676dcbf3cf9f4993a0af98e0c69b3955c30edd3882231273e27e670064a02818f1f8cdd943ef5cc81ee12d788e3a608566d77d362efbcff75dc36fc552c280",
|
||||
"0xf90211a03b6c36387b53b7e289c3c86af2b557140e4014fa048d0d99135743b7d3fbc60ca006cea7b7573ab37a61fdc9e1097dfdbe282e92d25a42c7d3d91dfdb18019a4c7a0012896d62f59b441aefcd509480e35720fd453df2790e29e813d53b8370d1f4ca0895e6b1257f117ff879ed435960eec1bc24468bd38474f000e518ffb0b315950a0a0edc39e928cc94681b2760f132dcdc83fe17d54c5c5b09de7603e5ab7ebf0ada0f8f82b7e0d0e2a8dc7180e858091131fece2647a6c94588a48b9df739cf5a678a054328eec0b3130aa7d9ef3170d8a4daf3b9190e130770ca41e36fcb430c7a4a5a0d5615a40f7049c7a24c3e57fcb0b22d9680d6b51fff7df0f2c5ce91993be93f4a035d13160b0f982a8bdb7a20bf228e19e36b19848fbfb6aaf6d79c9fdc904d61ca0db99d59bb3e588d5216fac40e67a433f0b462c486b4c04919c8568f1eecbe2c9a0578f1081e57db980c5016b9afffd1e72f39380db1fca1a666a452506bf733bd0a0a9e0c2be9d87031d659f8e0073f4cca2ecae47e2d36f8196095b5f399b53191fa0e570cedea435f2587acf6976abd12f876cbb2ca4479bdb54666944773fb43d81a02082b032a40baebfbc6167892907f68b995404484a834c1033dbd03fddec533fa0a79d6dec64de82fa2980f9c4fc50046fb3028c138d2a6411a03642120f6c40aea01d433ff2fea00de0b949c4545bd18d3bff68a489c9236c3823d1f3ba5f9a2c0c80",
|
||||
"0xf90211a045844b8f6d999428dd3a2f7e994388726da9126b7db6384ac77b37590d35eb25a09c686ed46871c61366631d299cb84b3c0381ecfc611db515c428a08a0346171aa0f762c8c18304c3e8f4fee62bd90636e2d8b247983f687337472d6097d38f0b53a07478957ce2bee4a17e0784074ab285f55c6c4046650a81f5d8f27c08238745d6a0f17709344cd7cee379b3e233a6afd07bbfa43ace7fe167e1f026f4b29dc63313a0d3d50e580c2e7f33d30d2a4113e4ecbfaeb3d78a5ea004af8486d41bef2c39c4a0636ebe1f4994d7ad28e2740a8e82c9b5e863fd0025fb00faaa575cbb7cb86926a034cf3cb76eb799eb8355d81a75b1be99212359d378fc61733f232190966dc669a0bfbb64f75f3f8e9e0f23889367208d990155c6ab01e8ee1e3a4a532b4304e3c9a0b71ce66da13ef62de1289159e2bdfaff127b707c98e4a89c385982ee60ffc72ba0f31d1792554607e4d6b4749ac9d56251ec6f216981e33919c90f828794b557bea0fff134206151e2a3ec9d87ff2df9dbc266ad7adff08e942c8864be08d47bd02fa061abb7adbe68bb47cb76e277f422d8ae8c8246d6f8624da4ee3970da4015d4faa09ecfb4f8ce948df16b0c5bd62c4142e2cc62904c65d6603a0c58bb0271654e44a0dde6db590bf16f22cd42ae30be58b5b40f2829ea8457a99a52956cdb40e2234aa0b5d3cb31f37c5a59a975b0fb7170a3307bb463cbaa88ad02816d237eb3d97a3b80",
|
||||
"0xf90211a054b5b608769e954e71842312ce4327f5fd0a9de9fe96577e2ad1e2d1004933efa0d1d1778c26081b6b2eb465d542adf6d7c2d51d39495aa913c3a9d1ad334ecfeba07b73c31dbca293022eb4d6c400d51b2d8e4d7570672ff3b56f66e36c6208b361a0afac336a1f5c0c5943fb8d8fd1ed96dd2fc8ab9a725b7ea42bbcc9d624692ba7a01859d15cb033fbd2da53eb2eb862b8a978fab780cc0f3a238a9c354f1b0c6e9da096024a2f8de0776948c288f387a93233daaee328fa49238b027018540a884101a02f82958e321660cca9bd28d87df56bbf69352c9ab6724e124d63433fcb475e62a0d9b5ce4dc30df03dbc6c6279a2c13fab46b69e57764c4bde6812c3fc217827bba0ee20a4dba9619655e313bf69f274b466f3e520d4ba33109c0ec799a20ea358b8a0a9475a02a4bfc4508aa8b4450d9167e6aeca8eda09e0dbf9b39555f34f8d6acfa0772aef02d6492beb261c2610d3b3a274955037bb8eb25a51d3907a3d09484578a0d126468067a425eee9464db3fb8656246a7b4eb3c81f019928539d5b1ace8847a0491dd72e855259dbdc1db03f28ee87977a0d5e3415302543f418d72e636170e6a0a1206cafc4c5432fae8bcfb9f8a24b2d8f3245d42366da254c3c163047b00ff0a0fa104626ea4d3717fde44ce56c5bfc77092085c9385e4f17ead39db40bce86efa05a6d8b33fb5e90fcb35c2df9beccbabd8b1d6194594af6c44809d9d2171cec4f80",
|
||||
"0xf90211a0771ac0700d8181d770ece0742e5c801b54f3e07adfdf419206c23ba3f7261603a0dd2bb9d9214beed4ec198d9e454708f498a159b634bbee0e7c0f0cd7c0242547a0d929f6b52cff176b39a4111b185b9ecd2366d5aafa43618fa24fda40c5eb0385a0f54733b1e24c09f23946a6c806d35e1803e36fc8386195f57fe72f6489b8110aa033005e6f2fe20f7d5fcdf45bf2f7d2824ddc3b75e1143c77b75777ef674648f8a026677a4d486d40f91488755c0498a42330a73cd8663ad7eecd6decc28d1b5824a0de27d51a0dea901b465d028e9683791f673e5418fc3b0ab3833e45b3015dc6b7a0f28cff3c618f2ffd0b1d488777d6ea1955100b282b3221bd99495c1f852adbc7a08cb90b6fccb14782241e71d5153152bffaddd3eba14fd64d141eb2dfe8707984a061001f162446533f3a5c99ae1aa688987140014c0a9bfa50edd022e7332d7b2ea0a9ef84251647b79bc5deda6f44582bb3e4ad91248b96320b846dab026e14a8bda0918b8c610350b37d79b685f3125aed46d921d5698597bf7843bbde7036dbb99aa024c37d30d749c476e7934dc7f14edf064c6450ea4802b16d0b4b0f936fcc8f7ea079f2fd9460d767ae6892dd0fcd93080a875f6e9089859bff55c009ef84b3c2efa09c551555c226ff9fe630334dca2ba68b30894143e21edf6601922626972ac80aa015712facc4c0df63a6c4894e633935b1fd6cafce22ebf0ebedc4d7604437c02280",
|
||||
"0xf90211a07802c76240fcf12a215e4f2ad5758b716879b46cdd43cce045d58357d69dbc43a08a18e9b3eea79e7bfb3cb08c6e5f14fcc7423b34760fa59305e40adc30f6b585a0f76e13f31b6e80c8963c04d5c350fc879ac62282f88fac96d7b39bf8545e145ea0730ccaa7c2ac7c9c70536d2b255e1cad86a5d1c2acaf33771de460d724103ddfa0de8f6a60c349b30cf49b8468fbb49ff89668d64171f7b56938b9bf5770229681a0c7a6d666f664352e484a81aae835885f475e82e036052b7c9a64d0f19cba06a5a0e2e9292e706ec16eaaf34a3395bf6f9f01641409b29b78fabd4406b68c8241a8a0d8b132fca2d11181db5227db3d2c13c88d42f189b8fcbbd491afa9e44713b1c2a0d943f41119e5a40a347f5a92f1ad52e7feb2082b3216b23751f2841f1f3ae9f4a052bd7f4c0265234c9938a9e22a5a97f43e6a84260f5188a046e1bc6a048b2c53a0c9ccca5909f78e85c4bb459bcecad28dd62f7d2662b55c5101e2d679e1896aada0b3ba2721eea1ee5e075503505a2739ae46f3f0f7970a067d44c999734f39ab97a0b65bc4f44e2650dd51e5f69c9a1da908a6ab4b50c6403768ece2cd4d204f36cea035470731725e6c0d3f83e3667e87fdf6ae2ead00f4ae0e2fab359160553060a8a0c260928ae9c4164e31f3e5f35eea202bbeea330b1223a0fc8ffb402176578314a0416c7427b7d67ecc52035c62fa484b678ab46e767aae2f659494b383ad0020dc80",
|
||||
"0xf90211a0992e74061d09ad0b2e377b345e50d25dc89b6e460974430ae67770ec8ab70e13a0d2c1747790b372c44a99427ad7261288928c6cb30a7b6cec5475c5fa8c871c6ca0275e0b2a6c04463f5dff1d4f7c4e831ea8ffd985fb54af570f7fb2c6cc4795b7a064517bc86ddf30d1d296bbfdb9ccd4e13f344273a5689f9244a0ea9e1f5c3e10a023c6f1dd8e3491edcfd84422636471c178149ecbbbad0207ce851c9b36dbb663a05f9ea08c1244c64ecd8ebb0275221e3f43fc60d6399ba9d84f3e8e99cd83c43fa097977bf671d59f7d3fa9f4fe804942ad939173a846989f4c72fbb47bc3c4055fa0e6def10cda5900b192319a6fd5996cf3dece5b45ea2f1ffc59bd5d7a6db1050fa0fb77cfbcc8df30f39290b3d38f60032cbb27db82b56064dc3fc1c4a088af9dd3a04bace0cc199ec5bbeacea504d9a0053e675ae58e9bffd3570b783a6bd7af20daa0cc6b6d773e97815d572da89187550550cb6195b1af29abb677b30ccba2900cdba03d017d00ef550cc56e60c282dd5a2d5cb498f9084de8d894541b914fed380184a081ec7fce9de00d451fa3601e94bf9f9953c7e367c01405f7375f70e629702a5fa0a24573a1feddc6fa279698e45a491859db32104101d5d28a9f4e19abbbb413f2a08827746343729a15dbe279824bcc1543088782397874c8fa9952ae78e5a9336aa0cfa318d8d7b8f0806e914579a22a02097365f8a4dc62e223b5dd0d82f497652380",
|
||||
"0xf90211a0aa7d90ee03ffb0d9aa5c05c6af67e4db06fd13a68aa063196cd9ef2de85a4602a0e44e93ad41215c9e048f472cc6297cad6e593a3bb920f45c626cf5778f3513c0a083b31bbf31042862ed797a99dbe47700766d17e11590d17e35e6a25a5d4ac717a0495a966f4d95e212d0be84c2b6d3f2d06b23c6a06efff2f330895426dcfe68f6a018cade23e6aee5d87a1240df51cb14dd00dd2d803f174b3df05090f6be892d1ba0fca73f16b68cda56863e90182cf60eaceffb6d56742ffd958cb2b3e037b501eca0445b0acd8638e3d85be1dc7321164c4776ef7114b27a91d6b0deb49b1383e51aa0a4f99f1ef7264f5009049f258d1167f08758710b41d77a6d76959944f1905dc5a02b508827fbc96c609386a36f7a09953e771ebb6156a470b1d016a7b3df496915a0aa18b704ad8f03598d08ea887477e75bac9679821d31fdb4fdbba4851a6163b1a07479712ae14cc1856eea1ccfee6ae7d01f3438bb65c7699c9024c8183c079091a0ec5e4c622a262cf22d692db947082fab6d268842bdb963640488698073f785d3a0c460f055f4c9302fca54f9c0035ef8ee560b65ff7b57fa349e3ec822f479cc6aa0a4c972c19ff26382418848e98f60ca5cc70ea9caecb6449e577dfc7b61f015b8a070c35efcab4ba9cb82a140b599e7122e6d32a1c1e78a0451ee5a7054c6224968a067fb845f6b25fa59ab83bb5d0aa6f3a76419a156f6e4cee6c91552431484042280",
|
||||
"0xf90211a0b164ee35e937b1ef3bd53fb5e29475416f6e5396c5de0c8d2c16b428eaaab5a8a07e6cbc8821a2e5de97c4f9bbf4e73dd263446d2a2eeb8301597c9a0886874d10a05079785ef100652b28c42b4833f38e07263e7e914a6530705ac4af3162894720a0b4ca81febd1d87e706e0a9922922e86f81c743abab1051acad88ac278fc96578a0bc8b52c79317545eb485216a4332bc86f16463537e581b31b076dac67b6e8df3a008e366c85d9dd81b66a74c6b67e517b1e255cbde0948f5741e1271edc8073e7ea0babc1ad7b2f41a3f5c4a36bd6a0dfb5d7a7cad15d54704b6a4af0b47fe021be8a031571bd4d18732c1aa82c28a446415debc5e38c2082af44d2f23ca2fa9269973a0394c973c371a5b4713b47c7db1f7f581b1cc288a9a26ce8223c0d4a3681f5b7ea0dea4675aedfb020e8f4cc35aa140ec3ce54ee5078263f95beeef47be72840af3a07e28868239c2f500a968ac3a66a5158fe47d2cad15dc10f18b6ce8f5220a5f01a0f45d9d2a467fd972b3c8d6606e49726210de96e9bd16affdd4f85abe7e1db8a5a0e3f0d70e138312a090a3336d4520f3ad7a3c04c16449832054bb40ba8a73389ba00fa99d25c82e8367fea365216634abec282456cd0a8b1fc4981367b9a4ced7b6a0cb9ff3c33e7a7ad56d6ab4723b8d64de775b4c2e89514c1848b8b421d9c6932ba00d97e9cf3830772b06d4571ca97210f3ed44872de04fe3ba13e22529f893040c80",
|
||||
"0xf90211a0b4cdccaafe641d0355e7e1cc0cd56a2fb39872451032fba08f2988a63765d077a0dd57f6153380308531d14e91816a292f3f4e468d8a312a59ca16c944605a419da0038cba9b6de1b6b41ed33c557e57143e63fc56efb2e8a295db12270327ab28cea037cff299fb76ee49d5c5c0a9ea484163f7bac014309636e36ec3299b450ca875a033eeadab32723cb3a3b993d4e1c4d51e38426c7e30e51ec6e9797479d37786d1a0e4d22ef856cf62ea6ad0bcd2084160f3e1ed47430fdc073586ce8152a2ab58f2a0cc7396ec90f0f95ec6b599030085141bfcdfa99166567884c33f049b3d86e8c8a056843bb382a7b92c401eba43f63be7c94a3700e3e18f2514fd610a6d3578cad6a0bc30d8698e2c962d8b6fc1e4cb3b7daf6a6e21f11aa30c7db7f955223acc2bfea07a73df7038f66e207e3f82ed6c59da7359edab6558e9be7d1f36ec199c4c9e4da0abed072b4cc049119fa7bcd72f98f99a982e49b65a7af6b7eed7c7a446dc52f4a00ae6a6ec60bbf6760d74c778d894aaa84601afaadf866903697553c021257ef3a034ece2ce6f440b308dbcf083f2ebcade4ddefcd240dc31e624b3c48b3591f938a0ad33a4f4c214d228d0c7746f5cb3110b6db8e3f6ac6f1f37ddc061b853030072a098f4a9ed0d784a894c0e185fbfbd3865416e23b5a28dbff26044fcaf879fa2aaa0b4f4966127ceff6e000609f405eadc1439908c4a49b3fba099f491b05b38c5ae80",
|
||||
"0xf90211a0c67e28944b2099ab9f9a52ea2381c9d551fca2c91a3cf1353d965eb1687598a3a0a4e9f157d9a0439c85ae7c4174ee8e31c5a67eddabe5260d069852575c523bcfa0cd18d510d997718c86d8c033d6d68979af3c685ac42bdd9df86c8d5819ff5594a0b7aaf055e7bac40e0b3af3c14a6793c6dd4946d9a763a853c7624557b25acc9ba06f8c8d354ea3d30fa6dc446ba87635b8e1de4f0edab58af089fe99da972c87f8a007e5bede6bb3d4e9d709dbb9bd90c7303c08e73cfd223395c44e43ec0e9a11c9a02b4c9aa654ea40d595e4c156fc77a175eff7c1add3eeae12c7f9422c224d1e0ca046fa3b7d71f16e16706660be73a9192e6c64f5423d893b3c3677ff9bb95af6d5a0e67932dbe91813788aacc491f658f9619ab720986a9a088ec76b72af0d8cdf7ca00fb01d17f0836e8c62700519ef10b1fd362e01505b349ce51385d752bb24d756a07444e46d6f16f46b8282626a0a4901bfac07be80e5e23c2e370abc3cd93e6ba6a0d8fa47d152be467f5e4f7b0e9e0a2fa59adb31cb4f5c7005f85f68218f6bf0fca054165f828ed52296df6cf1d7a59e4f1eb17bb26269e100069febe7905a7bc6bea0ef6f39d8d4b499e6ffaf8040221ae6c175bb6f04820674a9fe5155524faadb0ea0baf122b732d8ff0cf1dc63c9470952dae92be0861e9067a1a9d2055664b5a05aa01a090926c27aec40aa5a89861891366882ea2aacaa9606501ee9d36cf8684e3e80",
|
||||
"0xf90211a0d82c7dbd73a354eaa1d4015eda31ca4ebec6724803e1e6cd022f73ab5e159362a0b0b09054f6d509d3980835457cbcee8dfda94112d8205603e617f28e6c218bc1a048358d0de8fce7afa19df81c94af141bafec2253ba2cda99f397a6fc0676f649a02fc30b24dd6698e18539357d127b8bfbd64d6933a216bcaad684786799e1e1a9a024af4ba09ae00a47e0cbdc6a5809e91eddaab43b1d0460137f7615edc8a64eaea04fdfe1cea3247bf25cb98113e4c7c43f2c89ea57eceb37ba71c2fdac2365e6dda0970865de2a58db9ab4363f9777de9824a8975f31ae5a3fcf04d9ab4e1086c3e4a04fdef1f905bbc566da9e114869972e0ffb61d57fcfcefa65b20b060a6ec72df4a00edaf68b2dc2c1ee60ccf82c8c410a03f3d93e13a2c772c3cac9aefcd8f1880da0a3bc4c9fea2aa7a524d0180d884bcfd62364b02359766493a75214e08f07503fa0ae8af931784eba8e20e79deda58f5f610798a9d035d86da791966f038fb0a5b1a0ca33a3464b1440a3812a0f905570902a71a599c3bb1ee6937bb56fee7523eae3a0ad2b62e853670d8ee8aba018cb9a91e66dd7cf35fe45670c363239efd15028a3a0e3bb904874a72c1d372b3b3216773d738430a02c36100ad03539f654daf34c0fa027fd7c6c51e1b4f9be9dbb4ba55f8cb3a68ce0d9dcaf89d8e96a9f0f558565e6a051801b9784a4e0a592bc6e406f4462cb9802d4d5af660849aed03580cca34ac480",
|
||||
"0xf90211a0da4e7c4b2d9b50f67cd62cc7e092e15915f0a730f4899f8f01cad0b36e85d7daa056cbcbbacef95f98996b1dc413913094d8d5fc9fc8669e6573283dbb2eb67de8a0fe52cda97a0f567279c0174377218a400de3f33c6683915ccf4d71d3793c2661a0e453e6f7a826c9f56a0b8ce8798cfe6579166f0a761d67e84dff5a0424ef4a44a0ae4850e05d0c8aaee6136c7ccaacc69e8df63f01ad8e29faae25c6cadfa0d7e2a08bb26872bb2e30bca838942bf76e5fdcf2d9dc0ef1ccc6a9cd6a2e9bfffdd268a0c8422485ba932bdb4df3b2dec0b7721d4f9b0b5dc6bccb400dc08cfed89c4547a0af7d55cd831f5945ef5c79fccf3fa4c3dc01a178d405a3120abaf0a8d69b46a5a03686e08503e820ca347176b6b4dd967bdcbdd6cf89337f600f3184b0316769e9a06523bf953e9db2fdcdbe757c850045377aea37f8f580f18f2c872ae8c652ea4fa0a8503dbabfabcc5c015f809870614d18b7afc05f58604a36c3c250781410b879a0341a26cde3cf254a45f7a1ac50484a19366f3a698743fa0b4000c0d5471efbb9a0cb64116f1f6692c93e0db00888747310c3f46e183fa0288618fc2b89db33730aa0c1c4ef7540d0b873b0bcb2639cfd3eeb8ba9cad0ed793a48676e3723db902556a0d4f0df415b962b19a7368c82fe95c71a2e8b855ce7839f0cafc9f8281cdbe572a01e8a1303a9136d20d02976f32ee4eafb451411b991279016e42279f831968e9b80",
|
||||
"0xf90211a0f1809d9cd0bed31701f35eadb5a340248143fe2030f3dda377a077a0957cd794a030bc33b86056be6ddf071031456b42ecd40c830bdaaada429084187c1141f07ca0ee2ac300bbd4b82fce2de4936b69af00c204cdf0e0c045a828e2a34aa37fabcfa0254623044846d132a38937773e0bc3c80a751af25af27680b6a4714f501eff92a09644a4e376a69f50041fefc4cd0a6529aece86ff3aa10712556a845d12ae7c84a0ec6c9c86d9b143acaf5a798d7dd205e86a1443479fd4c1d9b38709e983f9747ca03db0518b17665d039cf0267ab8a26b4573cfc9207c4a138a7db6bb64e7cb1ba6a04af294bdc12fe98d8dc5e2df067f8ecd6b279cf6eed21632161a345aacfdf08aa00282a01b3787c1270c3b25d9aa90194689a632c808421ecbe6b0d03801c17892a0f782e61728167d1836aa24dfcfaa614424a5bb7412d2c29aa97d159d5ffb670ea04a3cfe65e43037cb703a0027c0ecdec1990bebbbb63cfaf7e025d26562b270f9a010046cb0e125c251f4dc3f59dfef5527fd81c5e4bbf8e9e3f10526b4dcaf910ca06ddec94e30040c03676efc842f22b25248a5a6d35a02bffd882fe37a56211d88a0e094343c92e8841c92b52e55d0d4cb90787f02fd62b28008a63a57f00a2d3d42a09594f26fc4ab8be623f51c4e05d27998ed193e04a2012296803d879889e6317da0070e661f8274d9300b57604595727bda7dcf809294324afc7da1e751cc8b799380",
|
||||
"0xf90211a0f2abd2af7753e4bd6cd5f411325ea79516808fa57b00c9c67f595a9030960505a0a13ff95e57437a4a2f17c5ff718981f926c5c1fe178f1d54f83dfce3240f69b8a0d991114bfec57a4f0872d0b7587ad3beaf8aa49ba9d29432ba199838ec2ff31fa06f8303f582cd7e6f3e73c983d89225944331f36120d4fb923bbcaa39a2384e38a04953b83755138de63e30fca554bebbf9e040111ccaa749bcbdb474a248f2e307a05e6171e1611560ab2e6df6021b0e89d5220a2c5cad022e3a5b1c6f7f955272d4a0a982486985b01d1825053cf79efe29c2c35699c86f4bbe95c60dc4565ccc6a9fa06f61e86e532cb28fada28398118f3b8c26c0e23ec7974018eccf2d7bfe371ce3a0ef54a35cc090ebb4e365cdf2ea84ce8a66495e89a4f2ac2707d4f598cf11c822a02fb1bea578da67148e3044c025c97945385a9df9fe28c10d336e2bbe601d9009a0bd8ec44f6e6eb5e869959713c316b006377daf8db9428f6e96c53e0aa84e8c50a0e25c05f4aa1db15e2e5b9cefa60cb9ec4d87b1b022804973c43d2ac809468671a01f1455319d36468c771c88dbb6a0b8d5e45cdc485dda6d7d51da8c47e71a3d36a05820eb6059279eeba5d25447c29a521efccbbc85950938e7965b30922bd6da12a0f4a2d5294c65ee4dc170301e745353cf8bdc25105591a1352e7266605020e024a088d41a40d7b37e8d459c418851cd759f3182e01e728de93e2812967875ef2c2580",
|
||||
"0xf90211a0f5509071821d2953b53448397715aab3c8436b35cc877e14ddb459a25ab3a690a055371a19dd0d60219312ed8199f918bdbff9e1b13386c454651dcd0e495b84aca0683add264b2e57df174d9feb3604e723c02383a2b2f7cf6098a913a216898369a011d7b15bb3bcab534178efb9596f94f5791687ee1d8e94c2203975b4662e38c6a08fdc2dac03bc46861810c8c420e2760e8f496a47dedc5e55f20bd148d39ac7e1a03e860edf66403d134ea6f254e5b983f34575d1ccfbcdb6c6d332b45da5ab7207a052fee2c677cdc5c21985e1a139b250672546149fdf5746e2b63367204dbfbebca033aa92f9efe3a1c2c8b306e5c86f2eee374ce188b70ae40d891a641526790c54a0b51a1d87c051be047e7f4bb4b76d051ad76db258426e1902ad38c859db41dd76a095707bb25b4f2c902d2ee2749673b6a15f4e4021fea65b96cb73f6df9f50f003a0baaba1431ca2782928c1a0233ea36879b151f6b357bfee9ab169f7a55b912873a0f428bd164c57c2f55288239b66eba55d54d722606100fdff4011bff479851cb1a034d1624be6579ba1402b640ef12f61a9517b5dc9748dd094e5b95d336e3a0bcea02e0b641713136c69efb6609c6d6ea4686707549e310cb8cb50fda538e7e5aa45a07b85a5161815b739a0e6fe6c6e12bf600af6b6aa406e4a4c1dbf3ee88db2a3e9a0306dbf2a7ae616d4aa59e0b705e1fb7e9bb32d07a74fd851c5b01719235d089080"
|
||||
],
|
||||
"codes": []
|
||||
}
|
||||
319
zkvm-prover/integration/testdata/phase1/witnesses/12508462.json
vendored
Normal file
319
zkvm-prover/integration/testdata/phase1/witnesses/12508462.json
vendored
Normal file
File diff suppressed because one or more lines are too long
200
zkvm-prover/integration/testdata/phase1/witnesses/12508463.json
vendored
Normal file
200
zkvm-prover/integration/testdata/phase1/witnesses/12508463.json
vendored
Normal file
File diff suppressed because one or more lines are too long
1
zkvm-prover/integration/testdata/phase2/proofs/.gitignore
vendored
Normal file
1
zkvm-prover/integration/testdata/phase2/proofs/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.json
|
||||
5
zkvm-prover/integration/testdata/phase2/proofs/README.md
vendored
Normal file
5
zkvm-prover/integration/testdata/phase2/proofs/README.md
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Proof files would be automatically generated here
|
||||
|
||||
### Some tests would try to generate the proofs it needed and cache them under this directory
|
||||
|
||||
### Use `make clean-test-cache` to clean the cached proof files
|
||||
7
zkvm-prover/integration/testdata/phase2/tasks/README.md
vendored
Normal file
7
zkvm-prover/integration/testdata/phase2/tasks/README.md
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
# Put task file here to execute some tests
|
||||
|
||||
### Following tests require batch task (naming as `batch-task.json`)
|
||||
+ `test-execute-batch-fast`
|
||||
+ `test-single-batch`
|
||||
|
||||
A task file can be generated into the output dir while a e2e test has completed
|
||||
1
zkvm-prover/integration/testdata/phase2/tasks/batch-task.json
vendored
Normal file
1
zkvm-prover/integration/testdata/phase2/tasks/batch-task.json
vendored
Normal file
File diff suppressed because one or more lines are too long
61
zkvm-prover/integration/testdata/phase2/witnesses/1.json
vendored
Normal file
61
zkvm-prover/integration/testdata/phase2/witnesses/1.json
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
{
|
||||
"chain_id": 333333,
|
||||
"header": {
|
||||
"parent_hash": "0x818c7d06855afe7516b06b46edec2ed0029537e8548ffc161686d1ad0afd763a",
|
||||
"sha3Uncles": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
|
||||
"miner": "0x0000000000000000000000000000000000000000",
|
||||
"state_root": "0x10ab3e5d45de7df81428a0c36426ee62abff54c2e16705148b94c93c0cefc4cd",
|
||||
"transactions_root": "0xfd3a328ddc0e4067263d8f5451793b245bd1cbcff4c901e4ca0b283011ef621b",
|
||||
"receipts_root": "0x8c521bf03b05e8859adb63ccd364c747b1c1830168dd612d45f6c64197de6c3a",
|
||||
"logs_bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
|
||||
"difficulty": "0x2",
|
||||
"number": "0x1",
|
||||
"gas_limit": "0x989680",
|
||||
"gas_used": "0x4b9a5",
|
||||
"timestamp": "0x67b4399f",
|
||||
"extra_data": "0xd883050808846765746888676f312e32312e31856c696e757800000000000000a472959ca8719182ddf67a63708f35598c145b2738b0b1445bcd02def3087e192a8bf45eccbf476ac2c7b0ff1b00046fe69dfff921de373c5cf7fe074134872901",
|
||||
"mix_hash": "0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"nonce": "0x0000000000000000",
|
||||
"base_fee_per_gas": "0x2562500"
|
||||
},
|
||||
"pre_state_root": "0x5302a56cbbec7d14d48d592b805d4ec3c7011439dfaa90d44deee02a9326d203",
|
||||
"transaction": [
|
||||
{
|
||||
"hash": "0xd85c7e1a4f29929093fc1c376c7047972a7bbf74d39b1478753eeb68bb767e3f",
|
||||
"nonce": "0x0",
|
||||
"from": "0x6f4c950442e1af093bcff730381e63ae9171b87a",
|
||||
"to": null,
|
||||
"value": "0x0",
|
||||
"gas": "0xf4240",
|
||||
"max_fee_per_gas": "0x3b9aca00",
|
||||
"max_priority_fee_per_gas": "0x1",
|
||||
"input": "0x6080604052348015600e575f80fd5b506104a58061001c5f395ff3fe608060405234801561000f575f80fd5b5060043610610029575f3560e01c8063c566b2091461002d575b5f80fd5b61004760048036038101906100429190610298565b61005e565b604051610055929190610359565b60405180910390f35b5f606061010073ffffffffffffffffffffffffffffffffffffffff168360405161008891906103c1565b5f604051808303815f865af19150503d805f81146100c1576040519150601f19603f3d011682016040523d82523d5f602084013e6100c6565b606091505b5080925081935050508161010f576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040161010690610431565b60405180910390fd5b7fa82cc8725dfeb485badf0c830fc554a37ed268855e35805056c77c86946342098160405161013e919061044f565b60405180910390a1915091565b5f604051905090565b5f80fd5b5f80fd5b5f80fd5b5f80fd5b5f601f19601f8301169050919050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52604160045260245ffd5b6101aa82610164565b810181811067ffffffffffffffff821117156101c9576101c8610174565b5b80604052505050565b5f6101db61014b565b90506101e782826101a1565b919050565b5f67ffffffffffffffff82111561020657610205610174565b5b61020f82610164565b9050602081019050919050565b828183375f83830152505050565b5f61023c610237846101ec565b6101d2565b90508281526020810184848401111561025857610257610160565b5b61026384828561021c565b509392505050565b5f82601f83011261027f5761027e61015c565b5b813561028f84826020860161022a565b91505092915050565b5f602082840312156102ad576102ac610154565b5b5f82013567ffffffffffffffff8111156102ca576102c9610158565b5b6102d68482850161026b565b91505092915050565b5f8115159050919050565b6102f3816102df565b82525050565b5f81519050919050565b5f82825260208201905092915050565b8281835e5f83830152505050565b5f61032b826102f9565b6103358185610303565b9350610345818560208601610313565b61034e81610164565b840191505092915050565b5f60408201905061036c5f8301856102ea565b818103602083015261037e8184610321565b90509392505050565b5f81905092915050565b5f61039b826102f9565b6103a58185610387565b93506103b5818560208601610313565b80840191505092915050565b5f6103cc8284610391565b915081905092915050565b5f82825260208201905092915050565b7f507265636f6d70696c652063616c6c206661696c6564000000000000000000005f82015250565b5f61041b6016836103d7565b9150610426826103e7565b602082019050919050565b5f6020820190508181035f8301526104488161040f565b9050919050565b5f6020820190508181035f8301526104678184610321565b90509291505056fea26469706673582212200f74a92387ae88fee4fc7c9238aa9bb6091756accf22046271bf9d976083ed3664736f6c63430008190033",
|
||||
"chain_id": "0x51615",
|
||||
"access_list": [],
|
||||
"transaction_type": 2,
|
||||
"signature": {
|
||||
"r": "0xe6de9d0c2690cb78e410ee6a968110d5a713de6b9f613986ff5a987d32afea31",
|
||||
"s": "0x4e07c4fe9b184edef8c2c7a58e7b252fc19542c11474cd428d24c9da11bb9fcb",
|
||||
"y_parity": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"withdrawals": null,
|
||||
"states": [
|
||||
"0xf851808080a0a87d9bb950836582673aa0eecc0ff64aac607870637a2dd2012b8b1b31981f698080a08da6d5c36a404670c553a2c9052df7cd604f04e3863c4c7b9e0027bfd54206d680808080808080808080",
|
||||
"0xf8689f3067596300e542c7cd2a1c5fca601da42a2739f790ad422b4f59fba0844e61b846f8448080a0762f18853f9b095d3cbb34af725cf2ec2be8a969a7b8d4cf70b8ede38d457a5da07f6f0daf66a63b4d504fabde8e9fa491ff678bf22082d8fee03ac3064fcf7de9",
|
||||
"0xe2a0336b6384b5eca791c62761152d0c79bb0604c104a5fb6f4eb0703f3154bb3db001",
|
||||
"0xf871a0335d1857aacd2be4f5447c6ce16b3d0c2c5aa79ebf97f920200333b536e86861b84ef84c80888ac7230489e80000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
|
||||
"0xe2a02052222313e28459528d920b65115c16c04f3efc82aaedc97be59f3f377c0d3f01",
|
||||
"0xe2a0366cc928b5edb82af9bd49922954155ab7b0942694bea4ce44661d9a8736c68801",
|
||||
"0xf851808080808080a032d1c4a6391dd38d71841fa7d5b3e4305e5ffb91227d7b303321d575e413b8f18080808080808080a03123f9aaae9cba26fb0f2407f39e8bce12878d354fc7370ffd4e9b4979411e1780",
|
||||
"0xf891a08857639e3944eefb53fc927fd82a371bd6fd6ada8f9fe9cbc2b6381c1aeb5fec80a01cd100c375b88cd4909b69a1c187a22169124e1dcbf910d68d391b79814a45c080808080808080a0c5d54b915b56a888eee4e6eeb3141e778f9b674d1d322962eed900f02c29990a80808080a03340bbaeafcda3a8672eb83099231dbbfab8dae02a1e8ec2f7180538fac207e080",
|
||||
"0xf838a120a9144a5e7efd259b8b0d55467f4696ed47ec83317d61501b76366dbcca65ce7395946f4c950442e1af093bcff730381e63ae9171b87a",
|
||||
"0xf8b180a03672d4a4951dbf05a8d18c33bd880a640aeb4dc1082bc96c489e3d658659c34080a082cc85168147c333f706c2b5691598335b1cd5cdb2e9ceb2549ada975b361aba80a039229c28de6ccaf16914959cbcd0a623a25303cde5643b98741689d36bc6a3a3808080a0394b537dac70183c9ae73cdc9170d23f63e2eab2c9c0558d8168c4a9b60882a78080808080a019f31f2ec09a6c71f568934c6b0a9d12198b8f2ef22d82a3283b4bca1b8ee60a80",
|
||||
"0xf85180808080a0166a095be91b1f2ffc9d1a8abc0522264f67121086a4ea0b22a0a6bef07b000a808080a0aae809664601487c8ce4c127ff74497983e8183aae5a17760bd024f62f2731668080808080808080",
|
||||
"0xf869a03b18a0b8246d45a5a396db3b461bf4a56bd646d9274adeadb5471dd31e30574fb846f8448080a0c12326139a2ceb734a1d1042b13dd94b7a2ea2b13cbd7fa0e2944790f4f70d7da019e0db18cf25c98a25c9e8eac4c99e0653cd8c395303140b41425e9fa3f9bb65",
|
||||
"0xe21ba05e56e3347565c4f1aed8369789b49a5fec2bb4cfda21c1de954ecee3b91ce3c2",
|
||||
"0xf869a020f884cdcd164a6b73972dc6625ecb078be7e607b54459f3edfaada99be20644b846f8448080a0f83a2f0b95b00ea04100cadcd981b0cc2edbf64dd961adf01fc51c9169394880a03733510decd4cdde078e264666d17cac208f9f9e93fc0a43b02921be5cf5726f"
|
||||
],
|
||||
"codes": []
|
||||
}
|
||||
197
zkvm-prover/integration/testdata/phase2/witnesses/2.json
vendored
Normal file
197
zkvm-prover/integration/testdata/phase2/witnesses/2.json
vendored
Normal file
@@ -0,0 +1,197 @@
|
||||
{
|
||||
"chain_id": 333333,
|
||||
"header": {
|
||||
"parent_hash": "0xf70b99aa36c6b7ba368ba4002a3e573f8febdf3a608546d43e881528e7a78b19",
|
||||
"sha3Uncles": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347",
|
||||
"miner": "0x0000000000000000000000000000000000000000",
|
||||
"state_root": "0xd4821c5b832756d9c64d9a259f3f37dd77d6f4dbf2c674b8ff65481644073007",
|
||||
"transactions_root": "0xc76de8800df34b226e2b653e59689848f40871873f4bad5f7194419e763d3b57",
|
||||
"receipts_root": "0xd8929b81f48473852941e257e0602129439ffe49752e1ca01debf70a384e4972",
|
||||
"logs_bloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000400000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000",
|
||||
"difficulty": "0x2",
|
||||
"number": "0x2",
|
||||
"gas_limit": "0x989680",
|
||||
"gas_used": "0x38b8a",
|
||||
"timestamp": "0x67b5a65c",
|
||||
"extra_data": "0xd883050808846765746888676f312e32312e31856c696e75780000000000000083bd1baac85c025480491f842e34574821751583bfb54f0be6165f920f6072df508990305a23d50fb0dacd54358954b173846131126e12cb17b739791abc6f6200",
|
||||
"mix_hash": "0x0000000000000000000000000000000000000000000000000000000000000000",
|
||||
"nonce": "0x0000000000000000",
|
||||
"base_fee_per_gas": "0x2562500"
|
||||
},
|
||||
"pre_state_root": "0x10ab3e5d45de7df81428a0c36426ee62abff54c2e16705148b94c93c0cefc4cd",
|
||||
"transaction": [
|
||||
{
|
||||
"hash": "0xfd069949d5885167ee8aba441f604172e41d49ae50d1f577740c79ac4a54452c",
|
||||
"nonce": "0x1",
|
||||
"from": "0x6f4c950442e1af093bcff730381e63ae9171b87a",
|
||||
"to": "0x0000000000000000000000000000000000000100",
|
||||
"value": "0x0",
|
||||
"gas": "0x186a0",
|
||||
"max_fee_per_gas": "0x3b9aca00",
|
||||
"max_priority_fee_per_gas": "0x1",
|
||||
"input": "0x4cee90eb86eaa050036147a12d49004b6b9c72bd725d39d4785011fe190f0b4da73bd4903f0ce3b639bbbf6e8e80d16931ff4bcf5993d58468e8fb19086e8cac36dbcd03009df8c59286b162af3bd7fcc0450c9aa81be5d10d312af6c66b1d604aebd3099c618202fcfe16ae7770b0c49ab5eadf74b754204a3bb6060e44eff37618b065f9832de4ca6ca971a7a1adc826d0f7c00181a5fb2ddf79ae00b4e10e",
|
||||
"chain_id": "0x51615",
|
||||
"access_list": [],
|
||||
"transaction_type": 2,
|
||||
"signature": {
|
||||
"r": "0x46ebf50743ce9a1d90ab42952373e50cc6fadd5b7c6e0331962feb9648afb030",
|
||||
"s": "0x497b36360dbaf92ded051f34e4b45883bfdc881d91dbd9b56d6ea636d30d824a",
|
||||
"y_parity": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"hash": "0xb9e273887813361e65dc9404133e53d073aebf31fc88723d0901c8e04ce0b087",
|
||||
"nonce": "0x2",
|
||||
"from": "0x6f4c950442e1af093bcff730381e63ae9171b87a",
|
||||
"to": "0x0000000000000000000000000000000000000100",
|
||||
"value": "0x0",
|
||||
"gas": "0x186a0",
|
||||
"max_fee_per_gas": "0x3b9aca00",
|
||||
"max_priority_fee_per_gas": "0x1",
|
||||
"input": "0x4cee90eb86eaa050036147a12d49004b6b9c72bd725d39d4785011fe190f0b4da73bd4903f0ce3b639bbbf6e8e80d16931ff4bcf5993d58468e8fb19086e8cac36dbcd03009df8c59286b162af3bd7fcc0450c9aa81be5d10d312af6c66b1d604aebd3099c618202fcfe16ae7770b0c49ab5eadf74b754204a3bb6060e44eff37618b065f9832de4ca6ca971a7a1adc826d0f7c00181a5fb2ddf79ae00b4e1",
|
||||
"chain_id": "0x51615",
|
||||
"access_list": [],
|
||||
"transaction_type": 2,
|
||||
"signature": {
|
||||
"r": "0xd2a81c1a107acefda7512ff57124e64430e6976ae04d4591517ea554fab96577",
|
||||
"s": "0x754aaea2817491db269d1189e8cf4cebb5acc462cc91c4045b83a92bbb05d9b",
|
||||
"y_parity": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"hash": "0xa403604b2fe028b4de480157e543d02d77272b4ffe2325cb8c54e51938de4e42",
|
||||
"nonce": "0x3",
|
||||
"from": "0x6f4c950442e1af093bcff730381e63ae9171b87a",
|
||||
"to": "0x0000000000000000000000000000000000000100",
|
||||
"value": "0x0",
|
||||
"gas": "0x186a0",
|
||||
"max_fee_per_gas": "0x3b9aca00",
|
||||
"max_priority_fee_per_gas": "0x1",
|
||||
"input": "0x4cee90eb86eaa050036147a12d49004b6b9c72bd725d39d4785011fe190f0b4da73bd4903f0ce3b639bbbf6e8e80d16931ff4bcf5993d58468e8fb19086e8cac36dbcd03009df8c59286b162af3bd7fcc0450c9aa81be5d10d312af6c66b1d604aebd3099c618202fcfe16ae7770b0c49ab5eadf74b754204a3bb6060e44eff37618b065f9832de4ca6ca971a7a1adc826d0f7c00181a5fb2ddf79ae00b4e10eff",
|
||||
"chain_id": "0x51615",
|
||||
"access_list": [],
|
||||
"transaction_type": 2,
|
||||
"signature": {
|
||||
"r": "0x4a228148c6cc653ac74b1c45d20aabe8686106bbacfb023f52f07be26fcf9d1e",
|
||||
"s": "0x68350a31097b54db9370b78d0d91738682a747f310bd3a954c3440ca45fb0e0b",
|
||||
"y_parity": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"hash": "0xf553c9392d6d9caa98861b99119745deaa936f5d0077972edad60e4e23cdb40c",
|
||||
"nonce": "0x4",
|
||||
"from": "0x6f4c950442e1af093bcff730381e63ae9171b87a",
|
||||
"to": "0x0000000000000000000000000000000000000100",
|
||||
"value": "0x0",
|
||||
"gas": "0x186a0",
|
||||
"max_fee_per_gas": "0x3b9aca00",
|
||||
"max_priority_fee_per_gas": "0x1",
|
||||
"input": "0x4cee90eb86eaa050036147a12d49004b6b9c72bd725d39d4785011fe190f0b4da73bd4903f0ce3b639bbbf6e8e80d16931ff4bcf5993d58468e8fb19086e8cac36dbcd03009df8c59286b162af3bd7fcc0450c9aa81be5d10d312af6c66b1d604aebd3099c618202fcfe16ae7770b0c49ab5eadf74b754204a3bb6060e44eff37618b065f9832de4ca6ca971a7a1adc826d0f7c00181a5fb2ddf79ae00b4e100",
|
||||
"chain_id": "0x51615",
|
||||
"access_list": [],
|
||||
"transaction_type": 2,
|
||||
"signature": {
|
||||
"r": "0x385c30794917338d3301c9ca3a93f56d6f6d0d2dfff136cd0a9c597980eb493b",
|
||||
"s": "0x14f07248e6a8dbd676c3576fcb7d49ab4d6f8bcd124eb071907831ceb52bfe50",
|
||||
"y_parity": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"hash": "0xe62c94089fb8f7ed2349b120fb2d24b03df3481298067dff671f3015d931760d",
|
||||
"nonce": "0x5",
|
||||
"from": "0x6f4c950442e1af093bcff730381e63ae9171b87a",
|
||||
"to": "0x8eebfef33eb00149852cadb631838ad9bfcce848",
|
||||
"value": "0x0",
|
||||
"gas": "0x186a0",
|
||||
"max_fee_per_gas": "0x3b9aca00",
|
||||
"max_priority_fee_per_gas": "0x1",
|
||||
"input": "0xc566b209000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000a04cee90eb86eaa050036147a12d49004b6b9c72bd725d39d4785011fe190f0b4da73bd4903f0ce3b639bbbf6e8e80d16931ff4bcf5993d58468e8fb19086e8cac36dbcd03009df8c59286b162af3bd7fcc0450c9aa81be5d10d312af6c66b1d604aebd3099c618202fcfe16ae7770b0c49ab5eadf74b754204a3bb6060e44eff37618b065f9832de4ca6ca971a7a1adc826d0f7c00181a5fb2ddf79ae00b4e10e",
|
||||
"chain_id": "0x51615",
|
||||
"access_list": [],
|
||||
"transaction_type": 2,
|
||||
"signature": {
|
||||
"r": "0xc2de0d5111ed370802ec6549c63eb4cd64f0b36628b74773cf21908a9a043e38",
|
||||
"s": "0x6eadaf8e1cf402578bf4695f4d40477aa1c1327757013f7e92311789a0e3191a",
|
||||
"y_parity": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"hash": "0x1409d2efd1f4052f63535421d13678f3664f9f7513de493947019bbced0ad416",
|
||||
"nonce": "0x6",
|
||||
"from": "0x6f4c950442e1af093bcff730381e63ae9171b87a",
|
||||
"to": "0x8eebfef33eb00149852cadb631838ad9bfcce848",
|
||||
"value": "0x0",
|
||||
"gas": "0x186a0",
|
||||
"max_fee_per_gas": "0x3b9aca00",
|
||||
"max_priority_fee_per_gas": "0x1",
|
||||
"input": "0xc566b2090000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000009f4cee90eb86eaa050036147a12d49004b6b9c72bd725d39d4785011fe190f0b4da73bd4903f0ce3b639bbbf6e8e80d16931ff4bcf5993d58468e8fb19086e8cac36dbcd03009df8c59286b162af3bd7fcc0450c9aa81be5d10d312af6c66b1d604aebd3099c618202fcfe16ae7770b0c49ab5eadf74b754204a3bb6060e44eff37618b065f9832de4ca6ca971a7a1adc826d0f7c00181a5fb2ddf79ae00b4e100",
|
||||
"chain_id": "0x51615",
|
||||
"access_list": [],
|
||||
"transaction_type": 2,
|
||||
"signature": {
|
||||
"r": "0x94d7b30351deb61158398597f81c888b3d24db2c88f09091fbfd4a23f1da4999",
|
||||
"s": "0x17a2df719e21b4b23ea06ce52414ee5062283f755c2691e36f7e448ba8677aa6",
|
||||
"y_parity": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"hash": "0xb712acf01736e3f2d973a0c52ed754b97ba1fa122645b08770fb18c992e6af43",
|
||||
"nonce": "0x7",
|
||||
"from": "0x6f4c950442e1af093bcff730381e63ae9171b87a",
|
||||
"to": "0x8eebfef33eb00149852cadb631838ad9bfcce848",
|
||||
"value": "0x0",
|
||||
"gas": "0x186a0",
|
||||
"max_fee_per_gas": "0x3b9aca00",
|
||||
"max_priority_fee_per_gas": "0x1",
|
||||
"input": "0xc566b209000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000a14cee90eb86eaa050036147a12d49004b6b9c72bd725d39d4785011fe190f0b4da73bd4903f0ce3b639bbbf6e8e80d16931ff4bcf5993d58468e8fb19086e8cac36dbcd03009df8c59286b162af3bd7fcc0450c9aa81be5d10d312af6c66b1d604aebd3099c618202fcfe16ae7770b0c49ab5eadf74b754204a3bb6060e44eff37618b065f9832de4ca6ca971a7a1adc826d0f7c00181a5fb2ddf79ae00b4e10eff00000000000000000000000000000000000000000000000000000000000000",
|
||||
"chain_id": "0x51615",
|
||||
"access_list": [],
|
||||
"transaction_type": 2,
|
||||
"signature": {
|
||||
"r": "0xc390036a156f1cbcb06ec04dbb031c9f4d87843399a8dd1b650190ead16150c6",
|
||||
"s": "0x3dea83877425cd265840a6acd1b4a732ecb2d17687b0d2760b35abb4f5fd23fc",
|
||||
"y_parity": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"hash": "0x216326474e436bdfe99ecee1ab6c4a6575dfd531581910ec0d1c7229d2993f5b",
|
||||
"nonce": "0x8",
|
||||
"from": "0x6f4c950442e1af093bcff730381e63ae9171b87a",
|
||||
"to": "0x8eebfef33eb00149852cadb631838ad9bfcce848",
|
||||
"value": "0x0",
|
||||
"gas": "0x186a0",
|
||||
"max_fee_per_gas": "0x3b9aca00",
|
||||
"max_priority_fee_per_gas": "0x1",
|
||||
"input": "0xc566b209000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000a04cee90eb86eaa050036147a12d49004b6b9c72bd725d39d4785011fe190f0b4da73bd4903f0ce3b639bbbf6e8e80d16931ff4bcf5993d58468e8fb19086e8cac36dbcd03009df8c59286b162af3bd7fcc0450c9aa81be5d10d312af6c66b1d604aebd3099c618202fcfe16ae7770b0c49ab5eadf74b754204a3bb6060e44eff37618b065f9832de4ca6ca971a7a1adc826d0f7c00181a5fb2ddf79ae00b4e100",
|
||||
"chain_id": "0x51615",
|
||||
"access_list": [],
|
||||
"transaction_type": 2,
|
||||
"signature": {
|
||||
"r": "0x86bf5a768e794c374445d8df3f504a80fc9fc2aea67f5079e95904d8ff20d222",
|
||||
"s": "0x4f022f362ebfdf3ce82e500c9fd7f0a4fa60858c348660cfa647ce64038b4677",
|
||||
"y_parity": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"withdrawals": null,
|
||||
"states": [
|
||||
"0xe2a0366cc928b5edb82af9bd49922954155ab7b0942694bea4ce44661d9a8736c68801",
|
||||
"0xf838a120a9144a5e7efd259b8b0d55467f4696ed47ec83317d61501b76366dbcca65ce7395946f4c950442e1af093bcff730381e63ae9171b87a",
|
||||
"0xf869a03b18a0b8246d45a5a396db3b461bf4a56bd646d9274adeadb5471dd31e30574fb846f8448080a0c12326139a2ceb734a1d1042b13dd94b7a2ea2b13cbd7fa0e2944790f4f70d7da019e0db18cf25c98a25c9e8eac4c99e0653cd8c395303140b41425e9fa3f9bb65",
|
||||
"0xe2a02052222313e28459528d920b65115c16c04f3efc82aaedc97be59f3f377c0d3f01",
|
||||
"0xf85180808080a0ef794574a4fede3bef9792f9b5bdaecebaea6631734e3aef4463611205c33493808080a0aae809664601487c8ce4c127ff74497983e8183aae5a17760bd024f62f2731668080808080808080",
|
||||
"0xf871a0335d1857aacd2be4f5447c6ce16b3d0c2c5aa79ebf97f920200333b536e86861b84ef84c01888ac717fa33a06d5ba056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470",
|
||||
"0xf869a03df879c73b5eb46a7b14f545af1315425d1e5ff14b5089ce3410a358fbaa89f5b846f8440180a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0027d68dcc1fe2c3d0fdf7a74a782485789d7f9060feb98ce5c8a7e89919f6a7e",
|
||||
"0xf851808080a0a87d9bb950836582673aa0eecc0ff64aac607870637a2dd2012b8b1b31981f698080a08da6d5c36a404670c553a2c9052df7cd604f04e3863c4c7b9e0027bfd54206d680808080808080808080",
|
||||
"0xe2a0336b6384b5eca791c62761152d0c79bb0604c104a5fb6f4eb0703f3154bb3db001",
|
||||
"0xf8d180a012921caa470c5f8a3ce3bad556ec8fed5a412da9c2a14af1ebf93d7fa5606c2780a082cc85168147c333f706c2b5691598335b1cd5cdb2e9ceb2549ada975b361aba80a063b370461da723f22a5ed2799ea7b8a9deff170c96642f51314a8151e9097ba7808080a0394b537dac70183c9ae73cdc9170d23f63e2eab2c9c0558d8168c4a9b60882a78080a009df5c8a1852e34c7e6ac68cbde5c0e51d453cd81e73413944d64fe8e3e464bd8080a019f31f2ec09a6c71f568934c6b0a9d12198b8f2ef22d82a3283b4bca1b8ee60a80",
|
||||
"0xf86fa020f884cdcd164a6b73972dc6625ecb078be7e607b54459f3edfaada99be20644b84cf84a80860b0a564792a5a0f83a2f0b95b00ea04100cadcd981b0cc2edbf64dd961adf01fc51c9169394880a03733510decd4cdde078e264666d17cac208f9f9e93fc0a43b02921be5cf5726f",
|
||||
"0xf8689f3067596300e542c7cd2a1c5fca601da42a2739f790ad422b4f59fba0844e61b846f8448080a0762f18853f9b095d3cbb34af725cf2ec2be8a969a7b8d4cf70b8ede38d457a5da07f6f0daf66a63b4d504fabde8e9fa491ff678bf22082d8fee03ac3064fcf7de9",
|
||||
"0xe21ba05e56e3347565c4f1aed8369789b49a5fec2bb4cfda21c1de954ecee3b91ce3c2",
|
||||
"0xf891a08857639e3944eefb53fc927fd82a371bd6fd6ada8f9fe9cbc2b6381c1aeb5fec80a01cd100c375b88cd4909b69a1c187a22169124e1dcbf910d68d391b79814a45c080808080808080a0c5d54b915b56a888eee4e6eeb3141e778f9b674d1d322962eed900f02c29990a80808080a03340bbaeafcda3a8672eb83099231dbbfab8dae02a1e8ec2f7180538fac207e080",
|
||||
"0xf851808080808080a032d1c4a6391dd38d71841fa7d5b3e4305e5ffb91227d7b303321d575e413b8f18080808080808080a03123f9aaae9cba26fb0f2407f39e8bce12878d354fc7370ffd4e9b4979411e1780"
|
||||
],
|
||||
"codes": [
|
||||
"0x608060405234801561000f575f80fd5b5060043610610029575f3560e01c8063c566b2091461002d575b5f80fd5b61004760048036038101906100429190610298565b61005e565b604051610055929190610359565b60405180910390f35b5f606061010073ffffffffffffffffffffffffffffffffffffffff168360405161008891906103c1565b5f604051808303815f865af19150503d805f81146100c1576040519150601f19603f3d011682016040523d82523d5f602084013e6100c6565b606091505b5080925081935050508161010f576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040161010690610431565b60405180910390fd5b7fa82cc8725dfeb485badf0c830fc554a37ed268855e35805056c77c86946342098160405161013e919061044f565b60405180910390a1915091565b5f604051905090565b5f80fd5b5f80fd5b5f80fd5b5f80fd5b5f601f19601f8301169050919050565b7f4e487b71000000000000000000000000000000000000000000000000000000005f52604160045260245ffd5b6101aa82610164565b810181811067ffffffffffffffff821117156101c9576101c8610174565b5b80604052505050565b5f6101db61014b565b90506101e782826101a1565b919050565b5f67ffffffffffffffff82111561020657610205610174565b5b61020f82610164565b9050602081019050919050565b828183375f83830152505050565b5f61023c610237846101ec565b6101d2565b90508281526020810184848401111561025857610257610160565b5b61026384828561021c565b509392505050565b5f82601f83011261027f5761027e61015c565b5b813561028f84826020860161022a565b91505092915050565b5f602082840312156102ad576102ac610154565b5b5f82013567ffffffffffffffff8111156102ca576102c9610158565b5b6102d68482850161026b565b91505092915050565b5f8115159050919050565b6102f3816102df565b82525050565b5f81519050919050565b5f82825260208201905092915050565b8281835e5f83830152505050565b5f61032b826102f9565b6103358185610303565b9350610345818560208601610313565b61034e81610164565b840191505092915050565b5f60408201905061036c5f8301856102ea565b818103602083015261037e8184610321565b90509392505050565b5f81905092915050565b5f61039b826102f9565b6103a58185610387565b93506103b5818560208601610313565b80840191505092915050565b5f6103cc8284610391565b915081905092915050565b5f82825260208201905092915050565b7f507265636f6d70696c652063616c6c206661696c6564000000000000000000005f82015250565b5f61041b6016836103d7565b9150610426826103e7565b602082019050919050565b5f6020820190508181035f8301526104488161040f565b9050919050565b5f6020820190508181035f8301526104678184610321565b90509291505056fea26469706673582212200f74a92387ae88fee4fc7c9238aa9bb6091756accf22046271bf9d976083ed3664736f6c63430008190033"
|
||||
]
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user