mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-11 23:18:07 -05:00
Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
426c57a5fa | ||
|
|
b7fdf48c30 | ||
|
|
ad0c918944 | ||
|
|
1098876183 | ||
|
|
9e520e7769 | ||
|
|
de7f6e56a9 | ||
|
|
3b323198dc | ||
|
|
c11e0283e8 | ||
|
|
a5a7844646 |
2
.github/workflows/common.yml
vendored
2
.github/workflows/common.yml
vendored
@@ -29,7 +29,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2025-02-14
|
||||
toolchain: nightly-2025-08-18
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- name: Install Go
|
||||
|
||||
2
.github/workflows/coordinator.yml
vendored
2
.github/workflows/coordinator.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: nightly-2025-02-14
|
||||
toolchain: nightly-2025-08-18
|
||||
override: true
|
||||
components: rustfmt, clippy
|
||||
- name: Install Go
|
||||
|
||||
18
.github/workflows/docker.yml
vendored
18
.github/workflows/docker.yml
vendored
@@ -51,9 +51,7 @@ jobs:
|
||||
push: true
|
||||
tags: |
|
||||
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
scrolltech/${{ env.REPOSITORY }}:latest
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
|
||||
|
||||
rollup_relayer:
|
||||
runs-on:
|
||||
@@ -97,9 +95,7 @@ jobs:
|
||||
push: true
|
||||
tags: |
|
||||
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
scrolltech/${{ env.REPOSITORY }}:latest
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
|
||||
|
||||
blob_uploader:
|
||||
runs-on:
|
||||
@@ -143,9 +139,7 @@ jobs:
|
||||
push: true
|
||||
tags: |
|
||||
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
scrolltech/${{ env.REPOSITORY }}:latest
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
|
||||
|
||||
rollup-db-cli:
|
||||
runs-on:
|
||||
@@ -189,9 +183,7 @@ jobs:
|
||||
push: true
|
||||
tags: |
|
||||
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
scrolltech/${{ env.REPOSITORY }}:latest
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
|
||||
|
||||
bridgehistoryapi-fetcher:
|
||||
runs-on:
|
||||
@@ -235,9 +227,7 @@ jobs:
|
||||
push: true
|
||||
tags: |
|
||||
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
scrolltech/${{ env.REPOSITORY }}:latest
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
|
||||
|
||||
bridgehistoryapi-api:
|
||||
runs-on:
|
||||
@@ -281,9 +271,7 @@ jobs:
|
||||
push: true
|
||||
tags: |
|
||||
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
scrolltech/${{ env.REPOSITORY }}:latest
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
|
||||
|
||||
bridgehistoryapi-db-cli:
|
||||
runs-on:
|
||||
@@ -327,9 +315,7 @@ jobs:
|
||||
push: true
|
||||
tags: |
|
||||
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
scrolltech/${{ env.REPOSITORY }}:latest
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
|
||||
|
||||
coordinator-api:
|
||||
runs-on:
|
||||
@@ -372,9 +358,7 @@ jobs:
|
||||
push: true
|
||||
tags: |
|
||||
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
scrolltech/${{ env.REPOSITORY }}:latest
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
|
||||
|
||||
coordinator-cron:
|
||||
runs-on:
|
||||
@@ -418,6 +402,4 @@ jobs:
|
||||
push: true
|
||||
tags: |
|
||||
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
scrolltech/${{ env.REPOSITORY }}:latest
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
|
||||
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
|
||||
|
||||
9
.github/workflows/intermediate-docker.yml
vendored
9
.github/workflows/intermediate-docker.yml
vendored
@@ -22,11 +22,9 @@ on:
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- nightly-2023-12-03
|
||||
- nightly-2022-12-10
|
||||
- 1.86.0
|
||||
- nightly-2025-02-14
|
||||
default: "nightly-2023-12-03"
|
||||
- nightly-2025-08-18
|
||||
default: "nightly-2025-08-18"
|
||||
PYTHON_VERSION:
|
||||
description: "Python version"
|
||||
required: false
|
||||
@@ -41,7 +39,8 @@ on:
|
||||
options:
|
||||
- "11.7.1"
|
||||
- "12.2.2"
|
||||
default: "11.7.1"
|
||||
- "12.9.1"
|
||||
default: "12.9.1"
|
||||
CARGO_CHEF_TAG:
|
||||
description: "Cargo chef version"
|
||||
required: true
|
||||
|
||||
2016
Cargo.lock
generated
2016
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
42
Cargo.toml
42
Cargo.toml
@@ -14,15 +14,16 @@ edition = "2021"
|
||||
homepage = "https://scroll.io"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/scroll-tech/scroll"
|
||||
version = "4.5.8"
|
||||
version = "4.5.47"
|
||||
|
||||
[workspace.dependencies]
|
||||
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "89a2dc1" }
|
||||
scroll-zkvm-verifier = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "89a2dc1" }
|
||||
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "89a2dc1" }
|
||||
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "060be4c" }
|
||||
scroll-zkvm-verifier = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "060be4c" }
|
||||
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "060be4c" }
|
||||
|
||||
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "chore/openvm-1.3", features = ["scroll"] }
|
||||
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "chore/openvm-1.3" }
|
||||
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "master", features = ["scroll", "rkyv"] }
|
||||
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "master" }
|
||||
sbv-core = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "master", features = ["scroll"] }
|
||||
|
||||
metrics = "0.23.0"
|
||||
metrics-util = "0.17"
|
||||
@@ -30,7 +31,7 @@ metrics-tracing-context = "0.16.0"
|
||||
|
||||
anyhow = "1.0"
|
||||
alloy = { version = "1", default-features = false }
|
||||
alloy-primitives = { version = "1.2", default-features = false, features = ["tiny-keccak"] }
|
||||
alloy-primitives = { version = "1.3", default-features = false, features = ["tiny-keccak"] }
|
||||
# also use this to trigger "serde" feature for primitives
|
||||
alloy-serde = { version = "1", default-features = false }
|
||||
|
||||
@@ -46,21 +47,20 @@ once_cell = "1.20"
|
||||
base64 = "0.22"
|
||||
|
||||
[patch.crates-io]
|
||||
revm = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-bytecode = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-context = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-context-interface = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-database = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-database-interface = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-handler = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-inspector = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-interpreter = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-precompile = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-primitives = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm-state = { git = "https://github.com/scroll-tech/revm", branch = "feat/reth-v78" }
|
||||
revm = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-bytecode = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-context = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-context-interface = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-database = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-database-interface = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-handler = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-inspector = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-interpreter = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-precompile = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-primitives = { git = "https://github.com/scroll-tech/revm" }
|
||||
revm-state = { git = "https://github.com/scroll-tech/revm" }
|
||||
|
||||
ruint = { git = "https://github.com/scroll-tech/uint.git", branch = "v1.15.0" }
|
||||
alloy-primitives = { git = "https://github.com/scroll-tech/alloy-core", branch = "v1.2.0" }
|
||||
alloy-primitives = { git = "https://github.com/scroll-tech/alloy-core", branch = "feat/rkyv" }
|
||||
|
||||
[profile.maxperf]
|
||||
inherits = "release"
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
"runtime/debug"
|
||||
)
|
||||
|
||||
var tag = "v4.5.44"
|
||||
var tag = "v4.5.48"
|
||||
|
||||
var commit = func() string {
|
||||
if info, ok := debug.ReadBuildInfo(); ok {
|
||||
|
||||
@@ -66,6 +66,7 @@ type AssetConfig struct {
|
||||
// VerifierConfig load zk verifier config.
|
||||
type VerifierConfig struct {
|
||||
MinProverVersion string `json:"min_prover_version"`
|
||||
Features string `json:"features,omitempty"`
|
||||
Verifiers []AssetConfig `json:"verifiers"`
|
||||
}
|
||||
|
||||
|
||||
@@ -140,3 +140,10 @@ func DumpVk(forkName, filePath string) error {
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Set dynamic feature flags that control libzkp runtime behavior
|
||||
func SetDynamicFeature(feats string) {
|
||||
cFeats := goToCString(feats)
|
||||
defer freeCString(cFeats)
|
||||
C.set_dynamic_feature(cFeats)
|
||||
}
|
||||
|
||||
@@ -54,4 +54,7 @@ char* gen_wrapped_proof(char* proof_json, char* metadata, char* vk, size_t vk_le
|
||||
// Release memory allocated for a string returned by gen_wrapped_proof
|
||||
void release_string(char* string_ptr);
|
||||
|
||||
void set_dynamic_feature(const char* feats);
|
||||
|
||||
|
||||
#endif /* LIBZKP_H */
|
||||
|
||||
@@ -67,6 +67,9 @@ func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if cfg.Features != "" {
|
||||
libzkp.SetDynamicFeature(cfg.Features)
|
||||
}
|
||||
libzkp.InitVerifier(string(configBytes))
|
||||
|
||||
v := &Verifier{
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
|
||||
[patch."https://github.com/openvm-org/openvm.git"]
|
||||
openvm-build = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
||||
openvm-circuit = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
||||
openvm-continuations = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
||||
openvm-instructions ={ git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
||||
openvm-native-circuit = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
||||
openvm-native-compiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
||||
openvm-native-recursion = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
||||
openvm-native-transpiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
||||
openvm-rv32im-transpiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
||||
openvm-sdk = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false, features = ["parallel", "bench-metrics", "evm-prove"] }
|
||||
openvm-transpiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
|
||||
|
||||
[patch."https://github.com/openvm-org/stark-backend.git"]
|
||||
openvm-stark-backend = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", branch = "main", features = ["gpu"] }
|
||||
openvm-stark-sdk = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", branch = "main", features = ["gpu"] }
|
||||
|
||||
[patch."https://github.com/Plonky3/Plonky3.git"]
|
||||
p3-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-field = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-commit = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-matrix = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-baby-bear = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", features = [
|
||||
"nightly-features",
|
||||
], tag = "v0.2.1" }
|
||||
p3-koala-bear = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-util = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-challenger = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-dft = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-fri = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-goldilocks = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-keccak = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-keccak-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-blake3 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-mds = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-merkle-tree = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-monty-31 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-poseidon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-poseidon2 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-poseidon2-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-symmetric = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-uni-stark = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
p3-maybe-rayon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" } # the "parallel" feature is NOT on by default to allow single-threaded benchmarking
|
||||
p3-bn254-fr = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
|
||||
11021
crates/gpu_override/Cargo.lock
generated
11021
crates/gpu_override/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,21 +0,0 @@
|
||||
.PHONY: build update clean
|
||||
|
||||
ZKVM_COMMIT ?= freebuild
|
||||
PLONKY3_GPU_VERSION=$(shell ./print_plonky3gpu_version.sh | sed -n '2p')
|
||||
$(info PLONKY3_GPU_VERSION is ${PLONKY3_GPU_VERSION})
|
||||
|
||||
GIT_REV ?= $(shell git rev-parse --short HEAD)
|
||||
GO_TAG ?= $(shell grep "var tag = " ../../common/version/version.go | cut -d "\"" -f2)
|
||||
ZK_VERSION=${ZKVM_COMMIT}-${PLONKY3_GPU_VERSION}
|
||||
$(info ZK_GPU_VERSION is ${ZK_VERSION})
|
||||
|
||||
clean:
|
||||
cargo clean -Z unstable-options --release -p prover --lockfile-path ./Cargo.lock
|
||||
|
||||
# build gpu prover, never touch lock file
|
||||
build:
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build -Z unstable-options --release -p prover --lockfile-path ./Cargo.lock
|
||||
|
||||
# update Cargo.lock while override config has been updated
|
||||
#update:
|
||||
# GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build -Z unstable-options --release -p prover --lockfile-path ./Cargo.lock
|
||||
@@ -1,15 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
config_file=.cargo/config.toml
|
||||
plonky3_gpu_path=$(grep 'path.*plonky3-gpu' "$config_file" | cut -d'"' -f2 | head -n 1)
|
||||
plonky3_gpu_path=$(dirname "$plonky3_gpu_path")
|
||||
|
||||
if [ -z $plonky3_gpu_path ]; then
|
||||
exit 0
|
||||
else
|
||||
pushd $plonky3_gpu_path
|
||||
commit_hash=$(git log --pretty=format:%h -n 1)
|
||||
echo "${commit_hash:0:7}"
|
||||
|
||||
popd
|
||||
fi
|
||||
@@ -13,6 +13,7 @@ libzkp = { path = "../libzkp" }
|
||||
alloy = { workspace = true, features = ["provider-http", "transport-http", "reqwest", "reqwest-rustls-tls", "json-rpc"] }
|
||||
sbv-primitives = { workspace = true, features = ["scroll"] }
|
||||
sbv-utils = { workspace = true, features = ["scroll"] }
|
||||
sbv-core = { workspace = true, features = ["scroll"] }
|
||||
|
||||
eyre.workspace = true
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ pub fn init(config: &str) -> eyre::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_client() -> rpc_client::RpcClient<'static> {
|
||||
pub fn get_client() -> impl libzkp::tasks::ChunkInterpreter {
|
||||
GLOBAL_L2GETH_CLI
|
||||
.get()
|
||||
.expect("must has been inited")
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use alloy::{
|
||||
providers::{Provider, ProviderBuilder, RootProvider},
|
||||
providers::{Provider, ProviderBuilder},
|
||||
rpc::client::ClientBuilder,
|
||||
transports::layers::RetryBackoffLayer,
|
||||
};
|
||||
@@ -49,13 +49,13 @@ pub struct RpcConfig {
|
||||
/// so it can be run in block mode (i.e. inside dynamic library without a global entry)
|
||||
pub struct RpcClientCore {
|
||||
/// rpc prover
|
||||
provider: RootProvider<Network>,
|
||||
client: alloy::rpc::client::RpcClient,
|
||||
rt: tokio::runtime::Runtime,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub struct RpcClient<'a> {
|
||||
provider: &'a RootProvider<Network>,
|
||||
pub struct RpcClient<'a, T: Provider<Network>> {
|
||||
provider: T,
|
||||
handle: &'a tokio::runtime::Handle,
|
||||
}
|
||||
|
||||
@@ -75,80 +75,78 @@ impl RpcClientCore {
|
||||
let retry_layer = RetryBackoffLayer::new(config.max_retry, config.backoff, config.cups);
|
||||
let client = ClientBuilder::default().layer(retry_layer).http(rpc);
|
||||
|
||||
Ok(Self {
|
||||
provider: ProviderBuilder::<_, _, Network>::default().connect_client(client),
|
||||
rt,
|
||||
})
|
||||
Ok(Self { client, rt })
|
||||
}
|
||||
|
||||
pub fn get_client(&self) -> RpcClient {
|
||||
pub fn get_client(&self) -> RpcClient<'_, impl Provider<Network>> {
|
||||
RpcClient {
|
||||
provider: &self.provider,
|
||||
provider: ProviderBuilder::<_, _, Network>::default()
|
||||
.connect_client(self.client.clone()),
|
||||
handle: self.rt.handle(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ChunkInterpreter for RpcClient<'_> {
|
||||
impl<T: Provider<Network>> ChunkInterpreter for RpcClient<'_, T> {
|
||||
fn try_fetch_block_witness(
|
||||
&self,
|
||||
block_hash: sbv_primitives::B256,
|
||||
prev_witness: Option<&sbv_primitives::types::BlockWitness>,
|
||||
) -> Result<sbv_primitives::types::BlockWitness> {
|
||||
prev_witness: Option<&sbv_core::BlockWitness>,
|
||||
) -> Result<sbv_core::BlockWitness> {
|
||||
async fn fetch_witness_async(
|
||||
provider: &RootProvider<Network>,
|
||||
provider: impl Provider<Network>,
|
||||
block_hash: sbv_primitives::B256,
|
||||
prev_witness: Option<&sbv_primitives::types::BlockWitness>,
|
||||
) -> Result<sbv_primitives::types::BlockWitness> {
|
||||
use sbv_utils::{rpc::ProviderExt, witness::WitnessBuilder};
|
||||
prev_witness: Option<&sbv_core::BlockWitness>,
|
||||
) -> Result<sbv_core::BlockWitness> {
|
||||
use sbv_utils::rpc::ProviderExt;
|
||||
|
||||
let chain_id = provider.get_chain_id().await?;
|
||||
let (chain_id, block_num, prev_state_root) = if let Some(w) = prev_witness {
|
||||
(w.chain_id, w.header.number + 1, w.header.state_root)
|
||||
} else {
|
||||
let chain_id = provider.get_chain_id().await?;
|
||||
let block = provider
|
||||
.get_block_by_hash(block_hash)
|
||||
.full()
|
||||
.await?
|
||||
.ok_or_else(|| eyre::eyre!("Block {block_hash} not found"))?;
|
||||
|
||||
let block = provider
|
||||
.get_block_by_hash(block_hash)
|
||||
.full()
|
||||
.await?
|
||||
.ok_or_else(|| eyre::eyre!("Block {block_hash} not found"))?;
|
||||
let parent_block = provider
|
||||
.get_block_by_hash(block.header.parent_hash)
|
||||
.await?
|
||||
.ok_or_else(|| {
|
||||
eyre::eyre!(
|
||||
"parent block for block {} should exist",
|
||||
block.header.number
|
||||
)
|
||||
})?;
|
||||
|
||||
let number = block.header.number;
|
||||
let parent_hash = block.header.parent_hash;
|
||||
if number == 0 {
|
||||
eyre::bail!("no number in header or use block 0");
|
||||
}
|
||||
|
||||
let mut witness_builder = WitnessBuilder::new()
|
||||
.block(block)
|
||||
.chain_id(chain_id)
|
||||
.execution_witness(provider.debug_execution_witness(number.into()).await?);
|
||||
|
||||
let prev_state_root = match prev_witness {
|
||||
Some(witness) => {
|
||||
if witness.header.number != number - 1 {
|
||||
eyre::bail!(
|
||||
"the ref witness is not the previous block, expected {} get {}",
|
||||
number - 1,
|
||||
witness.header.number,
|
||||
);
|
||||
}
|
||||
witness.header.state_root
|
||||
}
|
||||
None => {
|
||||
let parent_block = provider
|
||||
.get_block_by_hash(parent_hash)
|
||||
.await?
|
||||
.expect("parent block should exist");
|
||||
|
||||
parent_block.header.state_root
|
||||
}
|
||||
(
|
||||
chain_id,
|
||||
block.header.number,
|
||||
parent_block.header.state_root,
|
||||
)
|
||||
};
|
||||
witness_builder = witness_builder.prev_state_root(prev_state_root);
|
||||
|
||||
Ok(witness_builder.build()?)
|
||||
let req = provider
|
||||
.dump_block_witness(block_num)
|
||||
.with_chain_id(chain_id)
|
||||
.with_prev_state_root(prev_state_root);
|
||||
|
||||
let witness = req
|
||||
.send()
|
||||
.await
|
||||
.transpose()
|
||||
.ok_or_else(|| eyre::eyre!("Block witness {block_num} not available"))??;
|
||||
|
||||
Ok(witness)
|
||||
}
|
||||
|
||||
tracing::debug!("fetch witness for {block_hash}");
|
||||
self.handle
|
||||
.block_on(fetch_witness_async(self.provider, block_hash, prev_witness))
|
||||
self.handle.block_on(fetch_witness_async(
|
||||
&self.provider,
|
||||
block_hash,
|
||||
prev_witness,
|
||||
))
|
||||
}
|
||||
|
||||
fn try_fetch_storage_node(
|
||||
@@ -156,7 +154,7 @@ impl ChunkInterpreter for RpcClient<'_> {
|
||||
node_hash: sbv_primitives::B256,
|
||||
) -> Result<sbv_primitives::Bytes> {
|
||||
async fn fetch_storage_node_async(
|
||||
provider: &RootProvider<Network>,
|
||||
provider: impl Provider<Network>,
|
||||
node_hash: sbv_primitives::B256,
|
||||
) -> Result<sbv_primitives::Bytes> {
|
||||
let ret = provider
|
||||
@@ -168,7 +166,7 @@ impl ChunkInterpreter for RpcClient<'_> {
|
||||
|
||||
tracing::debug!("fetch storage node for {node_hash}");
|
||||
self.handle
|
||||
.block_on(fetch_storage_node_async(self.provider, node_hash))
|
||||
.block_on(fetch_storage_node_async(&self.provider, node_hash))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -194,10 +192,10 @@ mod tests {
|
||||
let client_core = RpcClientCore::create(&config).expect("Failed to create RPC client");
|
||||
let client = client_core.get_client();
|
||||
|
||||
// latest - 1 block in 2025.6.15
|
||||
// latest - 1 block in 2025.9.11
|
||||
let block_hash = B256::from(
|
||||
hex::const_decode_to_array(
|
||||
b"0x9535a6970bc4db9031749331a214e35ed8c8a3f585f6f456d590a0bc780a1368",
|
||||
b"0x093fb6bf2e556a659b35428ac447cd9f0635382fc40ffad417b5910824f9e932",
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
@@ -207,10 +205,10 @@ mod tests {
|
||||
.try_fetch_block_witness(block_hash, None)
|
||||
.expect("should success");
|
||||
|
||||
// latest block in 2025.6.15
|
||||
// block selected in 2025.9.11
|
||||
let block_hash = B256::from(
|
||||
hex::const_decode_to_array(
|
||||
b"0xd47088cdb6afc68aa082e633bb7da9340d29c73841668afacfb9c1e66e557af0",
|
||||
b"0x77cc84dd7a4dedf6fe5fb9b443aeb5a4fb0623ad088a365d3232b7b23fc848e5",
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
@@ -220,26 +218,4 @@ mod tests {
|
||||
|
||||
println!("{}", serde_json::to_string_pretty(&wit2).unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[ignore = "Requires L2GETH_ENDPOINT environment variable"]
|
||||
fn test_try_fetch_storage_node() {
|
||||
let config = create_config_from_env();
|
||||
let client_core = RpcClientCore::create(&config).expect("Failed to create RPC client");
|
||||
let client = client_core.get_client();
|
||||
|
||||
// the root node (state root) of the block in unittest above
|
||||
let node_hash = B256::from(
|
||||
hex::const_decode_to_array(
|
||||
b"0xb9e67403a2eb35afbb0475fe942918cf9a330a1d7532704c24554506be62b27c",
|
||||
)
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
// This is expected to fail since we're using a dummy hash, but it tests the code path
|
||||
let node = client
|
||||
.try_fetch_storage_node(node_hash)
|
||||
.expect("should success");
|
||||
println!("{}", serde_json::to_string_pretty(&node).unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ scroll-zkvm-verifier.workspace = true
|
||||
|
||||
alloy-primitives.workspace = true #depress the effect of "native-keccak"
|
||||
sbv-primitives = {workspace = true, features = ["scroll-compress-ratio", "scroll"]}
|
||||
sbv-core = { workspace = true, features = ["scroll"] }
|
||||
base64.workspace = true
|
||||
serde.workspace = true
|
||||
serde_derive.workspace = true
|
||||
@@ -18,6 +19,7 @@ tracing.workspace = true
|
||||
eyre.workspace = true
|
||||
|
||||
git-version = "0.3.5"
|
||||
bincode = { version = "2", features = ["serde"] }
|
||||
serde_stacker = "0.1"
|
||||
regex = "1.11"
|
||||
c-kzg = { version = "2.0", features = ["serde"] }
|
||||
|
||||
@@ -11,6 +11,27 @@ use serde_json::value::RawValue;
|
||||
use std::path::Path;
|
||||
use tasks::chunk_interpreter::{ChunkInterpreter, TryFromWithInterpreter};
|
||||
|
||||
/// global features: use legacy encoding for witness
|
||||
static mut LEGACY_WITNESS_ENCODING: bool = false;
|
||||
pub(crate) fn witness_use_legacy_mode() -> bool {
|
||||
unsafe { LEGACY_WITNESS_ENCODING }
|
||||
}
|
||||
|
||||
pub fn set_dynamic_feature(feats: &str) {
|
||||
for feat_s in feats.split(':') {
|
||||
match feat_s.trim().to_lowercase().as_str() {
|
||||
"legacy_witness" => {
|
||||
tracing::info!("set witness encoding for legacy mode");
|
||||
unsafe {
|
||||
// the function is only called while initialize step
|
||||
LEGACY_WITNESS_ENCODING = true;
|
||||
}
|
||||
}
|
||||
s => tracing::warn!("unrecognized dynamic feature: {s}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Turn the coordinator's chunk task into a json string for formal chunk proving
|
||||
/// task (with full witnesses)
|
||||
pub fn checkout_chunk_task(
|
||||
@@ -32,7 +53,6 @@ pub fn gen_universal_task(
|
||||
task_json: &str,
|
||||
fork_name_str: &str,
|
||||
expected_vk: &[u8],
|
||||
interpreter: Option<impl ChunkInterpreter>,
|
||||
) -> eyre::Result<(B256, String, String)> {
|
||||
use proofs::*;
|
||||
use tasks::*;
|
||||
@@ -56,10 +76,9 @@ pub fn gen_universal_task(
|
||||
if fork_name_str != task.fork_name.as_str() {
|
||||
eyre::bail!("fork name in chunk task not match the calling arg, expected {fork_name_str}, get {}", task.fork_name);
|
||||
}
|
||||
let (pi_hash, metadata, u_task) = utils::panic_catch(move || {
|
||||
gen_universal_chunk_task(task, fork_name_str.into(), interpreter)
|
||||
})
|
||||
.map_err(|e| eyre::eyre!("caught panic in chunk task{e}"))??;
|
||||
let (pi_hash, metadata, u_task) =
|
||||
utils::panic_catch(move || gen_universal_chunk_task(task, fork_name_str.into()))
|
||||
.map_err(|e| eyre::eyre!("caught panic in chunk task{e}"))??;
|
||||
(pi_hash, AnyMetaData::Chunk(metadata), u_task)
|
||||
}
|
||||
x if x == TaskType::Batch as i32 => {
|
||||
|
||||
@@ -9,8 +9,8 @@ use scroll_zkvm_types::{
|
||||
chunk::ChunkInfo,
|
||||
proof::{EvmProof, OpenVmEvmProof, ProofEnum, StarkProof},
|
||||
public_inputs::{ForkName, MultiVersionPublicInputs},
|
||||
types_agg::{AggregationInput, ProgramCommitment},
|
||||
utils::vec_as_base64,
|
||||
types_agg::AggregationInput,
|
||||
utils::{serialize_vk, vec_as_base64},
|
||||
};
|
||||
use serde::{de::DeserializeOwned, Deserialize, Serialize};
|
||||
|
||||
@@ -172,7 +172,7 @@ impl<Metadata> From<&WrappedProof<Metadata>> for AggregationInput {
|
||||
fn from(value: &WrappedProof<Metadata>) -> Self {
|
||||
Self {
|
||||
public_values: value.proof.public_values(),
|
||||
commitment: ProgramCommitment::deserialize(&value.vk),
|
||||
commitment: serialize_vk::deserialize(&value.vk),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,6 +16,11 @@ use crate::{
|
||||
use sbv_primitives::B256;
|
||||
use scroll_zkvm_types::public_inputs::{ForkName, MultiVersionPublicInputs};
|
||||
|
||||
fn encode_task_to_witness<T: serde::Serialize>(task: &T) -> eyre::Result<Vec<u8>> {
|
||||
let config = bincode::config::standard();
|
||||
Ok(bincode::serde::encode_to_vec(task, config)?)
|
||||
}
|
||||
|
||||
fn check_aggregation_proofs<Metadata>(
|
||||
proofs: &[proofs::WrappedProof<Metadata>],
|
||||
fork_name: ForkName,
|
||||
@@ -37,13 +42,9 @@ where
|
||||
|
||||
/// Generate required staff for chunk proving
|
||||
pub fn gen_universal_chunk_task(
|
||||
mut task: ChunkProvingTask,
|
||||
task: ChunkProvingTask,
|
||||
fork_name: ForkName,
|
||||
interpreter: Option<impl ChunkInterpreter>,
|
||||
) -> eyre::Result<(B256, ChunkProofMetadata, ProvingTask)> {
|
||||
if let Some(interpreter) = interpreter {
|
||||
task.prepare_task_via_interpret(interpreter)?;
|
||||
}
|
||||
let chunk_total_gas = task.stats().total_gas_used;
|
||||
let chunk_info = task.precheck_and_build_metadata()?;
|
||||
let proving_task = task.try_into()?;
|
||||
|
||||
@@ -4,9 +4,9 @@ use eyre::Result;
|
||||
use sbv_primitives::{B256, U256};
|
||||
use scroll_zkvm_types::{
|
||||
batch::{
|
||||
BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchHeaderV8, BatchInfo, BatchWitness,
|
||||
Envelope, EnvelopeV6, EnvelopeV7, EnvelopeV8, PointEvalWitness, ReferenceHeader,
|
||||
ToArchievedWitness, N_BLOB_BYTES,
|
||||
build_point_eval_witness, BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchHeaderV8,
|
||||
BatchInfo, BatchWitness, Envelope, EnvelopeV6, EnvelopeV7, EnvelopeV8, LegacyBatchWitness,
|
||||
ReferenceHeader, N_BLOB_BYTES,
|
||||
},
|
||||
public_inputs::ForkName,
|
||||
task::ProvingTask,
|
||||
@@ -84,6 +84,12 @@ impl TryFrom<BatchProvingTask> for ProvingTask {
|
||||
|
||||
fn try_from(value: BatchProvingTask) -> Result<Self> {
|
||||
let witness = value.build_guest_input();
|
||||
let serialized_witness = if crate::witness_use_legacy_mode() {
|
||||
let legacy_witness = LegacyBatchWitness::from(witness);
|
||||
to_rkyv_bytes::<RancorError>(&legacy_witness)?.into_vec()
|
||||
} else {
|
||||
super::encode_task_to_witness(&witness)?
|
||||
};
|
||||
|
||||
Ok(ProvingTask {
|
||||
identifier: value.batch_header.batch_hash().to_string(),
|
||||
@@ -93,7 +99,7 @@ impl TryFrom<BatchProvingTask> for ProvingTask {
|
||||
.into_iter()
|
||||
.map(|w_proof| w_proof.proof.into_stark_proof().expect("expect root proof"))
|
||||
.collect(),
|
||||
serialized_witness: vec![to_rkyv_bytes::<RancorError>(&witness)?.into_vec()],
|
||||
serialized_witness: vec![serialized_witness],
|
||||
vk: Vec::new(),
|
||||
})
|
||||
}
|
||||
@@ -161,10 +167,10 @@ impl BatchProvingTask {
|
||||
assert_eq!(p, kzg_proof);
|
||||
}
|
||||
|
||||
let point_eval_witness = PointEvalWitness {
|
||||
kzg_commitment: kzg_commitment.into_inner(),
|
||||
kzg_proof: kzg_proof.into_inner(),
|
||||
};
|
||||
let point_eval_witness = Some(build_point_eval_witness(
|
||||
kzg_commitment.into_inner(),
|
||||
kzg_proof.into_inner(),
|
||||
));
|
||||
|
||||
let reference_header = match fork_name {
|
||||
ForkName::EuclidV1 => ReferenceHeader::V6(*self.batch_header.must_v6_header()),
|
||||
@@ -192,12 +198,7 @@ impl BatchProvingTask {
|
||||
// 1. generate data for metadata from the witness
|
||||
// 2. validate every adjacent proof pair
|
||||
let witness = self.build_guest_input();
|
||||
let archieved = ToArchievedWitness::create(&witness)
|
||||
.map_err(|e| eyre::eyre!("archieve batch witness fail: {e}"))?;
|
||||
let archieved_witness = archieved
|
||||
.access()
|
||||
.map_err(|e| eyre::eyre!("access archieved batch witness fail: {e}"))?;
|
||||
let metadata: BatchInfo = archieved_witness.into();
|
||||
let metadata = BatchInfo::from(&witness);
|
||||
|
||||
super::check_aggregation_proofs(self.chunk_proofs.as_slice(), fork_name)?;
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
use crate::proofs::BatchProof;
|
||||
use eyre::Result;
|
||||
use scroll_zkvm_types::{
|
||||
bundle::{BundleInfo, BundleWitness, ToArchievedWitness},
|
||||
bundle::{BundleInfo, BundleWitness},
|
||||
public_inputs::ForkName,
|
||||
task::ProvingTask,
|
||||
utils::{to_rkyv_bytes, RancorError},
|
||||
};
|
||||
|
||||
/// Message indicating a sanity check failure.
|
||||
@@ -56,12 +57,7 @@ impl BundleProvingTask {
|
||||
// 1. generate data for metadata from the witness
|
||||
// 2. validate every adjacent proof pair
|
||||
let witness = self.build_guest_input();
|
||||
let archieved = ToArchievedWitness::create(&witness)
|
||||
.map_err(|e| eyre::eyre!("archieve bundle witness fail: {e}"))?;
|
||||
let archieved_witness = archieved
|
||||
.access()
|
||||
.map_err(|e| eyre::eyre!("access archieved bundle witness fail: {e}"))?;
|
||||
let metadata: BundleInfo = archieved_witness.into();
|
||||
let metadata = BundleInfo::from(&witness);
|
||||
|
||||
super::check_aggregation_proofs(self.batch_proofs.as_slice(), fork_name)?;
|
||||
|
||||
@@ -74,6 +70,11 @@ impl TryFrom<BundleProvingTask> for ProvingTask {
|
||||
|
||||
fn try_from(value: BundleProvingTask) -> Result<Self> {
|
||||
let witness = value.build_guest_input();
|
||||
let serialized_witness = if crate::witness_use_legacy_mode() {
|
||||
to_rkyv_bytes::<RancorError>(&witness)?.into_vec()
|
||||
} else {
|
||||
super::encode_task_to_witness(&witness)?
|
||||
};
|
||||
|
||||
Ok(ProvingTask {
|
||||
identifier: value.identifier(),
|
||||
@@ -83,7 +84,7 @@ impl TryFrom<BundleProvingTask> for ProvingTask {
|
||||
.into_iter()
|
||||
.map(|w_proof| w_proof.proof.into_stark_proof().expect("expect root proof"))
|
||||
.collect(),
|
||||
serialized_witness: vec![witness.rkyv_serialize(None)?.to_vec()],
|
||||
serialized_witness: vec![serialized_witness],
|
||||
vk: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
use super::chunk_interpreter::*;
|
||||
use eyre::Result;
|
||||
use sbv_primitives::{types::BlockWitness, B256};
|
||||
use sbv_core::BlockWitness;
|
||||
use sbv_primitives::B256;
|
||||
use scroll_zkvm_types::{
|
||||
chunk::{execute, ChunkInfo, ChunkWitness, ToArchievedWitness},
|
||||
chunk::{execute, ChunkInfo, ChunkWitness, LegacyChunkWitness},
|
||||
task::ProvingTask,
|
||||
utils::{to_rkyv_bytes, RancorError},
|
||||
};
|
||||
|
||||
/// The type aligned with coordinator's defination
|
||||
@@ -66,12 +68,18 @@ impl TryFrom<ChunkProvingTask> for ProvingTask {
|
||||
|
||||
fn try_from(value: ChunkProvingTask) -> Result<Self> {
|
||||
let witness = value.build_guest_input();
|
||||
let serialized_witness = if crate::witness_use_legacy_mode() {
|
||||
let legacy_witness = LegacyChunkWitness::from(witness);
|
||||
to_rkyv_bytes::<RancorError>(&legacy_witness)?.into_vec()
|
||||
} else {
|
||||
super::encode_task_to_witness(&witness)?
|
||||
};
|
||||
|
||||
Ok(ProvingTask {
|
||||
identifier: value.identifier(),
|
||||
fork_name: value.fork_name,
|
||||
aggregated_proofs: Vec::new(),
|
||||
serialized_witness: vec![witness.rkyv_serialize(None)?.to_vec()],
|
||||
serialized_witness: vec![serialized_witness],
|
||||
vk: Vec::new(),
|
||||
})
|
||||
}
|
||||
@@ -83,7 +91,7 @@ impl ChunkProvingTask {
|
||||
let num_txs = self
|
||||
.block_witnesses
|
||||
.iter()
|
||||
.map(|b| b.transaction.len())
|
||||
.map(|b| b.transactions.len())
|
||||
.sum::<usize>();
|
||||
let total_gas_used = self
|
||||
.block_witnesses
|
||||
@@ -131,18 +139,14 @@ impl ChunkProvingTask {
|
||||
|
||||
pub fn precheck_and_build_metadata(&self) -> Result<ChunkInfo> {
|
||||
let witness = self.build_guest_input();
|
||||
let archieved = ToArchievedWitness::create(&witness)
|
||||
.map_err(|e| eyre::eyre!("archieve chunk witness fail: {e}"))?;
|
||||
let archieved_witness = archieved
|
||||
.access()
|
||||
.map_err(|e| eyre::eyre!("access archieved chunk witness fail: {e}"))?;
|
||||
|
||||
let ret = ChunkInfo::try_from(archieved_witness).map_err(|e| eyre::eyre!("{e}"))?;
|
||||
let ret = ChunkInfo::try_from(witness).map_err(|e| eyre::eyre!("{e}"))?;
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
/// this method check the validate of current task (there may be missing storage node)
|
||||
/// and try fixing it until everything is ok
|
||||
#[deprecated]
|
||||
pub fn prepare_task_via_interpret(
|
||||
&mut self,
|
||||
interpreter: impl ChunkInterpreter,
|
||||
@@ -166,13 +170,8 @@ impl ChunkProvingTask {
|
||||
let mut attempts = 0;
|
||||
loop {
|
||||
let witness = self.build_guest_input();
|
||||
let archieved = ToArchievedWitness::create(&witness)
|
||||
.map_err(|e| eyre::eyre!("archieve chunk witness fail: {e}"))?;
|
||||
let archieved_witness = archieved
|
||||
.access()
|
||||
.map_err(|e| eyre::eyre!("access archieved chunk witness fail: {e}"))?;
|
||||
|
||||
match execute(archieved_witness) {
|
||||
match execute(witness) {
|
||||
Ok(_) => return Ok(()),
|
||||
Err(e) => {
|
||||
if let Some(caps) = err_parse_re.captures(&e) {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use eyre::Result;
|
||||
use sbv_primitives::{types::BlockWitness, Bytes, B256};
|
||||
use sbv_core::BlockWitness;
|
||||
use sbv_primitives::{Bytes, B256};
|
||||
|
||||
/// An interpreter which is cirtical in translating chunk data
|
||||
/// since we need to grep block witness and storage node data
|
||||
|
||||
@@ -17,10 +17,10 @@ pub struct Verifier {
|
||||
|
||||
impl Verifier {
|
||||
pub fn new(assets_dir: &str, fork: ForkName) -> Self {
|
||||
let verifier_bin = Path::new(assets_dir).join("verifier.bin");
|
||||
let verifier_bin = Path::new(assets_dir);
|
||||
|
||||
Self {
|
||||
verifier: UniversalVerifier::setup(&verifier_bin).expect("Setting up chunk verifier"),
|
||||
verifier: UniversalVerifier::setup(verifier_bin).expect("Setting up chunk verifier"),
|
||||
fork,
|
||||
}
|
||||
}
|
||||
@@ -32,12 +32,16 @@ impl ProofVerifier for Verifier {
|
||||
TaskType::Chunk => {
|
||||
let proof = serde_json::from_slice::<ChunkProof>(proof).unwrap();
|
||||
assert!(proof.pi_hash_check(self.fork));
|
||||
UniversalVerifier::verify_stark_proof(proof.as_root_proof(), &proof.vk).unwrap()
|
||||
self.verifier
|
||||
.verify_stark_proof(proof.as_root_proof(), &proof.vk)
|
||||
.unwrap()
|
||||
}
|
||||
TaskType::Batch => {
|
||||
let proof = serde_json::from_slice::<BatchProof>(proof).unwrap();
|
||||
assert!(proof.pi_hash_check(self.fork));
|
||||
UniversalVerifier::verify_stark_proof(proof.as_root_proof(), &proof.vk).unwrap()
|
||||
self.verifier
|
||||
.verify_stark_proof(proof.as_root_proof(), &proof.vk)
|
||||
.unwrap()
|
||||
}
|
||||
TaskType::Bundle => {
|
||||
let proof = serde_json::from_slice::<BundleProof>(proof).unwrap();
|
||||
|
||||
@@ -153,17 +153,12 @@ pub unsafe extern "C" fn gen_universal_task(
|
||||
expected_vk: *const u8,
|
||||
expected_vk_len: usize,
|
||||
) -> HandlingResult {
|
||||
let mut interpreter = None;
|
||||
let task_json = if task_type == TaskType::Chunk as i32 {
|
||||
let pre_task_str = c_char_to_str(task);
|
||||
let cli = l2geth::get_client();
|
||||
match libzkp::checkout_chunk_task(pre_task_str, cli) {
|
||||
Ok(str) => {
|
||||
interpreter.replace(cli);
|
||||
str
|
||||
}
|
||||
Ok(str) => str,
|
||||
Err(e) => {
|
||||
println!("gen_universal_task failed at pre interpret step, error: {e}");
|
||||
tracing::error!("gen_universal_task failed at pre interpret step, error: {e}");
|
||||
return failed_handling_result();
|
||||
}
|
||||
@@ -178,13 +173,8 @@ pub unsafe extern "C" fn gen_universal_task(
|
||||
&[]
|
||||
};
|
||||
|
||||
let ret = libzkp::gen_universal_task(
|
||||
task_type,
|
||||
&task_json,
|
||||
c_char_to_str(fork_name),
|
||||
expected_vk,
|
||||
interpreter,
|
||||
);
|
||||
let ret =
|
||||
libzkp::gen_universal_task(task_type, &task_json, c_char_to_str(fork_name), expected_vk);
|
||||
|
||||
if let Ok((pi_hash, meta_json, task_json)) = ret {
|
||||
let expected_pi_hash = pi_hash.0.map(|byte| byte as c_char);
|
||||
@@ -255,3 +245,10 @@ pub unsafe extern "C" fn release_string(ptr: *mut c_char) {
|
||||
let _ = CString::from_raw(ptr);
|
||||
}
|
||||
}
|
||||
|
||||
/// # Safety
|
||||
#[no_mangle]
|
||||
pub unsafe extern "C" fn set_dynamic_feature(feats: *const c_char) {
|
||||
let feats_str = c_char_to_str(feats);
|
||||
libzkp::set_dynamic_feature(feats_str);
|
||||
}
|
||||
|
||||
@@ -33,3 +33,7 @@ clap = { version = "4.5", features = ["derive"] }
|
||||
ctor = "0.2.8"
|
||||
url = { version = "2.5.4", features = ["serde",] }
|
||||
serde_bytes = "0.11.15"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
cuda = ["scroll-zkvm-prover/cuda"]
|
||||
@@ -1,3 +1,5 @@
|
||||
#![allow(dead_code)]
|
||||
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
|
||||
@@ -2,7 +2,6 @@ use std::path::Path;
|
||||
|
||||
use super::CircuitsHandler;
|
||||
use async_trait::async_trait;
|
||||
use base64::{prelude::BASE64_STANDARD, Engine};
|
||||
use eyre::Result;
|
||||
use scroll_proving_sdk::prover::ProofType;
|
||||
use scroll_zkvm_prover::{Prover, ProverConfig};
|
||||
@@ -12,10 +11,12 @@ pub struct UniversalHandler {
|
||||
prover: Prover,
|
||||
}
|
||||
|
||||
/// Safe for current usage as `CircuitsHandler` trait (protected inside of Mutex and NEVER extract
|
||||
/// the instance out by `into_inner`)
|
||||
unsafe impl Send for UniversalHandler {}
|
||||
|
||||
impl UniversalHandler {
|
||||
pub fn new(workspace_path: impl AsRef<Path>, proof_type: ProofType) -> Result<Self> {
|
||||
pub fn new(workspace_path: impl AsRef<Path>, _proof_type: ProofType) -> Result<Self> {
|
||||
let path_app_exe = workspace_path.as_ref().join("app.vmexe");
|
||||
let path_app_config = workspace_path.as_ref().join("openvm.toml");
|
||||
let segment_len = Some((1 << 22) - 100);
|
||||
@@ -25,16 +26,14 @@ impl UniversalHandler {
|
||||
segment_len,
|
||||
};
|
||||
|
||||
let use_evm = proof_type == ProofType::Bundle;
|
||||
|
||||
let prover = Prover::setup(config, use_evm, None)?;
|
||||
let prover = Prover::setup(config, None)?;
|
||||
Ok(Self { prover })
|
||||
}
|
||||
|
||||
/// get_prover get the inner prover, later we would replace chunk/batch/bundle_prover with
|
||||
/// universal prover, before that, use bundle_prover as the represent one
|
||||
pub fn get_prover(&self) -> &Prover {
|
||||
&self.prover
|
||||
pub fn get_prover(&mut self) -> &mut Prover {
|
||||
&mut self.prover
|
||||
}
|
||||
|
||||
pub fn get_task_from_input(input: &str) -> Result<ProvingTask> {
|
||||
@@ -45,14 +44,7 @@ impl UniversalHandler {
|
||||
#[async_trait]
|
||||
impl CircuitsHandler for Mutex<UniversalHandler> {
|
||||
async fn get_proof_data(&self, u_task: &ProvingTask, need_snark: bool) -> Result<String> {
|
||||
let handler_self = self.lock().await;
|
||||
|
||||
if need_snark && handler_self.prover.evm_prover.is_none() {
|
||||
eyre::bail!(
|
||||
"do not init prover for evm (vk: {})",
|
||||
BASE64_STANDARD.encode(handler_self.get_prover().get_app_vk())
|
||||
)
|
||||
}
|
||||
let mut handler_self = self.lock().await;
|
||||
|
||||
let proof = handler_self
|
||||
.get_prover()
|
||||
|
||||
@@ -15,7 +15,6 @@
|
||||
},
|
||||
"chunk_proposer_config": {
|
||||
"propose_interval_milliseconds": 100,
|
||||
"max_block_num_per_chunk": 100,
|
||||
"max_l2_gas_per_chunk": 20000000,
|
||||
"chunk_timeout_sec": 300,
|
||||
"max_uncompressed_batch_bytes_size": 4194304
|
||||
|
||||
@@ -92,7 +92,6 @@
|
||||
},
|
||||
"chunk_proposer_config": {
|
||||
"propose_interval_milliseconds": 100,
|
||||
"max_block_num_per_chunk": 100,
|
||||
"max_l2_gas_per_chunk": 20000000,
|
||||
"chunk_timeout_sec": 300,
|
||||
"max_uncompressed_batch_bytes_size": 4194304
|
||||
|
||||
@@ -31,7 +31,6 @@ type L2Config struct {
|
||||
// ChunkProposerConfig loads chunk_proposer configuration items.
|
||||
type ChunkProposerConfig struct {
|
||||
ProposeIntervalMilliseconds uint64 `json:"propose_interval_milliseconds"`
|
||||
MaxBlockNumPerChunk uint64 `json:"max_block_num_per_chunk"`
|
||||
MaxL2GasPerChunk uint64 `json:"max_l2_gas_per_chunk"`
|
||||
ChunkTimeoutSec uint64 `json:"chunk_timeout_sec"`
|
||||
MaxUncompressedBatchBytesSize uint64 `json:"max_uncompressed_batch_bytes_size"`
|
||||
|
||||
@@ -7,8 +7,14 @@ import (
|
||||
|
||||
// SenderConfig The config for transaction sender
|
||||
type SenderConfig struct {
|
||||
// The RPC endpoint of the ethereum or scroll public node.
|
||||
// The RPC endpoint of the ethereum or scroll public node (for backward compatibility).
|
||||
// If WriteEndpoints is specified, this endpoint will be used only for reading.
|
||||
// If WriteEndpoints is empty, this endpoint will be used for both reading and writing.
|
||||
Endpoint string `json:"endpoint"`
|
||||
// The RPC endpoints to send transactions to (optional).
|
||||
// If specified, transactions will be sent to all these endpoints in parallel.
|
||||
// If empty, transactions will be sent to the Endpoint.
|
||||
WriteEndpoints []string `json:"write_endpoints,omitempty"`
|
||||
// The time to trigger check pending txs in sender.
|
||||
CheckPendingTime uint64 `json:"check_pending_time"`
|
||||
// The number of blocks to wait to escalate increase gas price of the transaction.
|
||||
|
||||
@@ -345,8 +345,16 @@ func (r *Layer2Relayer) commitGenesisBatch(batchHash string, batchHeader []byte,
|
||||
// - backlogCount > r.cfg.BatchSubmission.BacklogMax -> forceSubmit
|
||||
// - we have at least minBatches AND price hits a desired target price
|
||||
func (r *Layer2Relayer) ProcessPendingBatches() {
|
||||
// Get effective batch limits based on whether validium mode is enabled.
|
||||
minBatches, maxBatches := r.getEffectiveBatchLimits()
|
||||
// First, get the backlog count to determine batch submission strategy
|
||||
backlogCount, err := r.batchOrm.GetFailedAndPendingBatchesCount(r.ctx)
|
||||
if err != nil {
|
||||
log.Error("Failed to fetch pending L2 batches count", "err", err)
|
||||
return
|
||||
}
|
||||
r.metrics.rollupL2RelayerBacklogCounts.Set(float64(backlogCount))
|
||||
|
||||
// Get effective batch limits based on validium mode and backlog size.
|
||||
minBatches, maxBatches := r.getEffectiveBatchLimits(backlogCount)
|
||||
|
||||
// get pending batches from database in ascending order by their index.
|
||||
dbBatches, err := r.batchOrm.GetFailedAndPendingBatches(r.ctx, maxBatches)
|
||||
@@ -360,15 +368,6 @@ func (r *Layer2Relayer) ProcessPendingBatches() {
|
||||
return
|
||||
}
|
||||
|
||||
// if backlog outgrow max size, force‐submit enough oldest batches
|
||||
backlogCount, err := r.batchOrm.GetFailedAndPendingBatchesCount(r.ctx)
|
||||
r.metrics.rollupL2RelayerBacklogCounts.Set(float64(backlogCount))
|
||||
|
||||
if err != nil {
|
||||
log.Error("Failed to fetch pending L2 batches", "err", err)
|
||||
return
|
||||
}
|
||||
|
||||
var forceSubmit bool
|
||||
|
||||
startChunk, err := r.chunkOrm.GetChunkByIndex(r.ctx, dbBatches[0].StartChunkIndex)
|
||||
@@ -563,12 +562,22 @@ func (r *Layer2Relayer) ProcessPendingBatches() {
|
||||
log.Info("Sent the commitBatches tx to layer1", "batches count", len(batchesToSubmit), "start index", firstBatch.Index, "start hash", firstBatch.Hash, "end index", lastBatch.Index, "end hash", lastBatch.Hash, "tx hash", txHash.String())
|
||||
}
|
||||
|
||||
// getEffectiveBatchLimits returns the effective min and max batch limits based on whether validium mode is enabled.
|
||||
func (r *Layer2Relayer) getEffectiveBatchLimits() (int, int) {
|
||||
// getEffectiveBatchLimits returns the effective min and max batch limits based on whether validium mode is enabled
|
||||
// and the current backlog size.
|
||||
// When backlogCount >= backlog_max: submit min_batches for fast inclusion at slightly higher price.
|
||||
// When backlogCount < backlog_max: submit max_batches for better cost amortization.
|
||||
func (r *Layer2Relayer) getEffectiveBatchLimits(backlogCount int64) (int, int) {
|
||||
if r.cfg.ValidiumMode {
|
||||
return 1, 1 // minBatches=1, maxBatches=1
|
||||
}
|
||||
return r.cfg.BatchSubmission.MinBatches, r.cfg.BatchSubmission.MaxBatches
|
||||
|
||||
// If backlog is at or above max, prioritize fast inclusion by submitting min_batches
|
||||
if backlogCount >= r.cfg.BatchSubmission.BacklogMax {
|
||||
return r.cfg.BatchSubmission.MinBatches, r.cfg.BatchSubmission.MinBatches
|
||||
}
|
||||
|
||||
// Otherwise, prioritize cost efficiency by trying to submit max_batches
|
||||
return r.cfg.BatchSubmission.MaxBatches, r.cfg.BatchSubmission.MaxBatches
|
||||
}
|
||||
|
||||
func (r *Layer2Relayer) contextIDFromBatches(codecVersion encoding.CodecVersion, batches []*dbBatchWithChunks) string {
|
||||
|
||||
@@ -56,6 +56,7 @@ func setupEnv(t *testing.T) {
|
||||
|
||||
cfg.L2Config.RelayerConfig.SenderConfig.Endpoint, err = testApps.GetPoSL1EndPoint()
|
||||
assert.NoError(t, err)
|
||||
cfg.L2Config.RelayerConfig.SenderConfig.WriteEndpoints = []string{cfg.L2Config.RelayerConfig.SenderConfig.Endpoint, cfg.L2Config.RelayerConfig.SenderConfig.Endpoint}
|
||||
cfg.L1Config.RelayerConfig.SenderConfig.Endpoint, err = testApps.GetL2GethEndPoint()
|
||||
assert.NoError(t, err)
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
"fmt"
|
||||
"math/big"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/holiman/uint256"
|
||||
@@ -67,7 +68,8 @@ type FeeData struct {
|
||||
type Sender struct {
|
||||
config *config.SenderConfig
|
||||
gethClient *gethclient.Client
|
||||
client *ethclient.Client // The client to retrieve on chain data or send transaction.
|
||||
client *ethclient.Client // The client to retrieve on chain data (read-only)
|
||||
writeClients []*ethclient.Client // The clients to send transactions to (write operations)
|
||||
transactionSigner *TransactionSigner
|
||||
chainID *big.Int // The chain id of the endpoint
|
||||
ctx context.Context
|
||||
@@ -90,9 +92,10 @@ func NewSender(ctx context.Context, config *config.SenderConfig, signerConfig *c
|
||||
return nil, fmt.Errorf("invalid params, EscalateMultipleNum; %v, EscalateMultipleDen: %v", config.EscalateMultipleNum, config.EscalateMultipleDen)
|
||||
}
|
||||
|
||||
// Initialize read client
|
||||
rpcClient, err := rpc.Dial(config.Endpoint)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to dial eth client, err: %w", err)
|
||||
return nil, fmt.Errorf("failed to dial read client, err: %w", err)
|
||||
}
|
||||
|
||||
client := ethclient.NewClient(rpcClient)
|
||||
@@ -105,12 +108,42 @@ func NewSender(ctx context.Context, config *config.SenderConfig, signerConfig *c
|
||||
return nil, fmt.Errorf("failed to create transaction signer, err: %w", err)
|
||||
}
|
||||
|
||||
// Initialize write clients
|
||||
var writeClients []*ethclient.Client
|
||||
if len(config.WriteEndpoints) > 0 {
|
||||
// Use specified write endpoints
|
||||
for i, endpoint := range config.WriteEndpoints {
|
||||
writeRpcClient, err := rpc.Dial(endpoint)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to dial write client %d (endpoint: %s), err: %w", i, endpoint, err)
|
||||
}
|
||||
writeClient := ethclient.NewClient(writeRpcClient)
|
||||
|
||||
// Verify the write client is connected to the same chain
|
||||
writeChainID, err := writeClient.ChainID(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get chain ID from write client %d (endpoint: %s), err: %w", i, endpoint, err)
|
||||
}
|
||||
if writeChainID.Cmp(chainID) != 0 {
|
||||
return nil, fmt.Errorf("write client %d (endpoint: %s) has different chain ID %s, expected %s", i, endpoint, writeChainID.String(), chainID.String())
|
||||
}
|
||||
|
||||
writeClients = append(writeClients, writeClient)
|
||||
}
|
||||
log.Info("initialized sender with multiple write clients", "service", service, "name", name, "readEndpoint", config.Endpoint, "writeEndpoints", config.WriteEndpoints)
|
||||
} else {
|
||||
// Use read client for writing (backward compatibility)
|
||||
writeClients = append(writeClients, client)
|
||||
log.Info("initialized sender with single client", "service", service, "name", name, "endpoint", config.Endpoint)
|
||||
}
|
||||
|
||||
// Create sender instance first and then initialize nonce
|
||||
sender := &Sender{
|
||||
ctx: ctx,
|
||||
config: config,
|
||||
gethClient: gethclient.New(rpcClient),
|
||||
client: client,
|
||||
writeClients: writeClients,
|
||||
chainID: chainID,
|
||||
transactionSigner: transactionSigner,
|
||||
db: db,
|
||||
@@ -169,6 +202,82 @@ func (s *Sender) getFeeData(target *common.Address, data []byte, sidecar *gethTy
|
||||
}
|
||||
}
|
||||
|
||||
// sendTransactionToMultipleClients sends a transaction to all write clients in parallel
|
||||
// and returns success if at least one client succeeds
|
||||
func (s *Sender) sendTransactionToMultipleClients(signedTx *gethTypes.Transaction) error {
|
||||
ctx, cancel := context.WithTimeout(s.ctx, 15*time.Second)
|
||||
defer cancel()
|
||||
|
||||
if len(s.writeClients) == 1 {
|
||||
// Single client - use direct approach
|
||||
return s.writeClients[0].SendTransaction(ctx, signedTx)
|
||||
}
|
||||
|
||||
// Multiple clients - send in parallel
|
||||
type result struct {
|
||||
endpoint string
|
||||
err error
|
||||
}
|
||||
|
||||
resultChan := make(chan result, len(s.writeClients))
|
||||
var wg sync.WaitGroup
|
||||
|
||||
// Send transaction to all write clients in parallel
|
||||
for i, client := range s.writeClients {
|
||||
wg.Add(1)
|
||||
// Determine endpoint URL for this client
|
||||
endpoint := s.config.WriteEndpoints[i]
|
||||
|
||||
go func(ep string, writeClient *ethclient.Client) {
|
||||
defer wg.Done()
|
||||
err := writeClient.SendTransaction(ctx, signedTx)
|
||||
resultChan <- result{endpoint: ep, err: err}
|
||||
}(endpoint, client)
|
||||
}
|
||||
|
||||
// Wait for all goroutines to finish
|
||||
go func() {
|
||||
wg.Wait()
|
||||
close(resultChan)
|
||||
}()
|
||||
|
||||
// Collect results
|
||||
var errs []error
|
||||
for res := range resultChan {
|
||||
if res.err != nil {
|
||||
errs = append(errs, fmt.Errorf("%s: %w", res.endpoint, res.err))
|
||||
log.Warn("failed to send transaction to write client",
|
||||
"endpoint", res.endpoint,
|
||||
"txHash", signedTx.Hash().Hex(),
|
||||
"nonce", signedTx.Nonce(),
|
||||
"from", s.transactionSigner.GetAddr().String(),
|
||||
"error", res.err)
|
||||
} else {
|
||||
log.Info("successfully sent transaction to write client",
|
||||
"endpoint", res.endpoint,
|
||||
"txHash", signedTx.Hash().Hex(),
|
||||
"nonce", signedTx.Nonce(),
|
||||
"from", s.transactionSigner.GetAddr().String())
|
||||
}
|
||||
}
|
||||
|
||||
// Check if at least one client succeeded
|
||||
if len(errs) < len(s.writeClients) {
|
||||
successCount := len(s.writeClients) - len(errs)
|
||||
if len(errs) > 0 {
|
||||
log.Info("transaction partially succeeded",
|
||||
"txHash", signedTx.Hash().Hex(),
|
||||
"successCount", successCount,
|
||||
"totalClients", len(s.writeClients),
|
||||
"failures", errors.Join(errs...))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// All clients failed
|
||||
return fmt.Errorf("failed to send transaction to all %d write clients: %w", len(s.writeClients), errors.Join(errs...))
|
||||
}
|
||||
|
||||
// SendTransaction send a signed L2tL1 transaction.
|
||||
func (s *Sender) SendTransaction(contextID string, target *common.Address, data []byte, blobs []*kzg4844.Blob) (common.Hash, uint64, error) {
|
||||
s.metrics.sendTransactionTotal.WithLabelValues(s.service, s.name).Inc()
|
||||
@@ -230,7 +339,7 @@ func (s *Sender) SendTransaction(contextID string, target *common.Address, data
|
||||
return common.Hash{}, 0, fmt.Errorf("failed to insert transaction, err: %w", err)
|
||||
}
|
||||
|
||||
if err := s.client.SendTransaction(s.ctx, signedTx); err != nil {
|
||||
if err := s.sendTransactionToMultipleClients(signedTx); err != nil {
|
||||
// Delete the transaction from the pending transaction table if it fails to send.
|
||||
if updateErr := s.pendingTransactionOrm.DeleteTransactionByTxHash(s.ctx, signedTx.Hash()); updateErr != nil {
|
||||
log.Error("failed to delete transaction", "tx hash", signedTx.Hash().String(), "from", s.transactionSigner.GetAddr().String(), "nonce", signedTx.Nonce(), "err", updateErr)
|
||||
@@ -645,7 +754,7 @@ func (s *Sender) checkPendingTransaction() {
|
||||
return
|
||||
}
|
||||
|
||||
if err := s.client.SendTransaction(s.ctx, newSignedTx); err != nil {
|
||||
if err := s.sendTransactionToMultipleClients(newSignedTx); err != nil {
|
||||
if strings.Contains(err.Error(), "nonce too low") {
|
||||
// When we receive a 'nonce too low' error but cannot find the transaction receipt, it indicates another transaction with this nonce has already been processed, so this transaction will never be mined and should be marked as failed.
|
||||
log.Warn("nonce too low detected, marking all non-confirmed transactions with same nonce as failed", "nonce", originalTx.Nonce(), "address", s.transactionSigner.GetAddr().Hex(), "txHash", originalTx.Hash().Hex(), "newTxHash", newSignedTx.Hash().Hex(), "err", err)
|
||||
|
||||
@@ -36,7 +36,7 @@ func testBatchProposerLimitsCodecV7(t *testing.T) {
|
||||
name: "Timeout",
|
||||
batchTimeoutSec: 0,
|
||||
expectedBatchesLen: 1,
|
||||
expectedChunksInFirstBatch: 2,
|
||||
expectedChunksInFirstBatch: 1,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -72,8 +72,7 @@ func testBatchProposerLimitsCodecV7(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: 1,
|
||||
MaxL2GasPerChunk: 20000000,
|
||||
MaxL2GasPerChunk: math.MaxUint64,
|
||||
ChunkTimeoutSec: 300,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
}, encoding.CodecV7, ¶ms.ChainConfig{
|
||||
@@ -154,7 +153,6 @@ func testBatchProposerBlobSizeLimitCodecV7(t *testing.T) {
|
||||
chainConfig := ¶ms.ChainConfig{LondonBlock: big.NewInt(0), BernoulliBlock: big.NewInt(0), CurieBlock: big.NewInt(0), DarwinTime: new(uint64), DarwinV2Time: new(uint64), EuclidTime: new(uint64), EuclidV2Time: new(uint64)}
|
||||
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: math.MaxUint64,
|
||||
MaxL2GasPerChunk: math.MaxUint64,
|
||||
ChunkTimeoutSec: 0,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
@@ -227,7 +225,6 @@ func testBatchProposerMaxChunkNumPerBatchLimitCodecV7(t *testing.T) {
|
||||
chainConfig := ¶ms.ChainConfig{LondonBlock: big.NewInt(0), BernoulliBlock: big.NewInt(0), CurieBlock: big.NewInt(0), DarwinTime: new(uint64), DarwinV2Time: new(uint64), EuclidTime: new(uint64), EuclidV2Time: new(uint64)}
|
||||
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: math.MaxUint64,
|
||||
MaxL2GasPerChunk: math.MaxUint64,
|
||||
ChunkTimeoutSec: 0,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
@@ -309,15 +306,14 @@ func testBatchProposerUncompressedBatchBytesLimitCodecV8(t *testing.T) {
|
||||
|
||||
// Create chunk proposer with no uncompressed batch bytes limit for chunks
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: 1, // One block per chunk
|
||||
MaxL2GasPerChunk: math.MaxUint64,
|
||||
MaxL2GasPerChunk: 1200000, // One block per chunk via gas limit
|
||||
ChunkTimeoutSec: math.MaxUint32,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
}, encoding.CodecV8, chainConfig, db, nil)
|
||||
|
||||
// Insert 2 blocks with large calldata and create 2 chunks
|
||||
l2BlockOrm := orm.NewL2Block(db)
|
||||
for i := uint64(1); i <= 2; i++ {
|
||||
for i := uint64(1); i <= 3; i++ {
|
||||
blockCopy := *block
|
||||
blockCopy.Header = &gethTypes.Header{}
|
||||
*blockCopy.Header = *block.Header
|
||||
@@ -326,7 +322,9 @@ func testBatchProposerUncompressedBatchBytesLimitCodecV8(t *testing.T) {
|
||||
err := l2BlockOrm.InsertL2Blocks(context.Background(), []*encoding.Block{&blockCopy})
|
||||
assert.NoError(t, err)
|
||||
|
||||
cp.TryProposeChunk() // Each call creates one chunk with one block
|
||||
cp.TryProposeChunk() // Each chunk will contain 1 block (~3KiB)
|
||||
// We create 2 chunks here, as we have 3 blocks and reach the gas limit for the 1st chunk with the 2nd block
|
||||
// and the 2nd chunk with the 3rd block.
|
||||
}
|
||||
|
||||
// Create batch proposer with 4KiB uncompressed batch bytes limit
|
||||
|
||||
@@ -86,15 +86,19 @@ func testBundleProposerLimitsCodecV7(t *testing.T) {
|
||||
_, err = batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, utils.BatchMetrics{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
block3 := *block1
|
||||
block3.Header = &gethTypes.Header{}
|
||||
*block3.Header = *block1.Header
|
||||
block3.Header.Number = new(big.Int).SetUint64(block2.Header.Number.Uint64() + 1)
|
||||
|
||||
l2BlockOrm := orm.NewL2Block(db)
|
||||
err = l2BlockOrm.InsertL2Blocks(context.Background(), []*encoding.Block{block1, block2})
|
||||
err = l2BlockOrm.InsertL2Blocks(context.Background(), []*encoding.Block{block1, block2, &block3})
|
||||
assert.NoError(t, err)
|
||||
|
||||
chainConfig := ¶ms.ChainConfig{LondonBlock: big.NewInt(0), BernoulliBlock: big.NewInt(0), CurieBlock: big.NewInt(0), DarwinTime: new(uint64), DarwinV2Time: new(uint64), EuclidTime: new(uint64), EuclidV2Time: new(uint64)}
|
||||
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: 1,
|
||||
MaxL2GasPerChunk: math.MaxUint64,
|
||||
MaxL2GasPerChunk: 1152994, // One block per chunk via gas limit
|
||||
ChunkTimeoutSec: math.MaxUint32,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
}, encoding.CodecV7, chainConfig, db, nil)
|
||||
|
||||
@@ -54,7 +54,6 @@ type ChunkProposer struct {
|
||||
// NewChunkProposer creates a new ChunkProposer instance.
|
||||
func NewChunkProposer(ctx context.Context, cfg *config.ChunkProposerConfig, minCodecVersion encoding.CodecVersion, chainCfg *params.ChainConfig, db *gorm.DB, reg prometheus.Registerer) *ChunkProposer {
|
||||
log.Info("new chunk proposer",
|
||||
"maxBlockNumPerChunk", cfg.MaxBlockNumPerChunk,
|
||||
"maxL2GasPerChunk", cfg.MaxL2GasPerChunk,
|
||||
"chunkTimeoutSec", cfg.ChunkTimeoutSec,
|
||||
"maxBlobSize", maxBlobSize)
|
||||
@@ -232,10 +231,9 @@ func (p *ChunkProposer) ProposeChunk() error {
|
||||
return err
|
||||
}
|
||||
|
||||
maxBlocksThisChunk := p.cfg.MaxBlockNumPerChunk
|
||||
|
||||
// select at most maxBlocksThisChunk blocks
|
||||
blocks, err := p.l2BlockOrm.GetL2BlocksGEHeight(p.ctx, unchunkedBlockHeight, int(maxBlocksThisChunk))
|
||||
// select blocks without a hard limit on count in practice (use a large value)
|
||||
// The actual limits will be enforced by gas, timeout, and blob size constraints
|
||||
blocks, err := p.l2BlockOrm.GetL2BlocksGEHeight(p.ctx, unchunkedBlockHeight, 1000)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -251,7 +249,7 @@ func (p *ChunkProposer) ProposeChunk() error {
|
||||
currentHardfork := encoding.GetHardforkName(p.chainCfg, blocks[i].Header.Number.Uint64(), blocks[i].Header.Time)
|
||||
if currentHardfork != hardforkName {
|
||||
blocks = blocks[:i]
|
||||
maxBlocksThisChunk = uint64(i) // update maxBlocksThisChunk to trigger chunking, because these blocks are the last blocks before the hardfork
|
||||
// Truncate blocks at hardfork boundary
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -324,8 +322,8 @@ func (p *ChunkProposer) ProposeChunk() error {
|
||||
}
|
||||
|
||||
currentTimeSec := uint64(time.Now().Unix())
|
||||
if metrics.FirstBlockTimestamp+p.cfg.ChunkTimeoutSec < currentTimeSec || metrics.NumBlocks == maxBlocksThisChunk {
|
||||
log.Info("reached maximum number of blocks in chunk or first block timeout",
|
||||
if metrics.FirstBlockTimestamp+p.cfg.ChunkTimeoutSec < currentTimeSec {
|
||||
log.Info("first block timeout reached",
|
||||
"block count", len(chunk.Blocks),
|
||||
"start block number", chunk.Blocks[0].Header.Number,
|
||||
"start block timestamp", metrics.FirstBlockTimestamp,
|
||||
|
||||
@@ -22,7 +22,6 @@ import (
|
||||
func testChunkProposerLimitsCodecV7(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
maxBlockNum uint64
|
||||
maxL2Gas uint64
|
||||
chunkTimeoutSec uint64
|
||||
expectedChunksLen int
|
||||
@@ -30,14 +29,12 @@ func testChunkProposerLimitsCodecV7(t *testing.T) {
|
||||
}{
|
||||
{
|
||||
name: "NoLimitReached",
|
||||
maxBlockNum: 100,
|
||||
maxL2Gas: 20_000_000,
|
||||
chunkTimeoutSec: 1000000000000,
|
||||
expectedChunksLen: 0,
|
||||
},
|
||||
{
|
||||
name: "Timeout",
|
||||
maxBlockNum: 100,
|
||||
maxL2Gas: 20_000_000,
|
||||
chunkTimeoutSec: 0,
|
||||
expectedChunksLen: 1,
|
||||
@@ -45,15 +42,13 @@ func testChunkProposerLimitsCodecV7(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "MaxL2GasPerChunkIs0",
|
||||
maxBlockNum: 10,
|
||||
maxL2Gas: 0,
|
||||
chunkTimeoutSec: 1000000000000,
|
||||
expectedChunksLen: 0,
|
||||
},
|
||||
{
|
||||
name: "MaxBlockNumPerChunkIs1",
|
||||
maxBlockNum: 1,
|
||||
maxL2Gas: 20_000_000,
|
||||
name: "SingleBlockByGasLimit",
|
||||
maxL2Gas: 1_100_000,
|
||||
chunkTimeoutSec: 1000000000000,
|
||||
expectedChunksLen: 1,
|
||||
expectedBlocksInFirstChunk: 1,
|
||||
@@ -62,7 +57,6 @@ func testChunkProposerLimitsCodecV7(t *testing.T) {
|
||||
// In this test the second block is not included in the chunk because together
|
||||
// with the first block it exceeds the maxL2GasPerChunk limit.
|
||||
name: "MaxL2GasPerChunkIsSecondBlock",
|
||||
maxBlockNum: 10,
|
||||
maxL2Gas: 1_153_000,
|
||||
chunkTimeoutSec: 1000000000000,
|
||||
expectedChunksLen: 1,
|
||||
@@ -85,7 +79,6 @@ func testChunkProposerLimitsCodecV7(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: tt.maxBlockNum,
|
||||
MaxL2GasPerChunk: tt.maxL2Gas,
|
||||
ChunkTimeoutSec: tt.chunkTimeoutSec,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
@@ -110,53 +103,6 @@ func testChunkProposerLimitsCodecV7(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func testChunkProposerBlobSizeLimitCodecV7(t *testing.T) {
|
||||
db := setupDB(t)
|
||||
defer database.CloseDB(db)
|
||||
block := readBlockFromJSON(t, "../../../testdata/blockTrace_03.json")
|
||||
for i := uint64(0); i < 510; i++ {
|
||||
l2BlockOrm := orm.NewL2Block(db)
|
||||
block.Header.Number = new(big.Int).SetUint64(i + 1)
|
||||
block.Header.Time = i + 1
|
||||
err := l2BlockOrm.InsertL2Blocks(context.Background(), []*encoding.Block{block})
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
// Add genesis chunk.
|
||||
chunkOrm := orm.NewChunk(db)
|
||||
_, err := chunkOrm.InsertChunk(context.Background(), &encoding.Chunk{Blocks: []*encoding.Block{{Header: &gethTypes.Header{Number: big.NewInt(0)}}}}, encoding.CodecV0, utils.ChunkMetrics{})
|
||||
assert.NoError(t, err)
|
||||
|
||||
chainConfig := ¶ms.ChainConfig{LondonBlock: big.NewInt(0), BernoulliBlock: big.NewInt(0), CurieBlock: big.NewInt(0), DarwinTime: new(uint64), DarwinV2Time: new(uint64), EuclidTime: new(uint64), EuclidV2Time: new(uint64)}
|
||||
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: 255,
|
||||
MaxL2GasPerChunk: math.MaxUint64,
|
||||
ChunkTimeoutSec: math.MaxUint32,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
}, encoding.CodecV7, chainConfig, db, nil)
|
||||
|
||||
for i := 0; i < 2; i++ {
|
||||
cp.TryProposeChunk()
|
||||
}
|
||||
|
||||
chunkOrm = orm.NewChunk(db)
|
||||
chunks, err := chunkOrm.GetChunksGEIndex(context.Background(), 1, 0)
|
||||
assert.NoError(t, err)
|
||||
|
||||
var expectedNumChunks int = 2
|
||||
var numBlocksMultiplier uint64 = 255
|
||||
assert.Len(t, chunks, expectedNumChunks)
|
||||
|
||||
for i, chunk := range chunks {
|
||||
expected := numBlocksMultiplier * (uint64(i) + 1)
|
||||
if expected > 2000 {
|
||||
expected = 2000
|
||||
}
|
||||
assert.Equal(t, expected, chunk.EndBlockNumber)
|
||||
}
|
||||
}
|
||||
|
||||
func testChunkProposerUncompressedBatchBytesLimitCodecV8(t *testing.T) {
|
||||
db := setupDB(t)
|
||||
defer database.CloseDB(db)
|
||||
@@ -204,7 +150,6 @@ func testChunkProposerUncompressedBatchBytesLimitCodecV8(t *testing.T) {
|
||||
// Set max_uncompressed_batch_bytes_size to 4KiB (4 * 1024)
|
||||
// One block (~3KiB) should fit, but two blocks (~6KiB) should exceed the limit
|
||||
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: math.MaxUint64, // No block number limit
|
||||
MaxL2GasPerChunk: math.MaxUint64, // No gas limit
|
||||
ChunkTimeoutSec: math.MaxUint32, // No timeout limit
|
||||
MaxUncompressedBatchBytesSize: 4 * 1024, // 4KiB limit
|
||||
|
||||
@@ -102,7 +102,6 @@ func TestFunction(t *testing.T) {
|
||||
|
||||
// Run chunk proposer test cases.
|
||||
t.Run("TestChunkProposerLimitsCodecV7", testChunkProposerLimitsCodecV7)
|
||||
t.Run("TestChunkProposerBlobSizeLimitCodecV7", testChunkProposerBlobSizeLimitCodecV7)
|
||||
t.Run("TestChunkProposerUncompressedBatchBytesLimitCodecV8", testChunkProposerUncompressedBatchBytesLimitCodecV8)
|
||||
|
||||
// Run batch proposer test cases.
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
"l2_config": {
|
||||
"endpoint": "https://rpc.scroll.io",
|
||||
"chunk_proposer_config": {
|
||||
"max_block_num_per_chunk": 100,
|
||||
"max_l2_gas_per_chunk": 20000000,
|
||||
"chunk_timeout_sec": 72000000000,
|
||||
"max_uncompressed_batch_bytes_size": 4194304
|
||||
|
||||
215
rollup/tests/integration_tool/imports.go
Normal file
215
rollup/tests/integration_tool/imports.go
Normal file
@@ -0,0 +1,215 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"math/rand"
|
||||
"sort"
|
||||
|
||||
"gorm.io/gorm"
|
||||
|
||||
"github.com/scroll-tech/da-codec/encoding"
|
||||
"github.com/scroll-tech/go-ethereum/common"
|
||||
"github.com/scroll-tech/go-ethereum/log"
|
||||
|
||||
"scroll-tech/common/database"
|
||||
|
||||
"scroll-tech/rollup/internal/orm"
|
||||
"scroll-tech/rollup/internal/utils"
|
||||
)
|
||||
|
||||
type importRecord struct {
|
||||
Chunk []string `json:"chunks"`
|
||||
Batch []string `json:"batches"`
|
||||
Bundle []string `json:"bundles"`
|
||||
}
|
||||
|
||||
func randomPickKfromN(n, k int, rng *rand.Rand) []int {
|
||||
ret := make([]int, n-1)
|
||||
for i := 1; i < n; i++ {
|
||||
ret[i-1] = i
|
||||
}
|
||||
|
||||
rng.Shuffle(len(ret), func(i, j int) {
|
||||
ret[i], ret[j] = ret[j], ret[i]
|
||||
})
|
||||
|
||||
ret = ret[:k-1]
|
||||
sort.Ints(ret)
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func importData(ctx context.Context, beginBlk, endBlk uint64, chkNum, batchNum, bundleNum int, seed int64) (*importRecord, error) {
|
||||
|
||||
db, err := database.InitDB(cfg.DBConfig)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret := &importRecord{}
|
||||
// Create a new random source with the provided seed
|
||||
source := rand.NewSource(seed)
|
||||
//nolint:all
|
||||
rng := rand.New(source)
|
||||
|
||||
chkSepIdx := randomPickKfromN(int(endBlk-beginBlk)+1, chkNum, rng)
|
||||
chkSep := make([]uint64, len(chkSepIdx))
|
||||
for i, ind := range chkSepIdx {
|
||||
chkSep[i] = beginBlk + uint64(ind)
|
||||
}
|
||||
chkSep = append(chkSep, endBlk+1)
|
||||
|
||||
log.Info("separated chunk", "border", chkSep)
|
||||
head := beginBlk
|
||||
lastMsgHash := common.Hash{}
|
||||
|
||||
ormChks := make([]*orm.Chunk, 0, chkNum)
|
||||
encChks := make([]*encoding.Chunk, 0, chkNum)
|
||||
for _, edBlk := range chkSep {
|
||||
ormChk, chk, err := importChunk(ctx, db, head, edBlk-1, lastMsgHash)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
lastMsgHash = chk.PostL1MessageQueueHash
|
||||
ormChks = append(ormChks, ormChk)
|
||||
encChks = append(encChks, chk)
|
||||
head = edBlk
|
||||
}
|
||||
|
||||
for _, chk := range ormChks {
|
||||
ret.Chunk = append(ret.Chunk, chk.Hash)
|
||||
}
|
||||
|
||||
batchSep := randomPickKfromN(chkNum, batchNum, rng)
|
||||
batchSep = append(batchSep, chkNum)
|
||||
log.Info("separated batch", "border", batchSep)
|
||||
|
||||
headChk := int(0)
|
||||
batches := make([]*orm.Batch, 0, batchNum)
|
||||
var lastBatch *orm.Batch
|
||||
for _, endChk := range batchSep {
|
||||
batch, err := importBatch(ctx, db, ormChks[headChk:endChk], encChks[headChk:endChk], lastBatch)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
lastBatch = batch
|
||||
batches = append(batches, batch)
|
||||
headChk = endChk
|
||||
}
|
||||
|
||||
for _, batch := range batches {
|
||||
ret.Batch = append(ret.Batch, batch.Hash)
|
||||
}
|
||||
|
||||
bundleSep := randomPickKfromN(batchNum, bundleNum, rng)
|
||||
bundleSep = append(bundleSep, batchNum)
|
||||
log.Info("separated bundle", "border", bundleSep)
|
||||
|
||||
headBatch := int(0)
|
||||
for _, endBatch := range bundleSep {
|
||||
hash, err := importBundle(ctx, db, batches[headBatch:endBatch])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret.Bundle = append(ret.Bundle, hash)
|
||||
headBatch = endBatch
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func importChunk(ctx context.Context, db *gorm.DB, beginBlk, endBlk uint64, prevMsgQueueHash common.Hash) (*orm.Chunk, *encoding.Chunk, error) {
|
||||
nblk := int(endBlk-beginBlk) + 1
|
||||
blockOrm := orm.NewL2Block(db)
|
||||
|
||||
blks, err := blockOrm.GetL2BlocksGEHeight(ctx, beginBlk, nblk)
|
||||
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
postHash, err := encoding.MessageQueueV2ApplyL1MessagesFromBlocks(prevMsgQueueHash, blks)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
theChunk := &encoding.Chunk{
|
||||
Blocks: blks,
|
||||
PrevL1MessageQueueHash: prevMsgQueueHash,
|
||||
PostL1MessageQueueHash: postHash,
|
||||
}
|
||||
chunkOrm := orm.NewChunk(db)
|
||||
|
||||
dbChk, err := chunkOrm.InsertChunk(ctx, theChunk, codecCfg, utils.ChunkMetrics{})
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
err = blockOrm.UpdateChunkHashInRange(ctx, beginBlk, endBlk, dbChk.Hash)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
log.Info("insert chunk", "From", beginBlk, "To", endBlk, "hash", dbChk.Hash)
|
||||
return dbChk, theChunk, nil
|
||||
}
|
||||
|
||||
func importBatch(ctx context.Context, db *gorm.DB, chks []*orm.Chunk, encChks []*encoding.Chunk, last *orm.Batch) (*orm.Batch, error) {
|
||||
|
||||
batchOrm := orm.NewBatch(db)
|
||||
if last == nil {
|
||||
var err error
|
||||
last, err = batchOrm.GetLatestBatch(ctx)
|
||||
if err != nil && !errors.Is(err, gorm.ErrRecordNotFound) {
|
||||
return nil, err
|
||||
} else if last != nil {
|
||||
log.Info("start from last batch", "index", last.Index)
|
||||
}
|
||||
}
|
||||
|
||||
index := uint64(0)
|
||||
var parentHash common.Hash
|
||||
if last != nil {
|
||||
index = last.Index + 1
|
||||
parentHash = common.HexToHash(last.Hash)
|
||||
}
|
||||
|
||||
var blks []*encoding.Block
|
||||
for _, chk := range encChks {
|
||||
blks = append(blks, chk.Blocks...)
|
||||
}
|
||||
|
||||
batch := &encoding.Batch{
|
||||
Index: index,
|
||||
TotalL1MessagePoppedBefore: chks[0].TotalL1MessagesPoppedBefore,
|
||||
ParentBatchHash: parentHash,
|
||||
Chunks: encChks,
|
||||
Blocks: blks,
|
||||
}
|
||||
|
||||
dbBatch, err := batchOrm.InsertBatch(ctx, batch, codecCfg, utils.BatchMetrics{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = orm.NewChunk(db).UpdateBatchHashInRange(ctx, chks[0].Index, chks[len(chks)-1].Index, dbBatch.Hash)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Info("insert batch", "index", index)
|
||||
return dbBatch, nil
|
||||
}
|
||||
|
||||
func importBundle(ctx context.Context, db *gorm.DB, batches []*orm.Batch) (string, error) {
|
||||
|
||||
bundleOrm := orm.NewBundle(db)
|
||||
bundle, err := bundleOrm.InsertBundle(ctx, batches, codecCfg)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
err = orm.NewBatch(db).UpdateBundleHashInRange(ctx, batches[0].Index, batches[len(batches)-1].Index, bundle.Hash)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
log.Info("insert bundle", "hash", bundle.Hash)
|
||||
return bundle.Hash, nil
|
||||
}
|
||||
198
rollup/tests/integration_tool/main.go
Normal file
198
rollup/tests/integration_tool/main.go
Normal file
@@ -0,0 +1,198 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/scroll-tech/da-codec/encoding"
|
||||
"github.com/scroll-tech/go-ethereum/log"
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"scroll-tech/common/database"
|
||||
"scroll-tech/common/utils"
|
||||
"scroll-tech/common/version"
|
||||
)
|
||||
|
||||
var app *cli.App
|
||||
var cfg *config
|
||||
var codecCfg encoding.CodecVersion = encoding.CodecV8
|
||||
|
||||
var outputNumFlag = cli.StringFlag{
|
||||
Name: "counts",
|
||||
Usage: "Counts for output (chunks,batches,bundles)",
|
||||
Value: "4,2,1",
|
||||
}
|
||||
|
||||
var outputPathFlag = cli.StringFlag{
|
||||
Name: "output",
|
||||
Usage: "output file path",
|
||||
Value: "testset.json",
|
||||
}
|
||||
|
||||
var seedFlag = cli.Int64Flag{
|
||||
Name: "seed",
|
||||
Usage: "random seed, 0 to use random selected seed",
|
||||
Value: 0,
|
||||
}
|
||||
|
||||
var codecFlag = cli.IntFlag{
|
||||
Name: "codec",
|
||||
Usage: "codec version, valid from 6, default(auto) is 0",
|
||||
Value: 0,
|
||||
}
|
||||
|
||||
func parseThreeIntegers(value string) (int, int, int, error) {
|
||||
// Split the input string by comma
|
||||
parts := strings.Split(value, ",")
|
||||
|
||||
// Check that we have exactly 3 parts
|
||||
if len(parts) != 3 {
|
||||
return 0, 0, 0, fmt.Errorf("input must contain exactly 3 comma-separated integers, got %s", value)
|
||||
}
|
||||
|
||||
// Parse the three integers
|
||||
values := make([]int, 3)
|
||||
for i, part := range parts {
|
||||
// Trim any whitespace
|
||||
part = strings.TrimSpace(part)
|
||||
|
||||
// Parse the integer
|
||||
val, err := strconv.Atoi(part)
|
||||
if err != nil {
|
||||
return 0, 0, 0, fmt.Errorf("failed to parse '%s' as integer: %w", part, err)
|
||||
}
|
||||
|
||||
// Check that it's positive
|
||||
if val <= 0 {
|
||||
return 0, 0, 0, fmt.Errorf("all integers must be greater than 0, got %d", val)
|
||||
}
|
||||
|
||||
values[i] = val
|
||||
}
|
||||
|
||||
// Check that first >= second >= third
|
||||
if values[0] < values[1] || values[1] < values[2] {
|
||||
return 0, 0, 0, fmt.Errorf("integers must be in descending order: %d >= %d >= %d",
|
||||
values[0], values[1], values[2])
|
||||
}
|
||||
|
||||
return values[0], values[1], values[2], nil
|
||||
}
|
||||
|
||||
// load a comptabile type of config for rollup
|
||||
type config struct {
|
||||
DBConfig *database.Config `json:"db_config"`
|
||||
}
|
||||
|
||||
func init() {
|
||||
// Set up coordinator app info.
|
||||
app = cli.NewApp()
|
||||
app.Action = action
|
||||
app.Name = "integration-test-tool"
|
||||
app.Usage = "The Scroll L2 Integration Test Tool"
|
||||
app.Version = version.Version
|
||||
app.Flags = append(app.Flags, &codecFlag, &seedFlag, &outputNumFlag, &outputPathFlag)
|
||||
app.Flags = append(app.Flags, utils.CommonFlags...)
|
||||
app.Before = func(ctx *cli.Context) error {
|
||||
if err := utils.LogSetup(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cfgFile := ctx.String(utils.ConfigFileFlag.Name)
|
||||
var err error
|
||||
cfg, err = newConfig(cfgFile)
|
||||
if err != nil {
|
||||
log.Crit("failed to load config file", "config file", cfgFile, "error", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(file string) (*config, error) {
|
||||
buf, err := os.ReadFile(filepath.Clean(file))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cfg := &config{}
|
||||
err = json.Unmarshal(buf, cfg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
func action(ctx *cli.Context) error {
|
||||
|
||||
if ctx.Args().Len() < 2 {
|
||||
return fmt.Errorf("specify begin and end block number")
|
||||
}
|
||||
|
||||
codecFl := ctx.Int(codecFlag.Name)
|
||||
if codecFl != 0 {
|
||||
switch codecFl {
|
||||
case 6:
|
||||
codecCfg = encoding.CodecV6
|
||||
case 7:
|
||||
codecCfg = encoding.CodecV7
|
||||
case 8:
|
||||
codecCfg = encoding.CodecV8
|
||||
default:
|
||||
return fmt.Errorf("invalid codec version %d", codecFl)
|
||||
}
|
||||
log.Info("set codec", "version", codecCfg)
|
||||
}
|
||||
|
||||
beginBlk, err := strconv.ParseUint(ctx.Args().First(), 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid begin block number: %w", err)
|
||||
}
|
||||
endBlk, err := strconv.ParseUint(ctx.Args().Get(1), 10, 64)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid begin block number: %w", err)
|
||||
}
|
||||
|
||||
chkNum, batchNum, bundleNum, err := parseThreeIntegers(ctx.String(outputNumFlag.Name))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
seed := ctx.Int64(seedFlag.Name)
|
||||
//nolint:all
|
||||
if seed == 0 {
|
||||
seed = rand.Int63()
|
||||
}
|
||||
|
||||
outputPath := ctx.String(outputPathFlag.Name)
|
||||
log.Info("output", "Seed", seed, "file", outputPath)
|
||||
ret, err := importData(ctx.Context, beginBlk, endBlk, chkNum, batchNum, bundleNum, seed)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Marshal the ret variable to JSON with indentation for readability
|
||||
jsonData, err := json.MarshalIndent(ret, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal result data to JSON: %w", err)
|
||||
}
|
||||
|
||||
// Write the JSON data to the specified file
|
||||
err = os.WriteFile(outputPath, jsonData, 0600)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write result to file %s: %w", outputPath, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
if err := app.Run(os.Args); err != nil {
|
||||
_, _ = fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
@@ -118,7 +118,6 @@ func testCommitBatchAndFinalizeBundleCodecV7(t *testing.T) {
|
||||
}
|
||||
|
||||
cp := watcher.NewChunkProposer(context.Background(), &config.ChunkProposerConfig{
|
||||
MaxBlockNumPerChunk: 100,
|
||||
MaxL2GasPerChunk: math.MaxUint64,
|
||||
ChunkTimeoutSec: 300,
|
||||
MaxUncompressedBatchBytesSize: math.MaxUint64,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
[toolchain]
|
||||
channel = "nightly-2025-02-14"
|
||||
targets = ["riscv32im-unknown-none-elf", "x86_64-unknown-linux-gnu"]
|
||||
channel = "nightly-2025-08-18"
|
||||
targets = ["riscv32im-unknown-none-elf", "x86_64-unknown-linux-gnu"]
|
||||
components = ["llvm-tools", "rustc-dev"]
|
||||
3
tests/prover-e2e/.env
Normal file
3
tests/prover-e2e/.env
Normal file
@@ -0,0 +1,3 @@
|
||||
GOOSE_DRIVER=postgres
|
||||
GOOSE_DBSTRING=postgresql://dev:dev@localhost:5432/scroll?sslmode=disable
|
||||
GOOSE_MIGRATION_DIR=../../database/migrate/migrations
|
||||
2
tests/prover-e2e/.gitignore
vendored
Normal file
2
tests/prover-e2e/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
build/*
|
||||
testset.json
|
||||
132
tests/prover-e2e/00100_import_blocks.sql
Normal file
132
tests/prover-e2e/00100_import_blocks.sql
Normal file
@@ -0,0 +1,132 @@
|
||||
-- +goose Up
|
||||
-- +goose StatementBegin
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973700', '0x29c84f0df09fda2c6c63d314bb6714dbbdeca3b85c91743f9e25d3f81c28b986', '0x01aabb5d1d7edadd10011b4099de7ed703b9ce495717cd48a304ff4db3710d8a', '{"parentHash":"0x01aabb5d1d7edadd10011b4099de7ed703b9ce495717cd48a304ff4db3710d8a","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77204","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36e5","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x29c84f0df09fda2c6c63d314bb6714dbbdeca3b85c91743f9e25d3f81c28b986"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf', '0', '0', '1753167589', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973701', '0x21ad5215b5b51cb5eb5ea6a19444804ca1628cbf8ef8cf1977660d8c468c0151', '0x29c84f0df09fda2c6c63d314bb6714dbbdeca3b85c91743f9e25d3f81c28b986', '{"parentHash":"0x29c84f0df09fda2c6c63d314bb6714dbbdeca3b85c91743f9e25d3f81c28b986","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77205","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36e6","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x21ad5215b5b51cb5eb5ea6a19444804ca1628cbf8ef8cf1977660d8c468c0151"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf', '0', '0', '1753167590', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973702', '0x8c9ce33a9b62060b193c01f31518f3bfc5d8132c11569a5b4db03a5b0611f30e', '0x21ad5215b5b51cb5eb5ea6a19444804ca1628cbf8ef8cf1977660d8c468c0151', '{"parentHash":"0x21ad5215b5b51cb5eb5ea6a19444804ca1628cbf8ef8cf1977660d8c468c0151","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77206","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36e7","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x8c9ce33a9b62060b193c01f31518f3bfc5d8132c11569a5b4db03a5b0611f30e"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf', '0', '0', '1753167591', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973703', '0x73eed8060ca9a36fe8bf8c981f0e44425cd69ade00ff986452f1c02d462194fe', '0x8c9ce33a9b62060b193c01f31518f3bfc5d8132c11569a5b4db03a5b0611f30e', '{"parentHash":"0x8c9ce33a9b62060b193c01f31518f3bfc5d8132c11569a5b4db03a5b0611f30e","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77207","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36e8","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x73eed8060ca9a36fe8bf8c981f0e44425cd69ade00ff986452f1c02d462194fe"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf', '0', '0', '1753167592', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973704', '0x7a6a1bede8936cfbd677cf38c43e399c8a2d0b62700caf05513bad540541b1b5', '0x73eed8060ca9a36fe8bf8c981f0e44425cd69ade00ff986452f1c02d462194fe', '{"parentHash":"0x73eed8060ca9a36fe8bf8c981f0e44425cd69ade00ff986452f1c02d462194fe","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77208","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36e9","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x7a6a1bede8936cfbd677cf38c43e399c8a2d0b62700caf05513bad540541b1b5"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf', '0', '0', '1753167593', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973705', '0x1a5306293b7801a42c4402f9d32e7a45d640c49506ee61452da0120f3e242424', '0x7a6a1bede8936cfbd677cf38c43e399c8a2d0b62700caf05513bad540541b1b5', '{"parentHash":"0x7a6a1bede8936cfbd677cf38c43e399c8a2d0b62700caf05513bad540541b1b5","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77209","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36ea","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x1a5306293b7801a42c4402f9d32e7a45d640c49506ee61452da0120f3e242424"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf', '0', '0', '1753167594', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973706', '0xc89f391a03c7138f676bd8babed6589035b2b7d8c8b99071cb90661f7f996386', '0x1a5306293b7801a42c4402f9d32e7a45d640c49506ee61452da0120f3e242424', '{"parentHash":"0x1a5306293b7801a42c4402f9d32e7a45d640c49506ee61452da0120f3e242424","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7720a","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36eb","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xc89f391a03c7138f676bd8babed6589035b2b7d8c8b99071cb90661f7f996386"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x347733a157bc7045f6a1d5bfd37d51763f3503b63290576a65b3b83265add2cf', '0', '0', '1753167595', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973707', '0x38d72b14ef43e548ab0ffd84892e7c3accdd11e1121c2c7a94b953b4e896eb41', '0xc89f391a03c7138f676bd8babed6589035b2b7d8c8b99071cb90661f7f996386', '{"parentHash":"0xc89f391a03c7138f676bd8babed6589035b2b7d8c8b99071cb90661f7f996386","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946","transactionsRoot":"0xf14cf5134833ddb4f42a017e92af371f0a71eaf5d84cb6e681c81fa023662c5d","receiptsRoot":"0x4008fb883088f1ba377310e15221fffc8e5446faf420d6a28e061e9341beb056","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000900000000000000000000000000000000000000000000000000000000000000000000000001000000008000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000020000000000000000000","difficulty":"0x1","number":"0xa7720b","gasLimit":"0x1312d00","gasUsed":"0x9642","timestamp":"0x687f36ec","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x38d72b14ef43e548ab0ffd84892e7c3accdd11e1121c2c7a94b953b4e896eb41"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946', '1', '38466', '1753167596', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[{"type":2,"nonce":3392500,"txHash":"0xc15b615906602154131a6c42d7603def4bd2a769881292d831140b0b9b8f8850","gas":45919,"gasPrice":"0x1de8476","gasTipCap":"0x64","gasFeeCap":"0x1de8476","from":"0x0000000000000000000000000000000000000000","to":"0x5300000000000000000000000000000000000002","chainId":"0x8274f","value":"0x0","data":"0x39455d3a0000000000000000000000000000000000000000000000000000000000045b840000000000000000000000000000000000000000000000000000000000000001","isCreate":false,"accessList":[{"address":"0x5300000000000000000000000000000000000003","storageKeys":["0x297c59f20c6b2556a4ed35dccabbdeb8b1cf950f62aefb86b98d19b5a4aff2a2"]}],"authorizationList":null,"v":"0x1","r":"0xa1b888cc9be7990c4f6bd8a9d0d5fa743ea8173196c7ca871464becd133ba0de","s":"0x6bacc3e1a244c62eff3008795e010598d07b95a8bad7a5592ec941e121294885"}]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973708', '0x230863f98595ba0c83785caf618072ce2a876307102adbeba11b9de9c4af8a08', '0x38d72b14ef43e548ab0ffd84892e7c3accdd11e1121c2c7a94b953b4e896eb41', '{"parentHash":"0x38d72b14ef43e548ab0ffd84892e7c3accdd11e1121c2c7a94b953b4e896eb41","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7720c","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36ed","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x230863f98595ba0c83785caf618072ce2a876307102adbeba11b9de9c4af8a08"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946', '0', '0', '1753167597', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973709', '0xae4af19a7697c2bb10a641f07269ed7df66775d56414567245adc98befdae557', '0x230863f98595ba0c83785caf618072ce2a876307102adbeba11b9de9c4af8a08', '{"parentHash":"0x230863f98595ba0c83785caf618072ce2a876307102adbeba11b9de9c4af8a08","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7720d","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36ee","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xae4af19a7697c2bb10a641f07269ed7df66775d56414567245adc98befdae557"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946', '0', '0', '1753167598', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973710', '0xd2bc7e24b66940a767abaee485870e9ebd24ff28e72a3096cdccc55b85f84182', '0xae4af19a7697c2bb10a641f07269ed7df66775d56414567245adc98befdae557', '{"parentHash":"0xae4af19a7697c2bb10a641f07269ed7df66775d56414567245adc98befdae557","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7720e","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36ef","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xd2bc7e24b66940a767abaee485870e9ebd24ff28e72a3096cdccc55b85f84182"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xb920df561f210a617a0c1567cb2f65350818b96b159f5aa4b9ac7915b7af4946', '0', '0', '1753167599', '', '0x206c062cf0991353ba5ebc9888ca224f470ad3edf8e8e01125726a3858ebdd73', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973711', '0x1870b94320db154de4702fe6bfb69ebc98fb531b78bf81a69b8ab658ba9d9af5', '0xd2bc7e24b66940a767abaee485870e9ebd24ff28e72a3096cdccc55b85f84182', '{"parentHash":"0xd2bc7e24b66940a767abaee485870e9ebd24ff28e72a3096cdccc55b85f84182","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x2bdf2906a7bbb398419246c3c77804a204641259b2aeb4f4a806eb772d31c480","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7720f","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f0","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4209","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x1870b94320db154de4702fe6bfb69ebc98fb531b78bf81a69b8ab658ba9d9af5"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x2bdf2906a7bbb398419246c3c77804a204641259b2aeb4f4a806eb772d31c480', '0', '0', '1753167600', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973712', '0x271745e26e3222352fce7052edef9adecba921f4315e48a9d55e46640ac324ce', '0x1870b94320db154de4702fe6bfb69ebc98fb531b78bf81a69b8ab658ba9d9af5', '{"parentHash":"0x1870b94320db154de4702fe6bfb69ebc98fb531b78bf81a69b8ab658ba9d9af5","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x1ec0026bd12fe29d710e5f04e605cdb715d68a2e5bac57416066a7bc6b298762","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77210","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f1","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4208","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x271745e26e3222352fce7052edef9adecba921f4315e48a9d55e46640ac324ce"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x1ec0026bd12fe29d710e5f04e605cdb715d68a2e5bac57416066a7bc6b298762', '0', '0', '1753167601', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973713', '0xe193fc4298a6beebbc59b83cc7e2bbdace76c24fe9b7bd76aa415159ecb60914', '0x271745e26e3222352fce7052edef9adecba921f4315e48a9d55e46640ac324ce', '{"parentHash":"0x271745e26e3222352fce7052edef9adecba921f4315e48a9d55e46640ac324ce","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x7e29363a63f54a0e03e08cb515a98f3c416a5ade3ec15d29eddd262baf67a2a1","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77211","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f2","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xe193fc4298a6beebbc59b83cc7e2bbdace76c24fe9b7bd76aa415159ecb60914"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x7e29363a63f54a0e03e08cb515a98f3c416a5ade3ec15d29eddd262baf67a2a1', '0', '0', '1753167602', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973714', '0x8e99d9242315a107492520e9255dd77798adbff81393d3de83960dac361bd838', '0xe193fc4298a6beebbc59b83cc7e2bbdace76c24fe9b7bd76aa415159ecb60914', '{"parentHash":"0xe193fc4298a6beebbc59b83cc7e2bbdace76c24fe9b7bd76aa415159ecb60914","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xd818ac5028fe5aa1abd2d3ffe4693b4e96eabad35e49011e2ce920bcd76d061a","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77212","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f3","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x8e99d9242315a107492520e9255dd77798adbff81393d3de83960dac361bd838"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xd818ac5028fe5aa1abd2d3ffe4693b4e96eabad35e49011e2ce920bcd76d061a', '0', '0', '1753167603', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973715', '0x9ee9d75a25a912e7d3907679a1e588021f4d2040c71d2e1c958538466a4fbbd6', '0x8e99d9242315a107492520e9255dd77798adbff81393d3de83960dac361bd838', '{"parentHash":"0x8e99d9242315a107492520e9255dd77798adbff81393d3de83960dac361bd838","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x233fd85bd753e126e4df23a05c56ccde3eb6ec06ce2565a990af3347dc95b0c5","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77213","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f4","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x9ee9d75a25a912e7d3907679a1e588021f4d2040c71d2e1c958538466a4fbbd6"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x233fd85bd753e126e4df23a05c56ccde3eb6ec06ce2565a990af3347dc95b0c5', '0', '0', '1753167604', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973716', '0x9b25094db21166930008728c487ca9dbbc1e842701c8573eaa6bea4d41c10a7e', '0x9ee9d75a25a912e7d3907679a1e588021f4d2040c71d2e1c958538466a4fbbd6', '{"parentHash":"0x9ee9d75a25a912e7d3907679a1e588021f4d2040c71d2e1c958538466a4fbbd6","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x12be357fcc1fc28e574a7f95a5f9b3aae7e18d8ab8829c676478b4e8953a8502","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77214","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f5","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x9b25094db21166930008728c487ca9dbbc1e842701c8573eaa6bea4d41c10a7e"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x12be357fcc1fc28e574a7f95a5f9b3aae7e18d8ab8829c676478b4e8953a8502', '0', '0', '1753167605', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973717', '0xeaab0e07b40720f8e961f28ef665f08e67797428dc1eaccba88d5d4f60341284', '0x9b25094db21166930008728c487ca9dbbc1e842701c8573eaa6bea4d41c10a7e', '{"parentHash":"0x9b25094db21166930008728c487ca9dbbc1e842701c8573eaa6bea4d41c10a7e","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x7e49dc33343a54e9afc285155b8a35575e6924d465fe2dc543b5ea8915eb828a","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77215","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f6","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xeaab0e07b40720f8e961f28ef665f08e67797428dc1eaccba88d5d4f60341284"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x7e49dc33343a54e9afc285155b8a35575e6924d465fe2dc543b5ea8915eb828a', '0', '0', '1753167606', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973718', '0xd6af3c7bf29f3689b516ed5c8fcf885a4e7eb2df751c35d4ebbb19fcc13628d4', '0xeaab0e07b40720f8e961f28ef665f08e67797428dc1eaccba88d5d4f60341284', '{"parentHash":"0xeaab0e07b40720f8e961f28ef665f08e67797428dc1eaccba88d5d4f60341284","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xf1a7db2e4f463fa87e3e65b73d2abc5374302855f6af9735d5a11c94c2d93975","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77216","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f7","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xd6af3c7bf29f3689b516ed5c8fcf885a4e7eb2df751c35d4ebbb19fcc13628d4"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xf1a7db2e4f463fa87e3e65b73d2abc5374302855f6af9735d5a11c94c2d93975', '0', '0', '1753167607', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973719', '0x5815f5b91d53d0b5c7d423f06da7cad3d45edeab1c2b590af02ceebfd33b2ce1', '0xd6af3c7bf29f3689b516ed5c8fcf885a4e7eb2df751c35d4ebbb19fcc13628d4', '{"parentHash":"0xd6af3c7bf29f3689b516ed5c8fcf885a4e7eb2df751c35d4ebbb19fcc13628d4","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xb5d1f420ddc1edb60c7fc3a06929a2014c548d1ddd52a78ab6984faed53a09d1","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77217","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36f8","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x5815f5b91d53d0b5c7d423f06da7cad3d45edeab1c2b590af02ceebfd33b2ce1"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xb5d1f420ddc1edb60c7fc3a06929a2014c548d1ddd52a78ab6984faed53a09d1', '0', '0', '1753167608', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973720', '0x4d8bbd6a15515cacf18cf9810ca3867442cefc733e9cdeaa1527a008fbca3bd1', '0x5815f5b91d53d0b5c7d423f06da7cad3d45edeab1c2b590af02ceebfd33b2ce1', '{"parentHash":"0x5815f5b91d53d0b5c7d423f06da7cad3d45edeab1c2b590af02ceebfd33b2ce1","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xf4ca7c941e6ad6a780ad8422a817c6a7916f3f80b5f0d0f95cabcb17b0531299","transactionsRoot":"0x79c3ba4e0fe89ddea0ed8becdbfff86f18dab3ffd21eaf13744b86cb104d664e","receiptsRoot":"0xc8f88931c3c4ca18cb582e490d7acabfbe04fd6fa971549af6bf927aec7bfa1f","logsBloom":"0x00000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000200000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77218","gasLimit":"0x1312d00","gasUsed":"0x7623","timestamp":"0x687f36f9","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x4d8bbd6a15515cacf18cf9810ca3867442cefc733e9cdeaa1527a008fbca3bd1"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xf4ca7c941e6ad6a780ad8422a817c6a7916f3f80b5f0d0f95cabcb17b0531299', '1', '30243', '1753167609', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[{"type":0,"nonce":13627,"txHash":"0x539962b9584723f919b9f3a0b454622f5f51c195300564116d0cedfec17a1381","gas":30243,"gasPrice":"0xef426b","gasTipCap":"0xef426b","gasFeeCap":"0xef426b","from":"0x0000000000000000000000000000000000000000","to":"0xf07cc6482a24843efe7b42259acbaf8d0a2a6952","chainId":"0x8274f","value":"0x0","data":"0x91b7f5ed0000000000000000000000000000000000000000000018f4c5be1c1407000000","isCreate":false,"accessList":null,"authorizationList":null,"v":"0x104ec2","r":"0xaa309d7e218825160be9a87c9e50d3cbfead9c87e90e984ad0ea2441633092a2","s":"0x438f39c0af058794f320e5578720557af07c5397e363f9628a6c4ffee5bd2487"}]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973721', '0x84c53d922cfe0558c4be02865af5ebd49efe44a458dabc16aea5584e5e06f346', '0x4d8bbd6a15515cacf18cf9810ca3867442cefc733e9cdeaa1527a008fbca3bd1', '{"parentHash":"0x4d8bbd6a15515cacf18cf9810ca3867442cefc733e9cdeaa1527a008fbca3bd1","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xd4838ba86f5a8e865a41ef7547148b6074235a658dd57ff2296c0badda4760d1","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77219","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36fa","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x84c53d922cfe0558c4be02865af5ebd49efe44a458dabc16aea5584e5e06f346"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xd4838ba86f5a8e865a41ef7547148b6074235a658dd57ff2296c0badda4760d1', '0', '0', '1753167610', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973722', '0xe1d601522b08d98852b4c7dc3584f292ac246a3dac3c600ba58bd6c20c97be5b', '0x84c53d922cfe0558c4be02865af5ebd49efe44a458dabc16aea5584e5e06f346', '{"parentHash":"0x84c53d922cfe0558c4be02865af5ebd49efe44a458dabc16aea5584e5e06f346","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x8deede75e20423d0495cbdb493d320dddde6df0459df998608a16f658eb7bec3","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7721a","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36fb","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xe1d601522b08d98852b4c7dc3584f292ac246a3dac3c600ba58bd6c20c97be5b"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x8deede75e20423d0495cbdb493d320dddde6df0459df998608a16f658eb7bec3', '0', '0', '1753167611', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973723', '0x8579712fc434b401f1ecfcf3ae22611be054480fa882e90f8eecb6c5e97534bd', '0xe1d601522b08d98852b4c7dc3584f292ac246a3dac3c600ba58bd6c20c97be5b', '{"parentHash":"0xe1d601522b08d98852b4c7dc3584f292ac246a3dac3c600ba58bd6c20c97be5b","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xb4fe51cda0401bb19e8448a2697a49e1fbc25398c2b18a9955d0a8e6f4b153a7","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7721b","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36fc","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x8579712fc434b401f1ecfcf3ae22611be054480fa882e90f8eecb6c5e97534bd"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xb4fe51cda0401bb19e8448a2697a49e1fbc25398c2b18a9955d0a8e6f4b153a7', '0', '0', '1753167612', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973724', '0xe13a0b907e044a9df1952acc31dc08a578fb910a0cc224e11692cb84c9c9a9f7', '0x8579712fc434b401f1ecfcf3ae22611be054480fa882e90f8eecb6c5e97534bd', '{"parentHash":"0x8579712fc434b401f1ecfcf3ae22611be054480fa882e90f8eecb6c5e97534bd","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xcd17c85290d8ec7473357ebe1605f766af6c1356732cc7ad11de0453baca05c6","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7721c","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36fd","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xe13a0b907e044a9df1952acc31dc08a578fb910a0cc224e11692cb84c9c9a9f7"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xcd17c85290d8ec7473357ebe1605f766af6c1356732cc7ad11de0453baca05c6', '0', '0', '1753167613', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973725', '0x2e26fb489f8644b3b5c44cd493ebc140ba3bc716588f37a71b8ba6dc504ccb5f', '0xe13a0b907e044a9df1952acc31dc08a578fb910a0cc224e11692cb84c9c9a9f7', '{"parentHash":"0xe13a0b907e044a9df1952acc31dc08a578fb910a0cc224e11692cb84c9c9a9f7","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xfd321f4a3e2bc757df89162f730a2e37519dcb29cdb63019665c1fe4dbceeb00","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7721d","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36fe","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x2e26fb489f8644b3b5c44cd493ebc140ba3bc716588f37a71b8ba6dc504ccb5f"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xfd321f4a3e2bc757df89162f730a2e37519dcb29cdb63019665c1fe4dbceeb00', '0', '0', '1753167614', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973726', '0x313b0fbb7cbb8bc1ba4fbc50684b516d31f9f7ee6f66d919da01328537a4b0a1', '0x2e26fb489f8644b3b5c44cd493ebc140ba3bc716588f37a71b8ba6dc504ccb5f', '{"parentHash":"0x2e26fb489f8644b3b5c44cd493ebc140ba3bc716588f37a71b8ba6dc504ccb5f","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x1a24ed5ee5e8ca354f583b28bd7f2c4c6fe4dca59fef476578eddab17b857471","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa7721e","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f36ff","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x313b0fbb7cbb8bc1ba4fbc50684b516d31f9f7ee6f66d919da01328537a4b0a1"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x1a24ed5ee5e8ca354f583b28bd7f2c4c6fe4dca59fef476578eddab17b857471', '0', '0', '1753167615', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973727', '0xf9039c9c24ab919066f2eb6f97360cfb727ed032c9e6142ea45e784b19894560', '0x313b0fbb7cbb8bc1ba4fbc50684b516d31f9f7ee6f66d919da01328537a4b0a1', '{"parentHash":"0x313b0fbb7cbb8bc1ba4fbc50684b516d31f9f7ee6f66d919da01328537a4b0a1","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x2927f53f1eaaeaa17a80f048f10474a7cc3b2c96547cc47caad33ff9e5b38da6","transactionsRoot":"0x80fd441b38b6ffb8f9369d8a5179356f9bf5ad332db0da99f7c6efdb90939cd2","receiptsRoot":"0xa262cee7ba62c004c6554e9cf378512a868346c24f8cafc1ac1954250339149e","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000900000000000000000000000000000000000000000000000000000000000000000000000001000000008000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000020000000000000000000","difficulty":"0x1","number":"0xa7721f","gasLimit":"0x1312d00","gasUsed":"0x9642","timestamp":"0x687f3700","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xf9039c9c24ab919066f2eb6f97360cfb727ed032c9e6142ea45e784b19894560"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x2927f53f1eaaeaa17a80f048f10474a7cc3b2c96547cc47caad33ff9e5b38da6', '1', '38466', '1753167616', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[{"type":2,"nonce":3392501,"txHash":"0xa5231ea1b94eb516575807531763b312d250ee5ad4dfbeea66beab5f448c32b6","gas":45919,"gasPrice":"0x1de8472","gasTipCap":"0x64","gasFeeCap":"0x1de8472","from":"0x0000000000000000000000000000000000000000","to":"0x5300000000000000000000000000000000000002","chainId":"0x8274f","value":"0x0","data":"0x39455d3a000000000000000000000000000000000000000000000000000000000004580f0000000000000000000000000000000000000000000000000000000000000001","isCreate":false,"accessList":[{"address":"0x5300000000000000000000000000000000000003","storageKeys":["0x297c59f20c6b2556a4ed35dccabbdeb8b1cf950f62aefb86b98d19b5a4aff2a2"]}],"authorizationList":null,"v":"0x1","r":"0xa09a97c38c7a58f40ff39ca74f938c63f1ef822cf91926d4fff96b7dc818d3f3","s":"0x77ee7453096794d9cbb206f26077f23b4cc88fe51893cb5eab46714e379ac833"}]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973728', '0xf27ff9223f6bf9a964737d50cb7c005f049cf0f4edfd16d24178a798c21716d6', '0xf9039c9c24ab919066f2eb6f97360cfb727ed032c9e6142ea45e784b19894560', '{"parentHash":"0xf9039c9c24ab919066f2eb6f97360cfb727ed032c9e6142ea45e784b19894560","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x0dbe54818526afaabbce83765eabcd4ec4d437a3497e5d046d599af862ea9850","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77220","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f3701","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0xf27ff9223f6bf9a964737d50cb7c005f049cf0f4edfd16d24178a798c21716d6"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0x0dbe54818526afaabbce83765eabcd4ec4d437a3497e5d046d599af862ea9850', '0', '0', '1753167617', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973729', '0x2b7777eb3ffe5939d6b70883cef69250ef5a2ed62a8b378973e0c3fe84707137', '0xf27ff9223f6bf9a964737d50cb7c005f049cf0f4edfd16d24178a798c21716d6', '{"parentHash":"0xf27ff9223f6bf9a964737d50cb7c005f049cf0f4edfd16d24178a798c21716d6","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xb89ed319fb9dcaed2df7e72223683cf255f6c1e45742e6caa810938871ce53bf","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77221","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f3702","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x2b7777eb3ffe5939d6b70883cef69250ef5a2ed62a8b378973e0c3fe84707137"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xb89ed319fb9dcaed2df7e72223683cf255f6c1e45742e6caa810938871ce53bf', '0', '0', '1753167618', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES ('10973730', '0x56318f0a941611fc22640ea7f7d0308ab88a9e23059b5c6983bafc2402003d13', '0x2b7777eb3ffe5939d6b70883cef69250ef5a2ed62a8b378973e0c3fe84707137', '{"parentHash":"0x2b7777eb3ffe5939d6b70883cef69250ef5a2ed62a8b378973e0c3fe84707137","sha3Uncles":"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0xe603d341e958521d3f5df8f37b5144b3c003214c481716cffa4e8d6303d9734f","transactionsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","difficulty":"0x1","number":"0xa77222","gasLimit":"0x1312d00","gasUsed":"0x0","timestamp":"0x687f3703","extraData":"0x","mixHash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x0000000000000000","baseFeePerGas":"0xef4207","withdrawalsRoot":null,"blobGasUsed":null,"excessBlobGas":null,"parentBeaconBlockRoot":null,"requestsHash":null,"hash":"0x56318f0a941611fc22640ea7f7d0308ab88a9e23059b5c6983bafc2402003d13"}', '0x5a9bd7f5f6723ce51c03beffa310a5bf79c2cf261ddb8622cf407b41d968ef91', '0xe603d341e958521d3f5df8f37b5144b3c003214c481716cffa4e8d6303d9734f', '0', '0', '1753167619', '', '0x2f73e96335a43b678e107b2ef57c7ec0297d88d4a9986c1d6f4e31f1d11fb4f4', '[]');
|
||||
|
||||
-- +goose StatementEnd
|
||||
-- +goose Down
|
||||
-- +goose StatementBegin
|
||||
DELETE FROM l2_block;
|
||||
-- +goose StatementEnd
|
||||
46
tests/prover-e2e/Makefile
Normal file
46
tests/prover-e2e/Makefile
Normal file
@@ -0,0 +1,46 @@
|
||||
.PHONY: clean setup_db test_tool all check_vars
|
||||
|
||||
GOOSE_CMD?=goose
|
||||
|
||||
|
||||
all: setup_db test_tool import_data
|
||||
|
||||
clean:
|
||||
docker compose down
|
||||
|
||||
check_vars:
|
||||
@if [ -z "$(BEGIN_BLOCK)" ] || [ -z "$(END_BLOCK)" ]; then \
|
||||
echo "Error: BEGIN_BLOCK and END_BLOCK must be defined"; \
|
||||
echo "Usage: make import_data BEGIN_BLOCK=<start_block> END_BLOCK=<end_block>"; \
|
||||
exit 1; \
|
||||
fi
|
||||
|
||||
setup_db: clean
|
||||
docker compose up --detach
|
||||
@echo "Waiting for PostgreSQL to be ready..."
|
||||
@for i in $$(seq 1 30); do \
|
||||
if nc -z localhost 5432 >/dev/null 2>&1; then \
|
||||
echo "PostgreSQL port is open!"; \
|
||||
sleep 2; \
|
||||
break; \
|
||||
fi; \
|
||||
echo "Waiting for PostgreSQL to start... ($$i/30)"; \
|
||||
sleep 2; \
|
||||
if [ $$i -eq 30 ]; then \
|
||||
echo "Timed out waiting for PostgreSQL to start"; \
|
||||
exit 1; \
|
||||
fi; \
|
||||
done
|
||||
${GOOSE_CMD} up
|
||||
GOOSE_MIGRATION_DIR=./ ${GOOSE_CMD} up-to 100
|
||||
|
||||
test_tool:
|
||||
go build -o $(PWD)/build/bin/e2e_tool ../../rollup/tests/integration_tool
|
||||
|
||||
build/bin/e2e_tool: test_tool
|
||||
|
||||
import_data_euclid: build/bin/e2e_tool check_vars
|
||||
build/bin/e2e_tool --config ./config.json --codec 7 ${BEGIN_BLOCK} ${END_BLOCK}
|
||||
|
||||
import_data: build/bin/e2e_tool check_vars
|
||||
build/bin/e2e_tool --config ./config.json --codec 8 ${BEGIN_BLOCK} ${END_BLOCK}
|
||||
12
tests/prover-e2e/README.md
Normal file
12
tests/prover-e2e/README.md
Normal file
@@ -0,0 +1,12 @@
|
||||
## A new e2e test tool to setup a local environment for testing coordinator and prover.
|
||||
|
||||
It contains data from some blocks in scroll sepolia, and helps to generate a series of chunks/batches/bundles from these blocks, filling the DB for the coordinator, so an e2e test (from chunk to bundle) can be run completely local
|
||||
|
||||
Steps:
|
||||
1. run `make all` under `tests/prover-e2e`, it would launch a postgreSql db in local docker container, which is ready to be used by coordinator (include some chunks/batches/bundles waiting to be proven)
|
||||
2. download circuit assets with `download-release.sh` script in `zkvm-prover`
|
||||
3. generate the verifier stuff corresponding to the downloaded assets by `make gen_verifier_stuff` in `zkvm-prover`
|
||||
4. setup `config.json` and `genesis.json` for coordinator, copy the generated verifier stuff in step 3 to the directory which coordinator would load them
|
||||
5. build and launch `coordinator_api` service locally
|
||||
6. setup the `config.json` for zkvm prover to connect with the locally launched coordinator api
|
||||
7. in `zkvm-prover`, launch `make test_e2e_run`, which would specific prover run locally, connect to the local coordinator api service according to the `config.json`, and prove all tasks being injected to db in step 1.
|
||||
8
tests/prover-e2e/config.json
Normal file
8
tests/prover-e2e/config.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"db_config": {
|
||||
"driver_name": "postgres",
|
||||
"dsn": "postgres://dev:dev@localhost:5432/scroll?sslmode=disable",
|
||||
"maxOpenNum": 5,
|
||||
"maxIdleNum": 1
|
||||
}
|
||||
}
|
||||
22
tests/prover-e2e/docker-compose.yml
Normal file
22
tests/prover-e2e/docker-compose.yml
Normal file
@@ -0,0 +1,22 @@
|
||||
# docker-compose.yml
|
||||
# This configuration is for local debugging only.
|
||||
# - PostgreSQL is bound to localhost (127.0.0.1) and not exposed externally.
|
||||
# - Data is persisted to ./db relative to current directory.
|
||||
# - No production security settings are applied.
|
||||
# The access url is postgresql://dev:dev@localhost:5432/devdb
|
||||
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres
|
||||
container_name: local_postgres
|
||||
environment:
|
||||
POSTGRES_USER: dev
|
||||
POSTGRES_PASSWORD: dev
|
||||
POSTGRES_DB: scroll
|
||||
ports:
|
||||
- "127.0.0.1:5432:5432" # Listen only on localhost
|
||||
# volumes:
|
||||
# - ./db:/var/lib/postgresql/data # Persist data to local ./db
|
||||
restart: unless-stopped
|
||||
27
tests/prover-e2e/prepare/dump_block_records.sql
Normal file
27
tests/prover-e2e/prepare/dump_block_records.sql
Normal file
@@ -0,0 +1,27 @@
|
||||
-- Create a file with INSERT statements for the specific records
|
||||
\o block_export.sql
|
||||
\t on
|
||||
\a
|
||||
SELECT 'INSERT INTO l2_block (number, hash, parent_hash, header, withdraw_root,
|
||||
state_root, tx_num, gas_used, block_timestamp, row_consumption,
|
||||
chunk_hash, transactions
|
||||
) VALUES (' ||
|
||||
quote_literal(number) || ', ' ||
|
||||
quote_literal(hash) || ', ' ||
|
||||
quote_literal(parent_hash) || ', ' ||
|
||||
quote_literal(header) || ', ' ||
|
||||
quote_literal(withdraw_root) || ', ' ||
|
||||
quote_literal(state_root) || ', ' ||
|
||||
quote_literal(tx_num) || ', ' ||
|
||||
quote_literal(gas_used) || ', ' ||
|
||||
quote_literal(block_timestamp) || ', ' ||
|
||||
quote_literal(row_consumption) || ', ' ||
|
||||
quote_literal(chunk_hash) || ', ' ||
|
||||
quote_literal(transactions) ||
|
||||
');'
|
||||
FROM l2_block
|
||||
WHERE number >= 10973700 and number <= 10973730
|
||||
ORDER BY number ASC;
|
||||
\t off
|
||||
\a
|
||||
\o
|
||||
@@ -1,4 +1,4 @@
|
||||
.PHONY: prover lint tests_binary
|
||||
.PHONY: prover prover_cpu lint tests_binary test_e2e_run test_run
|
||||
|
||||
RUST_MIN_STACK ?= 16777216
|
||||
export RUST_MIN_STACK
|
||||
@@ -32,15 +32,20 @@ endif
|
||||
|
||||
ZK_VERSION=${ZKVM_COMMIT}-${PLONKY3_VERSION}
|
||||
|
||||
E2E_HANDLE_SET = ../tests/prover-e2e/testset.json
|
||||
DUMP_DIR = .work
|
||||
E2E_HANDLE_SET ?= ../tests/prover-e2e/testset.json
|
||||
DUMP_DIR ?= .work
|
||||
|
||||
prover:
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZKVM_COMMIT=${ZKVM_COMMIT} $(MAKE) -C ../crates/gpu_override build
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --locked --release --features cuda -p prover
|
||||
|
||||
version:
|
||||
echo ${GO_TAG}-${GIT_REV}-${ZK_VERSION}
|
||||
|
||||
prover_cpu:
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --locked --release -p prover
|
||||
|
||||
clean:
|
||||
cargo clean -Z unstable-options --release -p prover --lockfile-path ../crates/gpu_override/Cargo.lock
|
||||
|
||||
tests_binary:
|
||||
cargo clean && cargo test --release --no-run
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# Define version mapping
|
||||
declare -A VERSION_MAP
|
||||
VERSION_MAP["euclid"]="0.4.3"
|
||||
VERSION_MAP["feynman"]="0.5.0rc1"
|
||||
VERSION_MAP["feynman"]="0.5.2"
|
||||
|
||||
# release version
|
||||
if [ -z "${SCROLL_ZKVM_VERSION}" ]; then
|
||||
@@ -14,7 +14,7 @@ if [ -z "${SCROLL_ZKVM_VERSION}" ]; then
|
||||
echo "Setting SCROLL_ZKVM_VERSION to ${SCROLL_ZKVM_VERSION} based on '$1' argument"
|
||||
else
|
||||
# Default version if no argument or not recognized
|
||||
SCROLL_ZKVM_VERSION=0.5.0rc0
|
||||
SCROLL_ZKVM_VERSION=0.5.2
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
Reference in New Issue
Block a user